Compare commits
48 Commits
v0.41.0
...
refactor/T
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
85063ea5df | ||
|
|
31086fea53 | ||
|
|
fd956095de | ||
|
|
a046d9f84c | ||
|
|
2e0d5aeb51 | ||
|
|
28ab7c6f0c | ||
|
|
d098eb58f3 | ||
|
|
5319a059ad | ||
|
|
163b8c4018 | ||
|
|
6322b95068 | ||
|
|
715ed1f9c2 | ||
|
|
82a759dd21 | ||
|
|
fe913608c4 | ||
|
|
79f9c5d1c6 | ||
|
|
3d091129e2 | ||
|
|
1a978f786d | ||
|
|
51669d3c5f | ||
|
|
d128dcb479 | ||
|
|
84286593f6 | ||
|
|
8d97f09e5e | ||
|
|
2748bc19be | ||
|
|
0b3c8fc774 | ||
|
|
7da18e0f00 | ||
|
|
49e38081ad | ||
|
|
a14f993a31 | ||
|
|
ae938f9909 | ||
|
|
f91e0bb93a | ||
|
|
d3f61005cf | ||
|
|
2923a3e88b | ||
|
|
a73ced0067 | ||
|
|
f89b91fe7f | ||
|
|
5950485d46 | ||
|
|
f349927a63 | ||
|
|
dfe8890598 | ||
|
|
d224876a8e | ||
|
|
17e8c76b94 | ||
|
|
9034a31cd6 | ||
|
|
523e61c9f7 | ||
|
|
cf575ded90 | ||
|
|
11a75d8532 | ||
|
|
6593e11332 | ||
|
|
c310f669d6 | ||
|
|
f327f47c3f | ||
|
|
acd61e825e | ||
|
|
895701da59 | ||
|
|
e0fb0db1f0 | ||
|
|
dc7e56106e | ||
|
|
90e5179980 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,3 +4,4 @@
|
||||
.mypy_cache/
|
||||
**/.env
|
||||
.coverage
|
||||
uv.lock
|
||||
|
||||
@@ -1,19 +1,30 @@
|
||||
# MARK: Project info
|
||||
[project]
|
||||
name = "corelibs"
|
||||
version = "0.41.0"
|
||||
version = "0.48.0"
|
||||
description = "Collection of utils for Python scripts"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
"corelibs-datetime>=1.0.1",
|
||||
"corelibs-debug>=1.0.0",
|
||||
"corelibs-dump-data>=1.0.0",
|
||||
"corelibs-encryption>=1.0.0",
|
||||
"corelibs-enum-base>=1.0.0",
|
||||
"corelibs-file>=1.0.0",
|
||||
"corelibs-hash>=1.0.0",
|
||||
"corelibs-iterator>=1.0.0",
|
||||
"corelibs-json>=1.0.0",
|
||||
"corelibs-regex-checks>=1.0.0",
|
||||
"corelibs-search>=1.0.0",
|
||||
"corelibs-stack-trace>=1.0.0",
|
||||
"corelibs-text-colors>=1.0.0",
|
||||
"corelibs-var>=1.0.0",
|
||||
"cryptography>=46.0.3",
|
||||
"jmespath>=1.0.1",
|
||||
"jsonpath-ng>=1.7.0",
|
||||
"psutil>=7.0.0",
|
||||
"requests[proxy]>=2.32.4",
|
||||
"requests[socks]>=2.32.5",
|
||||
]
|
||||
|
||||
# MARK: build system
|
||||
@@ -33,12 +44,14 @@ publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
corelibs-enum-base = { index = "opj-pypi" }
|
||||
corelibs-datetime = { index = "opj-pypi" }
|
||||
corelibs-var = { index = "opj-pypi" }
|
||||
corelibs-text-colors = { index = "opj-pypi" }
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"deepdiff>=8.6.1",
|
||||
"pytest>=8.4.1",
|
||||
"pytest-cov>=6.2.1",
|
||||
"typing-extensions>=4.15.0",
|
||||
]
|
||||
|
||||
# MARK: Python linting
|
||||
|
||||
@@ -3,8 +3,20 @@ List of regex compiled strings that can be used
|
||||
"""
|
||||
|
||||
import re
|
||||
from warnings import warn, deprecated
|
||||
from corelibs_regex_checks.regex_constants import (
|
||||
compile_re as compile_re_ng,
|
||||
SUB_EMAIL_BASIC_REGEX as SUB_EMAIL_BASIC_REGEX_NG,
|
||||
EMAIL_BASIC_REGEX as EMAIL_BASIC_REGEX_NG,
|
||||
NAME_EMAIL_SIMPLE_REGEX as NAME_EMAIL_SIMPLE_REGEX_NG,
|
||||
NAME_EMAIL_BASIC_REGEX as NAME_EMAIL_BASIC_REGEX_NG,
|
||||
DOMAIN_WITH_LOCALHOST_REGEX as DOMAIN_WITH_LOCALHOST_REGEX_NG,
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX as DOMAIN_WITH_LOCALHOST_PORT_REGEX_NG,
|
||||
DOMAIN_REGEX as DOMAIN_REGEX_NG
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_regex_checks.regex_constants.compile_re instead")
|
||||
def compile_re(reg: str) -> re.Pattern[str]:
|
||||
"""
|
||||
compile a regex with verbose flag
|
||||
@@ -15,23 +27,25 @@ def compile_re(reg: str) -> re.Pattern[str]:
|
||||
Returns:
|
||||
re.Pattern[str] -- _description_
|
||||
"""
|
||||
return re.compile(reg, re.VERBOSE)
|
||||
return compile_re_ng(reg)
|
||||
|
||||
|
||||
# email regex
|
||||
EMAIL_BASIC_REGEX: str = r"""
|
||||
^[A-Za-z0-9!#$%&'*+\-\/=?^_`{|}~][A-Za-z0-9!#$%:\(\)&'*+\-\/=?^_`{|}~\.]{0,63}
|
||||
@(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[a-zA-Z]{2,6}$
|
||||
"""
|
||||
SUB_EMAIL_BASIC_REGEX = SUB_EMAIL_BASIC_REGEX_NG
|
||||
|
||||
EMAIL_BASIC_REGEX = EMAIL_BASIC_REGEX_NG
|
||||
# name + email regex for email sending type like "foo bar" <email@mail.com>
|
||||
NAME_EMAIL_SIMPLE_REGEX = NAME_EMAIL_SIMPLE_REGEX_NG
|
||||
# name + email with the basic regex set
|
||||
NAME_EMAIL_BASIC_REGEX = NAME_EMAIL_BASIC_REGEX_NG
|
||||
# Domain regex with localhost
|
||||
DOMAIN_WITH_LOCALHOST_REGEX: str = r"""
|
||||
^(?:localhost|(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[A-Za-z]{2,})$
|
||||
"""
|
||||
DOMAIN_WITH_LOCALHOST_REGEX = DOMAIN_WITH_LOCALHOST_REGEX_NG
|
||||
# domain regex with loclhost and optional port
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX: str = r"""
|
||||
^(?:localhost|(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[A-Za-z]{2,})(?::\d+)?$
|
||||
"""
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX = DOMAIN_WITH_LOCALHOST_PORT_REGEX_NG
|
||||
# Domain, no localhost
|
||||
DOMAIN_REGEX: str = r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[A-Za-z]{2,}$"
|
||||
DOMAIN_REGEX = DOMAIN_REGEX_NG
|
||||
|
||||
# At the module level, issue a deprecation warning
|
||||
warn("Use corelibs_regex_checks.regex_constants instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
|
||||
27
src/corelibs/check_handling/regex_constants_compiled.py
Normal file
27
src/corelibs/check_handling/regex_constants_compiled.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""
|
||||
List of regex compiled strings that can be used
|
||||
"""
|
||||
|
||||
import warnings
|
||||
|
||||
from corelibs_regex_checks.regex_constants_compiled import (
|
||||
COMPILED_EMAIL_BASIC_REGEX as COMPILED_EMAIL_BASIC_REGEX_NG,
|
||||
COMPILED_NAME_EMAIL_SIMPLE_REGEX as COMPILED_NAME_EMAIL_SIMPLE_REGEX_NG,
|
||||
COMPILED_NAME_EMAIL_BASIC_REGEX as COMPILED_NAME_EMAIL_BASIC_REGEX_NG,
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX as COMPILED_DOMAIN_WITH_LOCALHOST_REGEX_NG,
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX as COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX_NG,
|
||||
COMPILED_DOMAIN_REGEX as COMPILED_DOMAIN_REGEX_NG
|
||||
)
|
||||
|
||||
# all above in compiled form
|
||||
COMPILED_EMAIL_BASIC_REGEX = COMPILED_EMAIL_BASIC_REGEX_NG
|
||||
COMPILED_NAME_EMAIL_SIMPLE_REGEX = COMPILED_NAME_EMAIL_SIMPLE_REGEX_NG
|
||||
COMPILED_NAME_EMAIL_BASIC_REGEX = COMPILED_NAME_EMAIL_BASIC_REGEX_NG
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX = COMPILED_DOMAIN_WITH_LOCALHOST_REGEX_NG
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX = COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX_NG
|
||||
COMPILED_DOMAIN_REGEX = COMPILED_DOMAIN_REGEX_NG
|
||||
|
||||
# At the module level, issue a deprecation warning
|
||||
warnings.warn("Use corelibs_regex_checks.regex_constants_compiled instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
@@ -53,6 +53,9 @@ class SettingsLoader:
|
||||
# for check settings, abort flag
|
||||
self.__check_settings_abort: bool = False
|
||||
|
||||
# error messages for raise ValueError
|
||||
self.__error_msg: list[str] = []
|
||||
|
||||
# MARK: load settings
|
||||
def load_settings(
|
||||
self,
|
||||
@@ -87,6 +90,8 @@ class SettingsLoader:
|
||||
Returns:
|
||||
dict[str, str]: key = value list
|
||||
"""
|
||||
# reset error message list before run
|
||||
self.__error_msg = []
|
||||
# default set entries
|
||||
entry_set_empty: dict[str, str | None] = {}
|
||||
# entries that have to be split
|
||||
@@ -109,7 +114,7 @@ class SettingsLoader:
|
||||
if allow_not_exist is True:
|
||||
return {}
|
||||
raise ValueError(self.__print(
|
||||
f"[!] Cannot read [{config_id}] block in the {self.config_file}: {e}",
|
||||
f"[!] Cannot read [{config_id}] block in the file {self.config_file}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
try:
|
||||
@@ -168,10 +173,13 @@ class SettingsLoader:
|
||||
args_overrride.append(key)
|
||||
if skip:
|
||||
continue
|
||||
settings[config_id][key] = [
|
||||
__value.replace(" ", "")
|
||||
for __value in settings[config_id][key].split(split_char)
|
||||
]
|
||||
if settings[config_id][key]:
|
||||
settings[config_id][key] = [
|
||||
__value.replace(" ", "")
|
||||
for __value in settings[config_id][key].split(split_char)
|
||||
]
|
||||
else:
|
||||
settings[config_id][key] = []
|
||||
except KeyError as e:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] Cannot read [{config_id}] block because the entry [{e}] could not be found",
|
||||
@@ -181,9 +189,8 @@ class SettingsLoader:
|
||||
# ignore error if arguments are set
|
||||
if not self.__check_arguments(config_validate, True):
|
||||
raise ValueError(self.__print(f"[!] Cannot find file: {self.config_file}", 'CRITICAL'))
|
||||
else:
|
||||
# base set
|
||||
settings[config_id] = {}
|
||||
# base set
|
||||
settings[config_id] = {}
|
||||
# make sure all are set
|
||||
# if we have arguments set, this override config settings
|
||||
error: bool = False
|
||||
@@ -274,7 +281,10 @@ class SettingsLoader:
|
||||
error = True
|
||||
self.__print(f"[!] Missing content entry for: {entry}", 'ERROR')
|
||||
if error is True:
|
||||
raise ValueError(self.__print("[!] Missing or incorrect settings data. Cannot proceed", 'CRITICAL'))
|
||||
self.__print("[!] Missing or incorrect settings data. Cannot proceed", 'CRITICAL')
|
||||
raise ValueError(
|
||||
"Missing or incorrect settings data. Cannot proceed: " + "; ".join(self.__error_msg)
|
||||
)
|
||||
# set empty
|
||||
for [entry, empty_set] in entry_set_empty.items():
|
||||
# if set, skip, else set to empty value
|
||||
@@ -567,7 +577,10 @@ class SettingsLoader:
|
||||
self.log.logger.log(Log.get_log_level_int(level), msg, stacklevel=2)
|
||||
if self.log is None or self.always_print:
|
||||
if print_error:
|
||||
print(msg)
|
||||
print(f"[SettingsLoader] {msg}")
|
||||
if level == 'ERROR':
|
||||
# remove any prefix [!] for error message list
|
||||
self.__error_msg.append(msg.replace('[!] ', '').strip())
|
||||
return msg
|
||||
|
||||
|
||||
|
||||
@@ -7,10 +7,13 @@ from typing import Any, Sequence
|
||||
from pathlib import Path
|
||||
from collections import Counter
|
||||
import csv
|
||||
from corelibs.file_handling.file_bom_encoding import is_bom_encoded, is_bom_encoded_info
|
||||
from corelibs.exceptions.csv_exceptions import (
|
||||
NoCsvReader, CompulsoryCsvHeaderCheckFailed, CsvHeaderDataMissing
|
||||
)
|
||||
|
||||
ENCODING = 'utf-8'
|
||||
ENCODING_UTF8_SIG = 'utf-8-sig'
|
||||
DELIMITER = ","
|
||||
QUOTECHAR = '"'
|
||||
# type: _QuotingType
|
||||
@@ -27,6 +30,7 @@ class CsvWriter:
|
||||
file_name: Path,
|
||||
header_mapping: dict[str, str],
|
||||
header_order: list[str] | None = None,
|
||||
encoding: str = ENCODING,
|
||||
delimiter: str = DELIMITER,
|
||||
quotechar: str = QUOTECHAR,
|
||||
quoting: Any = QUOTING,
|
||||
@@ -38,6 +42,7 @@ class CsvWriter:
|
||||
self.__delimiter = delimiter
|
||||
self.__quotechar = quotechar
|
||||
self.__quoting = quoting
|
||||
self.__encoding = encoding
|
||||
self.csv_file_writer = self.__open_csv(header_order)
|
||||
|
||||
def __open_csv(self, header_order: list[str] | None) -> csv.DictWriter[str]:
|
||||
@@ -69,7 +74,8 @@ class CsvWriter:
|
||||
try:
|
||||
fp = open(
|
||||
self.__file_name,
|
||||
"w", encoding="utf-8"
|
||||
"w",
|
||||
encoding=self.__encoding
|
||||
)
|
||||
csv_file_writer = csv.DictWriter(
|
||||
fp,
|
||||
@@ -109,6 +115,7 @@ class CsvReader:
|
||||
self,
|
||||
file_name: Path,
|
||||
header_check: Sequence[str] | None = None,
|
||||
encoding: str = ENCODING,
|
||||
delimiter: str = DELIMITER,
|
||||
quotechar: str = QUOTECHAR,
|
||||
quoting: Any = QUOTING,
|
||||
@@ -118,6 +125,7 @@ class CsvReader:
|
||||
self.__delimiter = delimiter
|
||||
self.__quotechar = quotechar
|
||||
self.__quoting = quoting
|
||||
self.__encoding = encoding
|
||||
self.header: Sequence[str] | None = None
|
||||
self.csv_file_reader = self.__open_csv()
|
||||
|
||||
@@ -129,9 +137,16 @@ class CsvReader:
|
||||
csv.DictReader | None: _description_
|
||||
"""
|
||||
try:
|
||||
# if UTF style check if this is BOM
|
||||
if self.__encoding.lower().startswith('utf-') and is_bom_encoded(self.__file_name):
|
||||
bom_info = is_bom_encoded_info(self.__file_name)
|
||||
if bom_info['encoding'] == 'utf-8':
|
||||
self.__encoding = ENCODING_UTF8_SIG
|
||||
else:
|
||||
self.__encoding = bom_info['encoding'] or self.__encoding
|
||||
fp = open(
|
||||
self.__file_name,
|
||||
"r", encoding="utf-8"
|
||||
"r", encoding=self.__encoding
|
||||
)
|
||||
csv_file_reader = csv.DictReader(
|
||||
fp,
|
||||
|
||||
76
src/corelibs/db_handling/sql_main.py
Normal file
76
src/corelibs/db_handling/sql_main.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Main SQL base for any SQL calls
|
||||
This is a wrapper for SQLiteIO or other future DB Interfaces
|
||||
[Note: at the moment only SQLiteIO is implemented]
|
||||
- on class creation connection with ValueError on fail
|
||||
- connect method checks if already connected and warns
|
||||
- connection class fails with ValueError if not valid target is selected (SQL wrapper type)
|
||||
- connected check class method
|
||||
- a process class that returns data as list or False if end or error
|
||||
|
||||
TODO: adapt more CoreLibs DB IO class flow here
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Literal
|
||||
from corelibs_stack_trace.stack import call_stack
|
||||
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
IDENT_SPLIT_CHARACTER: str = ':'
|
||||
|
||||
|
||||
class SQLMain:
|
||||
"""Main SQL interface class"""
|
||||
def __init__(self, log: 'Logger', db_ident: str):
|
||||
self.log = log
|
||||
self.dbh: SQLiteIO | None = None
|
||||
self.db_target: str | None = None
|
||||
self.connect(db_ident)
|
||||
if not self.connected():
|
||||
raise ValueError(f'Failed to connect to database [{call_stack()}]')
|
||||
|
||||
def connect(self, db_ident: str):
|
||||
"""setup basic connection"""
|
||||
if self.dbh is not None and self.dbh.conn is not None:
|
||||
self.log.warning(f"A database connection already exists for: {self.db_target} [{call_stack()}]")
|
||||
return
|
||||
self.db_target, db_dsn = db_ident.split(IDENT_SPLIT_CHARACTER)
|
||||
match self.db_target:
|
||||
case 'sqlite':
|
||||
# this is a Path only at the moment
|
||||
self.dbh = SQLiteIO(self.log, db_dsn, row_factory='Dict')
|
||||
case _:
|
||||
raise ValueError(f'SQL interface for {self.db_target} is not implemented [{call_stack()}]')
|
||||
if not self.dbh.db_connected():
|
||||
raise ValueError(f"DB Connection failed for: {self.db_target} [{call_stack()}]")
|
||||
|
||||
def close(self):
|
||||
"""close connection"""
|
||||
if self.dbh is None or not self.connected():
|
||||
return
|
||||
# self.log.info(f"Close DB Connection: {self.db_target} [{call_stack()}]")
|
||||
self.dbh.db_close()
|
||||
|
||||
def connected(self) -> bool:
|
||||
"""check connectuon"""
|
||||
if self.dbh is None or not self.dbh.db_connected():
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
return True
|
||||
|
||||
def process_query(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""mini wrapper for execute query"""
|
||||
if self.dbh is not None:
|
||||
result = self.dbh.execute_query(query, params)
|
||||
if result is False:
|
||||
return False
|
||||
else:
|
||||
self.log.error(f"Problem connecting to db: {self.db_target} [{call_stack()}]")
|
||||
return False
|
||||
return result
|
||||
|
||||
# __END__
|
||||
@@ -8,7 +8,7 @@ also method names are subject to change
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, TYPE_CHECKING
|
||||
import sqlite3
|
||||
from corelibs.debug_handling.debug_helpers import call_stack
|
||||
from corelibs_stack_trace.stack import call_stack
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
@@ -2,16 +2,16 @@
|
||||
Various debug helpers
|
||||
"""
|
||||
|
||||
import traceback
|
||||
import os
|
||||
import sys
|
||||
from warnings import deprecated
|
||||
from typing import Tuple, Type
|
||||
from types import TracebackType
|
||||
from corelibs_stack_trace.stack import call_stack as call_stack_ng, exception_stack as exception_stack_ng
|
||||
|
||||
# _typeshed.OptExcInfo
|
||||
OptExcInfo = Tuple[None, None, None] | Tuple[Type[BaseException], BaseException, TracebackType]
|
||||
|
||||
|
||||
@deprecated("Use corelibs_stack_trace.stack.call_stack instead")
|
||||
def call_stack(
|
||||
start: int = 0,
|
||||
skip_last: int = -1,
|
||||
@@ -31,23 +31,15 @@ def call_stack(
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# stack = traceback.extract_stack()[start:depth]
|
||||
# how many of the last entries we skip (so we do not get self), default is -1
|
||||
# start cannot be negative
|
||||
if skip_last > 0:
|
||||
skip_last = skip_last * -1
|
||||
stack = traceback.extract_stack()
|
||||
__stack = stack[start:skip_last]
|
||||
# start possible to high, reset start to 0
|
||||
if not __stack and reset_start_if_empty:
|
||||
start = 0
|
||||
__stack = stack[start:skip_last]
|
||||
if not separator:
|
||||
separator = ' -> '
|
||||
# print(f"* HERE: {dump_data(stack)}")
|
||||
return f"{separator}".join(f"{os.path.basename(f.filename)}:{f.name}:{f.lineno}" for f in __stack)
|
||||
return call_stack_ng(
|
||||
start=start,
|
||||
skip_last=skip_last,
|
||||
separator=separator,
|
||||
reset_start_if_empty=reset_start_if_empty
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_stack_trace.stack.exception_stack instead")
|
||||
def exception_stack(
|
||||
exc_stack: OptExcInfo | None = None,
|
||||
separator: str = ' -> '
|
||||
@@ -62,15 +54,9 @@ def exception_stack(
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
if exc_stack is not None:
|
||||
_, _, exc_traceback = exc_stack
|
||||
else:
|
||||
exc_traceback = None
|
||||
_, _, exc_traceback = sys.exc_info()
|
||||
stack = traceback.extract_tb(exc_traceback)
|
||||
if not separator:
|
||||
separator = ' -> '
|
||||
# print(f"* HERE: {dump_data(stack)}")
|
||||
return f"{separator}".join(f"{os.path.basename(f.filename)}:{f.name}:{f.lineno}" for f in stack)
|
||||
return exception_stack_ng(
|
||||
exc_stack=exc_stack,
|
||||
separator=separator
|
||||
)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,10 +2,12 @@
|
||||
dict dump as JSON formatted
|
||||
"""
|
||||
|
||||
import json
|
||||
from warnings import deprecated
|
||||
from typing import Any
|
||||
from corelibs_dump_data.dump_data import dump_data as dump_data_ng
|
||||
|
||||
|
||||
@deprecated("Use corelibs_dump_data.dump_data.dump_data instead")
|
||||
def dump_data(data: Any, use_indent: bool = True) -> str:
|
||||
"""
|
||||
dump formated output from dict/list
|
||||
@@ -16,7 +18,6 @@ def dump_data(data: Any, use_indent: bool = True) -> str:
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
indent = 4 if use_indent else None
|
||||
return json.dumps(data, indent=indent, ensure_ascii=False, default=str)
|
||||
return dump_data_ng(data=data, use_indent=use_indent)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -4,123 +4,40 @@ Profile memory usage in Python
|
||||
|
||||
# https://docs.python.org/3/library/tracemalloc.html
|
||||
|
||||
import os
|
||||
import time
|
||||
import tracemalloc
|
||||
import linecache
|
||||
from typing import Tuple
|
||||
from tracemalloc import Snapshot
|
||||
import psutil
|
||||
from warnings import warn, deprecated
|
||||
from typing import TYPE_CHECKING
|
||||
from corelibs_debug.profiling import display_top as display_top_ng, display_top_str, Profiling as CoreLibsProfiling
|
||||
if TYPE_CHECKING:
|
||||
from tracemalloc import Snapshot
|
||||
|
||||
|
||||
def display_top(snapshot: Snapshot, key_type: str = 'lineno', limit: int = 10) -> str:
|
||||
@deprecated("Use corelibs_debug.profiling.display_top_str with data from display_top instead")
|
||||
def display_top(snapshot: 'Snapshot', key_type: str = 'lineno', limit: int = 10) -> str:
|
||||
"""
|
||||
Print tracmalloc stats
|
||||
https://docs.python.org/3/library/tracemalloc.html#pretty-top
|
||||
|
||||
Args:
|
||||
snapshot (Snapshot): _description_
|
||||
snapshot ('Snapshot'): _description_
|
||||
key_type (str, optional): _description_. Defaults to 'lineno'.
|
||||
limit (int, optional): _description_. Defaults to 10.
|
||||
"""
|
||||
snapshot = snapshot.filter_traces((
|
||||
tracemalloc.Filter(False, "<frozen importlib._bootstrap>"),
|
||||
tracemalloc.Filter(False, "<unknown>"),
|
||||
))
|
||||
top_stats = snapshot.statistics(key_type)
|
||||
|
||||
profiler_msg = f"Top {limit} lines"
|
||||
for index, stat in enumerate(top_stats[:limit], 1):
|
||||
frame = stat.traceback[0]
|
||||
# replace "/path/to/module/file.py" with "module/file.py"
|
||||
filename = os.sep.join(frame.filename.split(os.sep)[-2:])
|
||||
profiler_msg += f"#{index}: {filename}:{frame.lineno}: {(stat.size / 1024):.1f} KiB"
|
||||
line = linecache.getline(frame.filename, frame.lineno).strip()
|
||||
if line:
|
||||
profiler_msg += f" {line}"
|
||||
|
||||
other = top_stats[limit:]
|
||||
if other:
|
||||
size = sum(stat.size for stat in other)
|
||||
profiler_msg += f"{len(other)} other: {(size / 1024):.1f} KiB"
|
||||
total = sum(stat.size for stat in top_stats)
|
||||
profiler_msg += f"Total allocated size: {(total / 1024):.1f} KiB"
|
||||
return profiler_msg
|
||||
return display_top_str(
|
||||
display_top_ng(
|
||||
snapshot=snapshot,
|
||||
key_type=key_type,
|
||||
limit=limit
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class Profiling:
|
||||
class Profiling(CoreLibsProfiling):
|
||||
"""
|
||||
Profile memory usage and elapsed time for some block
|
||||
Based on: https://stackoverflow.com/a/53301648
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# profiling id
|
||||
self.__ident: str = ''
|
||||
# memory
|
||||
self.__rss_before: int = 0
|
||||
self.__vms_before: int = 0
|
||||
# self.shared_before: int = 0
|
||||
self.__rss_used: int = 0
|
||||
self.__vms_used: int = 0
|
||||
# self.shared_used: int = 0
|
||||
# time
|
||||
self.__call_start: float = 0
|
||||
self.__elapsed = 0
|
||||
|
||||
def __get_process_memory(self) -> Tuple[int, int]:
|
||||
process = psutil.Process(os.getpid())
|
||||
mi = process.memory_info()
|
||||
# macos does not have mi.shared
|
||||
return mi.rss, mi.vms
|
||||
|
||||
def __elapsed_since(self) -> str:
|
||||
elapsed = time.time() - self.__call_start
|
||||
if elapsed < 1:
|
||||
return str(round(elapsed * 1000, 2)) + "ms"
|
||||
if elapsed < 60:
|
||||
return str(round(elapsed, 2)) + "s"
|
||||
if elapsed < 3600:
|
||||
return str(round(elapsed / 60, 2)) + "min"
|
||||
return str(round(elapsed / 3600, 2)) + "hrs"
|
||||
|
||||
def __format_bytes(self, bytes_data: int) -> str:
|
||||
if abs(bytes_data) < 1000:
|
||||
return str(bytes_data) + "B"
|
||||
if abs(bytes_data) < 1e6:
|
||||
return str(round(bytes_data / 1e3, 2)) + "kB"
|
||||
if abs(bytes_data) < 1e9:
|
||||
return str(round(bytes_data / 1e6, 2)) + "MB"
|
||||
return str(round(bytes_data / 1e9, 2)) + "GB"
|
||||
|
||||
def start_profiling(self, ident: str) -> None:
|
||||
"""
|
||||
start the profiling
|
||||
"""
|
||||
self.__ident = ident
|
||||
self.__rss_before, self.__vms_before = self.__get_process_memory()
|
||||
self.__call_start = time.time()
|
||||
|
||||
def end_profiling(self) -> None:
|
||||
"""
|
||||
end the profiling
|
||||
"""
|
||||
if self.__rss_before == 0 and self.__vms_before == 0:
|
||||
print("start_profile() was not called, output will be negative")
|
||||
self.__elapsed = self.__elapsed_since()
|
||||
__rss_after, __vms_after = self.__get_process_memory()
|
||||
self.__rss_used = __rss_after - self.__rss_before
|
||||
self.__vms_used = __vms_after - self.__vms_before
|
||||
|
||||
def print_profiling(self) -> str:
|
||||
"""
|
||||
print the profiling time
|
||||
"""
|
||||
return (
|
||||
f"Profiling: {self.__ident:>20} "
|
||||
f"RSS: {self.__format_bytes(self.__rss_used):>8} | "
|
||||
f"VMS: {self.__format_bytes(self.__vms_used):>8} | "
|
||||
f"time: {self.__elapsed:>8}"
|
||||
)
|
||||
warn("Use corelibs_debug.profiling.Profiling instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -5,109 +5,16 @@ Returns:
|
||||
Timer: class timer for basic time run calculations
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from warnings import warn
|
||||
from corelibs_debug.timer import Timer as CorelibsTimer
|
||||
|
||||
|
||||
class Timer:
|
||||
class Timer(CorelibsTimer):
|
||||
"""
|
||||
get difference between start and end date/time
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
init new start time and set end time to None
|
||||
"""
|
||||
self._overall_start_time = datetime.now()
|
||||
self._overall_end_time = None
|
||||
self._overall_run_time = None
|
||||
self._start_time = datetime.now()
|
||||
self._end_time = None
|
||||
self._run_time = None
|
||||
|
||||
# MARK: overall run time
|
||||
def overall_run_time(self) -> timedelta:
|
||||
"""
|
||||
overall run time difference from class launch to call of this function
|
||||
|
||||
Returns:
|
||||
timedelta: _description_
|
||||
"""
|
||||
self._overall_end_time = datetime.now()
|
||||
self._overall_run_time = self._overall_end_time - self._overall_start_time
|
||||
return self._overall_run_time
|
||||
|
||||
def get_overall_start_time(self) -> datetime:
|
||||
"""
|
||||
get set start time
|
||||
|
||||
Returns:
|
||||
datetime: _description_
|
||||
"""
|
||||
return self._overall_start_time
|
||||
|
||||
def get_overall_end_time(self) -> datetime | None:
|
||||
"""
|
||||
get set end time or None for not set
|
||||
|
||||
Returns:
|
||||
datetime|None: _description_
|
||||
"""
|
||||
return self._overall_end_time
|
||||
|
||||
def get_overall_run_time(self) -> timedelta | None:
|
||||
"""
|
||||
get run time or None if run time was not called
|
||||
|
||||
Returns:
|
||||
datetime|None: _description_
|
||||
"""
|
||||
return self._overall_run_time
|
||||
|
||||
# MARK: set run time
|
||||
def run_time(self) -> timedelta:
|
||||
"""
|
||||
difference between start time and current time
|
||||
|
||||
Returns:
|
||||
datetime: _description_
|
||||
"""
|
||||
self._end_time = datetime.now()
|
||||
self._run_time = self._end_time - self._start_time
|
||||
return self._run_time
|
||||
|
||||
def reset_run_time(self):
|
||||
"""
|
||||
reset start/end and run tine
|
||||
"""
|
||||
self._start_time = datetime.now()
|
||||
self._end_time = None
|
||||
self._run_time = None
|
||||
|
||||
def get_start_time(self) -> datetime:
|
||||
"""
|
||||
get set start time
|
||||
|
||||
Returns:
|
||||
datetime: _description_
|
||||
"""
|
||||
return self._start_time
|
||||
|
||||
def get_end_time(self) -> datetime | None:
|
||||
"""
|
||||
get set end time or None for not set
|
||||
|
||||
Returns:
|
||||
datetime|None: _description_
|
||||
"""
|
||||
return self._end_time
|
||||
|
||||
def get_run_time(self) -> timedelta | None:
|
||||
"""
|
||||
get run time or None if run time was not called
|
||||
|
||||
Returns:
|
||||
datetime|None: _description_
|
||||
"""
|
||||
return self._run_time
|
||||
warn("Use corelibs_debug.timer.Timer instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,11 +2,18 @@
|
||||
Various small helpers for data writing
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from typing import TYPE_CHECKING
|
||||
from corelibs_debug.writeline import (
|
||||
write_l as write_l_ng, pr_header as pr_header_ng,
|
||||
pr_title as pr_title_ng, pr_open as pr_open_ng,
|
||||
pr_close as pr_close_ng, pr_act as pr_act_ng
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
from io import TextIOWrapper, StringIO
|
||||
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.write_l instead")
|
||||
def write_l(line: str, fpl: 'TextIOWrapper | StringIO | None' = None, print_line: bool = False):
|
||||
"""
|
||||
Write a line to screen and to output file
|
||||
@@ -15,23 +22,30 @@ def write_l(line: str, fpl: 'TextIOWrapper | StringIO | None' = None, print_line
|
||||
line (String): Line to write
|
||||
fpl (Resource): file handler resource, if none write only to console
|
||||
"""
|
||||
if print_line is True:
|
||||
print(line)
|
||||
if fpl is not None:
|
||||
fpl.write(line + "\n")
|
||||
return write_l_ng(
|
||||
line=line,
|
||||
fpl=fpl,
|
||||
print_line=print_line
|
||||
)
|
||||
|
||||
|
||||
# progress printers
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.pr_header instead")
|
||||
def pr_header(tag: str, marker_string: str = '#', width: int = 35):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
tag (str): _description_
|
||||
"""
|
||||
print(f" {marker_string} {tag:^{width}} {marker_string}")
|
||||
return pr_header_ng(
|
||||
tag=tag,
|
||||
marker_string=marker_string,
|
||||
width=width
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.pr_title instead")
|
||||
def pr_title(tag: str, prefix_string: str = '|', space_filler: str = '.', width: int = 35):
|
||||
"""_summary_
|
||||
|
||||
@@ -39,9 +53,15 @@ def pr_title(tag: str, prefix_string: str = '|', space_filler: str = '.', width:
|
||||
tag (str): _description_
|
||||
prefix_string (str, optional): _description_. Defaults to '|'.
|
||||
"""
|
||||
print(f" {prefix_string} {tag:{space_filler}<{width}}:", flush=True)
|
||||
return pr_title_ng(
|
||||
tag=tag,
|
||||
prefix_string=prefix_string,
|
||||
space_filler=space_filler,
|
||||
width=width
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.pr_open instead")
|
||||
def pr_open(tag: str, prefix_string: str = '|', space_filler: str = '.', width: int = 35):
|
||||
"""
|
||||
writen progress open line with tag
|
||||
@@ -50,9 +70,15 @@ def pr_open(tag: str, prefix_string: str = '|', space_filler: str = '.', width:
|
||||
tag (str): _description_
|
||||
prefix_string (str): prefix string. Default: '|'
|
||||
"""
|
||||
print(f" {prefix_string} {tag:{space_filler}<{width}} [", end="", flush=True)
|
||||
return pr_open_ng(
|
||||
tag=tag,
|
||||
prefix_string=prefix_string,
|
||||
space_filler=space_filler,
|
||||
width=width
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.pr_close instead")
|
||||
def pr_close(tag: str = ''):
|
||||
"""
|
||||
write the close tag with new line
|
||||
@@ -60,9 +86,10 @@ def pr_close(tag: str = ''):
|
||||
Args:
|
||||
tag (str, optional): _description_. Defaults to ''.
|
||||
"""
|
||||
print(f"{tag}]", flush=True)
|
||||
return pr_close_ng(tag=tag)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.pr_act instead")
|
||||
def pr_act(act: str = "."):
|
||||
"""
|
||||
write progress character
|
||||
@@ -70,6 +97,6 @@ def pr_act(act: str = "."):
|
||||
Args:
|
||||
act (str, optional): _description_. Defaults to ".".
|
||||
"""
|
||||
print(f"{act}", end="", flush=True)
|
||||
return pr_act_ng(act=act)
|
||||
|
||||
# __EMD__
|
||||
|
||||
@@ -4,6 +4,8 @@ Send email wrapper
|
||||
|
||||
import smtplib
|
||||
from email.message import EmailMessage
|
||||
from email.header import Header
|
||||
from email.utils import formataddr, parseaddr
|
||||
from typing import TYPE_CHECKING, Any
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
@@ -133,21 +135,30 @@ class SendEmail:
|
||||
_subject = template["subject"]
|
||||
_body = template["body"]
|
||||
for key, value in replace.items():
|
||||
_subject = _subject.replace(f"{{{{{key}}}}}", value)
|
||||
_body = _body.replace(f"{{{{{key}}}}}", value)
|
||||
placeholder = f"{{{{{key}}}}}"
|
||||
_subject = _subject.replace(placeholder, value)
|
||||
_body = _body.replace(placeholder, value)
|
||||
name, addr = parseaddr(from_email)
|
||||
if name:
|
||||
# Encode the name part with MIME encoding
|
||||
encoded_name = str(Header(name, 'utf-8'))
|
||||
from_email_encoded = formataddr((encoded_name, addr))
|
||||
else:
|
||||
from_email_encoded = from_email
|
||||
# create a simple email and add subhect, from email
|
||||
msg_email = EmailMessage()
|
||||
# msg.set_content(_body, charset='utf-8', cte='quoted-printable')
|
||||
msg_email.set_content(_body, charset="utf-8")
|
||||
msg_email["Subject"] = _subject
|
||||
msg_email["From"] = from_email
|
||||
msg_email["From"] = from_email_encoded
|
||||
# push to array for sening
|
||||
msg.append(msg_email)
|
||||
return msg
|
||||
|
||||
def send_email_list(
|
||||
self,
|
||||
email: list[EmailMessage], receivers: list[str],
|
||||
emails: list[EmailMessage],
|
||||
receivers: list[str],
|
||||
combined_send: bool | None = None,
|
||||
test_only: bool | None = None
|
||||
):
|
||||
@@ -170,18 +181,27 @@ class SendEmail:
|
||||
smtp = smtplib.SMTP(smtp_host)
|
||||
except ConnectionRefusedError as e:
|
||||
self.log.error("Could not open SMTP connection to: %s, %s", smtp_host, e)
|
||||
# prepare receiver list
|
||||
receivers_encoded: list[str] = []
|
||||
for __receiver in receivers:
|
||||
to_name, to_addr = parseaddr(__receiver)
|
||||
if to_name:
|
||||
# Encode the name part with MIME encoding
|
||||
encoded_to_name = str(Header(to_name, 'utf-8'))
|
||||
receivers_encoded.append(formataddr((encoded_to_name, to_addr)))
|
||||
else:
|
||||
receivers_encoded.append(__receiver)
|
||||
# loop over messages and then over recievers
|
||||
for msg in email:
|
||||
for msg in emails:
|
||||
if combined_send is True:
|
||||
msg["To"] = ", ".join(receivers)
|
||||
msg["To"] = ", ".join(receivers_encoded)
|
||||
if not self.settings.get('test'):
|
||||
if smtp is not None:
|
||||
smtp.send_message(msg, msg["From"], receivers)
|
||||
smtp.send_message(msg, msg["From"], receivers_encoded)
|
||||
else:
|
||||
self.log.info(f"[EMAIL] Test, not sending email\n{msg}")
|
||||
else:
|
||||
for receiver in receivers:
|
||||
# send to
|
||||
for receiver in receivers_encoded:
|
||||
self.log.debug(f"===> Send to: {receiver}")
|
||||
if "To" in msg:
|
||||
msg.replace_header("To", receiver)
|
||||
|
||||
@@ -4,24 +4,11 @@ Will be moved to CoreLibs
|
||||
TODO: set key per encryption run
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import base64
|
||||
import hashlib
|
||||
from typing import TypedDict, cast
|
||||
from cryptography.fernet import Fernet
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
import warnings
|
||||
from corelibs_encryption.symmetric import SymmetricEncryption as CorelibsSymmetricEncryption
|
||||
|
||||
|
||||
class PackageData(TypedDict):
|
||||
"""encryption package"""
|
||||
encrypted_data: str
|
||||
salt: str
|
||||
key_hash: str
|
||||
|
||||
|
||||
class SymmetricEncryption:
|
||||
class SymmetricEncryption(CorelibsSymmetricEncryption):
|
||||
"""
|
||||
simple encryption
|
||||
|
||||
@@ -29,124 +16,7 @@ class SymmetricEncryption:
|
||||
key from the password to decrypt
|
||||
"""
|
||||
|
||||
def __init__(self, password: str):
|
||||
if not password:
|
||||
raise ValueError("A password must be set")
|
||||
self.password = password
|
||||
self.password_hash = hashlib.sha256(password.encode('utf-8')).hexdigest()
|
||||
|
||||
def __derive_key_from_password(self, password: str, salt: bytes) -> bytes:
|
||||
_password = password.encode('utf-8')
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=32,
|
||||
salt=salt,
|
||||
iterations=100000,
|
||||
)
|
||||
key = base64.urlsafe_b64encode(kdf.derive(_password))
|
||||
return key
|
||||
|
||||
def __encrypt_with_metadata(self, data: str | bytes) -> PackageData:
|
||||
"""Encrypt data and include salt if password-based"""
|
||||
# convert to bytes (for encoding)
|
||||
if isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
|
||||
# generate salt and key from password
|
||||
salt = os.urandom(16)
|
||||
key = self.__derive_key_from_password(self.password, salt)
|
||||
# init the cypher suit
|
||||
cipher_suite = Fernet(key)
|
||||
|
||||
encrypted_data = cipher_suite.encrypt(data)
|
||||
|
||||
# If using password, include salt in the result
|
||||
return {
|
||||
'encrypted_data': base64.urlsafe_b64encode(encrypted_data).decode('utf-8'),
|
||||
'salt': base64.urlsafe_b64encode(salt).decode('utf-8'),
|
||||
'key_hash': hashlib.sha256(key).hexdigest()
|
||||
}
|
||||
|
||||
def encrypt_with_metadata(self, data: str | bytes, return_as: str = 'str') -> str | bytes | PackageData:
|
||||
"""encrypt with metadata, but returns data in string"""
|
||||
match return_as:
|
||||
case 'str':
|
||||
return self.encrypt_with_metadata_return_str(data)
|
||||
case 'json':
|
||||
return self.encrypt_with_metadata_return_str(data)
|
||||
case 'bytes':
|
||||
return self.encrypt_with_metadata_return_bytes(data)
|
||||
case 'dict':
|
||||
return self.encrypt_with_metadata_return_dict(data)
|
||||
case _:
|
||||
# default is string json
|
||||
return self.encrypt_with_metadata_return_str(data)
|
||||
|
||||
def encrypt_with_metadata_return_dict(self, data: str | bytes) -> PackageData:
|
||||
"""encrypt with metadata, but returns data as PackageData dict"""
|
||||
return self.__encrypt_with_metadata(data)
|
||||
|
||||
def encrypt_with_metadata_return_str(self, data: str | bytes) -> str:
|
||||
"""encrypt with metadata, but returns data in string"""
|
||||
return json.dumps(self.__encrypt_with_metadata(data))
|
||||
|
||||
def encrypt_with_metadata_return_bytes(self, data: str | bytes) -> bytes:
|
||||
"""encrypt with metadata, but returns data in bytes"""
|
||||
return json.dumps(self.__encrypt_with_metadata(data)).encode('utf-8')
|
||||
|
||||
def decrypt_with_metadata(self, encrypted_package: str | bytes | PackageData, password: str | None = None) -> str:
|
||||
"""Decrypt data that may include metadata"""
|
||||
try:
|
||||
# Try to parse as JSON (password-based encryption)
|
||||
if isinstance(encrypted_package, bytes):
|
||||
package_data = cast(PackageData, json.loads(encrypted_package.decode('utf-8')))
|
||||
elif isinstance(encrypted_package, str):
|
||||
package_data = cast(PackageData, json.loads(str(encrypted_package)))
|
||||
else:
|
||||
package_data = encrypted_package
|
||||
|
||||
encrypted_data = base64.urlsafe_b64decode(package_data['encrypted_data'])
|
||||
salt = base64.urlsafe_b64decode(package_data['salt'])
|
||||
pwd = password or self.password
|
||||
key = self.__derive_key_from_password(pwd, salt)
|
||||
if package_data['key_hash'] != hashlib.sha256(key).hexdigest():
|
||||
raise ValueError("Key hash is not matching, possible invalid password")
|
||||
cipher_suite = Fernet(key)
|
||||
decrypted_data = cipher_suite.decrypt(encrypted_data)
|
||||
|
||||
except (json.JSONDecodeError, KeyError, UnicodeDecodeError) as e:
|
||||
raise ValueError(f"Invalid encrypted package format {e}") from e
|
||||
|
||||
return decrypted_data.decode('utf-8')
|
||||
|
||||
@staticmethod
|
||||
def encrypt_data(data: str | bytes, password: str) -> str:
|
||||
"""
|
||||
Static method to encrypt some data
|
||||
|
||||
Arguments:
|
||||
data {str | bytes} -- _description_
|
||||
password {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
encryptor = SymmetricEncryption(password)
|
||||
return encryptor.encrypt_with_metadata_return_str(data)
|
||||
|
||||
@staticmethod
|
||||
def decrypt_data(data: str | bytes | PackageData, password: str) -> str:
|
||||
"""
|
||||
Static method to decrypt some data
|
||||
|
||||
Arguments:
|
||||
data {str | bytes | PackageData} -- _description_
|
||||
password {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
decryptor = SymmetricEncryption(password)
|
||||
return decryptor.decrypt_with_metadata(data, password=password)
|
||||
warnings.warn("Use corelibs_encryption.symmetric.SymmetricEncryption instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
|
||||
0
src/corelibs/exceptions/__init__.py
Normal file
0
src/corelibs/exceptions/__init__.py
Normal file
@@ -2,19 +2,16 @@
|
||||
File check if BOM encoded, needed for CSV load
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from pathlib import Path
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
class BomEncodingInfo(TypedDict):
|
||||
"""BOM encoding info"""
|
||||
has_bom: bool
|
||||
bom_type: str | None
|
||||
encoding: str | None
|
||||
bom_length: int
|
||||
bom_pattern: bytes | None
|
||||
from corelibs_file.file_bom_encoding import (
|
||||
is_bom_encoded as is_bom_encoding_ng,
|
||||
get_bom_encoding_info,
|
||||
BomEncodingInfo
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_file.file_bom_encoding.is_bom_encoded instead")
|
||||
def is_bom_encoded(file_path: Path) -> bool:
|
||||
"""
|
||||
Detect if a file is BOM encoded
|
||||
@@ -25,9 +22,10 @@ def is_bom_encoded(file_path: Path) -> bool:
|
||||
Returns:
|
||||
bool: True if file has BOM, False otherwise
|
||||
"""
|
||||
return is_bom_encoded_info(file_path)['has_bom']
|
||||
return is_bom_encoding_ng(file_path)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_file.file_bom_encoding.get_bom_encoding_info instead")
|
||||
def is_bom_encoded_info(file_path: Path) -> BomEncodingInfo:
|
||||
"""
|
||||
Enhanced BOM detection with additional file analysis
|
||||
@@ -38,38 +36,7 @@ def is_bom_encoded_info(file_path: Path) -> BomEncodingInfo:
|
||||
Returns:
|
||||
dict: Comprehensive BOM and encoding information
|
||||
"""
|
||||
try:
|
||||
# Read first 1024 bytes for analysis
|
||||
with open(file_path, 'rb') as f:
|
||||
header = f.read(4)
|
||||
|
||||
bom_patterns = {
|
||||
b'\xef\xbb\xbf': ('UTF-8', 'utf-8', 3),
|
||||
b'\xff\xfe\x00\x00': ('UTF-32 LE', 'utf-32-le', 4),
|
||||
b'\x00\x00\xfe\xff': ('UTF-32 BE', 'utf-32-be', 4),
|
||||
b'\xff\xfe': ('UTF-16 LE', 'utf-16-le', 2),
|
||||
b'\xfe\xff': ('UTF-16 BE', 'utf-16-be', 2),
|
||||
}
|
||||
|
||||
for bom_pattern, (encoding_name, encoding, length) in bom_patterns.items():
|
||||
if header.startswith(bom_pattern):
|
||||
return {
|
||||
'has_bom': True,
|
||||
'bom_type': encoding_name,
|
||||
'encoding': encoding,
|
||||
'bom_length': length,
|
||||
'bom_pattern': bom_pattern
|
||||
}
|
||||
|
||||
return {
|
||||
'has_bom': False,
|
||||
'bom_type': None,
|
||||
'encoding': None,
|
||||
'bom_length': 0,
|
||||
'bom_pattern': None
|
||||
}
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error checking BOM encoding: {e}") from e
|
||||
return get_bom_encoding_info(file_path)
|
||||
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,10 +2,13 @@
|
||||
crc handlers for file CRC
|
||||
"""
|
||||
|
||||
import zlib
|
||||
from warnings import deprecated
|
||||
from pathlib import Path
|
||||
from corelibs_file.file_crc import file_crc as file_crc_ng
|
||||
from corelibs_file.file_handling import get_file_name
|
||||
|
||||
|
||||
@deprecated("Use corelibs_file.file_crc.file_crc instead")
|
||||
def file_crc(file_path: Path) -> str:
|
||||
"""
|
||||
With for loop and buffer, create file crc32
|
||||
@@ -16,13 +19,10 @@ def file_crc(file_path: Path) -> str:
|
||||
Returns:
|
||||
str: file crc32
|
||||
"""
|
||||
crc = 0
|
||||
with open(file_path, 'rb', 65536) as ins:
|
||||
for _ in range(int((file_path.stat().st_size / 65536)) + 1):
|
||||
crc = zlib.crc32(ins.read(65536), crc)
|
||||
return f"{crc & 0xFFFFFFFF:08X}"
|
||||
return file_crc_ng(file_path)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_file.file_handling.get_file_name instead")
|
||||
def file_name_crc(file_path: Path, add_parent_folder: bool = False) -> str:
|
||||
"""
|
||||
either returns file name only from path
|
||||
@@ -38,9 +38,6 @@ def file_name_crc(file_path: Path, add_parent_folder: bool = False) -> str:
|
||||
Returns:
|
||||
str: file name as string
|
||||
"""
|
||||
if add_parent_folder:
|
||||
return str(Path(file_path.parent.name).joinpath(file_path.name))
|
||||
else:
|
||||
return file_path.name
|
||||
return get_file_name(file_path, add_parent_folder=add_parent_folder)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,11 +2,12 @@
|
||||
File handling utilities
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from warnings import deprecated
|
||||
from pathlib import Path
|
||||
from corelibs_file.file_handling import remove_all_in_directory as remove_all_in_directory_ng
|
||||
|
||||
|
||||
@deprecated("Use corelibs_file.file_handling.remove_all_in_directory instead")
|
||||
def remove_all_in_directory(
|
||||
directory: Path,
|
||||
ignore_files: list[str] | None = None,
|
||||
@@ -14,43 +15,24 @@ def remove_all_in_directory(
|
||||
dry_run: bool = False
|
||||
) -> bool:
|
||||
"""
|
||||
remove all files and folders in a directory
|
||||
can exclude files or folders
|
||||
deprecated
|
||||
|
||||
Args:
|
||||
directory (Path): _description_
|
||||
ignore_files (list[str], optional): _description_. Defaults to None.
|
||||
Arguments:
|
||||
directory {Path} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
ignore_files {list[str] | None} -- _description_ (default: {None})
|
||||
verbose {bool} -- _description_ (default: {False})
|
||||
dry_run {bool} -- _description_ (default: {False})
|
||||
|
||||
Returns:
|
||||
bool: _description_
|
||||
bool -- _description_
|
||||
"""
|
||||
if not directory.is_dir():
|
||||
return False
|
||||
if ignore_files is None:
|
||||
ignore_files = []
|
||||
if verbose:
|
||||
print(
|
||||
f"{'[DRY RUN] ' if dry_run else ''}Remove old files in: {directory.name} [",
|
||||
end="", flush=True
|
||||
)
|
||||
# remove all files and folders in given directory by recursive globbing
|
||||
for file in directory.rglob("*"):
|
||||
# skip if in ignore files
|
||||
if file.name in ignore_files:
|
||||
continue
|
||||
# remove one file, or a whole directory
|
||||
if file.is_file():
|
||||
if not dry_run:
|
||||
os.remove(file)
|
||||
if verbose:
|
||||
print(".", end="", flush=True)
|
||||
elif file.is_dir():
|
||||
if not dry_run:
|
||||
shutil.rmtree(file)
|
||||
if verbose:
|
||||
print("/", end="", flush=True)
|
||||
if verbose:
|
||||
print("]", flush=True)
|
||||
return True
|
||||
return remove_all_in_directory_ng(
|
||||
directory,
|
||||
ignore_files=ignore_files,
|
||||
verbose=verbose,
|
||||
dry_run=dry_run
|
||||
)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,27 +2,31 @@
|
||||
wrapper around search path
|
||||
"""
|
||||
|
||||
from typing import Any, TypedDict, NotRequired
|
||||
from typing import Any
|
||||
from warnings import deprecated
|
||||
from corelibs_search.data_search import (
|
||||
ArraySearchList as CorelibsArraySearchList,
|
||||
find_in_array_from_list as corelibs_find_in_array_from_list,
|
||||
key_lookup as corelibs_key_lookup,
|
||||
value_lookup as corelibs_value_lookup
|
||||
)
|
||||
|
||||
|
||||
class ArraySearchList(TypedDict):
|
||||
class ArraySearchList(CorelibsArraySearchList):
|
||||
"""find in array from list search dict"""
|
||||
key: str
|
||||
value: str | bool | int | float | list[str | None]
|
||||
case_sensitive: NotRequired[bool]
|
||||
|
||||
|
||||
@deprecated("Use find_in_array_from_list()")
|
||||
@deprecated("Use corelibs_search.data_search.find_in_array_from_list instead")
|
||||
def array_search(
|
||||
search_params: list[ArraySearchList],
|
||||
data: list[dict[str, Any]],
|
||||
return_index: bool = False
|
||||
) -> list[dict[str, Any]]:
|
||||
"""depreacted, old call order"""
|
||||
return find_in_array_from_list(data, search_params, return_index)
|
||||
return corelibs_find_in_array_from_list(data, search_params, return_index)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_search.data_search.find_in_array_from_list instead")
|
||||
def find_in_array_from_list(
|
||||
data: list[dict[str, Any]],
|
||||
search_params: list[ArraySearchList],
|
||||
@@ -48,69 +52,14 @@ def find_in_array_from_list(
|
||||
list: list of found elements, or if return index
|
||||
list of dics with "index" and "data", where "data" holds the result list
|
||||
"""
|
||||
if not isinstance(search_params, list): # type: ignore
|
||||
raise ValueError("search_params must be a list")
|
||||
keys: list[str] = []
|
||||
# check that key and value exist and are set
|
||||
for search in search_params:
|
||||
if not search.get('key') or not search.get('value'):
|
||||
raise KeyError(
|
||||
f"Either Key '{search.get('key', '')}' or "
|
||||
f"Value '{search.get('value', '')}' is missing or empty"
|
||||
)
|
||||
# if double key -> abort
|
||||
if search.get("key") in keys:
|
||||
raise KeyError(
|
||||
f"Key {search.get('key', '')} already exists in search_params"
|
||||
)
|
||||
keys.append(str(search['key']))
|
||||
|
||||
return_items: list[dict[str, Any]] = []
|
||||
for si_idx, search_item in enumerate(data):
|
||||
# for each search entry, all must match
|
||||
matching = 0
|
||||
for search in search_params:
|
||||
# either Value direct or if Value is list then any of those items can match
|
||||
# values are compared in lower case if case senstive is off
|
||||
# lower case left side
|
||||
# TODO: allow nested Keys. eg "Key: ["Key a", "key b"]" to be ["Key a"]["key b"]
|
||||
if search.get("case_sensitive", True) is False:
|
||||
search_value = search_item.get(str(search['key']), "").lower()
|
||||
else:
|
||||
search_value = search_item.get(str(search['key']), "")
|
||||
# lower case right side
|
||||
if isinstance(search['value'], list):
|
||||
search_in = [
|
||||
str(k).lower()
|
||||
if search.get("case_sensitive", True) is False else k
|
||||
for k in search['value']
|
||||
]
|
||||
elif search.get("case_sensitive", True) is False:
|
||||
search_in = str(search['value']).lower()
|
||||
else:
|
||||
search_in = search['value']
|
||||
# compare check
|
||||
if (
|
||||
(
|
||||
isinstance(search_in, list) and
|
||||
search_value in search_in
|
||||
) or
|
||||
search_value == search_in
|
||||
):
|
||||
matching += 1
|
||||
if len(search_params) == matching:
|
||||
if return_index is True:
|
||||
# the data is now in "data sub set"
|
||||
return_items.append({
|
||||
"index": si_idx,
|
||||
"data": search_item
|
||||
})
|
||||
else:
|
||||
return_items.append(search_item)
|
||||
# return all found or empty list
|
||||
return return_items
|
||||
return corelibs_find_in_array_from_list(
|
||||
data,
|
||||
search_params,
|
||||
return_index
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_search.data_search.key_lookup instead")
|
||||
def key_lookup(haystack: dict[str, str], key: str) -> str:
|
||||
"""
|
||||
simple key lookup in haystack, erturns empty string if not found
|
||||
@@ -122,9 +71,10 @@ def key_lookup(haystack: dict[str, str], key: str) -> str:
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return haystack.get(key, "")
|
||||
return corelibs_key_lookup(haystack, key)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_search.data_search.value_lookup instead")
|
||||
def value_lookup(haystack: dict[str, str], value: str, raise_on_many: bool = False) -> str:
|
||||
"""
|
||||
find by value, if not found returns empty, if not raise on many returns the first one
|
||||
@@ -140,11 +90,6 @@ def value_lookup(haystack: dict[str, str], value: str, raise_on_many: bool = Fal
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
keys = [__key for __key, __value in haystack.items() if __value == value]
|
||||
if not keys:
|
||||
return ""
|
||||
if raise_on_many is True and len(keys) > 1:
|
||||
raise ValueError("More than one element found with the same name")
|
||||
return keys[0]
|
||||
return corelibs_value_lookup(haystack, value, raise_on_many)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,9 +2,16 @@
|
||||
Various helper functions for type data clean up
|
||||
"""
|
||||
|
||||
from typing import Any, cast
|
||||
from warnings import deprecated
|
||||
from typing import Any
|
||||
from corelibs_iterator.dict_support import (
|
||||
delete_keys_from_set as corelibs_delete_keys_from_set,
|
||||
convert_to_dict_type,
|
||||
set_entry as corelibs_set_entry
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_iterator.dict_support.delete_keys_from_set instead")
|
||||
def delete_keys_from_set(
|
||||
set_data: dict[str, Any] | list[Any] | str, keys: list[str]
|
||||
) -> dict[str, Any] | list[Any] | Any:
|
||||
@@ -19,24 +26,10 @@ def delete_keys_from_set(
|
||||
dict[str, Any] | list[Any] | None: _description_
|
||||
"""
|
||||
# skip everything if there is no keys list
|
||||
if not keys:
|
||||
return set_data
|
||||
if isinstance(set_data, dict):
|
||||
for key, value in set_data.copy().items():
|
||||
if key in keys:
|
||||
del set_data[key]
|
||||
if isinstance(value, (dict, list)):
|
||||
delete_keys_from_set(value, keys) # type: ignore Partly unknown
|
||||
elif isinstance(set_data, list):
|
||||
for value in set_data:
|
||||
if isinstance(value, (dict, list)):
|
||||
delete_keys_from_set(value, keys) # type: ignore Partly unknown
|
||||
else:
|
||||
set_data = [set_data]
|
||||
|
||||
return set_data
|
||||
return corelibs_delete_keys_from_set(set_data, keys)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_iterator.dict_support.convert_to_dict_type instead")
|
||||
def build_dict(
|
||||
any_dict: Any, ignore_entries: list[str] | None = None
|
||||
) -> dict[str, Any | list[Any] | dict[Any, Any]]:
|
||||
@@ -49,18 +42,10 @@ def build_dict(
|
||||
Returns:
|
||||
dict[str, Any | list[Any]]: _description_
|
||||
"""
|
||||
if ignore_entries is None:
|
||||
return cast(dict[str, Any | list[Any] | dict[Any, Any]], any_dict)
|
||||
# ignore entries can be one key or key nested
|
||||
# return {
|
||||
# key: value for key, value in any_dict.items() if key not in ignore_entries
|
||||
# }
|
||||
return cast(
|
||||
dict[str, Any | list[Any] | dict[Any, Any]],
|
||||
delete_keys_from_set(any_dict, ignore_entries)
|
||||
)
|
||||
return convert_to_dict_type(any_dict, ignore_entries)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_iterator.dict_support.set_entry instead")
|
||||
def set_entry(dict_set: dict[str, Any], key: str, value_set: Any) -> dict[str, Any]:
|
||||
"""
|
||||
set a new entry in the dict set
|
||||
@@ -73,9 +58,6 @@ def set_entry(dict_set: dict[str, Any], key: str, value_set: Any) -> dict[str, A
|
||||
Returns:
|
||||
dict[str, Any] -- _description_
|
||||
"""
|
||||
if not dict_set.get(key):
|
||||
dict_set[key] = {}
|
||||
dict_set[key] = value_set
|
||||
return dict_set
|
||||
return corelibs_set_entry(dict_set, key, value_set)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,8 +2,11 @@
|
||||
Dict helpers
|
||||
"""
|
||||
|
||||
|
||||
from typing import TypeAlias, Union, Dict, List, Any, cast
|
||||
from warnings import deprecated
|
||||
from typing import TypeAlias, Union, Dict, List, Any
|
||||
from corelibs_dump_data.dict_mask import (
|
||||
mask as corelibs_mask
|
||||
)
|
||||
|
||||
# definitions for the mask run below
|
||||
MaskableValue: TypeAlias = Union[str, int, float, bool, None]
|
||||
@@ -11,6 +14,7 @@ NestedDict: TypeAlias = Dict[str, Union[MaskableValue, List[Any], 'NestedDict']]
|
||||
ProcessableValue: TypeAlias = Union[MaskableValue, List[Any], NestedDict]
|
||||
|
||||
|
||||
@deprecated("use corelibs_dump_data.dict_mask.mask instead")
|
||||
def mask(
|
||||
data_set: dict[str, Any],
|
||||
mask_keys: list[str] | None = None,
|
||||
@@ -26,7 +30,7 @@ def mask(
|
||||
and end with '_', remove to search string in string
|
||||
|
||||
Arguments:
|
||||
data_set {dict[str, str]} -- _description_
|
||||
data_set {dict[str, Any]} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
mask_keys {list[str] | None} -- _description_ (default: {None})
|
||||
@@ -37,49 +41,12 @@ def mask(
|
||||
Returns:
|
||||
dict[str, str] -- _description_
|
||||
"""
|
||||
if skip is True:
|
||||
return data_set
|
||||
if mask_keys is None:
|
||||
mask_keys = ["encryption", "password", "secret"]
|
||||
else:
|
||||
# make sure it is lower case
|
||||
mask_keys = [mask_key.lower() for mask_key in mask_keys]
|
||||
|
||||
def should_mask_key(key: str) -> bool:
|
||||
"""Check if a key should be masked"""
|
||||
__key_lower = key.lower()
|
||||
return any(
|
||||
__key_lower.startswith(mask_key) or
|
||||
__key_lower.endswith(mask_key) or
|
||||
f"{mask_str_edges}{mask_key}{mask_str_edges}" in __key_lower
|
||||
for mask_key in mask_keys
|
||||
)
|
||||
|
||||
def mask_recursive(obj: ProcessableValue) -> ProcessableValue:
|
||||
"""Recursively mask values in nested structures"""
|
||||
if isinstance(obj, dict):
|
||||
return {
|
||||
key: mask_value(value) if should_mask_key(key) else mask_recursive(value)
|
||||
for key, value in obj.items()
|
||||
}
|
||||
if isinstance(obj, list):
|
||||
return [mask_recursive(item) for item in obj]
|
||||
return obj
|
||||
|
||||
def mask_value(value: Any) -> Any:
|
||||
"""Handle masking based on value type"""
|
||||
if isinstance(value, list):
|
||||
# Mask each individual value in the list
|
||||
return [mask_str for _ in cast('list[Any]', value)]
|
||||
if isinstance(value, dict):
|
||||
# Recursively process the dictionary instead of masking the whole thing
|
||||
return mask_recursive(cast('ProcessableValue', value))
|
||||
# Mask primitive values
|
||||
return mask_str
|
||||
|
||||
return {
|
||||
key: mask_value(value) if should_mask_key(key) else mask_recursive(value)
|
||||
for key, value in data_set.items()
|
||||
}
|
||||
return corelibs_mask(
|
||||
data_set,
|
||||
mask_keys,
|
||||
mask_str,
|
||||
mask_str_edges,
|
||||
skip
|
||||
)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,13 +2,35 @@
|
||||
Various dictionary, object and list hashers
|
||||
"""
|
||||
|
||||
import json
|
||||
import hashlib
|
||||
from warnings import deprecated
|
||||
from typing import Any
|
||||
from corelibs_hash.fingerprint import (
|
||||
hash_object as corelibs_hash_object,
|
||||
dict_hash_frozen as corelibs_dict_hash_frozen,
|
||||
dict_hash_crc as corelibs_dict_hash_crc
|
||||
)
|
||||
|
||||
|
||||
@deprecated("use corelibs_hash.fingerprint.hash_object instead")
|
||||
def hash_object(obj: Any) -> str:
|
||||
"""
|
||||
RECOMMENDED for new use
|
||||
Create a hash for any dict or list with mixed key types
|
||||
|
||||
Arguments:
|
||||
obj {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return corelibs_hash_object(obj)
|
||||
|
||||
|
||||
@deprecated("use corelibs_hash.fingerprint.hash_object instead")
|
||||
def dict_hash_frozen(data: dict[Any, Any]) -> int:
|
||||
"""
|
||||
NOT RECOMMENDED, use dict_hash_crc or hash_object instead
|
||||
If used, DO NOT CHANGE
|
||||
hash a dict via freeze
|
||||
|
||||
Args:
|
||||
@@ -17,23 +39,23 @@ def dict_hash_frozen(data: dict[Any, Any]) -> int:
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return hash(frozenset(data.items()))
|
||||
return corelibs_dict_hash_frozen(data)
|
||||
|
||||
|
||||
@deprecated("use corelibs_hash.fingerprint.dict_hash_crc and for new use hash_object instead")
|
||||
def dict_hash_crc(data: dict[Any, Any] | list[Any]) -> str:
|
||||
"""
|
||||
Create a sha256 hash over dict
|
||||
LEGACY METHOD, must be kept for fallback, if used by other code, DO NOT CHANGE
|
||||
Create a sha256 hash over dict or list
|
||||
alternative for
|
||||
dict_hash_frozen
|
||||
|
||||
Args:
|
||||
data (dict | list): _description_
|
||||
data (dict[Any, Any] | list[Any]): _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
str: sha256 hash, prefiex with HO_ if fallback used
|
||||
"""
|
||||
return hashlib.sha256(
|
||||
json.dumps(data, sort_keys=True, ensure_ascii=True).encode('utf-8')
|
||||
).hexdigest()
|
||||
return corelibs_dict_hash_crc(data)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,9 +2,16 @@
|
||||
List type helpers
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from typing import Any, Sequence
|
||||
from corelibs_iterator.list_support import (
|
||||
convert_to_list as corelibs_convert_to_list,
|
||||
is_list_in_list as corelibs_is_list_in_list,
|
||||
make_unique_list_of_dicts as corelibs_make_unique_list_of_dicts
|
||||
)
|
||||
|
||||
|
||||
@deprecated("use corelibs_iterator.list_support.convert_to_list instead")
|
||||
def convert_to_list(
|
||||
entry: str | int | float | bool | Sequence[str | int | float | bool | Sequence[Any]]
|
||||
) -> Sequence[str | int | float | bool | Sequence[Any]]:
|
||||
@@ -17,11 +24,10 @@ def convert_to_list(
|
||||
Returns:
|
||||
list[str | int | float | bool] -- _description_
|
||||
"""
|
||||
if isinstance(entry, list):
|
||||
return entry
|
||||
return [entry]
|
||||
return corelibs_convert_to_list(entry)
|
||||
|
||||
|
||||
@deprecated("use corelibs_iterator.list_support.is_list_in_list instead")
|
||||
def is_list_in_list(
|
||||
list_a: Sequence[str | int | float | bool | Sequence[Any]],
|
||||
list_b: Sequence[str | int | float | bool | Sequence[Any]]
|
||||
@@ -37,11 +43,20 @@ def is_list_in_list(
|
||||
Returns:
|
||||
list[Any] -- _description_
|
||||
"""
|
||||
# Create sets of (value, type) tuples
|
||||
set_a = set((item, type(item)) for item in list_a)
|
||||
set_b = set((item, type(item)) for item in list_b)
|
||||
return corelibs_is_list_in_list(list_a, list_b)
|
||||
|
||||
# Get the difference and extract just the values
|
||||
return [item for item, _ in set_a - set_b]
|
||||
|
||||
@deprecated("use corelibs_iterator.list_support.make_unique_list_of_dicts instead")
|
||||
def make_unique_list_of_dicts(dict_list: list[Any]) -> list[Any]:
|
||||
"""
|
||||
Create a list of unique dictionary entries
|
||||
|
||||
Arguments:
|
||||
dict_list {list[Any]} -- _description_
|
||||
|
||||
Returns:
|
||||
list[Any] -- _description_
|
||||
"""
|
||||
return corelibs_make_unique_list_of_dicts(dict_list)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,11 +2,12 @@
|
||||
helper functions for jmespath interfaces
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from typing import Any
|
||||
import jmespath
|
||||
import jmespath.exceptions
|
||||
from corelibs_search.jmespath_search import jmespath_search as jmespath_search_ng
|
||||
|
||||
|
||||
@deprecated("Use corelibs_search.jmespath_search.jmespath_search instead")
|
||||
def jmespath_search(search_data: dict[Any, Any] | list[Any], search_params: str) -> Any:
|
||||
"""
|
||||
jmespath search wrapper
|
||||
@@ -22,18 +23,6 @@ def jmespath_search(search_data: dict[Any, Any] | list[Any], search_params: str)
|
||||
Returns:
|
||||
Any: dict/list/etc, None if nothing found
|
||||
"""
|
||||
try:
|
||||
search_result = jmespath.search(search_params, search_data)
|
||||
except jmespath.exceptions.LexerError as excp:
|
||||
raise ValueError(f"Compile failed: {search_params}: {excp}") from excp
|
||||
except jmespath.exceptions.ParseError as excp:
|
||||
raise ValueError(f"Parse failed: {search_params}: {excp}") from excp
|
||||
except jmespath.exceptions.JMESPathTypeError as excp:
|
||||
raise ValueError(f"Search failed with JMESPathTypeError: {search_params}: {excp}") from excp
|
||||
except TypeError as excp:
|
||||
raise ValueError(f"Type error for search_params: {excp}") from excp
|
||||
return search_result
|
||||
|
||||
# TODO: compile jmespath setup
|
||||
return jmespath_search_ng(search_data, search_params)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,35 +2,37 @@
|
||||
json encoder for datetime
|
||||
"""
|
||||
|
||||
from warnings import warn, deprecated
|
||||
from typing import Any
|
||||
from json import JSONEncoder, dumps
|
||||
from datetime import datetime, date
|
||||
import copy
|
||||
from jsonpath_ng import parse # pyright: ignore[reportMissingTypeStubs, reportUnknownVariableType]
|
||||
from corelibs_json.json_support import (
|
||||
default_isoformat as default_isoformat_ng,
|
||||
DateTimeEncoder as DateTimeEncoderCoreLibs,
|
||||
json_dumps as json_dumps_ng,
|
||||
modify_with_jsonpath as modify_with_jsonpath_ng,
|
||||
)
|
||||
|
||||
|
||||
# subclass JSONEncoder
|
||||
class DateTimeEncoder(JSONEncoder):
|
||||
class DateTimeEncoder(DateTimeEncoderCoreLibs):
|
||||
"""
|
||||
Override the default method
|
||||
dumps(..., cls=DateTimeEncoder, ...)
|
||||
"""
|
||||
def default(self, o: Any) -> str | None:
|
||||
if isinstance(o, (date, datetime)):
|
||||
return o.isoformat()
|
||||
return None
|
||||
|
||||
|
||||
warn("Use corelibs_json.json_support.DateTimeEncoder instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_json.json_support.default_isoformat instead")
|
||||
def default_isoformat(obj: Any) -> str | None:
|
||||
"""
|
||||
default override
|
||||
dumps(..., default=default, ...)
|
||||
"""
|
||||
if isinstance(obj, (date, datetime)):
|
||||
return obj.isoformat()
|
||||
return None
|
||||
return default_isoformat_ng(obj)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_json.json_support.json_dumps instead")
|
||||
def json_dumps(data: Any):
|
||||
"""
|
||||
wrapper for json.dumps with sure dump without throwing Exceptions
|
||||
@@ -41,22 +43,15 @@ def json_dumps(data: Any):
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return dumps(data, ensure_ascii=False, default=str)
|
||||
return json_dumps_ng(data)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_json.json_support.modify_with_jsonpath instead")
|
||||
def modify_with_jsonpath(data: dict[Any, Any], path: str, new_value: Any):
|
||||
"""
|
||||
Modify dictionary using JSONPath (more powerful than JMESPath for modifications)
|
||||
"""
|
||||
result = copy.deepcopy(data)
|
||||
jsonpath_expr = parse(path) # pyright: ignore[reportUnknownVariableType]
|
||||
|
||||
# Find and update all matches
|
||||
matches = jsonpath_expr.find(result) # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType]
|
||||
for match in matches: # pyright: ignore[reportUnknownVariableType]
|
||||
match.full_path.update(result, new_value) # pyright: ignore[reportUnknownMemberType]
|
||||
|
||||
return result
|
||||
return modify_with_jsonpath_ng(data, path, new_value)
|
||||
|
||||
# __END__
|
||||
|
||||
|
||||
@@ -13,9 +13,9 @@ from pathlib import Path
|
||||
import atexit
|
||||
from enum import Flag, auto
|
||||
from typing import MutableMapping, TextIO, TypedDict, Any, TYPE_CHECKING, cast
|
||||
from corelibs_stack_trace.stack import call_stack, exception_stack
|
||||
from corelibs_text_colors.text_colors import Colors
|
||||
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
||||
from corelibs.string_handling.text_colors import Colors
|
||||
from corelibs.debug_handling.debug_helpers import call_stack, exception_stack
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from multiprocessing import Queue
|
||||
@@ -392,6 +392,24 @@ class LogParent:
|
||||
except IndexError:
|
||||
return LoggingLevel.NOTSET
|
||||
|
||||
def any_handler_is_minimum_level(self, log_level: LoggingLevel) -> bool:
|
||||
"""
|
||||
if any handler is set to minimum level
|
||||
|
||||
Arguments:
|
||||
log_level {LoggingLevel} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
for handler in self.handlers.values():
|
||||
try:
|
||||
if LoggingLevel.from_any(handler.level).includes(log_level):
|
||||
return True
|
||||
except (IndexError, AttributeError):
|
||||
continue
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def validate_log_level(log_level: Any) -> bool:
|
||||
"""
|
||||
@@ -584,9 +602,9 @@ class Log(LogParent):
|
||||
__setting = self.DEFAULT_LOG_SETTINGS.get(__log_entry, True)
|
||||
default_log_settings[__log_entry] = __setting
|
||||
# check console log type
|
||||
default_log_settings['console_format_type'] = cast('ConsoleFormat', log_settings.get(
|
||||
'console_format_type', self.DEFAULT_LOG_SETTINGS['console_format_type']
|
||||
))
|
||||
if (console_format_type := log_settings.get('console_format_type')) is None:
|
||||
console_format_type = self.DEFAULT_LOG_SETTINGS['console_format_type']
|
||||
default_log_settings['console_format_type'] = cast('ConsoleFormat', console_format_type)
|
||||
# check log queue
|
||||
__setting = log_settings.get('log_queue', self.DEFAULT_LOG_SETTINGS['log_queue'])
|
||||
if __setting is not None:
|
||||
@@ -756,6 +774,16 @@ class Log(LogParent):
|
||||
self.__set_console_formatter(console_format_type)
|
||||
)
|
||||
|
||||
def get_console_formatter(self) -> ConsoleFormat:
|
||||
"""
|
||||
Get the current console formatter, this the settings type
|
||||
Note that if eg "ALL" is set it will return the combined information but not the ALL flag name itself
|
||||
|
||||
Returns:
|
||||
ConsoleFormat -- _description_
|
||||
"""
|
||||
return self.log_settings['console_format_type']
|
||||
|
||||
# MARK: console handler
|
||||
def __create_console_handler(
|
||||
self, handler_name: str,
|
||||
|
||||
0
src/corelibs/math_handling/__init__.py
Normal file
0
src/corelibs/math_handling/__init__.py
Normal file
38
src/corelibs/math_handling/math_helpers.py
Normal file
38
src/corelibs/math_handling/math_helpers.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""
|
||||
Various math helpers
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
import math
|
||||
|
||||
|
||||
@deprecated("Use math.gcd instead")
|
||||
def gcd(a: int, b: int):
|
||||
"""
|
||||
Calculate: Greatest Common Divisor
|
||||
|
||||
Arguments:
|
||||
a {int} -- _description_
|
||||
b {int} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return math.gcd(a, b)
|
||||
|
||||
|
||||
@deprecated("Use math.lcm instead")
|
||||
def lcd(a: int, b: int):
|
||||
"""
|
||||
Calculate: Least Common Denominator
|
||||
|
||||
Arguments:
|
||||
a {int} -- _description_
|
||||
b {int} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return math.lcm(a, b)
|
||||
|
||||
# __END__
|
||||
@@ -3,32 +3,61 @@ requests lib interface
|
||||
V2 call type
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
import warnings
|
||||
from typing import Any, TypedDict, cast
|
||||
import requests
|
||||
# to hide the verfiy warnings because of the bad SSL settings from Netskope, Akamai, etc
|
||||
warnings.filterwarnings('ignore', message='Unverified HTTPS request')
|
||||
from requests import exceptions
|
||||
|
||||
|
||||
class ErrorResponse:
|
||||
"""
|
||||
Error response structure. This is returned if a request could not be completed
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
code: int,
|
||||
message: str,
|
||||
action: str,
|
||||
url: str,
|
||||
exception: exceptions.InvalidSchema | exceptions.ReadTimeout | exceptions.ConnectionError | None = None
|
||||
) -> None:
|
||||
self.code = code
|
||||
self.message = message
|
||||
self.action = action
|
||||
self.url = url
|
||||
self.exception_name = type(exception).__name__ if exception is not None else None
|
||||
self.exception_trace = exception if exception is not None else None
|
||||
|
||||
|
||||
class ProxyConfig(TypedDict):
|
||||
"""
|
||||
Socks proxy settings
|
||||
"""
|
||||
type: str
|
||||
host: str
|
||||
port: str
|
||||
|
||||
|
||||
class Caller:
|
||||
"""_summary_"""
|
||||
"""
|
||||
requests lib interface
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
header: dict[str, str],
|
||||
verify: bool = True,
|
||||
timeout: int = 20,
|
||||
proxy: dict[str, str] | None = None,
|
||||
proxy: ProxyConfig | None = None,
|
||||
verify: bool = True,
|
||||
ca_file: str | None = None
|
||||
):
|
||||
self.headers = header
|
||||
self.timeout: int = timeout
|
||||
self.cafile = ca_file
|
||||
self.ca_file = ca_file
|
||||
self.verify = verify
|
||||
self.proxy = proxy
|
||||
self.proxy = cast(dict[str, str], proxy) if proxy is not None else None
|
||||
|
||||
def __timeout(self, timeout: int | None) -> int:
|
||||
if timeout is not None:
|
||||
if timeout is not None and timeout >= 0:
|
||||
return timeout
|
||||
return self.timeout
|
||||
|
||||
@@ -39,7 +68,7 @@ class Caller:
|
||||
data: dict[str, Any] | None = None,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | None:
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""
|
||||
call wrapper, on error returns None
|
||||
|
||||
@@ -56,67 +85,96 @@ class Caller:
|
||||
if data is None:
|
||||
data = {}
|
||||
try:
|
||||
response = None
|
||||
if action == "get":
|
||||
response = requests.get(
|
||||
return requests.get(
|
||||
url,
|
||||
params=params,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
proxies=self.proxy,
|
||||
cert=self.ca_file
|
||||
)
|
||||
elif action == "post":
|
||||
response = requests.post(
|
||||
if action == "post":
|
||||
return requests.post(
|
||||
url,
|
||||
params=params,
|
||||
json=data,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
proxies=self.proxy,
|
||||
cert=self.ca_file
|
||||
)
|
||||
elif action == "put":
|
||||
response = requests.put(
|
||||
if action == "put":
|
||||
return requests.put(
|
||||
url,
|
||||
params=params,
|
||||
json=data,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
proxies=self.proxy,
|
||||
cert=self.ca_file
|
||||
)
|
||||
elif action == "patch":
|
||||
response = requests.patch(
|
||||
if action == "patch":
|
||||
return requests.patch(
|
||||
url,
|
||||
params=params,
|
||||
json=data,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
proxies=self.proxy,
|
||||
cert=self.ca_file
|
||||
)
|
||||
elif action == "delete":
|
||||
response = requests.delete(
|
||||
if action == "delete":
|
||||
return requests.delete(
|
||||
url,
|
||||
params=params,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
proxies=self.proxy,
|
||||
cert=self.ca_file
|
||||
)
|
||||
return response
|
||||
except requests.exceptions.InvalidSchema as e:
|
||||
print(f"Invalid URL during '{action}' for {url}:\n\t{e}")
|
||||
return None
|
||||
except requests.exceptions.ReadTimeout as e:
|
||||
print(f"Timeout ({self.timeout}s) during '{action}' for {url}:\n\t{e}")
|
||||
return None
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
print(f"Connection error during '{action}' for {url}:\n\t{e}")
|
||||
return None
|
||||
return ErrorResponse(
|
||||
100,
|
||||
f"Unsupported action '{action}'",
|
||||
action,
|
||||
url
|
||||
)
|
||||
except exceptions.InvalidSchema as e:
|
||||
return ErrorResponse(
|
||||
200,
|
||||
f"Invalid URL during '{action}' for {url}",
|
||||
action,
|
||||
url,
|
||||
e
|
||||
)
|
||||
except exceptions.ReadTimeout as e:
|
||||
return ErrorResponse(
|
||||
300,
|
||||
f"Timeout ({self.timeout}s) during '{action}' for {url}",
|
||||
action,
|
||||
url,
|
||||
e
|
||||
)
|
||||
except exceptions.ConnectionError as e:
|
||||
return ErrorResponse(
|
||||
400,
|
||||
f"Connection error during '{action}' for {url}",
|
||||
action,
|
||||
url,
|
||||
e
|
||||
)
|
||||
|
||||
def get(self, url: str, params: dict[str, Any] | None = None) -> requests.Response | None:
|
||||
def get(
|
||||
self,
|
||||
url: str,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""
|
||||
get data
|
||||
|
||||
@@ -127,11 +185,15 @@ class Caller:
|
||||
Returns:
|
||||
requests.Response: _description_
|
||||
"""
|
||||
return self.__call('get', url, params=params)
|
||||
return self.__call('get', url, params=params, timeout=timeout)
|
||||
|
||||
def post(
|
||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
||||
) -> requests.Response | None:
|
||||
self,
|
||||
url: str,
|
||||
data: dict[str, Any] | None = None,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""
|
||||
post data
|
||||
|
||||
@@ -143,11 +205,15 @@ class Caller:
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('post', url, data, params)
|
||||
return self.__call('post', url, data, params, timeout=timeout)
|
||||
|
||||
def put(
|
||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
||||
) -> requests.Response | None:
|
||||
self,
|
||||
url: str,
|
||||
data: dict[str, Any] | None = None,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
@@ -158,11 +224,15 @@ class Caller:
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('put', url, data, params)
|
||||
return self.__call('put', url, data, params, timeout=timeout)
|
||||
|
||||
def patch(
|
||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
||||
) -> requests.Response | None:
|
||||
self,
|
||||
url: str,
|
||||
data: dict[str, Any] | None = None,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
@@ -173,9 +243,14 @@ class Caller:
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('patch', url, data, params)
|
||||
return self.__call('patch', url, data, params, timeout=timeout)
|
||||
|
||||
def delete(self, url: str, params: dict[str, Any] | None = None) -> requests.Response | None:
|
||||
def delete(
|
||||
self,
|
||||
url: str,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""
|
||||
delete
|
||||
|
||||
@@ -186,6 +261,6 @@ class Caller:
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('delete', url, params=params)
|
||||
return self.__call('delete', url, params=params, timeout=timeout)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -5,152 +5,14 @@ Set colors with print(f"something {Colors.yellow}colorful{Colors.end})
|
||||
bold + underline + color combinations are possible.
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from corelibs_text_colors.text_colors import Colors as ColorsNew
|
||||
|
||||
class Colors:
|
||||
|
||||
@deprecated("Use src.corelibs_text_colors.text_colors instead")
|
||||
class Colors(ColorsNew):
|
||||
"""
|
||||
ANSI colors defined
|
||||
"""
|
||||
# General sets, these should not be accessd
|
||||
__BOLD = '\033[1m'
|
||||
__UNDERLINE = '\033[4m'
|
||||
__END = '\033[0m'
|
||||
__RESET = '\033[0m'
|
||||
# Define ANSI color codes as class attributes
|
||||
__BLACK = "\033[30m"
|
||||
__RED = "\033[31m"
|
||||
__GREEN = "\033[32m"
|
||||
__YELLOW = "\033[33m"
|
||||
__BLUE = "\033[34m"
|
||||
__MAGENTA = "\033[35m"
|
||||
__CYAN = "\033[36m"
|
||||
__WHITE = "\033[37m"
|
||||
|
||||
# Define bold/bright versions of the colors
|
||||
__BLACK_BOLD = "\033[1;30m"
|
||||
__RED_BOLD = "\033[1;31m"
|
||||
__GREEN_BOLD = "\033[1;32m"
|
||||
__YELLOW_BOLD = "\033[1;33m"
|
||||
__BLUE_BOLD = "\033[1;34m"
|
||||
__MAGENTA_BOLD = "\033[1;35m"
|
||||
__CYAN_BOLD = "\033[1;36m"
|
||||
__WHITE_BOLD = "\033[1;37m"
|
||||
|
||||
# BRIGHT, alternative
|
||||
__BLACK_BRIGHT = '\033[90m'
|
||||
__RED_BRIGHT = '\033[91m'
|
||||
__GREEN_BRIGHT = '\033[92m'
|
||||
__YELLOW_BRIGHT = '\033[93m'
|
||||
__BLUE_BRIGHT = '\033[94m'
|
||||
__MAGENTA_BRIGHT = '\033[95m'
|
||||
__CYAN_BRIGHT = '\033[96m'
|
||||
__WHITE_BRIGHT = '\033[97m'
|
||||
|
||||
# set access vars
|
||||
bold = __BOLD
|
||||
underline = __UNDERLINE
|
||||
end = __END
|
||||
reset = __RESET
|
||||
# normal
|
||||
black = __BLACK
|
||||
red = __RED
|
||||
green = __GREEN
|
||||
yellow = __YELLOW
|
||||
blue = __BLUE
|
||||
magenta = __MAGENTA
|
||||
cyan = __CYAN
|
||||
white = __WHITE
|
||||
# bold
|
||||
black_bold = __BLACK_BOLD
|
||||
red_bold = __RED_BOLD
|
||||
green_bold = __GREEN_BOLD
|
||||
yellow_bold = __YELLOW_BOLD
|
||||
blue_bold = __BLUE_BOLD
|
||||
magenta_bold = __MAGENTA_BOLD
|
||||
cyan_bold = __CYAN_BOLD
|
||||
white_bold = __WHITE_BOLD
|
||||
# bright
|
||||
black_bright = __BLACK_BRIGHT
|
||||
red_bright = __RED_BRIGHT
|
||||
green_bright = __GREEN_BRIGHT
|
||||
yellow_bright = __YELLOW_BRIGHT
|
||||
blue_bright = __BLUE_BRIGHT
|
||||
magenta_bright = __MAGENTA_BRIGHT
|
||||
cyan_bright = __CYAN_BRIGHT
|
||||
white_bright = __WHITE_BRIGHT
|
||||
|
||||
@staticmethod
|
||||
def disable():
|
||||
"""
|
||||
No colors
|
||||
"""
|
||||
Colors.bold = ''
|
||||
Colors.underline = ''
|
||||
Colors.end = ''
|
||||
Colors.reset = ''
|
||||
# normal
|
||||
Colors.black = ''
|
||||
Colors.red = ''
|
||||
Colors.green = ''
|
||||
Colors.yellow = ''
|
||||
Colors.blue = ''
|
||||
Colors.magenta = ''
|
||||
Colors.cyan = ''
|
||||
Colors.white = ''
|
||||
# bold/bright
|
||||
Colors.black_bold = ''
|
||||
Colors.red_bold = ''
|
||||
Colors.green_bold = ''
|
||||
Colors.yellow_bold = ''
|
||||
Colors.blue_bold = ''
|
||||
Colors.magenta_bold = ''
|
||||
Colors.cyan_bold = ''
|
||||
Colors.white_bold = ''
|
||||
# bold/bright alt
|
||||
Colors.black_bright = ''
|
||||
Colors.red_bright = ''
|
||||
Colors.green_bright = ''
|
||||
Colors.yellow_bright = ''
|
||||
Colors.blue_bright = ''
|
||||
Colors.magenta_bright = ''
|
||||
Colors.cyan_bright = ''
|
||||
Colors.white_bright = ''
|
||||
|
||||
@staticmethod
|
||||
def reset_colors():
|
||||
"""
|
||||
reset colors to the original ones
|
||||
"""
|
||||
# set access vars
|
||||
Colors.bold = Colors.__BOLD
|
||||
Colors.underline = Colors.__UNDERLINE
|
||||
Colors.end = Colors.__END
|
||||
Colors.reset = Colors.__RESET
|
||||
# normal
|
||||
Colors.black = Colors.__BLACK
|
||||
Colors.red = Colors.__RED
|
||||
Colors.green = Colors.__GREEN
|
||||
Colors.yellow = Colors.__YELLOW
|
||||
Colors.blue = Colors.__BLUE
|
||||
Colors.magenta = Colors.__MAGENTA
|
||||
Colors.cyan = Colors.__CYAN
|
||||
Colors.white = Colors.__WHITE
|
||||
# bold
|
||||
Colors.black_bold = Colors.__BLACK_BOLD
|
||||
Colors.red_bold = Colors.__RED_BOLD
|
||||
Colors.green_bold = Colors.__GREEN_BOLD
|
||||
Colors.yellow_bold = Colors.__YELLOW_BOLD
|
||||
Colors.blue_bold = Colors.__BLUE_BOLD
|
||||
Colors.magenta_bold = Colors.__MAGENTA_BOLD
|
||||
Colors.cyan_bold = Colors.__CYAN_BOLD
|
||||
Colors.white_bold = Colors.__WHITE_BOLD
|
||||
# bright
|
||||
Colors.black_bright = Colors.__BLACK_BRIGHT
|
||||
Colors.red_bright = Colors.__RED_BRIGHT
|
||||
Colors.green_bright = Colors.__GREEN_BRIGHT
|
||||
Colors.yellow_bright = Colors.__YELLOW_BRIGHT
|
||||
Colors.blue_bright = Colors.__BLUE_BRIGHT
|
||||
Colors.magenta_bright = Colors.__MAGENTA_BRIGHT
|
||||
Colors.cyan_bright = Colors.__CYAN_BRIGHT
|
||||
Colors.white_bright = Colors.__WHITE_BRIGHT
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,82 +2,24 @@
|
||||
Enum base classes
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from warnings import deprecated
|
||||
from typing import Any
|
||||
# from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
||||
import warnings
|
||||
from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
||||
|
||||
|
||||
class EnumBase(Enum):
|
||||
class EnumBase(CorelibsEnumBase):
|
||||
"""
|
||||
base for enum
|
||||
|
||||
.. deprecated::
|
||||
Use corelibs_enum_base.EnumBase instead
|
||||
DEPRECATED: Use corelibs_enum_base.enum_base.EnumBase instead
|
||||
|
||||
lookup_any and from_any will return "EnumBase" and the sub class name
|
||||
run the return again to "from_any" to get a clean value, or cast it
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
||||
def lookup_key(cls, enum_key: str):
|
||||
"""Lookup from key side (must be string)"""
|
||||
# if there is a ":", then this is legacy, replace with ___
|
||||
if ":" in enum_key:
|
||||
enum_key = enum_key.replace(':', '___')
|
||||
try:
|
||||
return cls[enum_key.upper()]
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Invalid key: {enum_key}") from e
|
||||
except AttributeError as e:
|
||||
raise ValueError(f"Invalid key: {enum_key}") from e
|
||||
|
||||
@classmethod
|
||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
||||
def lookup_value(cls, enum_value: Any):
|
||||
"""Lookup through value side"""
|
||||
try:
|
||||
return cls(enum_value)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid value: {enum_value}") from e
|
||||
# At the module level, issue a deprecation warning
|
||||
warnings.warn("Use corelibs_enum_base.enum_base.EnumBase instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
@classmethod
|
||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
||||
def from_any(cls, enum_any: Any):
|
||||
"""
|
||||
This only works in the following order
|
||||
-> class itself, as is
|
||||
-> str, assume key lookup
|
||||
-> if failed try other
|
||||
|
||||
Arguments:
|
||||
enum_any {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
if isinstance(enum_any, cls):
|
||||
return enum_any
|
||||
# try key first if it is string
|
||||
# if failed try value
|
||||
if isinstance(enum_any, str):
|
||||
try:
|
||||
return cls.lookup_key(enum_any)
|
||||
except (ValueError, AttributeError):
|
||||
try:
|
||||
return cls.lookup_value(enum_any)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Could not find as key or value: {enum_any}") from e
|
||||
return cls.lookup_value(enum_any)
|
||||
|
||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
||||
def to_value(self) -> Any:
|
||||
"""Convert to value"""
|
||||
return self.value
|
||||
|
||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
||||
def to_lower_case(self) -> str:
|
||||
"""return lower case"""
|
||||
return self.name.lower()
|
||||
|
||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
||||
def __str__(self) -> str:
|
||||
"""return [Enum].NAME like it was called with .name"""
|
||||
return self.name
|
||||
# __EMD__
|
||||
|
||||
15
src/corelibs/var_handling/enum_base.pyi
Normal file
15
src/corelibs/var_handling/enum_base.pyi
Normal file
@@ -0,0 +1,15 @@
|
||||
"""
|
||||
Enum base classes [STPUB]
|
||||
"""
|
||||
|
||||
from typing_extensions import deprecated
|
||||
from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
||||
|
||||
|
||||
@deprecated("Use corelibs_enum_base.enum_base.EnumBase instead")
|
||||
class EnumBase(CorelibsEnumBase):
|
||||
"""
|
||||
base for enum
|
||||
lookup_any and from_any will return "EnumBase" and the sub class name
|
||||
run the return again to "from_any" to get a clean value, or cast it
|
||||
"""
|
||||
109
test-run/check_handling/regex_checks.py
Normal file
109
test-run/check_handling/regex_checks.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""
|
||||
Test check andling for regex checks
|
||||
"""
|
||||
|
||||
from corelibs_text_colors.text_colors import Colors
|
||||
from corelibs.check_handling.regex_constants import (
|
||||
compile_re, DOMAIN_WITH_LOCALHOST_REGEX, EMAIL_BASIC_REGEX, NAME_EMAIL_BASIC_REGEX, SUB_EMAIL_BASIC_REGEX
|
||||
)
|
||||
from corelibs.check_handling.regex_constants_compiled import (
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX, COMPILED_EMAIL_BASIC_REGEX,
|
||||
COMPILED_NAME_EMAIL_SIMPLE_REGEX, COMPILED_NAME_EMAIL_BASIC_REGEX
|
||||
)
|
||||
|
||||
NAME_EMAIL_SIMPLE_REGEX = r"""
|
||||
^\s*(?:"(?P<name1>[^"]+)"\s*<(?P<email1>[^>]+)>|
|
||||
(?P<name2>.+?)\s*<(?P<email2>[^>]+)>|
|
||||
<(?P<email3>[^>]+)>|
|
||||
(?P<email4>[^\s<>]+))\s*$
|
||||
"""
|
||||
|
||||
|
||||
def domain_test():
|
||||
"""
|
||||
domain regex test
|
||||
"""
|
||||
print("=" * 30)
|
||||
test_domains = [
|
||||
"example.com",
|
||||
"localhost",
|
||||
"subdomain.localhost",
|
||||
"test.localhost.com",
|
||||
"some-domain.org"
|
||||
]
|
||||
|
||||
regex_domain_check = COMPILED_DOMAIN_WITH_LOCALHOST_REGEX
|
||||
print(f"REGEX: {DOMAIN_WITH_LOCALHOST_REGEX}")
|
||||
print(f"Check regex: {regex_domain_check.search('localhost')}")
|
||||
|
||||
for domain in test_domains:
|
||||
if regex_domain_check.search(domain):
|
||||
print(f"Matched: {domain}")
|
||||
else:
|
||||
print(f"Did not match: {domain}")
|
||||
|
||||
|
||||
def email_test():
|
||||
"""
|
||||
email regex test
|
||||
"""
|
||||
print("=" * 30)
|
||||
email_list = """
|
||||
e@bar.com
|
||||
<f@foobar.com>
|
||||
"Master" <foobar@bar.com>
|
||||
"not valid" not@valid.com
|
||||
also not valid not@valid.com
|
||||
some header <something@bar.com>
|
||||
test master <master@master.com>
|
||||
日本語 <japan@jp.net>
|
||||
"ひほん カケ苦" <foo@bar.com>
|
||||
single@entry.com
|
||||
arsch@popsch.com
|
||||
test open <open@open.com>
|
||||
"""
|
||||
|
||||
print(f"REGEX: SUB_EMAIL_BASIC_REGEX: {SUB_EMAIL_BASIC_REGEX}")
|
||||
print(f"REGEX: EMAIL_BASIC_REGEX: {EMAIL_BASIC_REGEX}")
|
||||
print(f"REGEX: COMPILED_NAME_EMAIL_SIMPLE_REGEX: {COMPILED_NAME_EMAIL_SIMPLE_REGEX}")
|
||||
print(f"REGEX: NAME_EMAIL_BASIC_REGEX: {NAME_EMAIL_BASIC_REGEX}")
|
||||
|
||||
basic_email = COMPILED_EMAIL_BASIC_REGEX
|
||||
sub_basic_email = compile_re(SUB_EMAIL_BASIC_REGEX)
|
||||
simple_name_email_regex = COMPILED_NAME_EMAIL_SIMPLE_REGEX
|
||||
full_name_email_regex = COMPILED_NAME_EMAIL_BASIC_REGEX
|
||||
for email in email_list.splitlines():
|
||||
email = email.strip()
|
||||
if not email:
|
||||
continue
|
||||
print(f">>> Testing: {email}")
|
||||
if not basic_email.match(email):
|
||||
print(f"{Colors.red}[EMAIL ] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[EMAIL ] Matched : {email}{Colors.reset}")
|
||||
if not sub_basic_email.match(email):
|
||||
print(f"{Colors.red}[SUB ] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[SUB ] Matched : {email}{Colors.reset}")
|
||||
if not simple_name_email_regex.match(email):
|
||||
print(f"{Colors.red}[SIMPLE] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[SIMPLE] Matched : {email}{Colors.reset}")
|
||||
if not full_name_email_regex.match(email):
|
||||
print(f"{Colors.red}[FULL ] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[FULL ] Matched : {email}{Colors.reset}")
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Test regex checks
|
||||
"""
|
||||
domain_test()
|
||||
email_test()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
@@ -12,10 +12,12 @@ some_match_list=foo,bar
|
||||
test_list=a,b,c,d f, g h
|
||||
other_list=a|b|c|d|
|
||||
third_list=xy|ab|df|fg
|
||||
empty_list=
|
||||
str_length=foobar
|
||||
int_range=20
|
||||
int_range_not_set=
|
||||
int_range_not_set_empty_set=5
|
||||
bool_var=True
|
||||
#
|
||||
match_target=foo
|
||||
match_target_list=foo,bar,baz
|
||||
@@ -37,3 +39,6 @@ email_bad=gii@bar.com
|
||||
[LoadTest]
|
||||
a.b.c=foo
|
||||
d:e:f=bar
|
||||
|
||||
[ErrorTest]
|
||||
some_value=42
|
||||
|
||||
@@ -4,7 +4,7 @@ Settings loader test
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log
|
||||
from corelibs.config_handling.settings_loader import SettingsLoader
|
||||
from corelibs.config_handling.settings_loader_handling.settings_loader_check import SettingsLoaderCheck
|
||||
@@ -21,11 +21,6 @@ def main():
|
||||
Main run
|
||||
"""
|
||||
|
||||
value = "2025/1/1"
|
||||
regex_c = re.compile(SettingsLoaderCheck.CHECK_SETTINGS['string.date']['regex'], re.VERBOSE)
|
||||
result = regex_c.search(value)
|
||||
print(f"regex {regex_c} check against {value} -> {result}")
|
||||
|
||||
# for log testing
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'settings_loader.log'),
|
||||
@@ -37,6 +32,11 @@ def main():
|
||||
)
|
||||
log.logger.info('Settings loader')
|
||||
|
||||
value = "2025/1/1"
|
||||
regex_c = re.compile(SettingsLoaderCheck.CHECK_SETTINGS['string.date']['regex'], re.VERBOSE)
|
||||
result = regex_c.search(value)
|
||||
log.info(f"regex {regex_c} check against {value} -> {result}")
|
||||
|
||||
sl = SettingsLoader(
|
||||
{
|
||||
'overload_from_args': 'OVERLOAD from ARGS',
|
||||
@@ -69,6 +69,9 @@ def main():
|
||||
"split:|",
|
||||
"check:string.alphanumeric"
|
||||
],
|
||||
"empty_list": [
|
||||
"split:,",
|
||||
],
|
||||
"str_length": [
|
||||
"length:2-10"
|
||||
],
|
||||
@@ -81,6 +84,7 @@ def main():
|
||||
"int_range_not_set_empty_set": [
|
||||
"empty:"
|
||||
],
|
||||
"bool_var": ["convert:bool"],
|
||||
"match_target": ["matching:foo"],
|
||||
"match_target_list": ["split:,", "matching:foo|bar|baz",],
|
||||
"match_source_a": ["in:match_target"],
|
||||
@@ -125,6 +129,20 @@ def main():
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
try:
|
||||
config_load = 'ErrorTest'
|
||||
config_data = sl.load_settings(
|
||||
config_load,
|
||||
{
|
||||
"some_value": [
|
||||
"check:string.email.basic",
|
||||
],
|
||||
}
|
||||
)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
139
test-run/db_handling/sql_main.py
Normal file
139
test-run/db_handling/sql_main.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""
|
||||
SQL Main wrapper test
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import json
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log, Logger
|
||||
from corelibs.db_handling.sql_main import SQLMain
|
||||
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
DATABASE_DIR: Path = Path("database")
|
||||
LOG_DIR: Path = Path("log")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'sqlite_main.log'),
|
||||
log_name="SQLite Main",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
"log_level_file": 'DEBUG',
|
||||
}
|
||||
)
|
||||
sql_main = SQLMain(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_ident=f"sqlite:{ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_main.db')}"
|
||||
)
|
||||
if sql_main.connected():
|
||||
log.info("SQL Main connected successfully")
|
||||
else:
|
||||
log.error('SQL Main connection failed')
|
||||
if sql_main.dbh is None:
|
||||
log.error('SQL Main DBH instance is None')
|
||||
return
|
||||
|
||||
if sql_main.dbh.trigger_exists('trg_test_a_set_date_updated_on_update'):
|
||||
log.info("Trigger trg_test_a_set_date_updated_on_update exists")
|
||||
if sql_main.dbh.table_exists('test_a'):
|
||||
log.info("Table test_a exists, dropping for clean test")
|
||||
sql_main.dbh.execute_query("DROP TABLE test_a;")
|
||||
# create a dummy table
|
||||
table_sql = """
|
||||
CREATE TABLE IF NOT EXISTS test_a (
|
||||
test_a_id INTEGER PRIMARY KEY,
|
||||
date_created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%f', 'now')),
|
||||
date_updated TEXT,
|
||||
uid TEXT NOT NULL UNIQUE,
|
||||
set_current_timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
text_a TEXT,
|
||||
content,
|
||||
int_a INTEGER,
|
||||
float_a REAL
|
||||
);
|
||||
"""
|
||||
|
||||
result = sql_main.dbh.execute_query(table_sql)
|
||||
log.debug(f"Create table result: {result}")
|
||||
trigger_sql = """
|
||||
CREATE TRIGGER trg_test_a_set_date_updated_on_update
|
||||
AFTER UPDATE ON test_a
|
||||
FOR EACH ROW
|
||||
WHEN OLD.date_updated IS NULL OR NEW.date_updated = OLD.date_updated
|
||||
BEGIN
|
||||
UPDATE test_a
|
||||
SET date_updated = (strftime('%Y-%m-%d %H:%M:%f', 'now'))
|
||||
WHERE test_a_id = NEW.test_a_id;
|
||||
END;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(trigger_sql)
|
||||
log.debug(f"Create trigger result: {result}")
|
||||
result = sql_main.dbh.meta_data_detail('test_a')
|
||||
log.debug(f"Table meta data detail: {dump_data(result)}")
|
||||
# INSERT DATA
|
||||
sql = """
|
||||
INSERT INTO test_a (uid, text_a, content, int_a, float_a)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
RETURNING test_a_id, uid;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[1] Insert data result: {dump_data(result)}")
|
||||
__uid: str = ''
|
||||
if result is not False:
|
||||
# first one only of interest
|
||||
result = dict(result[0])
|
||||
__uid = str(result.get('uid', ''))
|
||||
# second insert
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[2] Insert data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.execute_query("SELECT * FROM test_a;")
|
||||
log.debug(f"Select data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row result: {dump_data(result)}")
|
||||
sql = """
|
||||
UPDATE test_a
|
||||
SET text_a = ?
|
||||
WHERE uid = ?;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
'Some updated text A',
|
||||
__uid,
|
||||
)
|
||||
)
|
||||
log.debug(f"Update data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row after update result: {dump_data(result)}")
|
||||
|
||||
sql_main.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
@@ -1,14 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Main comment
|
||||
SQLite IO test
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import json
|
||||
import sqlite3
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log, Logger
|
||||
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ Symmetric encryption test
|
||||
"""
|
||||
|
||||
import json
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.encryption_handling.symmetric_encryption import SymmetricEncryption
|
||||
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@ BOM check for files
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.file_handling.file_bom_encoding import is_bom_encoded, is_bom_encoded_info
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
@@ -5,7 +5,7 @@ Search data tests
|
||||
iterator_handling.data_search
|
||||
"""
|
||||
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.iterator_handling.data_search import find_in_array_from_list, ArraySearchList
|
||||
|
||||
|
||||
@@ -24,12 +24,19 @@ def main() -> None:
|
||||
"lookup_value_c": "B02",
|
||||
"replace_value": "R02",
|
||||
},
|
||||
{
|
||||
"lookup_value_p": "A03",
|
||||
"lookup_value_c": "B03",
|
||||
"replace_value": "R03",
|
||||
},
|
||||
]
|
||||
test_foo = ArraySearchList(
|
||||
key = "lookup_value_p",
|
||||
value = "A01"
|
||||
key="lookup_value_p",
|
||||
value="A01"
|
||||
)
|
||||
print(test_foo)
|
||||
result = find_in_array_from_list(data, [test_foo])
|
||||
print(f"Search A: {dump_data(test_foo)} -> {dump_data(result)}")
|
||||
|
||||
search: list[ArraySearchList] = [
|
||||
{
|
||||
"key": "lookup_value_p",
|
||||
@@ -38,12 +45,122 @@ def main() -> None:
|
||||
{
|
||||
"key": "lookup_value_c",
|
||||
"value": "B01"
|
||||
},
|
||||
]
|
||||
result = find_in_array_from_list(data, search)
|
||||
print(f"Search B: {dump_data(search)} -> {dump_data(result)}")
|
||||
|
||||
search: list[ArraySearchList] = [
|
||||
{
|
||||
"key": "lookup_value_p",
|
||||
"value": "A01"
|
||||
},
|
||||
{
|
||||
"key": "lookup_value_c",
|
||||
"value": "B01"
|
||||
},
|
||||
{
|
||||
"key": "lookup_value_c",
|
||||
"value": "B02"
|
||||
},
|
||||
]
|
||||
try:
|
||||
result = find_in_array_from_list(data, search)
|
||||
print(f"Search C: {dump_data(search)} -> {dump_data(result)}")
|
||||
except KeyError as e:
|
||||
print(f"Search C raised KeyError: {e}")
|
||||
|
||||
search: list[ArraySearchList] = [
|
||||
{
|
||||
"key": "lookup_value_p",
|
||||
"value": "A01"
|
||||
},
|
||||
{
|
||||
"key": "lookup_value_c",
|
||||
"value": ["B01", "B02"]
|
||||
},
|
||||
]
|
||||
try:
|
||||
result = find_in_array_from_list(data, search)
|
||||
print(f"Search D: {dump_data(search)} -> {dump_data(result)}")
|
||||
except KeyError as e:
|
||||
print(f"Search D raised KeyError: {e}")
|
||||
|
||||
search: list[ArraySearchList] = [
|
||||
{
|
||||
"key": "lookup_value_p",
|
||||
"value": ["A01", "A03"]
|
||||
},
|
||||
{
|
||||
"key": "lookup_value_c",
|
||||
"value": ["B01", "B02"]
|
||||
},
|
||||
]
|
||||
try:
|
||||
result = find_in_array_from_list(data, search)
|
||||
print(f"Search E: {dump_data(search)} -> {dump_data(result)}")
|
||||
except KeyError as e:
|
||||
print(f"Search E raised KeyError: {e}")
|
||||
|
||||
search: list[ArraySearchList] = [
|
||||
{
|
||||
"key": "lookup_value_p",
|
||||
"value": "NOT FOUND"
|
||||
},
|
||||
]
|
||||
try:
|
||||
result = find_in_array_from_list(data, search)
|
||||
print(f"Search F: {dump_data(search)} -> {dump_data(result)}")
|
||||
except KeyError as e:
|
||||
print(f"Search F raised KeyError: {e}")
|
||||
|
||||
data = [
|
||||
{
|
||||
"sd_user_id": "1593",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1592",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1596",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1594",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1595",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1861",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1862",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1860",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search)
|
||||
|
||||
print(f"Search {dump_data(search)} -> {dump_data(result)}")
|
||||
result = find_in_array_from_list(data, [ArraySearchList(
|
||||
key="sd_user_id",
|
||||
value="1593"
|
||||
)])
|
||||
print(f"Search F: -> {dump_data(result)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -3,7 +3,7 @@ Iterator helper testing
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.iterator_handling.dict_mask import mask
|
||||
from corelibs.iterator_handling.dict_helpers import set_entry
|
||||
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
test list helpers
|
||||
"""
|
||||
|
||||
from corelibs.iterator_handling.list_helpers import is_list_in_list, convert_to_list
|
||||
from typing import Any
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.iterator_handling.list_helpers import is_list_in_list, convert_to_list, make_unique_list_of_dicts
|
||||
from corelibs.iterator_handling.fingerprint import dict_hash_crc
|
||||
|
||||
|
||||
def __test_is_list_in_list_a():
|
||||
@@ -18,9 +21,66 @@ def __convert_list():
|
||||
print(f"IN: {source} -> {result}")
|
||||
|
||||
|
||||
def __make_unique_list_of_dicts():
|
||||
dict_list = [
|
||||
{"a": 1, "b": 2, "nested": {"x": 10, "y": 20}},
|
||||
{"a": 1, "b": 2, "nested": {"x": 10, "y": 20}},
|
||||
{"b": 2, "a": 1, "nested": {"y": 20, "x": 10}},
|
||||
{"b": 2, "a": 1, "nested": {"y": 20, "x": 30}},
|
||||
{"a": 3, "b": 4, "nested": {"x": 30, "y": 40}}
|
||||
]
|
||||
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||
dhf = dict_hash_crc(unique_dicts)
|
||||
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||
|
||||
dict_list = [
|
||||
{"a": 1, 1: "one"},
|
||||
{1: "one", "a": 1},
|
||||
{"a": 2, 1: "one"}
|
||||
]
|
||||
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||
dhf = dict_hash_crc(unique_dicts)
|
||||
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||
|
||||
dict_list = [
|
||||
{"a": 1, "b": [1, 2, 3]},
|
||||
{"b": [1, 2, 3], "a": 1},
|
||||
{"a": 1, "b": [1, 2, 4]},
|
||||
1, 2, "String", 1, "Foobar"
|
||||
]
|
||||
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||
dhf = dict_hash_crc(unique_dicts)
|
||||
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||
|
||||
dict_list: list[Any] = [
|
||||
[],
|
||||
{},
|
||||
[],
|
||||
{},
|
||||
{"a": []},
|
||||
{"a": []},
|
||||
{"a": {}},
|
||||
{"a": {}},
|
||||
]
|
||||
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||
dhf = dict_hash_crc(unique_dicts)
|
||||
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||
|
||||
dict_list: list[Any] = [
|
||||
(1, 2),
|
||||
(1, 2),
|
||||
(2, 3),
|
||||
]
|
||||
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||
dhf = dict_hash_crc(unique_dicts)
|
||||
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||
|
||||
|
||||
def main():
|
||||
"""List helpers test runner"""
|
||||
__test_is_list_in_list_a()
|
||||
__convert_list()
|
||||
__make_unique_list_of_dicts()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
jmes path testing
|
||||
"""
|
||||
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.json_handling.jmespath_helper import jmespath_search
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ JSON content replace tets
|
||||
"""
|
||||
|
||||
from deepdiff import DeepDiff
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.json_handling.json_helper import modify_with_jsonpath
|
||||
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ Log logging_handling.log testing
|
||||
import sys
|
||||
from pathlib import Path
|
||||
# this is for testing only
|
||||
from corelibs_stack_trace.stack import exception_stack, call_stack
|
||||
from corelibs.logging_handling.log import Log, Logger, ConsoleFormat, ConsoleFormatSettings
|
||||
from corelibs.debug_handling.debug_helpers import exception_stack, call_stack
|
||||
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
||||
|
||||
|
||||
@@ -27,7 +27,8 @@ def main():
|
||||
"per_run_log": True,
|
||||
# "console_format_type": ConsoleFormatSettings.NONE,
|
||||
# "console_format_type": ConsoleFormatSettings.MINIMAL,
|
||||
"console_format_type": ConsoleFormat.TIME_MICROSECONDS | ConsoleFormat.NAME | ConsoleFormat.LEVEL,
|
||||
# "console_format_type": ConsoleFormat.TIME_MICROSECONDS | ConsoleFormat.NAME | ConsoleFormat.LEVEL,
|
||||
"console_format_type": None,
|
||||
# "console_format_type": ConsoleFormat.NAME,
|
||||
# "console_format_type": (
|
||||
# ConsoleFormat.TIME | ConsoleFormat.TIMEZONE | ConsoleFormat.LINENO | ConsoleFormat.LEVEL
|
||||
@@ -108,13 +109,31 @@ def main():
|
||||
log.set_log_level(Log.CONSOLE_HANDLER, LoggingLevel.ERROR)
|
||||
log.logger.warning('[NORMAL] Invisible Warning test: %s', log.logger.name)
|
||||
log.logger.error('[NORMAL] Visible Error test: %s', log.logger.name)
|
||||
log.logger.debug('[NORMAL] Visible Debug test: %s', log.logger.name)
|
||||
print(f"*** Any handler is minimum level ERROR: {log.any_handler_is_minimum_level(LoggingLevel.ERROR)}")
|
||||
print(f"*** Any handler is minimum level DEBUG: {log.any_handler_is_minimum_level(LoggingLevel.DEBUG)}")
|
||||
for handler in log.handlers.values():
|
||||
print(
|
||||
f"*** Setting handler {handler} is level {LoggingLevel.from_any(handler.level).name} -> "
|
||||
f"*** INC {LoggingLevel.from_any(handler.level).includes(LoggingLevel.DEBUG)}")
|
||||
|
||||
print(f"*** WARNING includes ERROR: {LoggingLevel.WARNING.includes(LoggingLevel.ERROR)}")
|
||||
print(f"*** ERROR includes WARNING: {LoggingLevel.ERROR.includes(LoggingLevel.WARNING)}")
|
||||
|
||||
log.set_log_level(Log.CONSOLE_HANDLER, LoggingLevel.DEBUG)
|
||||
log.debug('Current logging format: %s', log.log_settings['console_format_type'])
|
||||
log.debug('Current console formatter: %s', log.get_console_formatter())
|
||||
log.update_console_formatter(ConsoleFormat.TIME | ConsoleFormat.LINENO)
|
||||
log.info('Does hit show less')
|
||||
log.info('Does hit show less A')
|
||||
log.debug('Current console formatter after A: %s', log.get_console_formatter())
|
||||
log.update_console_formatter(ConsoleFormat.TIME | ConsoleFormat.LINENO)
|
||||
log.info('Does hit show less B')
|
||||
log.debug('Current console formatter after B: %s', log.get_console_formatter())
|
||||
log.update_console_formatter(ConsoleFormatSettings.ALL)
|
||||
log.info('Does hit show less C')
|
||||
log.debug('Current console formatter after C: %s', log.get_console_formatter())
|
||||
print(f"*** Any handler is minimum level ERROR: {log.any_handler_is_minimum_level(LoggingLevel.ERROR)}")
|
||||
print(f"*** Any handler is minimum level DEBUG: {log.any_handler_is_minimum_level(LoggingLevel.DEBUG)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
"""Unit tests for check_handling module."""
|
||||
@@ -1,336 +0,0 @@
|
||||
"""
|
||||
Unit tests for regex_constants module.
|
||||
|
||||
Tests all regex patterns defined in the check_handling.regex_constants module.
|
||||
"""
|
||||
|
||||
import re
|
||||
import pytest
|
||||
from corelibs.check_handling.regex_constants import (
|
||||
compile_re,
|
||||
EMAIL_BASIC_REGEX,
|
||||
DOMAIN_WITH_LOCALHOST_REGEX,
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||
DOMAIN_REGEX,
|
||||
)
|
||||
|
||||
|
||||
class TestCompileRe:
|
||||
"""Test cases for the compile_re function."""
|
||||
|
||||
def test_compile_re_returns_pattern(self) -> None:
|
||||
"""Test that compile_re returns a compiled regex Pattern object."""
|
||||
pattern = compile_re(r"test")
|
||||
assert isinstance(pattern, re.Pattern)
|
||||
|
||||
def test_compile_re_with_verbose_flag(self) -> None:
|
||||
"""Test that compile_re compiles with VERBOSE flag."""
|
||||
# Verbose mode allows whitespace and comments in regex
|
||||
verbose_regex = r"""
|
||||
\d+ # digits
|
||||
\s+ # whitespace
|
||||
"""
|
||||
pattern = compile_re(verbose_regex)
|
||||
assert pattern.match("123 ")
|
||||
assert not pattern.match("abc")
|
||||
|
||||
def test_compile_re_simple_pattern(self) -> None:
|
||||
"""Test compile_re with a simple pattern."""
|
||||
pattern = compile_re(r"^\d{3}$")
|
||||
assert pattern.match("123")
|
||||
assert not pattern.match("12")
|
||||
assert not pattern.match("1234")
|
||||
|
||||
|
||||
class TestEmailBasicRegex:
|
||||
"""Test cases for EMAIL_BASIC_REGEX pattern."""
|
||||
|
||||
@pytest.fixture
|
||||
def email_pattern(self) -> re.Pattern[str]:
|
||||
"""Fixture that returns compiled email regex pattern."""
|
||||
return compile_re(EMAIL_BASIC_REGEX)
|
||||
|
||||
@pytest.mark.parametrize("valid_email", [
|
||||
"user@example.com",
|
||||
"test.user@example.com",
|
||||
"user+tag@example.co.uk",
|
||||
"first.last@subdomain.example.com",
|
||||
"user123@test-domain.com",
|
||||
"a@example.com",
|
||||
"user_name@example.com",
|
||||
"user-name@example.com",
|
||||
"user@sub.domain.example.com",
|
||||
"test!#$%&'*+-/=?^_`{|}~@example.com",
|
||||
"1234567890@example.com",
|
||||
"user@example-domain.com",
|
||||
"user@domain.co",
|
||||
# Regex allows these (even if not strictly RFC compliant):
|
||||
"user.@example.com", # ends with dot before @
|
||||
"user..name@example.com", # consecutive dots in local part
|
||||
])
|
||||
def test_valid_emails(
|
||||
self, email_pattern: re.Pattern[str], valid_email: str
|
||||
) -> None:
|
||||
"""Test that valid email addresses match the pattern."""
|
||||
assert email_pattern.match(valid_email), (
|
||||
f"Failed to match valid email: {valid_email}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("invalid_email", [
|
||||
"", # empty string
|
||||
"@example.com", # missing local part
|
||||
"user@", # missing domain
|
||||
"user", # no @ symbol
|
||||
"user@.com", # domain starts with dot
|
||||
"user@domain", # no TLD
|
||||
"user @example.com", # space in local part
|
||||
"user@exam ple.com", # space in domain
|
||||
".user@example.com", # starts with dot
|
||||
"user@-example.com", # domain starts with hyphen
|
||||
"user@example-.com", # domain part ends with hyphen
|
||||
"user@example.c", # TLD too short (1 char)
|
||||
"user@example.toolong", # TLD too long (>6 chars)
|
||||
"user@@example.com", # double @
|
||||
"user@example@com", # multiple @
|
||||
"user@.example.com", # domain starts with dot
|
||||
"user@example.com.", # ends with dot
|
||||
"user@123.456.789.012", # numeric TLD not allowed
|
||||
])
|
||||
def test_invalid_emails(
|
||||
self, email_pattern: re.Pattern[str], invalid_email: str
|
||||
) -> None:
|
||||
"""Test that invalid email addresses do not match the pattern."""
|
||||
assert not email_pattern.match(invalid_email), (
|
||||
f"Incorrectly matched invalid email: {invalid_email}"
|
||||
)
|
||||
|
||||
def test_email_max_local_part_length(
|
||||
self, email_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test email with maximum local part length (64 characters)."""
|
||||
# Local part can be up to 64 chars (first char + 63 more)
|
||||
local_part = "a" * 64
|
||||
email = f"{local_part}@example.com"
|
||||
assert email_pattern.match(email)
|
||||
|
||||
def test_email_exceeds_local_part_length(
|
||||
self, email_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test email exceeding maximum local part length."""
|
||||
# 65 characters should not match
|
||||
local_part = "a" * 65
|
||||
email = f"{local_part}@example.com"
|
||||
assert not email_pattern.match(email)
|
||||
|
||||
|
||||
class TestDomainWithLocalhostRegex:
|
||||
"""Test cases for DOMAIN_WITH_LOCALHOST_REGEX pattern."""
|
||||
|
||||
@pytest.fixture
|
||||
def domain_localhost_pattern(self) -> re.Pattern[str]:
|
||||
"""Fixture that returns compiled domain with localhost regex pattern."""
|
||||
return compile_re(DOMAIN_WITH_LOCALHOST_REGEX)
|
||||
|
||||
@pytest.mark.parametrize("valid_domain", [
|
||||
"localhost",
|
||||
"example.com",
|
||||
"subdomain.example.com",
|
||||
"sub.domain.example.com",
|
||||
"test-domain.com",
|
||||
"example.co.uk",
|
||||
"a.com",
|
||||
"test123.example.com",
|
||||
"my-site.example.org",
|
||||
"multi.level.subdomain.example.com",
|
||||
])
|
||||
def test_valid_domains(
|
||||
self, domain_localhost_pattern: re.Pattern[str], valid_domain: str
|
||||
) -> None:
|
||||
"""Test that valid domains (including localhost) match the pattern."""
|
||||
assert domain_localhost_pattern.match(valid_domain), (
|
||||
f"Failed to match valid domain: {valid_domain}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("invalid_domain", [
|
||||
"", # empty string
|
||||
"example", # no TLD
|
||||
"-example.com", # starts with hyphen
|
||||
"example-.com", # ends with hyphen
|
||||
".example.com", # starts with dot
|
||||
"example.com.", # ends with dot
|
||||
"example..com", # consecutive dots
|
||||
"exam ple.com", # space in domain
|
||||
"example.c", # TLD too short
|
||||
"localhost:8080", # port not allowed in this pattern
|
||||
"example.com:8080", # port not allowed in this pattern
|
||||
"@example.com", # invalid character
|
||||
"example@com", # invalid character
|
||||
])
|
||||
def test_invalid_domains(
|
||||
self, domain_localhost_pattern: re.Pattern[str], invalid_domain: str
|
||||
) -> None:
|
||||
"""Test that invalid domains do not match the pattern."""
|
||||
assert not domain_localhost_pattern.match(invalid_domain), (
|
||||
f"Incorrectly matched invalid domain: {invalid_domain}"
|
||||
)
|
||||
|
||||
|
||||
class TestDomainWithLocalhostPortRegex:
|
||||
"""Test cases for DOMAIN_WITH_LOCALHOST_PORT_REGEX pattern."""
|
||||
|
||||
@pytest.fixture
|
||||
def domain_localhost_port_pattern(self) -> re.Pattern[str]:
|
||||
"""Fixture that returns compiled domain and localhost with port pattern."""
|
||||
return compile_re(DOMAIN_WITH_LOCALHOST_PORT_REGEX)
|
||||
|
||||
@pytest.mark.parametrize("valid_domain", [
|
||||
"localhost",
|
||||
"localhost:8080",
|
||||
"localhost:3000",
|
||||
"localhost:80",
|
||||
"localhost:443",
|
||||
"localhost:65535",
|
||||
"example.com",
|
||||
"example.com:8080",
|
||||
"subdomain.example.com:3000",
|
||||
"test-domain.com:443",
|
||||
"example.co.uk",
|
||||
"example.co.uk:8000",
|
||||
"a.com:1",
|
||||
"multi.level.subdomain.example.com:9999",
|
||||
])
|
||||
def test_valid_domains_with_port(
|
||||
self, domain_localhost_port_pattern: re.Pattern[str], valid_domain: str
|
||||
) -> None:
|
||||
"""Test that valid domains with optional ports match the pattern."""
|
||||
assert domain_localhost_port_pattern.match(valid_domain), (
|
||||
f"Failed to match valid domain: {valid_domain}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("invalid_domain", [
|
||||
"", # empty string
|
||||
"example", # no TLD
|
||||
"-example.com", # starts with hyphen
|
||||
"example-.com", # ends with hyphen
|
||||
".example.com", # starts with dot
|
||||
"example.com.", # ends with dot
|
||||
"localhost:", # port without number
|
||||
"example.com:", # port without number
|
||||
"example.com:abc", # non-numeric port
|
||||
"example.com: 8080", # space before port
|
||||
"example.com:80 80", # space in port
|
||||
"exam ple.com", # space in domain
|
||||
"localhost :8080", # space before colon
|
||||
])
|
||||
def test_invalid_domains_with_port(
|
||||
self,
|
||||
domain_localhost_port_pattern: re.Pattern[str],
|
||||
invalid_domain: str,
|
||||
) -> None:
|
||||
"""Test that invalid domains do not match the pattern."""
|
||||
assert not domain_localhost_port_pattern.match(invalid_domain), (
|
||||
f"Incorrectly matched invalid domain: {invalid_domain}"
|
||||
)
|
||||
|
||||
def test_large_port_number(
|
||||
self, domain_localhost_port_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test domain with large port numbers."""
|
||||
assert domain_localhost_port_pattern.match("example.com:65535")
|
||||
# Regex doesn't validate port range
|
||||
assert domain_localhost_port_pattern.match("example.com:99999")
|
||||
|
||||
|
||||
class TestDomainRegex:
|
||||
"""Test cases for DOMAIN_REGEX pattern (no localhost)."""
|
||||
|
||||
@pytest.fixture
|
||||
def domain_pattern(self) -> re.Pattern[str]:
|
||||
"""Fixture that returns compiled domain regex pattern."""
|
||||
return compile_re(DOMAIN_REGEX)
|
||||
|
||||
@pytest.mark.parametrize("valid_domain", [
|
||||
"example.com",
|
||||
"subdomain.example.com",
|
||||
"sub.domain.example.com",
|
||||
"test-domain.com",
|
||||
"example.co.uk",
|
||||
"a.com",
|
||||
"test123.example.com",
|
||||
"my-site.example.org",
|
||||
"multi.level.subdomain.example.com",
|
||||
"example.co",
|
||||
])
|
||||
def test_valid_domains_no_localhost(
|
||||
self, domain_pattern: re.Pattern[str], valid_domain: str
|
||||
) -> None:
|
||||
"""Test that valid domains match the pattern."""
|
||||
assert domain_pattern.match(valid_domain), (
|
||||
f"Failed to match valid domain: {valid_domain}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("invalid_domain", [
|
||||
"", # empty string
|
||||
"localhost", # localhost not allowed
|
||||
"example", # no TLD
|
||||
"-example.com", # starts with hyphen
|
||||
"example-.com", # ends with hyphen
|
||||
".example.com", # starts with dot
|
||||
"example.com.", # ends with dot
|
||||
"example..com", # consecutive dots
|
||||
"exam ple.com", # space in domain
|
||||
"example.c", # TLD too short
|
||||
"example.com:8080", # port not allowed
|
||||
"@example.com", # invalid character
|
||||
"example@com", # invalid character
|
||||
])
|
||||
def test_invalid_domains_no_localhost(
|
||||
self, domain_pattern: re.Pattern[str], invalid_domain: str
|
||||
) -> None:
|
||||
"""Test that invalid domains do not match the pattern."""
|
||||
assert not domain_pattern.match(invalid_domain), (
|
||||
f"Incorrectly matched invalid domain: {invalid_domain}"
|
||||
)
|
||||
|
||||
def test_localhost_not_allowed(
|
||||
self, domain_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test that localhost is explicitly not allowed in DOMAIN_REGEX."""
|
||||
assert not domain_pattern.match("localhost")
|
||||
|
||||
|
||||
class TestRegexPatternConsistency:
|
||||
"""Test cases for consistency across regex patterns."""
|
||||
|
||||
def test_all_patterns_compile(self) -> None:
|
||||
"""Test that all regex patterns can be compiled without errors."""
|
||||
patterns = [
|
||||
EMAIL_BASIC_REGEX,
|
||||
DOMAIN_WITH_LOCALHOST_REGEX,
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||
DOMAIN_REGEX,
|
||||
]
|
||||
for pattern in patterns:
|
||||
compiled = compile_re(pattern)
|
||||
assert isinstance(compiled, re.Pattern)
|
||||
|
||||
def test_domain_patterns_are_strings(self) -> None:
|
||||
"""Test that all regex constants are strings."""
|
||||
assert isinstance(EMAIL_BASIC_REGEX, str)
|
||||
assert isinstance(DOMAIN_WITH_LOCALHOST_REGEX, str)
|
||||
assert isinstance(DOMAIN_WITH_LOCALHOST_PORT_REGEX, str)
|
||||
assert isinstance(DOMAIN_REGEX, str)
|
||||
|
||||
def test_domain_patterns_hierarchy(self) -> None:
|
||||
"""Test that domain patterns follow expected hierarchy."""
|
||||
# DOMAIN_WITH_LOCALHOST_PORT_REGEX should accept everything
|
||||
# DOMAIN_WITH_LOCALHOST_REGEX accepts
|
||||
domain_localhost = compile_re(DOMAIN_WITH_LOCALHOST_REGEX)
|
||||
domain_localhost_port = compile_re(DOMAIN_WITH_LOCALHOST_PORT_REGEX)
|
||||
|
||||
test_cases = ["example.com", "subdomain.example.com", "localhost"]
|
||||
for test_case in test_cases:
|
||||
if domain_localhost.match(test_case):
|
||||
assert domain_localhost_port.match(test_case), (
|
||||
f"{test_case} should match both patterns"
|
||||
)
|
||||
@@ -16,7 +16,7 @@ class TestSettingsLoaderInit:
|
||||
|
||||
def test_init_with_valid_config_file(self, tmp_path: Path):
|
||||
"""Test initialization with a valid config file"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[Section]\nkey=value\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
@@ -35,7 +35,7 @@ class TestSettingsLoaderInit:
|
||||
|
||||
def test_init_with_missing_config_file(self, tmp_path: Path):
|
||||
"""Test initialization with missing config file"""
|
||||
config_file = tmp_path / "missing.ini"
|
||||
config_file = tmp_path.joinpath("missing.ini")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={},
|
||||
@@ -60,7 +60,7 @@ class TestSettingsLoaderInit:
|
||||
|
||||
def test_init_with_log(self, tmp_path: Path):
|
||||
"""Test initialization with Log object"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[Section]\nkey=value\n")
|
||||
mock_log = Mock(spec=Log)
|
||||
|
||||
@@ -80,7 +80,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_basic(self, tmp_path: Path):
|
||||
"""Test loading basic settings without validation"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nkey1=value1\nkey2=value2\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -90,7 +90,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_with_missing_section(self, tmp_path: Path):
|
||||
"""Test loading settings with missing section"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[OtherSection]\nkey=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -100,7 +100,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_allow_not_exist(self, tmp_path: Path):
|
||||
"""Test loading settings with allow_not_exist flag"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[OtherSection]\nkey=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -110,7 +110,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_mandatory_field_present(self, tmp_path: Path):
|
||||
"""Test mandatory field validation when field is present"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nrequired_field=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -123,7 +123,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_mandatory_field_missing(self, tmp_path: Path):
|
||||
"""Test mandatory field validation when field is missing"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nother_field=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -136,7 +136,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_mandatory_field_empty(self, tmp_path: Path):
|
||||
"""Test mandatory field validation when field is empty"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nrequired_field=\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -149,7 +149,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_with_split(self, tmp_path: Path):
|
||||
"""Test splitting values into lists"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist_field=a,b,c,d\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -162,7 +162,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_with_custom_split_char(self, tmp_path: Path):
|
||||
"""Test splitting with custom delimiter"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist_field=a|b|c|d\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -175,7 +175,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_split_removes_spaces(self, tmp_path: Path):
|
||||
"""Test that split removes spaces from values"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist_field=a, b , c , d\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -188,7 +188,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_empty_split_char_fallback(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test fallback to default split char when empty"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist_field=a,b,c\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -201,9 +201,22 @@ class TestLoadSettings:
|
||||
captured = capsys.readouterr()
|
||||
assert "fallback to:" in captured.out
|
||||
|
||||
def test_load_settings_split_empty_value(self, tmp_path: Path):
|
||||
"""Test that split on empty value results in empty list"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist_field=\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"list_field": ["split:,"]}
|
||||
)
|
||||
|
||||
assert result["list_field"] == []
|
||||
|
||||
def test_load_settings_convert_to_int(self, tmp_path: Path):
|
||||
"""Test converting values to int"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=123\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -217,7 +230,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_convert_to_float(self, tmp_path: Path):
|
||||
"""Test converting values to float"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=123.45\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -231,7 +244,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_convert_to_bool_true(self, tmp_path: Path):
|
||||
"""Test converting values to boolean True"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nflag1=true\nflag2=True\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -245,7 +258,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_convert_to_bool_false(self, tmp_path: Path):
|
||||
"""Test converting values to boolean False"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nflag1=false\nflag2=False\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -259,7 +272,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_convert_invalid_type(self, tmp_path: Path):
|
||||
"""Test converting with invalid type raises error"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -272,7 +285,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_empty_set_to_none(self, tmp_path: Path):
|
||||
"""Test setting empty values to None"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nother=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -285,7 +298,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_empty_set_to_custom_value(self, tmp_path: Path):
|
||||
"""Test setting empty values to custom value"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nother=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -298,7 +311,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_matching_valid(self, tmp_path: Path):
|
||||
"""Test matching validation with valid value"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nmode=production\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -311,7 +324,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_matching_invalid(self, tmp_path: Path):
|
||||
"""Test matching validation with invalid value"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nmode=invalid\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -324,7 +337,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_in_valid(self, tmp_path: Path):
|
||||
"""Test 'in' validation with valid value"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nallowed=a,b,c\nvalue=b\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -340,7 +353,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_in_invalid(self, tmp_path: Path):
|
||||
"""Test 'in' validation with invalid value"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nallowed=a,b,c\nvalue=d\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -356,7 +369,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_in_missing_target(self, tmp_path: Path):
|
||||
"""Test 'in' validation with missing target"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=a\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -369,7 +382,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_length_exact(self, tmp_path: Path):
|
||||
"""Test length validation with exact match"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -382,7 +395,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_length_exact_invalid(self, tmp_path: Path):
|
||||
"""Test length validation with exact match failure"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -395,7 +408,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_length_range(self, tmp_path: Path):
|
||||
"""Test length validation with range"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=testing\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -408,7 +421,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_length_min_only(self, tmp_path: Path):
|
||||
"""Test length validation with minimum only"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=testing\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -421,7 +434,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_length_max_only(self, tmp_path: Path):
|
||||
"""Test length validation with maximum only"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -434,7 +447,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_range_valid(self, tmp_path: Path):
|
||||
"""Test range validation with valid value"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=25\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -447,7 +460,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_range_invalid(self, tmp_path: Path):
|
||||
"""Test range validation with invalid value"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=100\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -460,7 +473,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_check_int_valid(self, tmp_path: Path):
|
||||
"""Test check:int with valid integer"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=12345\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -473,7 +486,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_check_int_cleanup(self, tmp_path: Path):
|
||||
"""Test check:int with cleanup"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=12a34b5\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -486,7 +499,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_check_email_valid(self, tmp_path: Path):
|
||||
"""Test check:string.email.basic with valid email"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nemail=test@example.com\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -499,7 +512,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_check_email_invalid(self, tmp_path: Path):
|
||||
"""Test check:string.email.basic with invalid email"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nemail=not-an-email\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -512,7 +525,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_args_override(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test command line arguments override config values"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
@@ -530,7 +543,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_args_no_flag(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test default behavior (no args_override:yes) with list argument that has split"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=a,b,c\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
@@ -550,7 +563,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_args_list_no_split(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test that list arguments without split entry are skipped"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
@@ -570,7 +583,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_args_list_with_split(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test that list arguments with split entry and args_override:yes are applied"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=a,b,c\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
@@ -589,7 +602,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_args_no_with_mandatory(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test default behavior (no args_override:yes) with mandatory field and list args with split"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=config1,config2\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
@@ -609,7 +622,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_args_no_with_mandatory_valid(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test default behavior with string args (always overrides due to current logic)"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
@@ -628,7 +641,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_args_string_no_split(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test that string arguments with args_override:yes work normally"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
@@ -647,7 +660,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_no_config_file_with_args(self, tmp_path: Path):
|
||||
"""Test loading settings without config file but with mandatory args"""
|
||||
config_file = tmp_path / "missing.ini"
|
||||
config_file = tmp_path.joinpath("missing.ini")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={"required": "value"},
|
||||
@@ -662,7 +675,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_no_config_file_missing_args(self, tmp_path: Path):
|
||||
"""Test loading settings without config file and missing args"""
|
||||
config_file = tmp_path / "missing.ini"
|
||||
config_file = tmp_path.joinpath("missing.ini")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
@@ -674,7 +687,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_check_list_with_split(self, tmp_path: Path):
|
||||
"""Test check validation with list values"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist=abc,def,ghi\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -687,7 +700,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_check_list_cleanup(self, tmp_path: Path):
|
||||
"""Test check validation cleans up list values"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist=ab-c,de_f,gh!i\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -700,7 +713,7 @@ class TestLoadSettings:
|
||||
|
||||
def test_load_settings_invalid_check_type(self, tmp_path: Path):
|
||||
"""Test with invalid check type"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
@@ -717,7 +730,7 @@ class TestComplexScenarios:
|
||||
|
||||
def test_complex_validation_scenario(self, tmp_path: Path):
|
||||
"""Test complex scenario with multiple validations"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text(
|
||||
"[Production]\n"
|
||||
"environment=production\n"
|
||||
@@ -758,7 +771,7 @@ class TestComplexScenarios:
|
||||
|
||||
def test_email_list_validation(self, tmp_path: Path):
|
||||
"""Test email list with validation"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text(
|
||||
"[EmailConfig]\n"
|
||||
"emails=test@example.com,admin@domain.org,user+tag@site.co.uk\n"
|
||||
@@ -775,7 +788,7 @@ class TestComplexScenarios:
|
||||
|
||||
def test_mixed_args_and_config(self, tmp_path: Path):
|
||||
"""Test mixing command line args and config file"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text(
|
||||
"[Settings]\n"
|
||||
"value1=config_value1\n"
|
||||
@@ -796,7 +809,7 @@ class TestComplexScenarios:
|
||||
|
||||
def test_multiple_check_types(self, tmp_path: Path):
|
||||
"""Test multiple different check types"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text(
|
||||
"[Checks]\n"
|
||||
"numbers=123,456,789\n"
|
||||
@@ -823,7 +836,7 @@ class TestComplexScenarios:
|
||||
|
||||
def test_args_no_and_list_skip_combination(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test combination of args_override:yes flag and list argument skip behavior"""
|
||||
config_file = tmp_path / "test.ini"
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text(
|
||||
"[Settings]\n"
|
||||
"no_override=a,b,c\n"
|
||||
|
||||
461
tests/unit/db_handling/test_sql_main.py
Normal file
461
tests/unit/db_handling/test_sql_main.py
Normal file
@@ -0,0 +1,461 @@
|
||||
"""
|
||||
PyTest: db_handling/sql_main
|
||||
Tests for SQLMain class - Main SQL interface wrapper
|
||||
|
||||
Note: Pylance warnings about "Redefining name from outer scope" in fixtures are expected.
|
||||
This is standard pytest fixture behavior where fixture parameters shadow fixture definitions.
|
||||
"""
|
||||
# pylint: disable=redefined-outer-name,too-many-public-methods,protected-access
|
||||
# pyright: reportUnknownParameterType=false, reportUnknownArgumentType=false
|
||||
# pyright: reportMissingParameterType=false, reportUnknownVariableType=false
|
||||
# pyright: reportArgumentType=false, reportGeneralTypeIssues=false
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Generator
|
||||
from unittest.mock import MagicMock, patch
|
||||
import pytest
|
||||
from corelibs.db_handling.sql_main import SQLMain, IDENT_SPLIT_CHARACTER
|
||||
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||
|
||||
|
||||
# Test fixtures
|
||||
@pytest.fixture
|
||||
def mock_logger() -> MagicMock:
|
||||
"""Create a mock logger for testing"""
|
||||
logger = MagicMock()
|
||||
logger.debug = MagicMock()
|
||||
logger.info = MagicMock()
|
||||
logger.warning = MagicMock()
|
||||
logger.error = MagicMock()
|
||||
return logger
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_db_path(tmp_path: Path) -> Path:
|
||||
"""Create a temporary database file path"""
|
||||
return tmp_path / "test_database.db"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_sqlite_io() -> Generator[MagicMock, None, None]:
|
||||
"""Create a mock SQLiteIO instance"""
|
||||
mock_io = MagicMock(spec=SQLiteIO)
|
||||
mock_io.conn = MagicMock()
|
||||
mock_io.db_connected = MagicMock(return_value=True)
|
||||
mock_io.db_close = MagicMock()
|
||||
mock_io.execute_query = MagicMock(return_value=[])
|
||||
yield mock_io
|
||||
|
||||
|
||||
# Test constant
|
||||
class TestConstants:
|
||||
"""Tests for module-level constants"""
|
||||
|
||||
def test_ident_split_character(self):
|
||||
"""Test that IDENT_SPLIT_CHARACTER is defined correctly"""
|
||||
assert IDENT_SPLIT_CHARACTER == ':'
|
||||
|
||||
|
||||
# Test SQLMain class initialization
|
||||
class TestSQLMainInit:
|
||||
"""Tests for SQLMain.__init__"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_successful_initialization_sqlite(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test successful initialization with SQLite"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
assert sql_main.log == mock_logger
|
||||
assert sql_main.dbh == mock_sqlite_instance
|
||||
assert sql_main.db_target == 'sqlite'
|
||||
mock_sqlite_class.assert_called_once_with(mock_logger, str(temp_db_path), row_factory='Dict')
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_initialization_connection_failure(self, mock_sqlite_class: MagicMock, mock_logger: MagicMock):
|
||||
"""Test initialization fails when connection cannot be established"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = None
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = 'sqlite:/path/to/db.db'
|
||||
with pytest.raises(ValueError, match='DB Connection failed for: sqlite'):
|
||||
SQLMain(mock_logger, db_ident)
|
||||
|
||||
def test_initialization_invalid_db_target(self, mock_logger: MagicMock):
|
||||
"""Test initialization with unsupported database target"""
|
||||
db_ident = 'postgresql:/path/to/db'
|
||||
with pytest.raises(ValueError, match='SQL interface for postgresql is not implemented'):
|
||||
SQLMain(mock_logger, db_ident)
|
||||
|
||||
def test_initialization_malformed_db_ident(self, mock_logger: MagicMock):
|
||||
"""Test initialization with malformed db_ident string"""
|
||||
db_ident = 'sqlite_no_colon'
|
||||
with pytest.raises(ValueError):
|
||||
SQLMain(mock_logger, db_ident)
|
||||
|
||||
|
||||
# Test SQLMain.connect method
|
||||
class TestSQLMainConnect:
|
||||
"""Tests for SQLMain.connect"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_connect_when_already_connected(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test connect warns when already connected"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
# Reset mock to check second call
|
||||
mock_logger.warning.reset_mock()
|
||||
|
||||
# Try to connect again
|
||||
sql_main.connect(f'sqlite:{temp_db_path}')
|
||||
|
||||
# Should have warned about existing connection
|
||||
mock_logger.warning.assert_called_once()
|
||||
assert 'already exists' in str(mock_logger.warning.call_args)
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_connect_sqlite_success(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test successful SQLite connection"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
sql_main = SQLMain.__new__(SQLMain)
|
||||
sql_main.log = mock_logger
|
||||
sql_main.dbh = None
|
||||
sql_main.db_target = None
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main.connect(db_ident)
|
||||
|
||||
assert sql_main.db_target == 'sqlite'
|
||||
assert sql_main.dbh == mock_sqlite_instance
|
||||
mock_sqlite_class.assert_called_once_with(mock_logger, str(temp_db_path), row_factory='Dict')
|
||||
|
||||
def test_connect_unsupported_database(self, mock_logger: MagicMock):
|
||||
"""Test connect with unsupported database type"""
|
||||
sql_main = SQLMain.__new__(SQLMain)
|
||||
sql_main.log = mock_logger
|
||||
sql_main.dbh = None
|
||||
sql_main.db_target = None
|
||||
|
||||
db_ident = 'mysql:/path/to/db'
|
||||
with pytest.raises(ValueError, match='SQL interface for mysql is not implemented'):
|
||||
sql_main.connect(db_ident)
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_connect_db_connection_failed(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test connect raises error when DB connection fails"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
sql_main = SQLMain.__new__(SQLMain)
|
||||
sql_main.log = mock_logger
|
||||
sql_main.dbh = None
|
||||
sql_main.db_target = None
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
with pytest.raises(ValueError, match='DB Connection failed for: sqlite'):
|
||||
sql_main.connect(db_ident)
|
||||
|
||||
|
||||
# Test SQLMain.close method
|
||||
class TestSQLMainClose:
|
||||
"""Tests for SQLMain.close"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_close_successful(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test successful database close"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.db_close = MagicMock()
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
sql_main.close()
|
||||
|
||||
mock_sqlite_instance.db_close.assert_called_once()
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_close_when_not_connected(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test close when not connected does nothing"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.db_close = MagicMock()
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
# Change db_connected to return False to simulate disconnection
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||
|
||||
sql_main.close()
|
||||
|
||||
# Should not raise error and should exit early
|
||||
assert mock_sqlite_instance.db_close.call_count == 0
|
||||
|
||||
def test_close_when_dbh_is_none(self, mock_logger: MagicMock):
|
||||
"""Test close when dbh is None"""
|
||||
sql_main = SQLMain.__new__(SQLMain)
|
||||
sql_main.log = mock_logger
|
||||
sql_main.dbh = None
|
||||
sql_main.db_target = 'sqlite'
|
||||
|
||||
# Should not raise error
|
||||
sql_main.close()
|
||||
|
||||
|
||||
# Test SQLMain.connected method
|
||||
class TestSQLMainConnected:
|
||||
"""Tests for SQLMain.connected"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_connected_returns_true(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test connected returns True when connected"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
assert sql_main.connected() is True
|
||||
mock_logger.warning.assert_not_called()
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_connected_returns_false_when_not_connected(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test connected returns False and warns when not connected"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
# Reset warning calls from init
|
||||
mock_logger.warning.reset_mock()
|
||||
|
||||
# Change db_connected to return False to simulate disconnection
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||
|
||||
assert sql_main.connected() is False
|
||||
mock_logger.warning.assert_called_once()
|
||||
assert 'No connection' in str(mock_logger.warning.call_args)
|
||||
|
||||
def test_connected_returns_false_when_dbh_is_none(self, mock_logger: MagicMock):
|
||||
"""Test connected returns False when dbh is None"""
|
||||
sql_main = SQLMain.__new__(SQLMain)
|
||||
sql_main.log = mock_logger
|
||||
sql_main.dbh = None
|
||||
sql_main.db_target = 'sqlite'
|
||||
|
||||
assert sql_main.connected() is False
|
||||
mock_logger.warning.assert_called_once()
|
||||
|
||||
|
||||
# Test SQLMain.process_query method
|
||||
class TestSQLMainProcessQuery:
|
||||
"""Tests for SQLMain.process_query"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_process_query_success_no_params(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test successful query execution without parameters"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
expected_result = [{'id': 1, 'name': 'test'}]
|
||||
mock_sqlite_instance.execute_query = MagicMock(return_value=expected_result)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
query = "SELECT * FROM test"
|
||||
result = sql_main.process_query(query)
|
||||
|
||||
assert result == expected_result
|
||||
mock_sqlite_instance.execute_query.assert_called_once_with(query, None)
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_process_query_success_with_params(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test successful query execution with parameters"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
expected_result = [{'id': 1, 'name': 'test'}]
|
||||
mock_sqlite_instance.execute_query = MagicMock(return_value=expected_result)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
query = "SELECT * FROM test WHERE id = ?"
|
||||
params = (1,)
|
||||
result = sql_main.process_query(query, params)
|
||||
|
||||
assert result == expected_result
|
||||
mock_sqlite_instance.execute_query.assert_called_once_with(query, params)
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_process_query_returns_false_on_error(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test query returns False when execute_query fails"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.execute_query = MagicMock(return_value=False)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
query = "SELECT * FROM nonexistent"
|
||||
result = sql_main.process_query(query)
|
||||
|
||||
assert result is False
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_process_query_dbh_is_none(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test query returns False when dbh is None"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
# Manually set dbh to None
|
||||
sql_main.dbh = None
|
||||
|
||||
query = "SELECT * FROM test"
|
||||
result = sql_main.process_query(query)
|
||||
|
||||
assert result is False
|
||||
mock_logger.error.assert_called_once()
|
||||
assert 'Problem connecting to db' in str(mock_logger.error.call_args)
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_process_query_returns_empty_list(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test query returns empty list when no results"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.execute_query = MagicMock(return_value=[])
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
query = "SELECT * FROM test WHERE 1=0"
|
||||
result = sql_main.process_query(query)
|
||||
|
||||
assert result == []
|
||||
|
||||
|
||||
# Integration-like tests
|
||||
class TestSQLMainIntegration:
|
||||
"""Integration-like tests for complete workflows"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_full_workflow_connect_query_close(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test complete workflow: connect, query, close"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.execute_query = MagicMock(return_value=[{'count': 5}])
|
||||
mock_sqlite_instance.db_close = MagicMock()
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
# Execute query
|
||||
result = sql_main.process_query("SELECT COUNT(*) as count FROM test")
|
||||
assert result == [{'count': 5}]
|
||||
|
||||
# Check connected
|
||||
assert sql_main.connected() is True
|
||||
|
||||
# Close connection
|
||||
sql_main.close()
|
||||
mock_sqlite_instance.db_close.assert_called_once()
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_multiple_queries_same_connection(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test multiple queries on the same connection"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.execute_query = MagicMock(side_effect=[
|
||||
[{'id': 1}],
|
||||
[{'id': 2}],
|
||||
[{'id': 3}]
|
||||
])
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
result1 = sql_main.process_query("SELECT * FROM test WHERE id = 1")
|
||||
result2 = sql_main.process_query("SELECT * FROM test WHERE id = 2")
|
||||
result3 = sql_main.process_query("SELECT * FROM test WHERE id = 3")
|
||||
|
||||
assert result1 == [{'id': 1}]
|
||||
assert result2 == [{'id': 2}]
|
||||
assert result3 == [{'id': 3}]
|
||||
assert mock_sqlite_instance.execute_query.call_count == 3
|
||||
|
||||
|
||||
# __END__
|
||||
@@ -1,639 +0,0 @@
|
||||
"""
|
||||
Unit tests for debug_handling.debug_helpers module
|
||||
"""
|
||||
|
||||
import sys
|
||||
import pytest
|
||||
|
||||
from corelibs.debug_handling.debug_helpers import (
|
||||
call_stack,
|
||||
exception_stack,
|
||||
OptExcInfo
|
||||
)
|
||||
|
||||
|
||||
class TestCallStack:
|
||||
"""Test cases for call_stack function"""
|
||||
|
||||
def test_call_stack_basic(self):
|
||||
"""Test basic call_stack functionality"""
|
||||
result = call_stack()
|
||||
assert isinstance(result, str)
|
||||
assert "test_debug_helpers.py" in result
|
||||
assert "test_call_stack_basic" in result
|
||||
|
||||
def test_call_stack_with_default_separator(self):
|
||||
"""Test call_stack with default separator"""
|
||||
result = call_stack()
|
||||
assert " -> " in result
|
||||
|
||||
def test_call_stack_with_custom_separator(self):
|
||||
"""Test call_stack with custom separator"""
|
||||
result = call_stack(separator=" | ")
|
||||
assert " | " in result
|
||||
assert " -> " not in result
|
||||
|
||||
def test_call_stack_with_empty_separator(self):
|
||||
"""Test call_stack with empty separator (should default to ' -> ')"""
|
||||
result = call_stack(separator="")
|
||||
assert " -> " in result
|
||||
|
||||
def test_call_stack_format(self):
|
||||
"""Test call_stack output format (filename:function:lineno)"""
|
||||
result = call_stack()
|
||||
parts = result.split(" -> ")
|
||||
for part in parts:
|
||||
# Each part should have format: filename:function:lineno
|
||||
assert part.count(":") >= 2
|
||||
# Most parts should contain .py but some system frames might not
|
||||
# Just check that we have some .py files in the trace
|
||||
assert ".py" in result or "test_debug_helpers" in result
|
||||
|
||||
def test_call_stack_with_start_offset(self):
|
||||
"""Test call_stack with start offset"""
|
||||
result_no_offset = call_stack(start=0)
|
||||
result_with_offset = call_stack(start=2)
|
||||
|
||||
# With offset, we should get fewer frames
|
||||
parts_no_offset = result_no_offset.split(" -> ")
|
||||
parts_with_offset = result_with_offset.split(" -> ")
|
||||
|
||||
assert len(parts_with_offset) <= len(parts_no_offset)
|
||||
|
||||
def test_call_stack_with_skip_last(self):
|
||||
"""Test call_stack with skip_last parameter"""
|
||||
result_skip_default = call_stack(skip_last=-1)
|
||||
result_skip_more = call_stack(skip_last=-3)
|
||||
|
||||
# Skipping more should result in fewer frames
|
||||
parts_default = result_skip_default.split(" -> ")
|
||||
parts_more = result_skip_more.split(" -> ")
|
||||
|
||||
assert len(parts_more) <= len(parts_default)
|
||||
|
||||
def test_call_stack_skip_last_positive_converts_to_negative(self):
|
||||
"""Test that positive skip_last is converted to negative"""
|
||||
# Both should produce same result
|
||||
result_negative = call_stack(skip_last=-2)
|
||||
result_positive = call_stack(skip_last=2)
|
||||
|
||||
assert result_negative == result_positive
|
||||
|
||||
def test_call_stack_nested_calls(self):
|
||||
"""Test call_stack in nested function calls"""
|
||||
def level_one():
|
||||
return level_two()
|
||||
|
||||
def level_two():
|
||||
return level_three()
|
||||
|
||||
def level_three():
|
||||
return call_stack()
|
||||
|
||||
result = level_one()
|
||||
assert "level_one" in result
|
||||
assert "level_two" in result
|
||||
assert "level_three" in result
|
||||
|
||||
def test_call_stack_reset_start_if_empty_false(self):
|
||||
"""Test call_stack with high start value and reset_start_if_empty=False"""
|
||||
# Using a very high start value should result in empty stack
|
||||
result = call_stack(start=1000, reset_start_if_empty=False)
|
||||
assert result == ""
|
||||
|
||||
def test_call_stack_reset_start_if_empty_true(self):
|
||||
"""Test call_stack with high start value and reset_start_if_empty=True"""
|
||||
# Using a very high start value with reset should give non-empty result
|
||||
result = call_stack(start=1000, reset_start_if_empty=True)
|
||||
assert result != ""
|
||||
assert "test_debug_helpers.py" in result
|
||||
|
||||
def test_call_stack_contains_line_numbers(self):
|
||||
"""Test that call_stack includes line numbers"""
|
||||
result = call_stack()
|
||||
# Extract parts and check for numbers
|
||||
parts = result.split(" -> ")
|
||||
for part in parts:
|
||||
# Line numbers should be present (digits at the end)
|
||||
assert any(char.isdigit() for char in part)
|
||||
|
||||
def test_call_stack_separator_none(self):
|
||||
"""Test call_stack with None separator"""
|
||||
result = call_stack(separator="") # Use empty string instead of None
|
||||
# Empty string should be converted to default ' -> '
|
||||
assert " -> " in result
|
||||
|
||||
def test_call_stack_multiple_separators(self):
|
||||
"""Test call_stack with various custom separators"""
|
||||
separators = [" | ", " >> ", " => ", " / ", "\n"]
|
||||
|
||||
for sep in separators:
|
||||
result = call_stack(separator=sep)
|
||||
assert sep in result or result == "" # May be empty based on stack depth
|
||||
|
||||
|
||||
class TestExceptionStack:
|
||||
"""Test cases for exception_stack function"""
|
||||
|
||||
def test_exception_stack_with_active_exception(self):
|
||||
"""Test exception_stack when an exception is active"""
|
||||
try:
|
||||
raise ValueError("Test exception")
|
||||
except ValueError:
|
||||
result = exception_stack()
|
||||
assert isinstance(result, str)
|
||||
assert "test_debug_helpers.py" in result
|
||||
assert "test_exception_stack_with_active_exception" in result
|
||||
|
||||
def test_exception_stack_format(self):
|
||||
"""Test exception_stack output format"""
|
||||
try:
|
||||
raise RuntimeError("Test error")
|
||||
except RuntimeError:
|
||||
result = exception_stack()
|
||||
parts = result.split(" -> ")
|
||||
for part in parts:
|
||||
# Each part should have format: filename:function:lineno
|
||||
assert part.count(":") >= 2
|
||||
|
||||
def test_exception_stack_with_custom_separator(self):
|
||||
"""Test exception_stack with custom separator"""
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise TypeError("Test type error")
|
||||
inner_call()
|
||||
|
||||
try:
|
||||
nested_call()
|
||||
except TypeError:
|
||||
result = exception_stack(separator=" | ")
|
||||
# Only check separator if there are multiple frames
|
||||
if " | " in result or result.count(":") == 2:
|
||||
# Single frame or has separator
|
||||
assert isinstance(result, str)
|
||||
assert " -> " not in result
|
||||
|
||||
def test_exception_stack_with_empty_separator(self):
|
||||
"""Test exception_stack with empty separator (should default to ' -> ')"""
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise KeyError("Test key error")
|
||||
inner_call()
|
||||
|
||||
try:
|
||||
nested_call()
|
||||
except KeyError:
|
||||
result = exception_stack(separator="")
|
||||
# Should use default separator if multiple frames exist
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_exception_stack_separator_none(self):
|
||||
"""Test exception_stack with empty separator"""
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise IndexError("Test index error")
|
||||
inner_call()
|
||||
|
||||
try:
|
||||
nested_call()
|
||||
except IndexError:
|
||||
result = exception_stack(separator="") # Use empty string instead of None
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_exception_stack_nested_exceptions(self):
|
||||
"""Test exception_stack with nested function calls"""
|
||||
def level_one():
|
||||
level_two()
|
||||
|
||||
def level_two():
|
||||
level_three()
|
||||
|
||||
def level_three():
|
||||
raise ValueError("Nested exception")
|
||||
|
||||
try:
|
||||
level_one()
|
||||
except ValueError:
|
||||
result = exception_stack()
|
||||
# Should contain all levels in the stack
|
||||
assert "level_one" in result or "level_two" in result or "level_three" in result
|
||||
|
||||
def test_exception_stack_with_provided_exc_info(self):
|
||||
"""Test exception_stack with explicitly provided exc_info"""
|
||||
try:
|
||||
raise AttributeError("Test attribute error")
|
||||
except AttributeError:
|
||||
exc_info = sys.exc_info()
|
||||
result = exception_stack(exc_stack=exc_info)
|
||||
assert isinstance(result, str)
|
||||
assert len(result) > 0
|
||||
|
||||
def test_exception_stack_no_active_exception(self):
|
||||
"""Test exception_stack when no exception is active"""
|
||||
# This should handle the case gracefully
|
||||
# When no exception is active, sys.exc_info() returns (None, None, None)
|
||||
result = exception_stack()
|
||||
# With no traceback, should return empty string or handle gracefully
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_exception_stack_contains_line_numbers(self):
|
||||
"""Test that exception_stack includes line numbers"""
|
||||
try:
|
||||
raise OSError("Test OS error")
|
||||
except OSError:
|
||||
result = exception_stack()
|
||||
if result: # May be empty
|
||||
parts = result.split(" -> ")
|
||||
for part in parts:
|
||||
# Line numbers should be present
|
||||
assert any(char.isdigit() for char in part)
|
||||
|
||||
def test_exception_stack_multiple_exceptions(self):
|
||||
"""Test exception_stack captures the current exception only"""
|
||||
first_result = None
|
||||
second_result = None
|
||||
|
||||
try:
|
||||
raise ValueError("First exception")
|
||||
except ValueError:
|
||||
first_result = exception_stack()
|
||||
|
||||
try:
|
||||
raise TypeError("Second exception")
|
||||
except TypeError:
|
||||
second_result = exception_stack()
|
||||
|
||||
# Both should be valid but may differ
|
||||
assert isinstance(first_result, str)
|
||||
assert isinstance(second_result, str)
|
||||
|
||||
def test_exception_stack_with_multiple_separators(self):
|
||||
"""Test exception_stack with various custom separators"""
|
||||
separators = [" | ", " >> ", " => ", " / ", "\n"]
|
||||
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise ValueError("Test exception")
|
||||
inner_call()
|
||||
|
||||
for sep in separators:
|
||||
try:
|
||||
nested_call()
|
||||
except ValueError:
|
||||
result = exception_stack(separator=sep)
|
||||
assert isinstance(result, str)
|
||||
# Separator only appears if there are multiple frames
|
||||
|
||||
|
||||
class TestOptExcInfo:
|
||||
"""Test cases for OptExcInfo type definition"""
|
||||
|
||||
def test_opt_exc_info_type_none_tuple(self):
|
||||
"""Test OptExcInfo can be None tuple"""
|
||||
exc_info: OptExcInfo = (None, None, None)
|
||||
assert exc_info == (None, None, None)
|
||||
|
||||
def test_opt_exc_info_type_exception_tuple(self):
|
||||
"""Test OptExcInfo can be exception tuple"""
|
||||
try:
|
||||
raise ValueError("Test")
|
||||
except ValueError:
|
||||
exc_info: OptExcInfo = sys.exc_info()
|
||||
assert exc_info[0] is not None
|
||||
assert exc_info[1] is not None
|
||||
assert exc_info[2] is not None
|
||||
|
||||
def test_opt_exc_info_with_exception_stack(self):
|
||||
"""Test that OptExcInfo works with exception_stack function"""
|
||||
try:
|
||||
raise RuntimeError("Test runtime error")
|
||||
except RuntimeError:
|
||||
exc_info = sys.exc_info()
|
||||
result = exception_stack(exc_stack=exc_info)
|
||||
assert isinstance(result, str)
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests combining multiple scenarios"""
|
||||
|
||||
def test_call_stack_and_exception_stack_together(self):
|
||||
"""Test using both call_stack and exception_stack in error handling"""
|
||||
def faulty_function():
|
||||
_ = call_stack() # Get call stack before exception
|
||||
raise ValueError("Intentional error")
|
||||
|
||||
try:
|
||||
faulty_function()
|
||||
except ValueError:
|
||||
exception_trace = exception_stack()
|
||||
|
||||
assert isinstance(exception_trace, str)
|
||||
assert "faulty_function" in exception_trace or "test_debug_helpers.py" in exception_trace
|
||||
|
||||
def test_nested_exception_with_call_stack(self):
|
||||
"""Test call_stack within exception handling"""
|
||||
def outer():
|
||||
return inner()
|
||||
|
||||
def inner():
|
||||
try:
|
||||
raise RuntimeError("Inner error")
|
||||
except RuntimeError:
|
||||
return {
|
||||
'call_stack': call_stack(),
|
||||
'exception_stack': exception_stack()
|
||||
}
|
||||
|
||||
result = outer()
|
||||
assert 'call_stack' in result
|
||||
assert 'exception_stack' in result
|
||||
assert isinstance(result['call_stack'], str)
|
||||
assert isinstance(result['exception_stack'], str)
|
||||
|
||||
def test_multiple_nested_levels(self):
|
||||
"""Test with multiple nested function levels"""
|
||||
def level_a():
|
||||
return level_b()
|
||||
|
||||
def level_b():
|
||||
return level_c()
|
||||
|
||||
def level_c():
|
||||
return level_d()
|
||||
|
||||
def level_d():
|
||||
try:
|
||||
raise ValueError("Deep error")
|
||||
except ValueError:
|
||||
return {
|
||||
'call': call_stack(),
|
||||
'exception': exception_stack()
|
||||
}
|
||||
|
||||
result = level_a()
|
||||
# Should contain information about the call chain
|
||||
assert result['call']
|
||||
assert result['exception']
|
||||
|
||||
def test_different_separators_consistency(self):
|
||||
"""Test that different separators work consistently"""
|
||||
separators = [" -> ", " | ", " / ", " >> "]
|
||||
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise ValueError("Test")
|
||||
inner_call()
|
||||
|
||||
for sep in separators:
|
||||
try:
|
||||
nested_call()
|
||||
except ValueError:
|
||||
exc_result = exception_stack(separator=sep)
|
||||
call_result = call_stack(separator=sep)
|
||||
|
||||
assert isinstance(exc_result, str)
|
||||
assert isinstance(call_result, str)
|
||||
# Both should be valid strings (separator check only if multiple frames)
|
||||
|
||||
|
||||
class TestEdgeCases:
|
||||
"""Test edge cases and boundary conditions"""
|
||||
|
||||
def test_call_stack_with_zero_start(self):
|
||||
"""Test call_stack with start=0 (should include all frames)"""
|
||||
result = call_stack(start=0)
|
||||
assert isinstance(result, str)
|
||||
assert len(result) > 0
|
||||
|
||||
def test_call_stack_with_large_skip_last(self):
|
||||
"""Test call_stack with very large skip_last value"""
|
||||
result = call_stack(skip_last=-100)
|
||||
# Should handle gracefully, may be empty
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_exception_stack_none_exc_info(self):
|
||||
"""Test exception_stack with None as exc_stack"""
|
||||
result = exception_stack(exc_stack=None)
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_exception_stack_empty_tuple(self):
|
||||
"""Test exception_stack with empty exception info"""
|
||||
exc_info: OptExcInfo = (None, None, None)
|
||||
result = exception_stack(exc_stack=exc_info)
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_call_stack_special_characters_in_separator(self):
|
||||
"""Test call_stack with special characters in separator"""
|
||||
special_separators = ["\n", "\t", "->", "||", "//"]
|
||||
|
||||
for sep in special_separators:
|
||||
result = call_stack(separator=sep)
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_very_deep_call_stack(self):
|
||||
"""Test call_stack with very deep recursion (up to a limit)"""
|
||||
def recursive_call(depth: int, max_depth: int = 5) -> str:
|
||||
if depth >= max_depth:
|
||||
return call_stack()
|
||||
return recursive_call(depth + 1, max_depth)
|
||||
|
||||
result = recursive_call(0)
|
||||
assert isinstance(result, str)
|
||||
# Should contain multiple recursive_call entries
|
||||
assert result.count("recursive_call") > 0
|
||||
|
||||
def test_exception_stack_different_exception_types(self):
|
||||
"""Test exception_stack with various exception types"""
|
||||
exception_types = [
|
||||
ValueError("value"),
|
||||
TypeError("type"),
|
||||
KeyError("key"),
|
||||
IndexError("index"),
|
||||
AttributeError("attr"),
|
||||
RuntimeError("runtime"),
|
||||
]
|
||||
|
||||
for exc in exception_types:
|
||||
try:
|
||||
raise exc
|
||||
except (ValueError, TypeError, KeyError, IndexError, AttributeError, RuntimeError):
|
||||
result = exception_stack()
|
||||
assert isinstance(result, str)
|
||||
|
||||
|
||||
class TestRealWorldScenarios:
|
||||
"""Test real-world debugging scenarios"""
|
||||
|
||||
def test_debugging_workflow(self):
|
||||
"""Test typical debugging workflow with both functions"""
|
||||
def process_data(data: str) -> str:
|
||||
_ = call_stack() # Capture call stack for debugging
|
||||
if not data:
|
||||
raise ValueError("No data provided")
|
||||
return data.upper()
|
||||
|
||||
# Success case
|
||||
result = process_data("test")
|
||||
assert result == "TEST"
|
||||
|
||||
# Error case
|
||||
try:
|
||||
process_data("")
|
||||
except ValueError:
|
||||
exc_trace = exception_stack()
|
||||
assert isinstance(exc_trace, str)
|
||||
|
||||
def test_logging_context(self):
|
||||
"""Test using call_stack for logging context"""
|
||||
def get_logging_context():
|
||||
return {
|
||||
'timestamp': 'now',
|
||||
'stack': call_stack(start=1, separator=" > "),
|
||||
'function': 'get_logging_context'
|
||||
}
|
||||
|
||||
context = get_logging_context()
|
||||
assert 'stack' in context
|
||||
assert 'timestamp' in context
|
||||
assert isinstance(context['stack'], str)
|
||||
|
||||
def test_error_reporting(self):
|
||||
"""Test comprehensive error reporting"""
|
||||
def dangerous_operation() -> dict[str, str]:
|
||||
try:
|
||||
# Simulate some operation
|
||||
_ = 1 / 0
|
||||
except ZeroDivisionError:
|
||||
return {
|
||||
'error': 'Division by zero',
|
||||
'call_stack': call_stack(),
|
||||
'exception_stack': exception_stack(),
|
||||
}
|
||||
return {} # Fallback return
|
||||
|
||||
error_report = dangerous_operation()
|
||||
assert error_report is not None
|
||||
assert 'error' in error_report
|
||||
assert 'call_stack' in error_report
|
||||
assert 'exception_stack' in error_report
|
||||
assert error_report['error'] == 'Division by zero'
|
||||
|
||||
def test_function_tracing(self):
|
||||
"""Test function call tracing"""
|
||||
traces: list[str] = []
|
||||
|
||||
def traced_function_a() -> str:
|
||||
traces.append(call_stack())
|
||||
return traced_function_b()
|
||||
|
||||
def traced_function_b() -> str:
|
||||
traces.append(call_stack())
|
||||
return traced_function_c()
|
||||
|
||||
def traced_function_c() -> str:
|
||||
traces.append(call_stack())
|
||||
return "done"
|
||||
|
||||
result = traced_function_a()
|
||||
assert result == "done"
|
||||
assert len(traces) == 3
|
||||
# Each trace should be different (different call depths)
|
||||
assert all(isinstance(t, str) for t in traces)
|
||||
|
||||
def test_exception_chain_tracking(self):
|
||||
"""Test tracking exception chains"""
|
||||
exception_traces: list[str] = []
|
||||
|
||||
def operation_one() -> None:
|
||||
try:
|
||||
operation_two()
|
||||
except ValueError:
|
||||
exception_traces.append(exception_stack())
|
||||
raise
|
||||
|
||||
def operation_two() -> None:
|
||||
try:
|
||||
operation_three()
|
||||
except TypeError as exc:
|
||||
exception_traces.append(exception_stack())
|
||||
raise ValueError("Wrapped error") from exc
|
||||
|
||||
def operation_three() -> None:
|
||||
raise TypeError("Original error")
|
||||
|
||||
try:
|
||||
operation_one()
|
||||
except ValueError:
|
||||
exception_traces.append(exception_stack())
|
||||
|
||||
# Should have captured multiple exception stacks
|
||||
assert len(exception_traces) > 0
|
||||
assert all(isinstance(t, str) for t in exception_traces)
|
||||
|
||||
|
||||
class TestParametrized:
|
||||
"""Parametrized tests for comprehensive coverage"""
|
||||
|
||||
@pytest.mark.parametrize("start", [0, 1, 2, 5, 10])
|
||||
def test_call_stack_various_starts(self, start: int) -> None:
|
||||
"""Test call_stack with various start values"""
|
||||
result = call_stack(start=start)
|
||||
assert isinstance(result, str)
|
||||
|
||||
@pytest.mark.parametrize("skip_last", [-1, -2, -3, -5, 1, 2, 3, 5])
|
||||
def test_call_stack_various_skip_lasts(self, skip_last: int) -> None:
|
||||
"""Test call_stack with various skip_last values"""
|
||||
result = call_stack(skip_last=skip_last)
|
||||
assert isinstance(result, str)
|
||||
|
||||
@pytest.mark.parametrize("separator", [" -> ", " | ", " / ", " >> ", " => ", "\n", "\t"])
|
||||
def test_call_stack_various_separators(self, separator: str) -> None:
|
||||
"""Test call_stack with various separators"""
|
||||
result = call_stack(separator=separator)
|
||||
assert isinstance(result, str)
|
||||
if result:
|
||||
assert separator in result
|
||||
|
||||
@pytest.mark.parametrize("reset_start", [True, False])
|
||||
def test_call_stack_reset_start_variations(self, reset_start: bool) -> None:
|
||||
"""Test call_stack with reset_start_if_empty variations"""
|
||||
result = call_stack(start=100, reset_start_if_empty=reset_start)
|
||||
assert isinstance(result, str)
|
||||
if reset_start:
|
||||
assert len(result) > 0 # Should have content after reset
|
||||
else:
|
||||
assert len(result) == 0 # Should be empty
|
||||
|
||||
@pytest.mark.parametrize("separator", [" -> ", " | ", " / ", " >> ", "\n"])
|
||||
def test_exception_stack_various_separators(self, separator: str) -> None:
|
||||
"""Test exception_stack with various separators"""
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise ValueError("Test")
|
||||
inner_call()
|
||||
|
||||
try:
|
||||
nested_call()
|
||||
except ValueError:
|
||||
result = exception_stack(separator=separator)
|
||||
assert isinstance(result, str)
|
||||
# Check that result is valid (separator only if multiple frames exist)
|
||||
|
||||
@pytest.mark.parametrize("exception_type", [
|
||||
ValueError,
|
||||
TypeError,
|
||||
KeyError,
|
||||
IndexError,
|
||||
AttributeError,
|
||||
RuntimeError,
|
||||
OSError,
|
||||
])
|
||||
def test_exception_stack_various_exception_types(self, exception_type: type[Exception]) -> None:
|
||||
"""Test exception_stack with various exception types"""
|
||||
try:
|
||||
raise exception_type("Test exception")
|
||||
except (ValueError, TypeError, KeyError, IndexError, AttributeError, RuntimeError, OSError):
|
||||
result = exception_stack()
|
||||
assert isinstance(result, str)
|
||||
|
||||
# __END__
|
||||
@@ -1,288 +0,0 @@
|
||||
"""
|
||||
Unit tests for debug_handling.dump_data module
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime, date
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
|
||||
|
||||
class TestDumpData:
|
||||
"""Test cases for dump_data function"""
|
||||
|
||||
def test_dump_simple_dict(self):
|
||||
"""Test dumping a simple dictionary"""
|
||||
data = {"name": "John", "age": 30}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_simple_list(self):
|
||||
"""Test dumping a simple list"""
|
||||
data = [1, 2, 3, 4, 5]
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_nested_dict(self):
|
||||
"""Test dumping a nested dictionary"""
|
||||
data = {
|
||||
"user": {
|
||||
"name": "Alice",
|
||||
"address": {
|
||||
"city": "Tokyo",
|
||||
"country": "Japan"
|
||||
}
|
||||
}
|
||||
}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_mixed_types(self):
|
||||
"""Test dumping data with mixed types"""
|
||||
data = {
|
||||
"string": "test",
|
||||
"number": 42,
|
||||
"float": 3.14,
|
||||
"boolean": True,
|
||||
"null": None,
|
||||
"list": [1, 2, 3]
|
||||
}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_with_indent_default(self):
|
||||
"""Test that indent is applied by default"""
|
||||
data = {"a": 1, "b": 2}
|
||||
result = dump_data(data)
|
||||
|
||||
# With indent, result should contain newlines
|
||||
assert "\n" in result
|
||||
assert " " in result # 4 spaces for indent
|
||||
|
||||
def test_dump_with_indent_true(self):
|
||||
"""Test explicit indent=True"""
|
||||
data = {"a": 1, "b": 2}
|
||||
result = dump_data(data, use_indent=True)
|
||||
|
||||
# With indent, result should contain newlines
|
||||
assert "\n" in result
|
||||
assert " " in result # 4 spaces for indent
|
||||
|
||||
def test_dump_without_indent(self):
|
||||
"""Test dumping without indentation"""
|
||||
data = {"a": 1, "b": 2}
|
||||
result = dump_data(data, use_indent=False)
|
||||
|
||||
# Without indent, result should be compact
|
||||
assert "\n" not in result
|
||||
assert result == '{"a": 1, "b": 2}'
|
||||
|
||||
def test_dump_unicode_characters(self):
|
||||
"""Test that unicode characters are preserved (ensure_ascii=False)"""
|
||||
data = {"message": "こんにちは", "emoji": "😀", "german": "Müller"}
|
||||
result = dump_data(data)
|
||||
|
||||
# Unicode characters should be preserved, not escaped
|
||||
assert "こんにちは" in result
|
||||
assert "😀" in result
|
||||
assert "Müller" in result
|
||||
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_datetime_object(self):
|
||||
"""Test dumping data with datetime objects (using default=str)"""
|
||||
now = datetime(2023, 10, 15, 14, 30, 0)
|
||||
data = {"timestamp": now}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
# datetime should be converted to string
|
||||
assert "2023-10-15" in result
|
||||
|
||||
def test_dump_date_object(self):
|
||||
"""Test dumping data with date objects"""
|
||||
today = date(2023, 10, 15)
|
||||
data = {"date": today}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "2023-10-15" in result
|
||||
|
||||
def test_dump_decimal_object(self):
|
||||
"""Test dumping data with Decimal objects"""
|
||||
data = {"amount": Decimal("123.45")}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "123.45" in result
|
||||
|
||||
def test_dump_empty_dict(self):
|
||||
"""Test dumping an empty dictionary"""
|
||||
data = {}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == {}
|
||||
|
||||
def test_dump_empty_list(self):
|
||||
"""Test dumping an empty list"""
|
||||
data = []
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == []
|
||||
|
||||
def test_dump_string_directly(self):
|
||||
"""Test dumping a string directly"""
|
||||
data = "Hello, World!"
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_number_directly(self):
|
||||
"""Test dumping a number directly"""
|
||||
data = 42
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_boolean_directly(self):
|
||||
"""Test dumping a boolean directly"""
|
||||
data = True
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed is True
|
||||
|
||||
def test_dump_none_directly(self):
|
||||
"""Test dumping None directly"""
|
||||
data = None
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert result == "null"
|
||||
parsed = json.loads(result)
|
||||
assert parsed is None
|
||||
|
||||
def test_dump_complex_nested_structure(self):
|
||||
"""Test dumping a complex nested structure"""
|
||||
data = {
|
||||
"users": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Alice",
|
||||
"tags": ["admin", "user"],
|
||||
"metadata": {
|
||||
"created": datetime(2023, 1, 1),
|
||||
"active": True
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Bob",
|
||||
"tags": ["user"],
|
||||
"metadata": {
|
||||
"created": datetime(2023, 6, 15),
|
||||
"active": False
|
||||
}
|
||||
}
|
||||
],
|
||||
"total": 2
|
||||
}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
# Check that it's valid JSON
|
||||
parsed = json.loads(result)
|
||||
assert len(parsed["users"]) == 2
|
||||
assert parsed["total"] == 2
|
||||
|
||||
def test_dump_special_characters(self):
|
||||
"""Test dumping data with special characters"""
|
||||
data = {
|
||||
"quote": 'He said "Hello"',
|
||||
"backslash": "path\\to\\file",
|
||||
"newline": "line1\nline2",
|
||||
"tab": "col1\tcol2"
|
||||
}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_large_numbers(self):
|
||||
"""Test dumping large numbers"""
|
||||
data = {
|
||||
"big_int": 123456789012345678901234567890,
|
||||
"big_float": 1.23456789e100
|
||||
}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed["big_int"] == data["big_int"]
|
||||
|
||||
def test_dump_list_of_dicts(self):
|
||||
"""Test dumping a list of dictionaries"""
|
||||
data = [
|
||||
{"id": 1, "name": "Item 1"},
|
||||
{"id": 2, "name": "Item 2"},
|
||||
{"id": 3, "name": "Item 3"}
|
||||
]
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
assert len(parsed) == 3
|
||||
|
||||
|
||||
class CustomObject:
|
||||
"""Custom class for testing default=str conversion"""
|
||||
def __init__(self, value: Any):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return f"CustomObject({self.value})"
|
||||
|
||||
|
||||
class TestDumpDataWithCustomObjects:
|
||||
"""Test cases for dump_data with custom objects"""
|
||||
|
||||
def test_dump_custom_object(self):
|
||||
"""Test that custom objects are converted using str()"""
|
||||
obj = CustomObject("test")
|
||||
data = {"custom": obj}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "CustomObject(test)" in result
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
@@ -1,560 +0,0 @@
|
||||
"""
|
||||
Unit tests for corelibs.debug_handling.profiling module
|
||||
"""
|
||||
|
||||
import time
|
||||
import tracemalloc
|
||||
|
||||
from corelibs.debug_handling.profiling import display_top, Profiling
|
||||
|
||||
|
||||
class TestDisplayTop:
|
||||
"""Test display_top function"""
|
||||
|
||||
def test_display_top_basic(self):
|
||||
"""Test that display_top returns a string with basic stats"""
|
||||
tracemalloc.start()
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 10000
|
||||
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Top 10 lines" in result
|
||||
assert "KiB" in result
|
||||
assert "Total allocated size:" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_display_top_with_custom_limit(self):
|
||||
"""Test display_top with custom limit parameter"""
|
||||
tracemalloc.start()
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 10000
|
||||
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot, limit=5)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Top 5 lines" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_display_top_with_different_key_type(self):
|
||||
"""Test display_top with different key_type parameter"""
|
||||
tracemalloc.start()
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 10000
|
||||
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot, key_type='filename')
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Top 10 lines" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_display_top_filters_traces(self):
|
||||
"""Test that display_top filters out bootstrap and unknown traces"""
|
||||
tracemalloc.start()
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 10000
|
||||
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot)
|
||||
|
||||
# Should not contain filtered traces
|
||||
assert "<frozen importlib._bootstrap>" not in result
|
||||
assert "<unknown>" not in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_display_top_with_limit_larger_than_stats(self):
|
||||
"""Test display_top when limit is larger than available stats"""
|
||||
tracemalloc.start()
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 100
|
||||
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot, limit=1000)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Top 1000 lines" in result
|
||||
assert "Total allocated size:" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_display_top_empty_snapshot(self):
|
||||
"""Test display_top with a snapshot that has minimal traces"""
|
||||
tracemalloc.start()
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot, limit=1)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Top 1 lines" in result
|
||||
|
||||
|
||||
class TestProfilingInitialization:
|
||||
"""Test Profiling class initialization"""
|
||||
|
||||
def test_profiling_initialization(self):
|
||||
"""Test that Profiling initializes correctly"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Should be able to create instance
|
||||
assert isinstance(profiler, Profiling)
|
||||
|
||||
def test_profiling_initial_state(self):
|
||||
"""Test that Profiling starts in a clean state"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Should not raise an error when calling end_profiling
|
||||
# even though start_profiling wasn't called
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
assert isinstance(result, str)
|
||||
|
||||
|
||||
class TestProfilingStartEnd:
|
||||
"""Test start_profiling and end_profiling functionality"""
|
||||
|
||||
def test_start_profiling(self):
|
||||
"""Test that start_profiling can be called"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Should not raise an error
|
||||
profiler.start_profiling("test_operation")
|
||||
|
||||
def test_end_profiling(self):
|
||||
"""Test that end_profiling can be called"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("test_operation")
|
||||
|
||||
# Should not raise an error
|
||||
profiler.end_profiling()
|
||||
|
||||
def test_start_profiling_with_different_idents(self):
|
||||
"""Test start_profiling with different identifier strings"""
|
||||
profiler = Profiling()
|
||||
|
||||
identifiers = ["short", "longer_identifier", "very_long_identifier_with_many_chars"]
|
||||
|
||||
for ident in identifiers:
|
||||
profiler.start_profiling(ident)
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert ident in result
|
||||
|
||||
def test_end_profiling_without_start(self):
|
||||
"""Test that end_profiling can be called without start_profiling"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Should not raise an error but internal state should indicate warning
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_profiling_measures_time(self):
|
||||
"""Test that profiling measures elapsed time"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("time_test")
|
||||
|
||||
sleep_duration = 0.05 # 50ms
|
||||
time.sleep(sleep_duration)
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "time:" in result
|
||||
# Should have some time measurement
|
||||
assert "ms" in result or "s" in result
|
||||
|
||||
def test_profiling_measures_memory(self):
|
||||
"""Test that profiling measures memory usage"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("memory_test")
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 100000
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "RSS:" in result
|
||||
assert "VMS:" in result
|
||||
assert "time:" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
|
||||
class TestProfilingPrintProfiling:
|
||||
"""Test print_profiling functionality"""
|
||||
|
||||
def test_print_profiling_returns_string(self):
|
||||
"""Test that print_profiling returns a string"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("test")
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_print_profiling_contains_identifier(self):
|
||||
"""Test that print_profiling includes the identifier"""
|
||||
profiler = Profiling()
|
||||
identifier = "my_test_operation"
|
||||
|
||||
profiler.start_profiling(identifier)
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert identifier in result
|
||||
|
||||
def test_print_profiling_format(self):
|
||||
"""Test that print_profiling has expected format"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("test")
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
# Check for expected components
|
||||
assert "Profiling:" in result
|
||||
assert "RSS:" in result
|
||||
assert "VMS:" in result
|
||||
assert "time:" in result
|
||||
|
||||
def test_print_profiling_multiple_calls(self):
|
||||
"""Test that print_profiling can be called multiple times"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("test")
|
||||
profiler.end_profiling()
|
||||
|
||||
result1 = profiler.print_profiling()
|
||||
result2 = profiler.print_profiling()
|
||||
|
||||
# Should return the same result
|
||||
assert result1 == result2
|
||||
|
||||
def test_print_profiling_time_formats(self):
|
||||
"""Test different time format outputs"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Very short duration (milliseconds)
|
||||
profiler.start_profiling("ms_test")
|
||||
time.sleep(0.001)
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
assert "ms" in result
|
||||
|
||||
# Slightly longer duration (seconds)
|
||||
profiler.start_profiling("s_test")
|
||||
time.sleep(0.1)
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
# Could be ms or s depending on timing
|
||||
assert ("ms" in result or "s" in result)
|
||||
|
||||
def test_print_profiling_memory_formats(self):
|
||||
"""Test different memory format outputs"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("memory_format_test")
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 50000
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
# Should have some memory unit (B, kB, MB, GB)
|
||||
assert any(unit in result for unit in ["B", "kB", "MB", "GB"])
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
|
||||
class TestProfilingIntegration:
|
||||
"""Integration tests for Profiling class"""
|
||||
|
||||
def test_complete_profiling_cycle(self):
|
||||
"""Test a complete profiling cycle from start to print"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("complete_cycle")
|
||||
|
||||
# Do some work
|
||||
data = [i for i in range(10000)]
|
||||
time.sleep(0.01)
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "complete_cycle" in result
|
||||
assert "RSS:" in result
|
||||
assert "VMS:" in result
|
||||
assert "time:" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_multiple_profiling_sessions(self):
|
||||
"""Test running multiple profiling sessions"""
|
||||
profiler = Profiling()
|
||||
|
||||
# First session
|
||||
profiler.start_profiling("session_1")
|
||||
time.sleep(0.01)
|
||||
profiler.end_profiling()
|
||||
result1 = profiler.print_profiling()
|
||||
|
||||
# Second session (same profiler instance)
|
||||
profiler.start_profiling("session_2")
|
||||
data = [0] * 100000
|
||||
time.sleep(0.01)
|
||||
profiler.end_profiling()
|
||||
result2 = profiler.print_profiling()
|
||||
|
||||
# Results should be different
|
||||
assert "session_1" in result1
|
||||
assert "session_2" in result2
|
||||
assert result1 != result2
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_profiling_with_zero_work(self):
|
||||
"""Test profiling with minimal work"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("zero_work")
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "zero_work" in result
|
||||
|
||||
def test_profiling_with_heavy_computation(self):
|
||||
"""Test profiling with heavier computation"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("heavy_computation")
|
||||
|
||||
# Do some computation
|
||||
result_data: list[list[int]] = []
|
||||
for _ in range(1000):
|
||||
result_data.append([j * 2 for j in range(100)])
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "heavy_computation" in result
|
||||
# Should show measurable time and memory
|
||||
assert "time:" in result
|
||||
|
||||
# Clean up
|
||||
del result_data
|
||||
|
||||
def test_independent_profilers(self):
|
||||
"""Test that multiple Profiling instances are independent"""
|
||||
profiler1 = Profiling()
|
||||
profiler2 = Profiling()
|
||||
|
||||
profiler1.start_profiling("profiler_1")
|
||||
time.sleep(0.01)
|
||||
|
||||
profiler2.start_profiling("profiler_2")
|
||||
data = [0] * 100000
|
||||
time.sleep(0.01)
|
||||
|
||||
profiler1.end_profiling()
|
||||
profiler2.end_profiling()
|
||||
|
||||
result1 = profiler1.print_profiling()
|
||||
result2 = profiler2.print_profiling()
|
||||
|
||||
# Should have different identifiers
|
||||
assert "profiler_1" in result1
|
||||
assert "profiler_2" in result2
|
||||
|
||||
# Results should be different
|
||||
assert result1 != result2
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
|
||||
class TestProfilingEdgeCases:
|
||||
"""Test edge cases and boundary conditions"""
|
||||
|
||||
def test_empty_identifier(self):
|
||||
"""Test profiling with empty identifier"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("")
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Profiling:" in result
|
||||
|
||||
def test_very_long_identifier(self):
|
||||
"""Test profiling with very long identifier"""
|
||||
profiler = Profiling()
|
||||
|
||||
long_ident = "a" * 100
|
||||
|
||||
profiler.start_profiling(long_ident)
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert long_ident in result
|
||||
|
||||
def test_special_characters_in_identifier(self):
|
||||
"""Test profiling with special characters in identifier"""
|
||||
profiler = Profiling()
|
||||
|
||||
special_ident = "test_@#$%_operation"
|
||||
|
||||
profiler.start_profiling(special_ident)
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert special_ident in result
|
||||
|
||||
def test_rapid_consecutive_profiling(self):
|
||||
"""Test rapid consecutive profiling cycles"""
|
||||
profiler = Profiling()
|
||||
|
||||
for i in range(5):
|
||||
profiler.start_profiling(f"rapid_{i}")
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert f"rapid_{i}" in result
|
||||
|
||||
def test_profiling_negative_memory_change(self):
|
||||
"""Test profiling when memory usage decreases"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Allocate some memory before profiling
|
||||
pre_data = [0] * 1000000
|
||||
|
||||
profiler.start_profiling("memory_decrease")
|
||||
|
||||
# Free the memory
|
||||
del pre_data
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "memory_decrease" in result
|
||||
# Should handle negative memory change gracefully
|
||||
|
||||
def test_very_short_duration(self):
|
||||
"""Test profiling with extremely short duration"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("instant")
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "instant" in result
|
||||
assert "ms" in result # Should show milliseconds for very short duration
|
||||
|
||||
|
||||
class TestProfilingContextManager:
|
||||
"""Test profiling usage patterns similar to context managers"""
|
||||
|
||||
def test_typical_usage_pattern(self):
|
||||
"""Test typical usage pattern for profiling"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Typical pattern
|
||||
profiler.start_profiling("typical_operation")
|
||||
|
||||
# Perform operation
|
||||
result_list: list[int] = []
|
||||
for _ in range(1000):
|
||||
result_list.append(_ * 2)
|
||||
|
||||
profiler.end_profiling()
|
||||
|
||||
# Get results
|
||||
output = profiler.print_profiling()
|
||||
|
||||
assert isinstance(output, str)
|
||||
assert "typical_operation" in output
|
||||
|
||||
# Clean up
|
||||
del result_list
|
||||
|
||||
def test_profiling_without_end(self):
|
||||
"""Test what happens when end_profiling is not called"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("no_end")
|
||||
|
||||
# Don't call end_profiling
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
# Should still return a string (though data might be incomplete)
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_profiling_end_without_start(self):
|
||||
"""Test calling end_profiling multiple times without start"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.end_profiling()
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
|
||||
# __END__
|
||||
@@ -1,405 +0,0 @@
|
||||
"""
|
||||
Unit tests for corelibs.debug_handling.timer module
|
||||
"""
|
||||
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from corelibs.debug_handling.timer import Timer
|
||||
|
||||
|
||||
class TestTimerInitialization:
|
||||
"""Test Timer class initialization"""
|
||||
|
||||
def test_timer_initialization(self):
|
||||
"""Test that Timer initializes with correct default values"""
|
||||
timer = Timer()
|
||||
|
||||
# Check that start times are set
|
||||
assert isinstance(timer.get_overall_start_time(), datetime)
|
||||
assert isinstance(timer.get_start_time(), datetime)
|
||||
|
||||
# Check that end times are None
|
||||
assert timer.get_overall_end_time() is None
|
||||
assert timer.get_end_time() is None
|
||||
|
||||
# Check that run times are None
|
||||
assert timer.get_overall_run_time() is None
|
||||
assert timer.get_run_time() is None
|
||||
|
||||
def test_timer_start_times_are_recent(self):
|
||||
"""Test that start times are set to current time on initialization"""
|
||||
before_init = datetime.now()
|
||||
timer = Timer()
|
||||
after_init = datetime.now()
|
||||
|
||||
overall_start = timer.get_overall_start_time()
|
||||
start = timer.get_start_time()
|
||||
|
||||
assert before_init <= overall_start <= after_init
|
||||
assert before_init <= start <= after_init
|
||||
|
||||
def test_timer_start_times_are_same(self):
|
||||
"""Test that overall_start_time and start_time are initialized to the same time"""
|
||||
timer = Timer()
|
||||
|
||||
overall_start = timer.get_overall_start_time()
|
||||
start = timer.get_start_time()
|
||||
|
||||
# They should be very close (within a few microseconds)
|
||||
time_diff = abs((overall_start - start).total_seconds())
|
||||
assert time_diff < 0.001 # Less than 1 millisecond
|
||||
|
||||
|
||||
class TestOverallRunTime:
|
||||
"""Test overall run time functionality"""
|
||||
|
||||
def test_overall_run_time_returns_timedelta(self):
|
||||
"""Test that overall_run_time returns a timedelta object"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01) # Sleep for 10ms
|
||||
|
||||
result = timer.overall_run_time()
|
||||
|
||||
assert isinstance(result, timedelta)
|
||||
|
||||
def test_overall_run_time_sets_end_time(self):
|
||||
"""Test that calling overall_run_time sets the end time"""
|
||||
timer = Timer()
|
||||
|
||||
assert timer.get_overall_end_time() is None
|
||||
|
||||
timer.overall_run_time()
|
||||
|
||||
assert isinstance(timer.get_overall_end_time(), datetime)
|
||||
|
||||
def test_overall_run_time_sets_run_time(self):
|
||||
"""Test that calling overall_run_time sets the run time"""
|
||||
timer = Timer()
|
||||
|
||||
assert timer.get_overall_run_time() is None
|
||||
|
||||
timer.overall_run_time()
|
||||
|
||||
assert isinstance(timer.get_overall_run_time(), timedelta)
|
||||
|
||||
def test_overall_run_time_accuracy(self):
|
||||
"""Test that overall_run_time calculates time difference accurately"""
|
||||
timer = Timer()
|
||||
sleep_duration = 0.05 # 50ms
|
||||
time.sleep(sleep_duration)
|
||||
|
||||
result = timer.overall_run_time()
|
||||
|
||||
# Allow for some variance (10ms tolerance)
|
||||
assert sleep_duration - 0.01 <= result.total_seconds() <= sleep_duration + 0.01
|
||||
|
||||
def test_overall_run_time_multiple_calls(self):
|
||||
"""Test that calling overall_run_time multiple times updates the values"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01)
|
||||
|
||||
first_result = timer.overall_run_time()
|
||||
first_end_time = timer.get_overall_end_time()
|
||||
|
||||
time.sleep(0.01)
|
||||
|
||||
second_result = timer.overall_run_time()
|
||||
second_end_time = timer.get_overall_end_time()
|
||||
|
||||
# Second call should have longer runtime
|
||||
assert second_result > first_result
|
||||
assert second_end_time is not None
|
||||
assert first_end_time is not None
|
||||
# End time should be updated
|
||||
assert second_end_time > first_end_time
|
||||
|
||||
def test_overall_run_time_consistency(self):
|
||||
"""Test that get_overall_run_time returns the same value as overall_run_time"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01)
|
||||
|
||||
calculated_time = timer.overall_run_time()
|
||||
retrieved_time = timer.get_overall_run_time()
|
||||
|
||||
assert calculated_time == retrieved_time
|
||||
|
||||
|
||||
class TestRunTime:
|
||||
"""Test run time functionality"""
|
||||
|
||||
def test_run_time_returns_timedelta(self):
|
||||
"""Test that run_time returns a timedelta object"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01)
|
||||
|
||||
result = timer.run_time()
|
||||
|
||||
assert isinstance(result, timedelta)
|
||||
|
||||
def test_run_time_sets_end_time(self):
|
||||
"""Test that calling run_time sets the end time"""
|
||||
timer = Timer()
|
||||
|
||||
assert timer.get_end_time() is None
|
||||
|
||||
timer.run_time()
|
||||
|
||||
assert isinstance(timer.get_end_time(), datetime)
|
||||
|
||||
def test_run_time_sets_run_time(self):
|
||||
"""Test that calling run_time sets the run time"""
|
||||
timer = Timer()
|
||||
|
||||
assert timer.get_run_time() is None
|
||||
|
||||
timer.run_time()
|
||||
|
||||
assert isinstance(timer.get_run_time(), timedelta)
|
||||
|
||||
def test_run_time_accuracy(self):
|
||||
"""Test that run_time calculates time difference accurately"""
|
||||
timer = Timer()
|
||||
sleep_duration = 0.05 # 50ms
|
||||
time.sleep(sleep_duration)
|
||||
|
||||
result = timer.run_time()
|
||||
|
||||
# Allow for some variance (10ms tolerance)
|
||||
assert sleep_duration - 0.01 <= result.total_seconds() <= sleep_duration + 0.01
|
||||
|
||||
def test_run_time_multiple_calls(self):
|
||||
"""Test that calling run_time multiple times updates the values"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01)
|
||||
|
||||
first_result = timer.run_time()
|
||||
first_end_time = timer.get_end_time()
|
||||
|
||||
time.sleep(0.01)
|
||||
|
||||
second_result = timer.run_time()
|
||||
second_end_time = timer.get_end_time()
|
||||
|
||||
# Second call should have longer runtime
|
||||
assert second_result > first_result
|
||||
assert second_end_time is not None
|
||||
assert first_end_time is not None
|
||||
# End time should be updated
|
||||
assert second_end_time > first_end_time
|
||||
|
||||
def test_run_time_consistency(self):
|
||||
"""Test that get_run_time returns the same value as run_time"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01)
|
||||
|
||||
calculated_time = timer.run_time()
|
||||
retrieved_time = timer.get_run_time()
|
||||
|
||||
assert calculated_time == retrieved_time
|
||||
|
||||
|
||||
class TestResetRunTime:
|
||||
"""Test reset_run_time functionality"""
|
||||
|
||||
def test_reset_run_time_resets_start_time(self):
|
||||
"""Test that reset_run_time updates the start time"""
|
||||
timer = Timer()
|
||||
original_start = timer.get_start_time()
|
||||
|
||||
time.sleep(0.02)
|
||||
timer.reset_run_time()
|
||||
|
||||
new_start = timer.get_start_time()
|
||||
|
||||
assert new_start > original_start
|
||||
|
||||
def test_reset_run_time_clears_end_time(self):
|
||||
"""Test that reset_run_time clears the end time"""
|
||||
timer = Timer()
|
||||
timer.run_time()
|
||||
|
||||
assert timer.get_end_time() is not None
|
||||
|
||||
timer.reset_run_time()
|
||||
|
||||
assert timer.get_end_time() is None
|
||||
|
||||
def test_reset_run_time_clears_run_time(self):
|
||||
"""Test that reset_run_time clears the run time"""
|
||||
timer = Timer()
|
||||
timer.run_time()
|
||||
|
||||
assert timer.get_run_time() is not None
|
||||
|
||||
timer.reset_run_time()
|
||||
|
||||
assert timer.get_run_time() is None
|
||||
|
||||
def test_reset_run_time_does_not_affect_overall_times(self):
|
||||
"""Test that reset_run_time does not affect overall times"""
|
||||
timer = Timer()
|
||||
|
||||
overall_start = timer.get_overall_start_time()
|
||||
timer.overall_run_time()
|
||||
overall_end = timer.get_overall_end_time()
|
||||
overall_run = timer.get_overall_run_time()
|
||||
|
||||
timer.reset_run_time()
|
||||
|
||||
# Overall times should remain unchanged
|
||||
assert timer.get_overall_start_time() == overall_start
|
||||
assert timer.get_overall_end_time() == overall_end
|
||||
assert timer.get_overall_run_time() == overall_run
|
||||
|
||||
def test_reset_run_time_allows_new_measurement(self):
|
||||
"""Test that reset_run_time allows for new time measurements"""
|
||||
timer = Timer()
|
||||
time.sleep(0.02)
|
||||
timer.run_time()
|
||||
|
||||
first_run_time = timer.get_run_time()
|
||||
|
||||
timer.reset_run_time()
|
||||
time.sleep(0.01)
|
||||
timer.run_time()
|
||||
|
||||
second_run_time = timer.get_run_time()
|
||||
|
||||
assert second_run_time is not None
|
||||
assert first_run_time is not None
|
||||
# Second measurement should be shorter since we reset
|
||||
assert second_run_time < first_run_time
|
||||
|
||||
|
||||
class TestTimerIntegration:
|
||||
"""Integration tests for Timer class"""
|
||||
|
||||
def test_independent_timers(self):
|
||||
"""Test that multiple Timer instances are independent"""
|
||||
timer1 = Timer()
|
||||
time.sleep(0.01)
|
||||
timer2 = Timer()
|
||||
|
||||
# timer1 should have earlier start time
|
||||
assert timer1.get_start_time() < timer2.get_start_time()
|
||||
assert timer1.get_overall_start_time() < timer2.get_overall_start_time()
|
||||
|
||||
def test_overall_and_run_time_independence(self):
|
||||
"""Test that overall time and run time are independent"""
|
||||
timer = Timer()
|
||||
time.sleep(0.02)
|
||||
|
||||
# Reset run time but not overall
|
||||
timer.reset_run_time()
|
||||
time.sleep(0.01)
|
||||
|
||||
run_time = timer.run_time()
|
||||
overall_time = timer.overall_run_time()
|
||||
|
||||
# Overall time should be longer than run time
|
||||
assert overall_time > run_time
|
||||
|
||||
def test_typical_usage_pattern(self):
|
||||
"""Test a typical usage pattern of the Timer class"""
|
||||
timer = Timer()
|
||||
|
||||
# Measure first operation
|
||||
time.sleep(0.01)
|
||||
first_operation = timer.run_time()
|
||||
assert first_operation.total_seconds() > 0
|
||||
|
||||
# Reset and measure second operation
|
||||
timer.reset_run_time()
|
||||
time.sleep(0.01)
|
||||
second_operation = timer.run_time()
|
||||
assert second_operation.total_seconds() > 0
|
||||
|
||||
# Get overall time
|
||||
overall = timer.overall_run_time()
|
||||
|
||||
# Overall should be greater than individual operations
|
||||
assert overall > first_operation
|
||||
assert overall > second_operation
|
||||
|
||||
def test_zero_sleep_timer(self):
|
||||
"""Test timer with minimal sleep (edge case)"""
|
||||
timer = Timer()
|
||||
|
||||
# Call run_time immediately
|
||||
result = timer.run_time()
|
||||
|
||||
# Should still return a valid timedelta (very small)
|
||||
assert isinstance(result, timedelta)
|
||||
assert result.total_seconds() >= 0
|
||||
|
||||
def test_getter_methods_before_calculation(self):
|
||||
"""Test that getter methods return None before calculation methods are called"""
|
||||
timer = Timer()
|
||||
|
||||
# Before calling run_time()
|
||||
assert timer.get_end_time() is None
|
||||
assert timer.get_run_time() is None
|
||||
|
||||
# Before calling overall_run_time()
|
||||
assert timer.get_overall_end_time() is None
|
||||
assert timer.get_overall_run_time() is None
|
||||
|
||||
# But start times should always be set
|
||||
assert timer.get_start_time() is not None
|
||||
assert timer.get_overall_start_time() is not None
|
||||
|
||||
|
||||
class TestTimerEdgeCases:
|
||||
"""Test edge cases and boundary conditions"""
|
||||
|
||||
def test_rapid_consecutive_calls(self):
|
||||
"""Test rapid consecutive calls to run_time"""
|
||||
timer = Timer()
|
||||
|
||||
results: list[timedelta] = []
|
||||
for _ in range(5):
|
||||
results.append(timer.run_time())
|
||||
|
||||
# Each result should be greater than or equal to the previous
|
||||
for i in range(1, len(results)):
|
||||
assert results[i] >= results[i - 1]
|
||||
|
||||
def test_very_short_duration(self):
|
||||
"""Test timer with very short duration"""
|
||||
timer = Timer()
|
||||
result = timer.run_time()
|
||||
|
||||
# Should be a very small positive timedelta
|
||||
assert isinstance(result, timedelta)
|
||||
assert result.total_seconds() >= 0
|
||||
assert result.total_seconds() < 0.1 # Less than 100ms
|
||||
|
||||
def test_reset_multiple_times(self):
|
||||
"""Test resetting the timer multiple times"""
|
||||
timer = Timer()
|
||||
|
||||
for _ in range(3):
|
||||
timer.reset_run_time()
|
||||
time.sleep(0.01)
|
||||
result = timer.run_time()
|
||||
|
||||
assert isinstance(result, timedelta)
|
||||
assert result.total_seconds() > 0
|
||||
|
||||
def test_overall_time_persists_through_resets(self):
|
||||
"""Test that overall time continues even when run_time is reset"""
|
||||
timer = Timer()
|
||||
|
||||
time.sleep(0.01)
|
||||
timer.reset_run_time()
|
||||
|
||||
time.sleep(0.01)
|
||||
timer.reset_run_time()
|
||||
|
||||
overall = timer.overall_run_time()
|
||||
|
||||
# Overall time should reflect total elapsed time
|
||||
assert overall.total_seconds() >= 0.02
|
||||
|
||||
# __END__
|
||||
@@ -1,975 +0,0 @@
|
||||
"""
|
||||
Unit tests for debug_handling.writeline module
|
||||
"""
|
||||
|
||||
import io
|
||||
import pytest
|
||||
from pytest import CaptureFixture
|
||||
|
||||
from corelibs.debug_handling.writeline import (
|
||||
write_l,
|
||||
pr_header,
|
||||
pr_title,
|
||||
pr_open,
|
||||
pr_close,
|
||||
pr_act
|
||||
)
|
||||
|
||||
|
||||
class TestWriteL:
|
||||
"""Test cases for write_l function"""
|
||||
|
||||
def test_write_l_print_only(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with print_line=True and no file"""
|
||||
write_l("Test line", print_line=True)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "Test line\n"
|
||||
|
||||
def test_write_l_no_print_no_file(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with print_line=False and no file (should do nothing)"""
|
||||
write_l("Test line", print_line=False)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
|
||||
def test_write_l_file_only(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with file handler only (no print)"""
|
||||
fpl = io.StringIO()
|
||||
write_l("Test line", fpl=fpl, print_line=False)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
assert fpl.getvalue() == "Test line\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_both_print_and_file(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with both print and file output"""
|
||||
fpl = io.StringIO()
|
||||
write_l("Test line", fpl=fpl, print_line=True)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "Test line\n"
|
||||
assert fpl.getvalue() == "Test line\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_multiple_lines_to_file(self):
|
||||
"""Test write_l writing multiple lines to file"""
|
||||
fpl = io.StringIO()
|
||||
write_l("Line 1", fpl=fpl, print_line=False)
|
||||
write_l("Line 2", fpl=fpl, print_line=False)
|
||||
write_l("Line 3", fpl=fpl, print_line=False)
|
||||
assert fpl.getvalue() == "Line 1\nLine 2\nLine 3\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_empty_string(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with empty string"""
|
||||
fpl = io.StringIO()
|
||||
write_l("", fpl=fpl, print_line=True)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "\n"
|
||||
assert fpl.getvalue() == "\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_special_characters(self):
|
||||
"""Test write_l with special characters"""
|
||||
fpl = io.StringIO()
|
||||
special_line = "Special: \t\n\r\\ 特殊文字 €"
|
||||
write_l(special_line, fpl=fpl, print_line=False)
|
||||
assert special_line + "\n" in fpl.getvalue()
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_long_string(self):
|
||||
"""Test write_l with long string"""
|
||||
fpl = io.StringIO()
|
||||
long_line = "A" * 1000
|
||||
write_l(long_line, fpl=fpl, print_line=False)
|
||||
assert fpl.getvalue() == long_line + "\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_unicode_content(self):
|
||||
"""Test write_l with unicode content"""
|
||||
fpl = io.StringIO()
|
||||
unicode_line = "Hello 世界 🌍 Привет"
|
||||
write_l(unicode_line, fpl=fpl, print_line=False)
|
||||
assert fpl.getvalue() == unicode_line + "\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_default_parameters(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with default parameters"""
|
||||
write_l("Test")
|
||||
captured = capsys.readouterr()
|
||||
# Default print_line is False
|
||||
assert captured.out == ""
|
||||
|
||||
def test_write_l_with_newline_in_string(self):
|
||||
"""Test write_l with newline characters in the string"""
|
||||
fpl = io.StringIO()
|
||||
write_l("Line with\nnewline", fpl=fpl, print_line=False)
|
||||
assert fpl.getvalue() == "Line with\nnewline\n"
|
||||
fpl.close()
|
||||
|
||||
|
||||
class TestPrHeader:
|
||||
"""Test cases for pr_header function"""
|
||||
|
||||
def test_pr_header_default(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with default parameters"""
|
||||
pr_header("TEST")
|
||||
captured = capsys.readouterr()
|
||||
assert "#" in captured.out
|
||||
assert "TEST" in captured.out
|
||||
|
||||
def test_pr_header_custom_marker(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with custom marker string"""
|
||||
pr_header("TEST", marker_string="*")
|
||||
captured = capsys.readouterr()
|
||||
assert "*" in captured.out
|
||||
assert "TEST" in captured.out
|
||||
assert "#" not in captured.out
|
||||
|
||||
def test_pr_header_custom_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with custom width"""
|
||||
pr_header("TEST", width=50)
|
||||
captured = capsys.readouterr()
|
||||
# Check that output is formatted
|
||||
assert "TEST" in captured.out
|
||||
|
||||
def test_pr_header_short_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with short tag"""
|
||||
pr_header("X")
|
||||
captured = capsys.readouterr()
|
||||
assert "X" in captured.out
|
||||
assert "#" in captured.out
|
||||
|
||||
def test_pr_header_long_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with long tag"""
|
||||
pr_header("This is a very long header tag")
|
||||
captured = capsys.readouterr()
|
||||
assert "This is a very long header tag" in captured.out
|
||||
|
||||
def test_pr_header_empty_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with empty tag"""
|
||||
pr_header("")
|
||||
captured = capsys.readouterr()
|
||||
assert "#" in captured.out
|
||||
|
||||
def test_pr_header_special_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with special characters in tag"""
|
||||
pr_header("TEST: 123! @#$")
|
||||
captured = capsys.readouterr()
|
||||
assert "TEST: 123! @#$" in captured.out
|
||||
|
||||
def test_pr_header_unicode(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with unicode characters"""
|
||||
pr_header("テスト 🎉")
|
||||
captured = capsys.readouterr()
|
||||
assert "テスト 🎉" in captured.out
|
||||
|
||||
def test_pr_header_various_markers(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with various marker strings"""
|
||||
markers = ["*", "=", "-", "+", "~", "@"]
|
||||
for marker in markers:
|
||||
pr_header("TEST", marker_string=marker)
|
||||
captured = capsys.readouterr()
|
||||
assert marker in captured.out
|
||||
assert "TEST" in captured.out
|
||||
|
||||
def test_pr_header_zero_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with width of 0"""
|
||||
pr_header("TEST", width=0)
|
||||
captured = capsys.readouterr()
|
||||
assert "TEST" in captured.out
|
||||
|
||||
def test_pr_header_large_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with large width"""
|
||||
pr_header("TEST", width=100)
|
||||
captured = capsys.readouterr()
|
||||
assert "TEST" in captured.out
|
||||
assert "#" in captured.out
|
||||
|
||||
def test_pr_header_format(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header output format"""
|
||||
pr_header("CENTER", marker_string="#", width=20)
|
||||
captured = capsys.readouterr()
|
||||
# Should have spaces around centered text
|
||||
assert " CENTER " in captured.out or "CENTER" in captured.out
|
||||
|
||||
|
||||
class TestPrTitle:
|
||||
"""Test cases for pr_title function"""
|
||||
|
||||
def test_pr_title_default(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with default parameters"""
|
||||
pr_title("Test Title")
|
||||
captured = capsys.readouterr()
|
||||
assert "Test Title" in captured.out
|
||||
assert "|" in captured.out
|
||||
assert "." in captured.out
|
||||
assert ":" in captured.out
|
||||
|
||||
def test_pr_title_custom_prefix(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with custom prefix string"""
|
||||
pr_title("Test", prefix_string=">")
|
||||
captured = capsys.readouterr()
|
||||
assert ">" in captured.out
|
||||
assert "Test" in captured.out
|
||||
assert "|" not in captured.out
|
||||
|
||||
def test_pr_title_custom_space_filler(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with custom space filler"""
|
||||
pr_title("Test", space_filler="-")
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
assert "-" in captured.out
|
||||
assert "." not in captured.out
|
||||
|
||||
def test_pr_title_custom_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with custom width"""
|
||||
pr_title("Test", width=50)
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
|
||||
def test_pr_title_short_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with short tag"""
|
||||
pr_title("X")
|
||||
captured = capsys.readouterr()
|
||||
assert "X" in captured.out
|
||||
assert "." in captured.out
|
||||
|
||||
def test_pr_title_long_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with long tag"""
|
||||
pr_title("This is a very long title tag")
|
||||
captured = capsys.readouterr()
|
||||
assert "This is a very long title tag" in captured.out
|
||||
|
||||
def test_pr_title_empty_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with empty tag"""
|
||||
pr_title("")
|
||||
captured = capsys.readouterr()
|
||||
assert "|" in captured.out
|
||||
assert ":" in captured.out
|
||||
|
||||
def test_pr_title_special_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with special characters"""
|
||||
pr_title("Task #123!")
|
||||
captured = capsys.readouterr()
|
||||
assert "Task #123!" in captured.out
|
||||
|
||||
def test_pr_title_unicode(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with unicode characters"""
|
||||
pr_title("タイトル 📝")
|
||||
captured = capsys.readouterr()
|
||||
assert "タイトル 📝" in captured.out
|
||||
|
||||
def test_pr_title_various_fillers(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with various space fillers"""
|
||||
fillers = [".", "-", "_", "*", " ", "~"]
|
||||
for filler in fillers:
|
||||
pr_title("Test", space_filler=filler)
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
|
||||
def test_pr_title_zero_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with width of 0"""
|
||||
pr_title("Test", width=0)
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
|
||||
def test_pr_title_large_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with large width"""
|
||||
pr_title("Test", width=100)
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
|
||||
def test_pr_title_format_left_align(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title output format (should be left-aligned with filler)"""
|
||||
pr_title("Start", space_filler=".", width=10)
|
||||
captured = capsys.readouterr()
|
||||
# Should have the tag followed by dots
|
||||
assert "Start" in captured.out
|
||||
assert ":" in captured.out
|
||||
|
||||
|
||||
class TestPrOpen:
|
||||
"""Test cases for pr_open function"""
|
||||
|
||||
def test_pr_open_default(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with default parameters"""
|
||||
pr_open("Processing")
|
||||
captured = capsys.readouterr()
|
||||
assert "Processing" in captured.out
|
||||
assert "|" in captured.out
|
||||
assert "." in captured.out
|
||||
assert "[" in captured.out
|
||||
# Should not have newline at the end
|
||||
assert not captured.out.endswith("\n")
|
||||
|
||||
def test_pr_open_custom_prefix(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with custom prefix string"""
|
||||
pr_open("Task", prefix_string=">")
|
||||
captured = capsys.readouterr()
|
||||
assert ">" in captured.out
|
||||
assert "Task" in captured.out
|
||||
assert "|" not in captured.out
|
||||
|
||||
def test_pr_open_custom_space_filler(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with custom space filler"""
|
||||
pr_open("Task", space_filler="-")
|
||||
captured = capsys.readouterr()
|
||||
assert "Task" in captured.out
|
||||
assert "-" in captured.out
|
||||
assert "." not in captured.out
|
||||
|
||||
def test_pr_open_custom_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with custom width"""
|
||||
pr_open("Task", width=50)
|
||||
captured = capsys.readouterr()
|
||||
assert "Task" in captured.out
|
||||
assert "[" in captured.out
|
||||
|
||||
def test_pr_open_short_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with short tag"""
|
||||
pr_open("X")
|
||||
captured = capsys.readouterr()
|
||||
assert "X" in captured.out
|
||||
assert "[" in captured.out
|
||||
|
||||
def test_pr_open_long_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with long tag"""
|
||||
pr_open("This is a very long task tag")
|
||||
captured = capsys.readouterr()
|
||||
assert "This is a very long task tag" in captured.out
|
||||
|
||||
def test_pr_open_empty_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with empty tag"""
|
||||
pr_open("")
|
||||
captured = capsys.readouterr()
|
||||
assert "[" in captured.out
|
||||
assert "|" in captured.out
|
||||
|
||||
def test_pr_open_no_newline(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open doesn't end with newline"""
|
||||
pr_open("Test")
|
||||
captured = capsys.readouterr()
|
||||
# Output should not end with newline (uses end="")
|
||||
assert not captured.out.endswith("\n")
|
||||
|
||||
def test_pr_open_special_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with special characters"""
|
||||
pr_open("Loading: 50%")
|
||||
captured = capsys.readouterr()
|
||||
assert "Loading: 50%" in captured.out
|
||||
|
||||
def test_pr_open_unicode(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with unicode characters"""
|
||||
pr_open("処理中 ⏳")
|
||||
captured = capsys.readouterr()
|
||||
assert "処理中 ⏳" in captured.out
|
||||
|
||||
def test_pr_open_format(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open output format"""
|
||||
pr_open("Task", prefix_string="|", space_filler=".", width=20)
|
||||
captured = capsys.readouterr()
|
||||
assert "|" in captured.out
|
||||
assert "Task" in captured.out
|
||||
assert "[" in captured.out
|
||||
|
||||
|
||||
class TestPrClose:
|
||||
"""Test cases for pr_close function"""
|
||||
|
||||
def test_pr_close_default(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with default (empty) tag"""
|
||||
pr_close()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "]\n"
|
||||
|
||||
def test_pr_close_with_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with custom tag"""
|
||||
pr_close("DONE")
|
||||
captured = capsys.readouterr()
|
||||
assert "DONE" in captured.out
|
||||
assert "]" in captured.out
|
||||
assert captured.out.endswith("\n")
|
||||
|
||||
def test_pr_close_with_space(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with space in tag"""
|
||||
pr_close(" OK ")
|
||||
captured = capsys.readouterr()
|
||||
assert " OK " in captured.out
|
||||
assert "]" in captured.out
|
||||
|
||||
def test_pr_close_empty_string(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with empty string (same as default)"""
|
||||
pr_close("")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "]\n"
|
||||
|
||||
def test_pr_close_special_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with special characters"""
|
||||
pr_close("✓")
|
||||
captured = capsys.readouterr()
|
||||
assert "✓" in captured.out
|
||||
assert "]" in captured.out
|
||||
|
||||
def test_pr_close_unicode(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with unicode characters"""
|
||||
pr_close("完了")
|
||||
captured = capsys.readouterr()
|
||||
assert "完了" in captured.out
|
||||
assert "]" in captured.out
|
||||
|
||||
def test_pr_close_newline(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close ends with newline"""
|
||||
pr_close("OK")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out.endswith("\n")
|
||||
|
||||
def test_pr_close_various_tags(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with various tags"""
|
||||
tags = ["OK", "DONE", "✓", "✗", "SKIP", "PASS", "FAIL"]
|
||||
for tag in tags:
|
||||
pr_close(tag)
|
||||
captured = capsys.readouterr()
|
||||
assert tag in captured.out
|
||||
assert "]" in captured.out
|
||||
|
||||
|
||||
class TestPrAct:
|
||||
"""Test cases for pr_act function"""
|
||||
|
||||
def test_pr_act_default(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with default dot"""
|
||||
pr_act()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "."
|
||||
assert not captured.out.endswith("\n")
|
||||
|
||||
def test_pr_act_custom_character(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with custom character"""
|
||||
pr_act("#")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "#"
|
||||
|
||||
def test_pr_act_multiple_calls(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with multiple calls"""
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "..."
|
||||
|
||||
def test_pr_act_various_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with various characters"""
|
||||
characters = [".", "#", "*", "+", "-", "=", ">", "~"]
|
||||
for char in characters:
|
||||
pr_act(char)
|
||||
captured = capsys.readouterr()
|
||||
assert "".join(characters) in captured.out
|
||||
|
||||
def test_pr_act_empty_string(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with empty string"""
|
||||
pr_act("")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
|
||||
def test_pr_act_special_character(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with special characters"""
|
||||
pr_act("✓")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "✓"
|
||||
|
||||
def test_pr_act_unicode(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with unicode character"""
|
||||
pr_act("●")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "●"
|
||||
|
||||
def test_pr_act_no_newline(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act doesn't add newline"""
|
||||
pr_act("x")
|
||||
captured = capsys.readouterr()
|
||||
assert not captured.out.endswith("\n")
|
||||
|
||||
def test_pr_act_multiple_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with multiple characters in string"""
|
||||
pr_act("...")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "..."
|
||||
|
||||
def test_pr_act_whitespace(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with whitespace"""
|
||||
pr_act(" ")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == " "
|
||||
|
||||
|
||||
class TestProgressCombinations:
|
||||
"""Test combinations of progress printer functions"""
|
||||
|
||||
def test_complete_progress_flow(self, capsys: CaptureFixture[str]):
|
||||
"""Test complete progress output flow"""
|
||||
pr_header("PROCESS")
|
||||
pr_title("Task 1")
|
||||
pr_open("Subtask")
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
pr_close(" OK")
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert "PROCESS" in captured.out
|
||||
assert "Task 1" in captured.out
|
||||
assert "Subtask" in captured.out
|
||||
assert "..." in captured.out
|
||||
assert " OK]" in captured.out
|
||||
|
||||
def test_multiple_tasks_progress(self, capsys: CaptureFixture[str]):
|
||||
"""Test multiple tasks with progress"""
|
||||
pr_header("BATCH PROCESS")
|
||||
for i in range(3):
|
||||
pr_open(f"Task {i + 1}")
|
||||
for _ in range(5):
|
||||
pr_act(".")
|
||||
pr_close(" DONE")
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert "BATCH PROCESS" in captured.out
|
||||
assert "Task 1" in captured.out
|
||||
assert "Task 2" in captured.out
|
||||
assert "Task 3" in captured.out
|
||||
assert " DONE]" in captured.out
|
||||
|
||||
def test_nested_progress(self, capsys: CaptureFixture[str]):
|
||||
"""Test nested progress indicators"""
|
||||
pr_header("MAIN TASK", marker_string="=")
|
||||
pr_title("Subtask A", prefix_string=">")
|
||||
pr_open("Processing")
|
||||
pr_act("#")
|
||||
pr_act("#")
|
||||
pr_close()
|
||||
pr_title("Subtask B", prefix_string=">")
|
||||
pr_open("Processing")
|
||||
pr_act("*")
|
||||
pr_act("*")
|
||||
pr_close(" OK")
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert "MAIN TASK" in captured.out
|
||||
assert "Subtask A" in captured.out
|
||||
assert "Subtask B" in captured.out
|
||||
assert "##" in captured.out
|
||||
assert "**" in captured.out
|
||||
|
||||
def test_progress_with_different_markers(self, capsys: CaptureFixture[str]):
|
||||
"""Test progress with different marker styles"""
|
||||
pr_header("Process", marker_string="*")
|
||||
pr_title("Step 1", prefix_string=">>", space_filler="-")
|
||||
pr_open("Work", prefix_string=">>", space_filler="-")
|
||||
pr_act("+")
|
||||
pr_close(" ✓")
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert "*" in captured.out
|
||||
assert ">>" in captured.out
|
||||
assert "-" in captured.out
|
||||
assert "+" in captured.out
|
||||
assert "✓" in captured.out
|
||||
|
||||
def test_empty_progress_sequence(self, capsys: CaptureFixture[str]):
|
||||
"""Test progress sequence with no actual progress"""
|
||||
pr_open("Quick task")
|
||||
pr_close(" SKIP")
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert "Quick task" in captured.out
|
||||
assert " SKIP]" in captured.out
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests combining multiple scenarios"""
|
||||
|
||||
def test_file_and_console_logging(self, capsys: CaptureFixture[str]):
|
||||
"""Test logging to both file and console"""
|
||||
fpl = io.StringIO()
|
||||
|
||||
write_l("Starting process", fpl=fpl, print_line=True)
|
||||
write_l("Processing item 1", fpl=fpl, print_line=True)
|
||||
write_l("Processing item 2", fpl=fpl, print_line=True)
|
||||
write_l("Complete", fpl=fpl, print_line=True)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
file_content = fpl.getvalue()
|
||||
|
||||
# Check console output
|
||||
assert "Starting process\n" in captured.out
|
||||
assert "Processing item 1\n" in captured.out
|
||||
assert "Processing item 2\n" in captured.out
|
||||
assert "Complete\n" in captured.out
|
||||
|
||||
# Check file output
|
||||
assert "Starting process\n" in file_content
|
||||
assert "Processing item 1\n" in file_content
|
||||
assert "Processing item 2\n" in file_content
|
||||
assert "Complete\n" in file_content
|
||||
|
||||
fpl.close()
|
||||
|
||||
def test_progress_with_logging(self, capsys: CaptureFixture[str]):
|
||||
"""Test combining progress output with file logging"""
|
||||
fpl = io.StringIO()
|
||||
|
||||
write_l("=== Process Start ===", fpl=fpl, print_line=True)
|
||||
pr_header("MAIN PROCESS")
|
||||
write_l("Header shown", fpl=fpl, print_line=False)
|
||||
|
||||
pr_open("Task 1")
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
pr_close(" OK")
|
||||
write_l("Task 1 completed", fpl=fpl, print_line=False)
|
||||
|
||||
write_l("=== Process End ===", fpl=fpl, print_line=True)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
file_content = fpl.getvalue()
|
||||
|
||||
assert "=== Process Start ===" in captured.out
|
||||
assert "MAIN PROCESS" in captured.out
|
||||
assert "Task 1" in captured.out
|
||||
assert "=== Process End ===" in captured.out
|
||||
|
||||
assert "=== Process Start ===\n" in file_content
|
||||
assert "Header shown\n" in file_content
|
||||
assert "Task 1 completed\n" in file_content
|
||||
assert "=== Process End ===\n" in file_content
|
||||
|
||||
fpl.close()
|
||||
|
||||
def test_complex_workflow(self, capsys: CaptureFixture[str]):
|
||||
"""Test complex workflow with all functions"""
|
||||
fpl = io.StringIO()
|
||||
|
||||
write_l("Log: Starting batch process", fpl=fpl, print_line=False)
|
||||
pr_header("BATCH PROCESSOR", marker_string="=", width=40)
|
||||
|
||||
for i in range(2):
|
||||
write_l(f"Log: Processing batch {i + 1}", fpl=fpl, print_line=False)
|
||||
pr_title(f"Batch {i + 1}", prefix_string="|", space_filler=".")
|
||||
|
||||
pr_open(f"Item {i + 1}", prefix_string="|", space_filler=".")
|
||||
for j in range(3):
|
||||
pr_act("*")
|
||||
write_l(f"Log: Progress {j + 1}/3", fpl=fpl, print_line=False)
|
||||
pr_close(" ✓")
|
||||
|
||||
write_l(f"Log: Batch {i + 1} complete", fpl=fpl, print_line=False)
|
||||
|
||||
write_l("Log: All batches complete", fpl=fpl, print_line=False)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
file_content = fpl.getvalue()
|
||||
|
||||
# Check console has progress indicators
|
||||
assert "BATCH PROCESSOR" in captured.out
|
||||
assert "Batch 1" in captured.out
|
||||
assert "Batch 2" in captured.out
|
||||
assert "***" in captured.out
|
||||
assert "✓" in captured.out
|
||||
|
||||
# Check file has all log entries
|
||||
assert "Log: Starting batch process\n" in file_content
|
||||
assert "Log: Processing batch 1\n" in file_content
|
||||
assert "Log: Processing batch 2\n" in file_content
|
||||
assert "Log: Progress 1/3\n" in file_content
|
||||
assert "Log: Batch 1 complete\n" in file_content
|
||||
assert "Log: All batches complete\n" in file_content
|
||||
|
||||
fpl.close()
|
||||
|
||||
|
||||
class TestEdgeCases:
|
||||
"""Test edge cases and boundary conditions"""
|
||||
|
||||
def test_write_l_none_file_handler(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l explicitly with None file handler"""
|
||||
write_l("Test", fpl=None, print_line=True)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "Test\n"
|
||||
|
||||
def test_pr_header_negative_width(self):
|
||||
"""Test pr_header with negative width raises ValueError"""
|
||||
with pytest.raises(ValueError):
|
||||
pr_header("Test", width=-10)
|
||||
|
||||
def test_pr_title_negative_width(self):
|
||||
"""Test pr_title with negative width raises ValueError"""
|
||||
with pytest.raises(ValueError):
|
||||
pr_title("Test", width=-10)
|
||||
|
||||
def test_pr_open_negative_width(self):
|
||||
"""Test pr_open with negative width raises ValueError"""
|
||||
with pytest.raises(ValueError):
|
||||
pr_open("Test", width=-10)
|
||||
|
||||
def test_multiple_pr_act_no_close(self, capsys: CaptureFixture[str]):
|
||||
"""Test multiple pr_act calls without pr_close"""
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "..."
|
||||
|
||||
def test_pr_close_without_pr_open(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close without prior pr_open (should still work)"""
|
||||
pr_close(" OK")
|
||||
captured = capsys.readouterr()
|
||||
assert " OK]" in captured.out
|
||||
|
||||
def test_very_long_strings(self):
|
||||
"""Test with very long strings"""
|
||||
fpl = io.StringIO()
|
||||
long_str = "A" * 10000
|
||||
write_l(long_str, fpl=fpl, print_line=False)
|
||||
assert len(fpl.getvalue()) == 10001 # string + newline
|
||||
fpl.close()
|
||||
|
||||
def test_pr_header_very_long_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with tag longer than width"""
|
||||
pr_header("This is a very long tag that exceeds the width", width=10)
|
||||
captured = capsys.readouterr()
|
||||
assert "This is a very long tag that exceeds the width" in captured.out
|
||||
|
||||
def test_pr_title_very_long_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with tag longer than width"""
|
||||
pr_title("This is a very long tag that exceeds the width", width=10)
|
||||
captured = capsys.readouterr()
|
||||
assert "This is a very long tag that exceeds the width" in captured.out
|
||||
|
||||
def test_write_l_closed_file(self):
|
||||
"""Test write_l with closed file should raise error"""
|
||||
fpl = io.StringIO()
|
||||
fpl.close()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
write_l("Test", fpl=fpl, print_line=False)
|
||||
|
||||
|
||||
class TestParametrized:
|
||||
"""Parametrized tests for comprehensive coverage"""
|
||||
|
||||
@pytest.mark.parametrize("print_line", [True, False])
|
||||
def test_write_l_print_line_variations(self, print_line: bool, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with different print_line values"""
|
||||
write_l("Test", print_line=print_line)
|
||||
captured = capsys.readouterr()
|
||||
if print_line:
|
||||
assert captured.out == "Test\n"
|
||||
else:
|
||||
assert captured.out == ""
|
||||
|
||||
@pytest.mark.parametrize("marker", ["#", "*", "=", "-", "+", "~", "@", "^"])
|
||||
def test_pr_header_various_markers_param(self, marker: str, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with various markers"""
|
||||
pr_header("TEST", marker_string=marker)
|
||||
captured = capsys.readouterr()
|
||||
assert marker in captured.out
|
||||
assert "TEST" in captured.out
|
||||
|
||||
@pytest.mark.parametrize("width", [0, 5, 10, 20, 35, 50, 100])
|
||||
def test_pr_header_various_widths(self, width: int, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with various widths"""
|
||||
pr_header("TEST", width=width)
|
||||
captured = capsys.readouterr()
|
||||
assert "TEST" in captured.out
|
||||
|
||||
@pytest.mark.parametrize("filler", [".", "-", "_", "*", " ", "~", "="])
|
||||
def test_pr_title_various_fillers_param(self, filler: str, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with various space fillers"""
|
||||
pr_title("Test", space_filler=filler)
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
|
||||
@pytest.mark.parametrize("prefix", ["|", ">", ">>", "*", "-", "+"])
|
||||
def test_pr_title_various_prefixes(self, prefix: str, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with various prefix strings"""
|
||||
pr_title("Test", prefix_string=prefix)
|
||||
captured = capsys.readouterr()
|
||||
assert prefix in captured.out
|
||||
assert "Test" in captured.out
|
||||
|
||||
@pytest.mark.parametrize("act_char", [".", "#", "*", "+", "-", "=", ">", "~", "✓", "●"])
|
||||
def test_pr_act_various_characters_param(self, act_char: str, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with various characters"""
|
||||
pr_act(act_char)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == act_char
|
||||
|
||||
@pytest.mark.parametrize("close_tag", ["", " OK", " DONE", " ✓", " ✗", " SKIP", " PASS"])
|
||||
def test_pr_close_various_tags_param(self, close_tag: str, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with various tags"""
|
||||
pr_close(close_tag)
|
||||
captured = capsys.readouterr()
|
||||
assert f"{close_tag}]" in captured.out
|
||||
|
||||
@pytest.mark.parametrize("content", [
|
||||
"Simple text",
|
||||
"Text with 特殊文字",
|
||||
"Text with emoji 🎉",
|
||||
"Text\twith\ttabs",
|
||||
"Multiple\n\nNewlines",
|
||||
"",
|
||||
"A" * 100,
|
||||
])
|
||||
def test_write_l_various_content(self, content: str, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with various content types"""
|
||||
fpl = io.StringIO()
|
||||
write_l(content, fpl=fpl, print_line=True)
|
||||
captured = capsys.readouterr()
|
||||
assert content in captured.out
|
||||
assert content + "\n" in fpl.getvalue()
|
||||
fpl.close()
|
||||
|
||||
|
||||
class TestRealWorldScenarios:
|
||||
"""Test real-world usage scenarios"""
|
||||
|
||||
def test_batch_processing_output(self, capsys: CaptureFixture[str]):
|
||||
"""Test typical batch processing output"""
|
||||
pr_header("BATCH PROCESSOR", marker_string="=", width=50)
|
||||
|
||||
items = ["file1.txt", "file2.txt", "file3.txt"]
|
||||
for item in items:
|
||||
pr_open(f"Processing {item}")
|
||||
for _ in range(10):
|
||||
pr_act(".")
|
||||
pr_close(" ✓")
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "BATCH PROCESSOR" in captured.out
|
||||
for item in items:
|
||||
assert item in captured.out
|
||||
assert "✓" in captured.out
|
||||
|
||||
def test_logging_workflow(self, capsys: CaptureFixture[str]):
|
||||
"""Test typical logging workflow"""
|
||||
log_file = io.StringIO()
|
||||
|
||||
# Simulate a workflow with logging
|
||||
write_l("[INFO] Starting process", fpl=log_file, print_line=True)
|
||||
write_l("[INFO] Initializing components", fpl=log_file, print_line=True)
|
||||
write_l("[DEBUG] Component A loaded", fpl=log_file, print_line=False)
|
||||
write_l("[DEBUG] Component B loaded", fpl=log_file, print_line=False)
|
||||
write_l("[INFO] Processing data", fpl=log_file, print_line=True)
|
||||
write_l("[INFO] Process complete", fpl=log_file, print_line=True)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
log_content = log_file.getvalue()
|
||||
|
||||
# Console should only have INFO messages
|
||||
assert "[INFO] Starting process" in captured.out
|
||||
assert "[DEBUG] Component A loaded" not in captured.out
|
||||
|
||||
# Log file should have all messages
|
||||
assert "[INFO] Starting process\n" in log_content
|
||||
assert "[DEBUG] Component A loaded\n" in log_content
|
||||
assert "[DEBUG] Component B loaded\n" in log_content
|
||||
|
||||
log_file.close()
|
||||
|
||||
def test_progress_indicator_for_long_task(self, capsys: CaptureFixture[str]):
|
||||
"""Test progress indicator for a long-running task"""
|
||||
pr_header("DATA PROCESSING")
|
||||
pr_open("Loading data", width=50)
|
||||
|
||||
# Simulate progress
|
||||
for i in range(20):
|
||||
if i % 5 == 0:
|
||||
pr_act(str(i // 5))
|
||||
else:
|
||||
pr_act(".")
|
||||
|
||||
pr_close(" COMPLETE")
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "DATA PROCESSING" in captured.out
|
||||
assert "Loading data" in captured.out
|
||||
assert "COMPLETE" in captured.out
|
||||
|
||||
def test_multi_stage_process(self, capsys: CaptureFixture[str]):
|
||||
"""Test multi-stage process with titles and progress"""
|
||||
pr_header("DEPLOYMENT PIPELINE", marker_string="=")
|
||||
|
||||
stages = ["Build", "Test", "Deploy"]
|
||||
for stage in stages:
|
||||
pr_title(stage)
|
||||
pr_open(f"Running {stage.lower()}")
|
||||
pr_act("#")
|
||||
pr_act("#")
|
||||
pr_act("#")
|
||||
pr_close(" OK")
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "DEPLOYMENT PIPELINE" in captured.out
|
||||
for stage in stages:
|
||||
assert stage in captured.out
|
||||
assert "###" in captured.out
|
||||
|
||||
def test_error_reporting_with_logging(self, capsys: CaptureFixture[str]):
|
||||
"""Test error reporting workflow"""
|
||||
error_log = io.StringIO()
|
||||
|
||||
pr_header("VALIDATION", marker_string="!")
|
||||
pr_open("Checking files")
|
||||
|
||||
write_l("[ERROR] File not found: data.csv", fpl=error_log, print_line=False)
|
||||
pr_act("✗")
|
||||
|
||||
write_l("[ERROR] Permission denied: output.txt", fpl=error_log, print_line=False)
|
||||
pr_act("✗")
|
||||
|
||||
pr_close(" FAILED")
|
||||
|
||||
captured = capsys.readouterr()
|
||||
log_content = error_log.getvalue()
|
||||
|
||||
assert "VALIDATION" in captured.out
|
||||
assert "Checking files" in captured.out
|
||||
assert "✗✗" in captured.out
|
||||
assert "FAILED" in captured.out
|
||||
|
||||
assert "[ERROR] File not found: data.csv\n" in log_content
|
||||
assert "[ERROR] Permission denied: output.txt\n" in log_content
|
||||
|
||||
error_log.close()
|
||||
|
||||
def test_detailed_reporting(self, capsys: CaptureFixture[str]):
|
||||
"""Test detailed reporting with mixed output"""
|
||||
report_file = io.StringIO()
|
||||
|
||||
pr_header("SYSTEM REPORT", marker_string="#", width=60)
|
||||
write_l("=== System Report Generated ===", fpl=report_file, print_line=False)
|
||||
|
||||
pr_title("Database Status", prefix_string=">>")
|
||||
write_l("Database: Connected", fpl=report_file, print_line=False)
|
||||
write_l("Tables: 15", fpl=report_file, print_line=False)
|
||||
write_l("Records: 1,234,567", fpl=report_file, print_line=False)
|
||||
|
||||
pr_title("API Status", prefix_string=">>")
|
||||
write_l("API: Online", fpl=report_file, print_line=False)
|
||||
write_l("Requests/min: 1,500", fpl=report_file, print_line=False)
|
||||
|
||||
write_l("=== Report Complete ===", fpl=report_file, print_line=False)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
report_content = report_file.getvalue()
|
||||
|
||||
assert "SYSTEM REPORT" in captured.out
|
||||
assert "Database Status" in captured.out
|
||||
assert "API Status" in captured.out
|
||||
|
||||
assert "=== System Report Generated ===\n" in report_content
|
||||
assert "Database: Connected\n" in report_content
|
||||
assert "API: Online\n" in report_content
|
||||
assert "=== Report Complete ===\n" in report_content
|
||||
|
||||
report_file.close()
|
||||
|
||||
# __END__
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
Unit tests for encryption_handling module
|
||||
"""
|
||||
@@ -1,665 +0,0 @@
|
||||
"""
|
||||
PyTest: encryption_handling/symmetric_encryption
|
||||
"""
|
||||
# pylint: disable=redefined-outer-name
|
||||
# ^ Disabled because pytest fixtures intentionally redefine names
|
||||
|
||||
import os
|
||||
import json
|
||||
import base64
|
||||
import hashlib
|
||||
import pytest
|
||||
from corelibs.encryption_handling.symmetric_encryption import (
|
||||
SymmetricEncryption
|
||||
)
|
||||
|
||||
|
||||
class TestSymmetricEncryptionInitialization:
|
||||
"""Tests for SymmetricEncryption initialization"""
|
||||
|
||||
def test_valid_password_initialization(self):
|
||||
"""Test initialization with a valid password"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
assert encryptor.password == "test_password"
|
||||
assert encryptor.password_hash == hashlib.sha256("test_password".encode('utf-8')).hexdigest()
|
||||
|
||||
def test_empty_password_raises_error(self):
|
||||
"""Test that empty password raises ValueError"""
|
||||
with pytest.raises(ValueError, match="A password must be set"):
|
||||
SymmetricEncryption("")
|
||||
|
||||
def test_password_hash_is_consistent(self):
|
||||
"""Test that password hash is consistently generated"""
|
||||
encryptor1 = SymmetricEncryption("test_password")
|
||||
encryptor2 = SymmetricEncryption("test_password")
|
||||
assert encryptor1.password_hash == encryptor2.password_hash
|
||||
|
||||
def test_different_passwords_different_hashes(self):
|
||||
"""Test that different passwords produce different hashes"""
|
||||
encryptor1 = SymmetricEncryption("password1")
|
||||
encryptor2 = SymmetricEncryption("password2")
|
||||
assert encryptor1.password_hash != encryptor2.password_hash
|
||||
|
||||
|
||||
class TestEncryptWithMetadataReturnDict:
|
||||
"""Tests for encrypt_with_metadata_return_dict method"""
|
||||
|
||||
def test_encrypt_string_returns_package_data(self):
|
||||
"""Test encrypting a string returns PackageData dict"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata_return_dict("test data")
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert 'encrypted_data' in result
|
||||
assert 'salt' in result
|
||||
assert 'key_hash' in result
|
||||
|
||||
def test_encrypt_bytes_returns_package_data(self):
|
||||
"""Test encrypting bytes returns PackageData dict"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata_return_dict(b"test data")
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert 'encrypted_data' in result
|
||||
assert 'salt' in result
|
||||
assert 'key_hash' in result
|
||||
|
||||
def test_encrypted_data_is_base64_encoded(self):
|
||||
"""Test that encrypted_data is base64 encoded"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata_return_dict("test data")
|
||||
|
||||
# Should not raise exception when decoding
|
||||
base64.urlsafe_b64decode(result['encrypted_data'])
|
||||
|
||||
def test_salt_is_base64_encoded(self):
|
||||
"""Test that salt is base64 encoded"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata_return_dict("test data")
|
||||
|
||||
# Should not raise exception when decoding
|
||||
salt = base64.urlsafe_b64decode(result['salt'])
|
||||
# Salt should be 16 bytes
|
||||
assert len(salt) == 16
|
||||
|
||||
def test_key_hash_is_valid_hex(self):
|
||||
"""Test that key_hash is a valid hex string"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata_return_dict("test data")
|
||||
|
||||
# Should be 64 characters (SHA256 hex)
|
||||
assert len(result['key_hash']) == 64
|
||||
# Should only contain hex characters
|
||||
int(result['key_hash'], 16)
|
||||
|
||||
def test_different_salts_for_each_encryption(self):
|
||||
"""Test that each encryption uses a different salt"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result1 = encryptor.encrypt_with_metadata_return_dict("test data")
|
||||
result2 = encryptor.encrypt_with_metadata_return_dict("test data")
|
||||
|
||||
assert result1['salt'] != result2['salt']
|
||||
assert result1['encrypted_data'] != result2['encrypted_data']
|
||||
|
||||
|
||||
class TestEncryptWithMetadataReturnStr:
|
||||
"""Tests for encrypt_with_metadata_return_str method"""
|
||||
|
||||
def test_returns_json_string(self):
|
||||
"""Test that method returns a valid JSON string"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
|
||||
assert isinstance(result, str)
|
||||
# Should be valid JSON
|
||||
parsed = json.loads(result)
|
||||
assert 'encrypted_data' in parsed
|
||||
assert 'salt' in parsed
|
||||
assert 'key_hash' in parsed
|
||||
|
||||
def test_json_string_parseable(self):
|
||||
"""Test that returned JSON string can be parsed back"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
|
||||
parsed = json.loads(result)
|
||||
assert isinstance(parsed, dict)
|
||||
|
||||
|
||||
class TestEncryptWithMetadataReturnBytes:
|
||||
"""Tests for encrypt_with_metadata_return_bytes method"""
|
||||
|
||||
def test_returns_bytes(self):
|
||||
"""Test that method returns bytes"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata_return_bytes("test data")
|
||||
|
||||
assert isinstance(result, bytes)
|
||||
|
||||
def test_bytes_contains_valid_json(self):
|
||||
"""Test that returned bytes contain valid JSON"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata_return_bytes("test data")
|
||||
|
||||
# Should be valid JSON when decoded
|
||||
parsed = json.loads(result.decode('utf-8'))
|
||||
assert 'encrypted_data' in parsed
|
||||
assert 'salt' in parsed
|
||||
assert 'key_hash' in parsed
|
||||
|
||||
|
||||
class TestEncryptWithMetadata:
|
||||
"""Tests for encrypt_with_metadata method with different return types"""
|
||||
|
||||
def test_return_as_str(self):
|
||||
"""Test encrypt_with_metadata with return_as='str'"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata("test data", return_as='str')
|
||||
|
||||
assert isinstance(result, str)
|
||||
json.loads(result) # Should be valid JSON
|
||||
|
||||
def test_return_as_json(self):
|
||||
"""Test encrypt_with_metadata with return_as='json'"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata("test data", return_as='json')
|
||||
|
||||
assert isinstance(result, str)
|
||||
json.loads(result) # Should be valid JSON
|
||||
|
||||
def test_return_as_bytes(self):
|
||||
"""Test encrypt_with_metadata with return_as='bytes'"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata("test data", return_as='bytes')
|
||||
|
||||
assert isinstance(result, bytes)
|
||||
|
||||
def test_return_as_dict(self):
|
||||
"""Test encrypt_with_metadata with return_as='dict'"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata("test data", return_as='dict')
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert 'encrypted_data' in result
|
||||
|
||||
def test_default_return_type(self):
|
||||
"""Test encrypt_with_metadata default return type"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata("test data")
|
||||
|
||||
# Default should be 'str'
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_invalid_return_type_defaults_to_str(self):
|
||||
"""Test that invalid return_as defaults to str"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata("test data", return_as='invalid')
|
||||
|
||||
assert isinstance(result, str)
|
||||
|
||||
|
||||
class TestDecryptWithMetadata:
|
||||
"""Tests for decrypt_with_metadata method"""
|
||||
|
||||
def test_decrypt_string_package(self):
|
||||
"""Test decrypting a string JSON package"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
def test_decrypt_bytes_package(self):
|
||||
"""Test decrypting a bytes JSON package"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_bytes("test data")
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
def test_decrypt_dict_package(self):
|
||||
"""Test decrypting a dict PackageData"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_dict("test data")
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
def test_decrypt_with_different_password_fails(self):
|
||||
"""Test that decrypting with wrong password fails"""
|
||||
encryptor = SymmetricEncryption("password1")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
|
||||
decryptor = SymmetricEncryption("password2")
|
||||
with pytest.raises(ValueError, match="Key hash is not matching"):
|
||||
decryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
def test_decrypt_with_explicit_password(self):
|
||||
"""Test decrypting with explicitly provided password"""
|
||||
encryptor = SymmetricEncryption("password1")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
|
||||
# Decrypt with different password parameter
|
||||
decryptor = SymmetricEncryption("password1")
|
||||
decrypted = decryptor.decrypt_with_metadata(encrypted, password="password1")
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
def test_decrypt_invalid_json_raises_error(self):
|
||||
"""Test that invalid JSON raises ValueError"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
|
||||
with pytest.raises(ValueError, match="Invalid encrypted package format"):
|
||||
encryptor.decrypt_with_metadata("not valid json")
|
||||
|
||||
def test_decrypt_missing_fields_raises_error(self):
|
||||
"""Test that missing required fields raises ValueError"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
invalid_package = json.dumps({"encrypted_data": "test"})
|
||||
|
||||
with pytest.raises(ValueError, match="Invalid encrypted package format"):
|
||||
encryptor.decrypt_with_metadata(invalid_package)
|
||||
|
||||
def test_decrypt_unicode_data(self):
|
||||
"""Test encrypting and decrypting unicode data"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
unicode_data = "Hello 世界 🌍"
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(unicode_data)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == unicode_data
|
||||
|
||||
def test_decrypt_empty_string(self):
|
||||
"""Test encrypting and decrypting empty string"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str("")
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == ""
|
||||
|
||||
def test_decrypt_long_data(self):
|
||||
"""Test encrypting and decrypting long data"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
long_data = "A" * 10000
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(long_data)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == long_data
|
||||
|
||||
|
||||
class TestStaticMethods:
|
||||
"""Tests for static methods encrypt_data and decrypt_data"""
|
||||
|
||||
def test_encrypt_data_static_method(self):
|
||||
"""Test static encrypt_data method"""
|
||||
encrypted = SymmetricEncryption.encrypt_data("test data", "test_password")
|
||||
|
||||
assert isinstance(encrypted, str)
|
||||
# Should be valid JSON
|
||||
parsed = json.loads(encrypted)
|
||||
assert 'encrypted_data' in parsed
|
||||
assert 'salt' in parsed
|
||||
assert 'key_hash' in parsed
|
||||
|
||||
def test_decrypt_data_static_method(self):
|
||||
"""Test static decrypt_data method"""
|
||||
encrypted = SymmetricEncryption.encrypt_data("test data", "test_password")
|
||||
decrypted = SymmetricEncryption.decrypt_data(encrypted, "test_password")
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
def test_static_methods_roundtrip(self):
|
||||
"""Test complete roundtrip using static methods"""
|
||||
original = "test data with special chars: !@#$%^&*()"
|
||||
encrypted = SymmetricEncryption.encrypt_data(original, "test_password")
|
||||
decrypted = SymmetricEncryption.decrypt_data(encrypted, "test_password")
|
||||
|
||||
assert decrypted == original
|
||||
|
||||
def test_static_decrypt_with_bytes(self):
|
||||
"""Test static decrypt_data with bytes input"""
|
||||
encrypted = SymmetricEncryption.encrypt_data("test data", "test_password")
|
||||
encrypted_bytes = encrypted.encode('utf-8')
|
||||
decrypted = SymmetricEncryption.decrypt_data(encrypted_bytes, "test_password")
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
def test_static_decrypt_with_dict(self):
|
||||
"""Test static decrypt_data with PackageData dict"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted_dict = encryptor.encrypt_with_metadata_return_dict("test data")
|
||||
decrypted = SymmetricEncryption.decrypt_data(encrypted_dict, "test_password")
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
def test_static_encrypt_bytes_data(self):
|
||||
"""Test static encrypt_data with bytes input"""
|
||||
encrypted = SymmetricEncryption.encrypt_data(b"test data", "test_password")
|
||||
decrypted = SymmetricEncryption.decrypt_data(encrypted, "test_password")
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
|
||||
class TestEncryptionSecurity:
|
||||
"""Security-related tests for encryption"""
|
||||
|
||||
def test_same_data_different_encryption(self):
|
||||
"""Test that same data produces different encrypted outputs due to salt"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted1 = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
encrypted2 = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
|
||||
assert encrypted1 != encrypted2
|
||||
|
||||
def test_password_not_recoverable_from_hash(self):
|
||||
"""Test that password hash is one-way"""
|
||||
encryptor = SymmetricEncryption("secret_password")
|
||||
# The password_hash should be SHA256 hex (64 chars)
|
||||
assert len(encryptor.password_hash) == 64
|
||||
# Password should not be easily derivable from hash
|
||||
assert "secret_password" not in encryptor.password_hash
|
||||
|
||||
def test_encrypted_data_not_plaintext(self):
|
||||
"""Test that encrypted data doesn't contain plaintext"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
plaintext = "very_secret_data_12345"
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(plaintext)
|
||||
|
||||
# Plaintext should not appear in encrypted output
|
||||
assert plaintext not in encrypted
|
||||
|
||||
def test_modified_encrypted_data_fails_decryption(self):
|
||||
"""Test that modified encrypted data fails to decrypt"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
|
||||
# Modify the encrypted data
|
||||
encrypted_dict = json.loads(encrypted)
|
||||
encrypted_dict['encrypted_data'] = encrypted_dict['encrypted_data'][:-5] + "AAAAA"
|
||||
modified_encrypted = json.dumps(encrypted_dict)
|
||||
|
||||
# Should fail to decrypt
|
||||
with pytest.raises(Exception): # Fernet will raise an exception
|
||||
encryptor.decrypt_with_metadata(modified_encrypted)
|
||||
|
||||
def test_modified_salt_fails_decryption(self):
|
||||
"""Test that modified salt fails to decrypt"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
|
||||
# Modify the salt
|
||||
encrypted_dict = json.loads(encrypted)
|
||||
original_salt = base64.urlsafe_b64decode(encrypted_dict['salt'])
|
||||
modified_salt = bytes([b ^ 1 for b in original_salt])
|
||||
encrypted_dict['salt'] = base64.urlsafe_b64encode(modified_salt).decode('utf-8')
|
||||
modified_encrypted = json.dumps(encrypted_dict)
|
||||
|
||||
# Should fail to decrypt due to key hash mismatch
|
||||
with pytest.raises(ValueError, match="Key hash is not matching"):
|
||||
encryptor.decrypt_with_metadata(modified_encrypted)
|
||||
|
||||
|
||||
class TestEdgeCases:
|
||||
"""Edge case tests"""
|
||||
|
||||
def test_very_long_password(self):
|
||||
"""Test with very long password"""
|
||||
long_password = "a" * 1000
|
||||
encryptor = SymmetricEncryption(long_password)
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
def test_special_characters_in_data(self):
|
||||
"""Test encryption of data with special characters"""
|
||||
special_data = "!@#$%^&*()_+-=[]{}|;':\",./<>?\n\t\r"
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(special_data)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == special_data
|
||||
|
||||
def test_binary_data_utf8_bytes(self):
|
||||
"""Test encryption of UTF-8 encoded bytes"""
|
||||
# Test with UTF-8 encoded bytes
|
||||
utf8_bytes = "Hello 世界 🌍".encode('utf-8')
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(utf8_bytes)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == "Hello 世界 🌍"
|
||||
|
||||
def test_binary_data_with_base64_encoding(self):
|
||||
"""Test encryption of arbitrary binary data using base64 encoding"""
|
||||
# For arbitrary binary data, encode to base64 first
|
||||
binary_data = bytes(range(256))
|
||||
base64_encoded = base64.b64encode(binary_data).decode('utf-8')
|
||||
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(base64_encoded)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
# Decode back to binary
|
||||
decoded_binary = base64.b64decode(decrypted)
|
||||
assert decoded_binary == binary_data
|
||||
|
||||
def test_binary_data_image_simulation(self):
|
||||
"""Test encryption of simulated binary image data"""
|
||||
# Simulate image binary data (random bytes)
|
||||
image_data = os.urandom(1024) # 1KB of random binary data
|
||||
base64_encoded = base64.b64encode(image_data).decode('utf-8')
|
||||
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(base64_encoded)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
# Verify round-trip
|
||||
decoded_data = base64.b64decode(decrypted)
|
||||
assert decoded_data == image_data
|
||||
|
||||
def test_binary_data_with_null_bytes(self):
|
||||
"""Test encryption of data containing null bytes"""
|
||||
# Create data with null bytes
|
||||
data_with_nulls = "text\x00with\x00nulls\x00bytes"
|
||||
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(data_with_nulls)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == data_with_nulls
|
||||
|
||||
def test_binary_data_bytes_input(self):
|
||||
"""Test encryption with bytes input directly"""
|
||||
# UTF-8 compatible bytes
|
||||
byte_data = b"Binary data test"
|
||||
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(byte_data)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == "Binary data test"
|
||||
|
||||
def test_binary_data_large_file_simulation(self):
|
||||
"""Test encryption of large binary data (simulated file)"""
|
||||
# Simulate a larger binary file (10KB)
|
||||
large_data = os.urandom(10240)
|
||||
base64_encoded = base64.b64encode(large_data).decode('utf-8')
|
||||
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(base64_encoded)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
# Verify integrity
|
||||
decoded_data = base64.b64decode(decrypted)
|
||||
assert len(decoded_data) == 10240
|
||||
assert decoded_data == large_data
|
||||
|
||||
def test_binary_data_json_with_base64(self):
|
||||
"""Test encryption of JSON containing base64 encoded binary data"""
|
||||
binary_data = os.urandom(256)
|
||||
json_data = json.dumps({
|
||||
"filename": "test.bin",
|
||||
"data": base64.b64encode(binary_data).decode('utf-8'),
|
||||
"size": len(binary_data)
|
||||
})
|
||||
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(json_data)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
# Parse and verify
|
||||
parsed = json.loads(decrypted)
|
||||
assert parsed["filename"] == "test.bin"
|
||||
assert parsed["size"] == 256
|
||||
decoded_binary = base64.b64decode(parsed["data"])
|
||||
assert decoded_binary == binary_data
|
||||
|
||||
def test_numeric_password(self):
|
||||
"""Test with numeric string password"""
|
||||
encryptor = SymmetricEncryption("12345")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
def test_unicode_password(self):
|
||||
"""Test with unicode password"""
|
||||
encryptor = SymmetricEncryption("パスワード123")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests"""
|
||||
|
||||
def test_multiple_encrypt_decrypt_cycles(self):
|
||||
"""Test multiple encryption/decryption cycles"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
original = "test data"
|
||||
|
||||
# Encrypt and decrypt multiple times
|
||||
for _ in range(5):
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(original)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
assert decrypted == original
|
||||
|
||||
def test_different_return_types_interoperability(self):
|
||||
"""Test that different return types can be decrypted"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
original = "test data"
|
||||
|
||||
# Encrypt with different return types
|
||||
encrypted_str = encryptor.encrypt_with_metadata_return_str(original)
|
||||
encrypted_bytes = encryptor.encrypt_with_metadata_return_bytes(original)
|
||||
encrypted_dict = encryptor.encrypt_with_metadata_return_dict(original)
|
||||
|
||||
# All should decrypt to the same value
|
||||
assert encryptor.decrypt_with_metadata(encrypted_str) == original
|
||||
assert encryptor.decrypt_with_metadata(encrypted_bytes) == original
|
||||
assert encryptor.decrypt_with_metadata(encrypted_dict) == original
|
||||
|
||||
def test_cross_instance_encryption_decryption(self):
|
||||
"""Test that different instances with same password can decrypt"""
|
||||
encryptor1 = SymmetricEncryption("test_password")
|
||||
encryptor2 = SymmetricEncryption("test_password")
|
||||
|
||||
encrypted = encryptor1.encrypt_with_metadata_return_str("test data")
|
||||
decrypted = encryptor2.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
def test_static_and_instance_methods_compatible(self):
|
||||
"""Test that static and instance methods are compatible"""
|
||||
# Encrypt with static method
|
||||
encrypted = SymmetricEncryption.encrypt_data("test data", "test_password")
|
||||
|
||||
# Decrypt with instance method
|
||||
decryptor = SymmetricEncryption("test_password")
|
||||
decrypted = decryptor.decrypt_with_metadata(encrypted)
|
||||
|
||||
assert decrypted == "test data"
|
||||
|
||||
# And vice versa
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted2 = encryptor.encrypt_with_metadata_return_str("test data 2")
|
||||
decrypted2 = SymmetricEncryption.decrypt_data(encrypted2, "test_password")
|
||||
|
||||
assert decrypted2 == "test data 2"
|
||||
|
||||
|
||||
# Parametrized tests
|
||||
@pytest.mark.parametrize("data", [
|
||||
"simple text",
|
||||
"text with spaces and punctuation!",
|
||||
"123456789",
|
||||
"unicode: こんにちは",
|
||||
"emoji: 🔐🔑",
|
||||
"",
|
||||
"a" * 1000, # Long string
|
||||
])
|
||||
def test_encrypt_decrypt_various_data(data: str):
|
||||
"""Parametrized test for various data types"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str(data)
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
assert decrypted == data
|
||||
|
||||
|
||||
@pytest.mark.parametrize("password", [
|
||||
"simple",
|
||||
"with spaces",
|
||||
"special!@#$%",
|
||||
"unicode世界",
|
||||
"123456",
|
||||
"a" * 100, # Long password
|
||||
])
|
||||
def test_various_passwords(password: str):
|
||||
"""Parametrized test for various passwords"""
|
||||
encryptor = SymmetricEncryption(password)
|
||||
encrypted = encryptor.encrypt_with_metadata_return_str("test data")
|
||||
decrypted = encryptor.decrypt_with_metadata(encrypted)
|
||||
assert decrypted == "test data"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("return_type,expected_type", [
|
||||
("str", str),
|
||||
("json", str),
|
||||
("bytes", bytes),
|
||||
("dict", dict),
|
||||
])
|
||||
def test_return_types_parametrized(return_type: str, expected_type: type):
|
||||
"""Parametrized test for different return types"""
|
||||
encryptor = SymmetricEncryption("test_password")
|
||||
result = encryptor.encrypt_with_metadata("test data", return_as=return_type)
|
||||
assert isinstance(result, expected_type)
|
||||
|
||||
|
||||
# Fixtures
|
||||
@pytest.fixture
|
||||
def encryptor() -> SymmetricEncryption:
|
||||
"""Fixture providing a basic encryptor instance"""
|
||||
return SymmetricEncryption("test_password")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_encrypted_data(encryptor: SymmetricEncryption) -> str:
|
||||
"""Fixture providing sample encrypted data"""
|
||||
return encryptor.encrypt_with_metadata_return_str("sample data")
|
||||
|
||||
|
||||
def test_with_encryptor_fixture(encryptor: SymmetricEncryption) -> None:
|
||||
"""Test using encryptor fixture"""
|
||||
encrypted: str = encryptor.encrypt_with_metadata_return_str("test")
|
||||
decrypted: str = encryptor.decrypt_with_metadata(encrypted)
|
||||
assert decrypted == "test"
|
||||
|
||||
|
||||
def test_with_encrypted_data_fixture(encryptor: SymmetricEncryption, sample_encrypted_data: str) -> None:
|
||||
"""Test using encrypted data fixture"""
|
||||
decrypted: str = encryptor.decrypt_with_metadata(sample_encrypted_data)
|
||||
assert decrypted == "sample data"
|
||||
|
||||
# __END__
|
||||
@@ -1,538 +0,0 @@
|
||||
"""
|
||||
PyTest: file_handling/file_bom_encoding
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
import pytest
|
||||
|
||||
from corelibs.file_handling.file_bom_encoding import (
|
||||
is_bom_encoded,
|
||||
is_bom_encoded_info,
|
||||
BomEncodingInfo,
|
||||
)
|
||||
|
||||
|
||||
class TestIsBomEncoded:
|
||||
"""Test suite for is_bom_encoded function"""
|
||||
|
||||
def test_utf8_bom_file(self, tmp_path: Path):
|
||||
"""Test detection of UTF-8 BOM encoded file"""
|
||||
test_file = tmp_path / "utf8_bom.txt"
|
||||
# UTF-8 BOM: EF BB BF
|
||||
content = b'\xef\xbb\xbfHello, World!'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is True
|
||||
assert isinstance(result, bool)
|
||||
|
||||
def test_utf16_le_bom_file(self, tmp_path: Path):
|
||||
"""Test detection of UTF-16 LE BOM encoded file"""
|
||||
test_file = tmp_path / "utf16_le_bom.txt"
|
||||
# UTF-16 LE BOM: FF FE
|
||||
content = b'\xff\xfeH\x00e\x00l\x00l\x00o\x00'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is True
|
||||
|
||||
def test_utf16_be_bom_file(self, tmp_path: Path):
|
||||
"""Test detection of UTF-16 BE BOM encoded file"""
|
||||
test_file = tmp_path / "utf16_be_bom.txt"
|
||||
# UTF-16 BE BOM: FE FF
|
||||
content = b'\xfe\xff\x00H\x00e\x00l\x00l\x00o'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is True
|
||||
|
||||
def test_utf32_le_bom_file(self, tmp_path: Path):
|
||||
"""Test detection of UTF-32 LE BOM encoded file"""
|
||||
test_file = tmp_path / "utf32_le_bom.txt"
|
||||
# UTF-32 LE BOM: FF FE 00 00
|
||||
content = b'\xff\xfe\x00\x00H\x00\x00\x00e\x00\x00\x00'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is True
|
||||
|
||||
def test_utf32_be_bom_file(self, tmp_path: Path):
|
||||
"""Test detection of UTF-32 BE BOM encoded file"""
|
||||
test_file = tmp_path / "utf32_be_bom.txt"
|
||||
# UTF-32 BE BOM: 00 00 FE FF
|
||||
content = b'\x00\x00\xfe\xff\x00\x00\x00H\x00\x00\x00e'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is True
|
||||
|
||||
def test_no_bom_ascii_file(self, tmp_path: Path):
|
||||
"""Test detection of ASCII file without BOM"""
|
||||
test_file = tmp_path / "ascii.txt"
|
||||
content = b'Hello, World!'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is False
|
||||
|
||||
def test_no_bom_utf8_file(self, tmp_path: Path):
|
||||
"""Test detection of UTF-8 file without BOM"""
|
||||
test_file = tmp_path / "utf8_no_bom.txt"
|
||||
content = 'Hello, 世界!'.encode('utf-8')
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is False
|
||||
|
||||
def test_empty_file(self, tmp_path: Path):
|
||||
"""Test detection on empty file"""
|
||||
test_file = tmp_path / "empty.txt"
|
||||
test_file.write_bytes(b'')
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is False
|
||||
|
||||
def test_binary_file_no_bom(self, tmp_path: Path):
|
||||
"""Test detection on binary file without BOM"""
|
||||
test_file = tmp_path / "binary.bin"
|
||||
content = bytes(range(256))
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is False
|
||||
|
||||
def test_partial_bom_pattern(self, tmp_path: Path):
|
||||
"""Test file with partial BOM pattern that shouldn't match"""
|
||||
test_file = tmp_path / "partial_bom.txt"
|
||||
# Only first two bytes of UTF-8 BOM
|
||||
content = b'\xef\xbbHello'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is False
|
||||
|
||||
def test_false_positive_bom_pattern(self, tmp_path: Path):
|
||||
"""Test file that contains BOM-like bytes but not at the start"""
|
||||
test_file = tmp_path / "false_positive.txt"
|
||||
content = b'Hello\xef\xbb\xbfWorld'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is False
|
||||
|
||||
def test_nonexistent_file(self, tmp_path: Path):
|
||||
"""Test that function raises error for non-existent file"""
|
||||
test_file = tmp_path / "nonexistent.txt"
|
||||
|
||||
with pytest.raises(ValueError, match="Error checking BOM encoding"):
|
||||
is_bom_encoded(test_file)
|
||||
|
||||
def test_very_small_file(self, tmp_path: Path):
|
||||
"""Test file smaller than largest BOM pattern (4 bytes)"""
|
||||
test_file = tmp_path / "small.txt"
|
||||
content = b'Hi'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is False
|
||||
|
||||
def test_exactly_bom_size_utf8(self, tmp_path: Path):
|
||||
"""Test file that is exactly the size of UTF-8 BOM"""
|
||||
test_file = tmp_path / "exact_bom.txt"
|
||||
content = b'\xef\xbb\xbf'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is True
|
||||
|
||||
def test_exactly_bom_size_utf32(self, tmp_path: Path):
|
||||
"""Test file that is exactly the size of UTF-32 BOM"""
|
||||
test_file = tmp_path / "exact_bom_utf32.txt"
|
||||
content = b'\xff\xfe\x00\x00'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded(test_file)
|
||||
assert result is True
|
||||
|
||||
|
||||
class TestIsBomEncodedInfo:
|
||||
"""Test suite for is_bom_encoded_info function"""
|
||||
|
||||
def test_utf8_bom_info(self, tmp_path: Path):
|
||||
"""Test detailed info for UTF-8 BOM encoded file"""
|
||||
test_file = tmp_path / "utf8_bom.txt"
|
||||
content = b'\xef\xbb\xbfHello, UTF-8!'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert result['has_bom'] is True
|
||||
assert result['bom_type'] == 'UTF-8'
|
||||
assert result['encoding'] == 'utf-8'
|
||||
assert result['bom_length'] == 3
|
||||
assert result['bom_pattern'] == b'\xef\xbb\xbf'
|
||||
|
||||
def test_utf16_le_bom_info(self, tmp_path: Path):
|
||||
"""Test detailed info for UTF-16 LE BOM encoded file"""
|
||||
test_file = tmp_path / "utf16_le_bom.txt"
|
||||
content = b'\xff\xfeH\x00e\x00l\x00l\x00o\x00'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
assert result['has_bom'] is True
|
||||
assert result['bom_type'] == 'UTF-16 LE'
|
||||
assert result['encoding'] == 'utf-16-le'
|
||||
assert result['bom_length'] == 2
|
||||
assert result['bom_pattern'] == b'\xff\xfe'
|
||||
|
||||
def test_utf16_be_bom_info(self, tmp_path: Path):
|
||||
"""Test detailed info for UTF-16 BE BOM encoded file"""
|
||||
test_file = tmp_path / "utf16_be_bom.txt"
|
||||
content = b'\xfe\xff\x00H\x00e\x00l\x00l\x00o'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
assert result['has_bom'] is True
|
||||
assert result['bom_type'] == 'UTF-16 BE'
|
||||
assert result['encoding'] == 'utf-16-be'
|
||||
assert result['bom_length'] == 2
|
||||
assert result['bom_pattern'] == b'\xfe\xff'
|
||||
|
||||
def test_utf32_le_bom_info(self, tmp_path: Path):
|
||||
"""Test detailed info for UTF-32 LE BOM encoded file"""
|
||||
test_file = tmp_path / "utf32_le_bom.txt"
|
||||
content = b'\xff\xfe\x00\x00H\x00\x00\x00e\x00\x00\x00'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
assert result['has_bom'] is True
|
||||
assert result['bom_type'] == 'UTF-32 LE'
|
||||
assert result['encoding'] == 'utf-32-le'
|
||||
assert result['bom_length'] == 4
|
||||
assert result['bom_pattern'] == b'\xff\xfe\x00\x00'
|
||||
|
||||
def test_utf32_be_bom_info(self, tmp_path: Path):
|
||||
"""Test detailed info for UTF-32 BE BOM encoded file"""
|
||||
test_file = tmp_path / "utf32_be_bom.txt"
|
||||
content = b'\x00\x00\xfe\xff\x00\x00\x00H\x00\x00\x00e'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
assert result['has_bom'] is True
|
||||
assert result['bom_type'] == 'UTF-32 BE'
|
||||
assert result['encoding'] == 'utf-32-be'
|
||||
assert result['bom_length'] == 4
|
||||
assert result['bom_pattern'] == b'\x00\x00\xfe\xff'
|
||||
|
||||
def test_no_bom_info(self, tmp_path: Path):
|
||||
"""Test detailed info for file without BOM"""
|
||||
test_file = tmp_path / "no_bom.txt"
|
||||
content = b'Hello, World!'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
assert result['has_bom'] is False
|
||||
assert result['bom_type'] is None
|
||||
assert result['encoding'] is None
|
||||
assert result['bom_length'] == 0
|
||||
assert result['bom_pattern'] is None
|
||||
|
||||
def test_empty_file_info(self, tmp_path: Path):
|
||||
"""Test detailed info for empty file"""
|
||||
test_file = tmp_path / "empty.txt"
|
||||
test_file.write_bytes(b'')
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
assert result['has_bom'] is False
|
||||
assert result['bom_type'] is None
|
||||
assert result['encoding'] is None
|
||||
assert result['bom_length'] == 0
|
||||
assert result['bom_pattern'] is None
|
||||
|
||||
def test_bom_precedence_utf32_vs_utf16(self, tmp_path: Path):
|
||||
"""Test that UTF-32 LE BOM takes precedence over UTF-16 LE when both match"""
|
||||
test_file = tmp_path / "precedence.txt"
|
||||
# UTF-32 LE BOM starts with UTF-16 LE BOM pattern
|
||||
content = b'\xff\xfe\x00\x00Additional content'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
# Should detect UTF-32 LE, not UTF-16 LE
|
||||
assert result['has_bom'] is True
|
||||
assert result['bom_type'] == 'UTF-32 LE'
|
||||
assert result['encoding'] == 'utf-32-le'
|
||||
assert result['bom_length'] == 4
|
||||
assert result['bom_pattern'] == b'\xff\xfe\x00\x00'
|
||||
|
||||
def test_return_type_validation(self, tmp_path: Path):
|
||||
"""Test that return type matches BomEncodingInfo TypedDict"""
|
||||
test_file = tmp_path / "test.txt"
|
||||
test_file.write_bytes(b'Test content')
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
# Check all required keys are present
|
||||
required_keys = {'has_bom', 'bom_type', 'encoding', 'bom_length', 'bom_pattern'}
|
||||
assert set(result.keys()) == required_keys
|
||||
|
||||
# Check types
|
||||
assert isinstance(result['has_bom'], bool)
|
||||
assert result['bom_type'] is None or isinstance(result['bom_type'], str)
|
||||
assert result['encoding'] is None or isinstance(result['encoding'], str)
|
||||
assert isinstance(result['bom_length'], int)
|
||||
assert result['bom_pattern'] is None or isinstance(result['bom_pattern'], bytes)
|
||||
|
||||
def test_nonexistent_file_error(self, tmp_path: Path):
|
||||
"""Test that function raises ValueError for non-existent file"""
|
||||
test_file = tmp_path / "nonexistent.txt"
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
is_bom_encoded_info(test_file)
|
||||
|
||||
assert "Error checking BOM encoding" in str(exc_info.value)
|
||||
|
||||
def test_directory_instead_of_file(self, tmp_path: Path):
|
||||
"""Test that function raises error when given a directory"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
with pytest.raises(ValueError, match="Error checking BOM encoding"):
|
||||
is_bom_encoded_info(test_dir)
|
||||
|
||||
def test_large_file_with_bom(self, tmp_path: Path):
|
||||
"""Test BOM detection on large file (only first 4 bytes matter)"""
|
||||
test_file = tmp_path / "large_bom.txt"
|
||||
# UTF-8 BOM followed by large content
|
||||
content = b'\xef\xbb\xbf' + b'A' * 100000
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
assert result['has_bom'] is True
|
||||
assert result['bom_type'] == 'UTF-8'
|
||||
assert result['encoding'] == 'utf-8'
|
||||
|
||||
def test_bom_detection_priority_order(self, tmp_path: Path):
|
||||
"""Test that BOM patterns are checked in the correct priority order"""
|
||||
# The function should check longer patterns first to avoid false matches
|
||||
test_cases = [
|
||||
(b'\xff\xfe\x00\x00', 'UTF-32 LE'), # 4 bytes
|
||||
(b'\x00\x00\xfe\xff', 'UTF-32 BE'), # 4 bytes
|
||||
(b'\xff\xfe', 'UTF-16 LE'), # 2 bytes
|
||||
(b'\xfe\xff', 'UTF-16 BE'), # 2 bytes
|
||||
(b'\xef\xbb\xbf', 'UTF-8'), # 3 bytes
|
||||
]
|
||||
|
||||
for i, (bom_bytes, expected_type) in enumerate(test_cases):
|
||||
test_file = tmp_path / f"priority_test_{i}.txt"
|
||||
content = bom_bytes + b'Content'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
assert result['bom_type'] == expected_type
|
||||
assert result['bom_pattern'] == bom_bytes
|
||||
|
||||
def test_csv_file_with_utf8_bom(self, tmp_path: Path):
|
||||
"""Test CSV file with UTF-8 BOM (common use case mentioned in docstring)"""
|
||||
test_file = tmp_path / "data.csv"
|
||||
content = b'\xef\xbb\xbf"Name","Age","City"\n"John",30,"New York"\n"Jane",25,"Tokyo"'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
assert result['has_bom'] is True
|
||||
assert result['bom_type'] == 'UTF-8'
|
||||
assert result['encoding'] == 'utf-8'
|
||||
assert result['bom_length'] == 3
|
||||
|
||||
def test_csv_file_without_bom(self, tmp_path: Path):
|
||||
"""Test CSV file without BOM"""
|
||||
test_file = tmp_path / "data_no_bom.csv"
|
||||
content = b'"Name","Age","City"\n"John",30,"New York"\n"Jane",25,"Tokyo"'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result = is_bom_encoded_info(test_file)
|
||||
|
||||
assert result['has_bom'] is False
|
||||
assert result['bom_type'] is None
|
||||
assert result['encoding'] is None
|
||||
assert result['bom_length'] == 0
|
||||
|
||||
|
||||
class TestBomEncodingInfo:
|
||||
"""Test suite for BomEncodingInfo TypedDict"""
|
||||
|
||||
def test_typed_dict_structure(self):
|
||||
"""Test that BomEncodingInfo has correct structure"""
|
||||
# This is a type check - in actual usage, mypy would validate this
|
||||
sample_info: BomEncodingInfo = {
|
||||
'has_bom': True,
|
||||
'bom_type': 'UTF-8',
|
||||
'encoding': 'utf-8',
|
||||
'bom_length': 3,
|
||||
'bom_pattern': b'\xef\xbb\xbf'
|
||||
}
|
||||
|
||||
assert sample_info['has_bom'] is True
|
||||
assert sample_info['bom_type'] == 'UTF-8'
|
||||
assert sample_info['encoding'] == 'utf-8'
|
||||
assert sample_info['bom_length'] == 3
|
||||
assert sample_info['bom_pattern'] == b'\xef\xbb\xbf'
|
||||
|
||||
def test_typed_dict_none_values(self):
|
||||
"""Test TypedDict with None values"""
|
||||
sample_info: BomEncodingInfo = {
|
||||
'has_bom': False,
|
||||
'bom_type': None,
|
||||
'encoding': None,
|
||||
'bom_length': 0,
|
||||
'bom_pattern': None
|
||||
}
|
||||
|
||||
assert sample_info['has_bom'] is False
|
||||
assert sample_info['bom_type'] is None
|
||||
assert sample_info['encoding'] is None
|
||||
assert sample_info['bom_length'] == 0
|
||||
assert sample_info['bom_pattern'] is None
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests for BOM encoding detection"""
|
||||
|
||||
def test_is_bom_encoded_uses_info_function(self, tmp_path: Path):
|
||||
"""Test that is_bom_encoded uses is_bom_encoded_info internally"""
|
||||
test_file = tmp_path / "integration.txt"
|
||||
content = b'\xef\xbb\xbfIntegration test'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
# Both functions should return consistent results
|
||||
simple_result = is_bom_encoded(test_file)
|
||||
detailed_result = is_bom_encoded_info(test_file)
|
||||
|
||||
assert simple_result == detailed_result['has_bom']
|
||||
assert simple_result is True
|
||||
|
||||
def test_multiple_file_bom_detection_workflow(self, tmp_path: Path):
|
||||
"""Test a workflow of detecting BOM across multiple files"""
|
||||
files = {
|
||||
'utf8_bom.csv': b'\xef\xbb\xbf"data","value"\n"test",123',
|
||||
'utf16_le.txt': b'\xff\xfeH\x00e\x00l\x00l\x00o\x00',
|
||||
'no_bom.txt': b'Plain ASCII text',
|
||||
'empty.txt': b'',
|
||||
}
|
||||
|
||||
results = {}
|
||||
detailed_results = {}
|
||||
|
||||
for filename, content in files.items():
|
||||
file_path = tmp_path / filename
|
||||
file_path.write_bytes(content)
|
||||
|
||||
results[filename] = is_bom_encoded(file_path)
|
||||
detailed_results[filename] = is_bom_encoded_info(file_path)
|
||||
|
||||
# Verify results
|
||||
assert results['utf8_bom.csv'] is True
|
||||
assert results['utf16_le.txt'] is True
|
||||
assert results['no_bom.txt'] is False
|
||||
assert results['empty.txt'] is False
|
||||
|
||||
# Verify detailed results match simple results
|
||||
for filename in files:
|
||||
assert results[filename] == detailed_results[filename]['has_bom']
|
||||
|
||||
# Verify specific encoding details
|
||||
assert detailed_results['utf8_bom.csv']['encoding'] == 'utf-8'
|
||||
assert detailed_results['utf16_le.txt']['encoding'] == 'utf-16-le'
|
||||
assert detailed_results['no_bom.txt']['encoding'] is None
|
||||
|
||||
def test_csv_loading_workflow(self, tmp_path: Path):
|
||||
"""Test BOM detection workflow for CSV loading (main use case)"""
|
||||
# Create CSV files with and without BOM
|
||||
csv_with_bom = tmp_path / "data_with_bom.csv"
|
||||
csv_without_bom = tmp_path / "data_without_bom.csv"
|
||||
|
||||
# CSV with UTF-8 BOM
|
||||
bom_content = b'\xef\xbb\xbf"Name","Age"\n"Alice",30\n"Bob",25'
|
||||
csv_with_bom.write_bytes(bom_content)
|
||||
|
||||
# CSV without BOM
|
||||
no_bom_content = b'"Name","Age"\n"Charlie",35\n"Diana",28'
|
||||
csv_without_bom.write_bytes(no_bom_content)
|
||||
|
||||
# Simulate CSV loading workflow
|
||||
files_to_process = [csv_with_bom, csv_without_bom]
|
||||
processing_info: list[dict[str, str | bool | int]] = []
|
||||
|
||||
for csv_file in files_to_process:
|
||||
bom_info = is_bom_encoded_info(csv_file)
|
||||
|
||||
file_info: dict[str, str | bool | int] = {
|
||||
'file': csv_file.name,
|
||||
'has_bom': bom_info['has_bom'],
|
||||
'encoding': bom_info['encoding'] or 'default',
|
||||
'skip_bytes': bom_info['bom_length']
|
||||
}
|
||||
processing_info.append(file_info)
|
||||
|
||||
# Verify workflow results
|
||||
assert len(processing_info) == 2
|
||||
|
||||
bom_file_info = next(info for info in processing_info if info['file'] == 'data_with_bom.csv')
|
||||
no_bom_file_info = next(info for info in processing_info if info['file'] == 'data_without_bom.csv')
|
||||
|
||||
assert bom_file_info['has_bom'] is True
|
||||
assert bom_file_info['encoding'] == 'utf-8'
|
||||
assert bom_file_info['skip_bytes'] == 3
|
||||
|
||||
assert no_bom_file_info['has_bom'] is False
|
||||
assert no_bom_file_info['encoding'] == 'default'
|
||||
assert no_bom_file_info['skip_bytes'] == 0
|
||||
|
||||
def test_error_handling_consistency(self, tmp_path: Path):
|
||||
"""Test that both functions handle errors consistently"""
|
||||
nonexistent_file = tmp_path / "does_not_exist.txt"
|
||||
|
||||
# Both functions should raise ValueError for non-existent files
|
||||
with pytest.raises(ValueError):
|
||||
is_bom_encoded(nonexistent_file)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
is_bom_encoded_info(nonexistent_file)
|
||||
|
||||
def test_all_supported_bom_types(self, tmp_path: Path):
|
||||
"""Test detection of all supported BOM types"""
|
||||
bom_test_cases = [
|
||||
('utf8', b'\xef\xbb\xbf', 'UTF-8', 'utf-8', 3),
|
||||
('utf16_le', b'\xff\xfe', 'UTF-16 LE', 'utf-16-le', 2),
|
||||
('utf16_be', b'\xfe\xff', 'UTF-16 BE', 'utf-16-be', 2),
|
||||
('utf32_le', b'\xff\xfe\x00\x00', 'UTF-32 LE', 'utf-32-le', 4),
|
||||
('utf32_be', b'\x00\x00\xfe\xff', 'UTF-32 BE', 'utf-32-be', 4),
|
||||
]
|
||||
|
||||
for name, bom_bytes, expected_type, expected_encoding, expected_length in bom_test_cases:
|
||||
test_file = tmp_path / f"{name}_test.txt"
|
||||
content = bom_bytes + b'Test content'
|
||||
test_file.write_bytes(content)
|
||||
|
||||
# Test simple function
|
||||
assert is_bom_encoded(test_file) is True
|
||||
|
||||
# Test detailed function
|
||||
info = is_bom_encoded_info(test_file)
|
||||
assert info['has_bom'] is True
|
||||
assert info['bom_type'] == expected_type
|
||||
assert info['encoding'] == expected_encoding
|
||||
assert info['bom_length'] == expected_length
|
||||
assert info['bom_pattern'] == bom_bytes
|
||||
|
||||
|
||||
# __END__
|
||||
@@ -1,389 +0,0 @@
|
||||
"""
|
||||
PyTest: file_handling/file_crc
|
||||
"""
|
||||
|
||||
import zlib
|
||||
from pathlib import Path
|
||||
import pytest
|
||||
|
||||
from corelibs.file_handling.file_crc import (
|
||||
file_crc,
|
||||
file_name_crc,
|
||||
)
|
||||
|
||||
|
||||
class TestFileCrc:
|
||||
"""Test suite for file_crc function"""
|
||||
|
||||
def test_file_crc_small_file(self, tmp_path: Path):
|
||||
"""Test CRC calculation for a small file"""
|
||||
test_file = tmp_path / "test_small.txt"
|
||||
content = b"Hello, World!"
|
||||
test_file.write_bytes(content)
|
||||
|
||||
# Calculate expected CRC
|
||||
expected_crc = f"{zlib.crc32(content) & 0xFFFFFFFF:08X}"
|
||||
|
||||
result = file_crc(test_file)
|
||||
assert result == expected_crc
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 8 # CRC32 is 8 hex digits
|
||||
|
||||
def test_file_crc_large_file(self, tmp_path: Path):
|
||||
"""Test CRC calculation for a file larger than buffer size (65536 bytes)"""
|
||||
test_file = tmp_path / "test_large.bin"
|
||||
|
||||
# Create a file larger than the buffer (65536 bytes)
|
||||
content = b"A" * 100000
|
||||
test_file.write_bytes(content)
|
||||
|
||||
# Calculate expected CRC
|
||||
expected_crc = f"{zlib.crc32(content) & 0xFFFFFFFF:08X}"
|
||||
|
||||
result = file_crc(test_file)
|
||||
assert result == expected_crc
|
||||
|
||||
def test_file_crc_empty_file(self, tmp_path: Path):
|
||||
"""Test CRC calculation for an empty file"""
|
||||
test_file = tmp_path / "test_empty.txt"
|
||||
test_file.write_bytes(b"")
|
||||
|
||||
# CRC of empty data
|
||||
expected_crc = f"{zlib.crc32(b"") & 0xFFFFFFFF:08X}"
|
||||
|
||||
result = file_crc(test_file)
|
||||
assert result == expected_crc
|
||||
assert result == "00000000"
|
||||
|
||||
def test_file_crc_binary_file(self, tmp_path: Path):
|
||||
"""Test CRC calculation for a binary file"""
|
||||
test_file = tmp_path / "test_binary.bin"
|
||||
content = bytes(range(256)) # All possible byte values
|
||||
test_file.write_bytes(content)
|
||||
|
||||
expected_crc = f"{zlib.crc32(content) & 0xFFFFFFFF:08X}"
|
||||
|
||||
result = file_crc(test_file)
|
||||
assert result == expected_crc
|
||||
|
||||
def test_file_crc_exact_buffer_size(self, tmp_path: Path):
|
||||
"""Test CRC calculation for a file exactly the buffer size"""
|
||||
test_file = tmp_path / "test_exact_buffer.bin"
|
||||
content = b"X" * 65536
|
||||
test_file.write_bytes(content)
|
||||
|
||||
expected_crc = f"{zlib.crc32(content) & 0xFFFFFFFF:08X}"
|
||||
|
||||
result = file_crc(test_file)
|
||||
assert result == expected_crc
|
||||
|
||||
def test_file_crc_multiple_buffers(self, tmp_path: Path):
|
||||
"""Test CRC calculation for a file requiring multiple buffer reads"""
|
||||
test_file = tmp_path / "test_multi_buffer.bin"
|
||||
content = b"TestData" * 20000 # ~160KB
|
||||
test_file.write_bytes(content)
|
||||
|
||||
expected_crc = f"{zlib.crc32(content) & 0xFFFFFFFF:08X}"
|
||||
|
||||
result = file_crc(test_file)
|
||||
assert result == expected_crc
|
||||
|
||||
def test_file_crc_unicode_content(self, tmp_path: Path):
|
||||
"""Test CRC calculation for a file with unicode content"""
|
||||
test_file = tmp_path / "test_unicode.txt"
|
||||
content = "Hello 世界! 🌍".encode('utf-8')
|
||||
test_file.write_bytes(content)
|
||||
|
||||
expected_crc = f"{zlib.crc32(content) & 0xFFFFFFFF:08X}"
|
||||
|
||||
result = file_crc(test_file)
|
||||
assert result == expected_crc
|
||||
|
||||
def test_file_crc_deterministic(self, tmp_path: Path):
|
||||
"""Test that CRC calculation is deterministic"""
|
||||
test_file = tmp_path / "test_deterministic.txt"
|
||||
content = b"Deterministic test content"
|
||||
test_file.write_bytes(content)
|
||||
|
||||
result1 = file_crc(test_file)
|
||||
result2 = file_crc(test_file)
|
||||
|
||||
assert result1 == result2
|
||||
|
||||
def test_file_crc_different_files(self, tmp_path: Path):
|
||||
"""Test that different files produce different CRCs"""
|
||||
file1 = tmp_path / "file1.txt"
|
||||
file2 = tmp_path / "file2.txt"
|
||||
|
||||
file1.write_bytes(b"Content 1")
|
||||
file2.write_bytes(b"Content 2")
|
||||
|
||||
crc1 = file_crc(file1)
|
||||
crc2 = file_crc(file2)
|
||||
|
||||
assert crc1 != crc2
|
||||
|
||||
def test_file_crc_same_content_different_names(self, tmp_path: Path):
|
||||
"""Test that files with same content produce same CRC regardless of name"""
|
||||
file1 = tmp_path / "name1.txt"
|
||||
file2 = tmp_path / "name2.txt"
|
||||
|
||||
content = b"Same content"
|
||||
file1.write_bytes(content)
|
||||
file2.write_bytes(content)
|
||||
|
||||
crc1 = file_crc(file1)
|
||||
crc2 = file_crc(file2)
|
||||
|
||||
assert crc1 == crc2
|
||||
|
||||
def test_file_crc_nonexistent_file(self, tmp_path: Path):
|
||||
"""Test that file_crc raises error for non-existent file"""
|
||||
test_file = tmp_path / "nonexistent.txt"
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
file_crc(test_file)
|
||||
|
||||
def test_file_crc_with_path_object(self, tmp_path: Path):
|
||||
"""Test file_crc works with Path object"""
|
||||
test_file = tmp_path / "test_path.txt"
|
||||
test_file.write_bytes(b"Test with Path")
|
||||
|
||||
result = file_crc(test_file)
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 8
|
||||
|
||||
|
||||
class TestFileNameCrc:
|
||||
"""Test suite for file_name_crc function"""
|
||||
|
||||
def test_file_name_crc_simple_filename(self, tmp_path: Path):
|
||||
"""Test extracting simple filename without parent folder"""
|
||||
test_file = tmp_path / "testfile.csv"
|
||||
|
||||
result = file_name_crc(test_file, add_parent_folder=False)
|
||||
assert result == "testfile.csv"
|
||||
|
||||
def test_file_name_crc_with_parent_folder(self, tmp_path: Path):
|
||||
"""Test extracting filename with parent folder"""
|
||||
parent = tmp_path / "parent_folder"
|
||||
parent.mkdir()
|
||||
test_file = parent / "testfile.csv"
|
||||
|
||||
result = file_name_crc(test_file, add_parent_folder=True)
|
||||
assert result == "parent_folder/testfile.csv"
|
||||
|
||||
def test_file_name_crc_nested_path_without_parent(self):
|
||||
"""Test filename extraction from deeply nested path without parent"""
|
||||
test_path = Path("/foo/bar/baz/file.csv")
|
||||
|
||||
result = file_name_crc(test_path, add_parent_folder=False)
|
||||
assert result == "file.csv"
|
||||
|
||||
def test_file_name_crc_nested_path_with_parent(self):
|
||||
"""Test filename extraction from deeply nested path with parent"""
|
||||
test_path = Path("/foo/bar/baz/file.csv")
|
||||
|
||||
result = file_name_crc(test_path, add_parent_folder=True)
|
||||
assert result == "baz/file.csv"
|
||||
|
||||
def test_file_name_crc_default_parameter(self, tmp_path: Path):
|
||||
"""Test that add_parent_folder defaults to False"""
|
||||
test_file = tmp_path / "subdir" / "testfile.txt"
|
||||
test_file.parent.mkdir(parents=True)
|
||||
|
||||
result = file_name_crc(test_file)
|
||||
assert result == "testfile.txt"
|
||||
|
||||
def test_file_name_crc_different_extensions(self, tmp_path: Path):
|
||||
"""Test with different file extensions"""
|
||||
extensions = [".txt", ".csv", ".json", ".xml", ".py"]
|
||||
|
||||
for ext in extensions:
|
||||
test_file = tmp_path / f"testfile{ext}"
|
||||
result = file_name_crc(test_file, add_parent_folder=False)
|
||||
assert result == f"testfile{ext}"
|
||||
|
||||
def test_file_name_crc_no_extension(self, tmp_path: Path):
|
||||
"""Test with filename without extension"""
|
||||
test_file = tmp_path / "testfile"
|
||||
|
||||
result = file_name_crc(test_file, add_parent_folder=False)
|
||||
assert result == "testfile"
|
||||
|
||||
def test_file_name_crc_multiple_dots(self, tmp_path: Path):
|
||||
"""Test with filename containing multiple dots"""
|
||||
test_file = tmp_path / "test.file.name.tar.gz"
|
||||
|
||||
result = file_name_crc(test_file, add_parent_folder=False)
|
||||
assert result == "test.file.name.tar.gz"
|
||||
|
||||
def test_file_name_crc_with_spaces(self, tmp_path: Path):
|
||||
"""Test with filename containing spaces"""
|
||||
test_file = tmp_path / "test file name.txt"
|
||||
|
||||
result = file_name_crc(test_file, add_parent_folder=False)
|
||||
assert result == "test file name.txt"
|
||||
|
||||
def test_file_name_crc_with_special_chars(self, tmp_path: Path):
|
||||
"""Test with filename containing special characters"""
|
||||
test_file = tmp_path / "test_file-name (1).txt"
|
||||
|
||||
result = file_name_crc(test_file, add_parent_folder=False)
|
||||
assert result == "test_file-name (1).txt"
|
||||
|
||||
def test_file_name_crc_unicode_filename(self, tmp_path: Path):
|
||||
"""Test with unicode characters in filename"""
|
||||
test_file = tmp_path / "テストファイル.txt"
|
||||
|
||||
result = file_name_crc(test_file, add_parent_folder=False)
|
||||
assert result == "テストファイル.txt"
|
||||
|
||||
def test_file_name_crc_unicode_parent(self, tmp_path: Path):
|
||||
"""Test with unicode characters in parent folder name"""
|
||||
parent = tmp_path / "親フォルダ"
|
||||
parent.mkdir()
|
||||
test_file = parent / "file.txt"
|
||||
|
||||
result = file_name_crc(test_file, add_parent_folder=True)
|
||||
assert result == "親フォルダ/file.txt"
|
||||
|
||||
def test_file_name_crc_path_separator(self, tmp_path: Path):
|
||||
"""Test that result uses forward slash separator"""
|
||||
parent = tmp_path / "parent"
|
||||
parent.mkdir()
|
||||
test_file = parent / "file.txt"
|
||||
|
||||
result = file_name_crc(test_file, add_parent_folder=True)
|
||||
assert "/" in result
|
||||
assert result == "parent/file.txt"
|
||||
|
||||
def test_file_name_crc_return_type(self, tmp_path: Path):
|
||||
"""Test that return type is always string"""
|
||||
test_file = tmp_path / "test.txt"
|
||||
|
||||
result1 = file_name_crc(test_file, add_parent_folder=False)
|
||||
result2 = file_name_crc(test_file, add_parent_folder=True)
|
||||
|
||||
assert isinstance(result1, str)
|
||||
assert isinstance(result2, str)
|
||||
|
||||
def test_file_name_crc_root_level_file(self):
|
||||
"""Test with file at root level"""
|
||||
test_path = Path("/file.txt")
|
||||
|
||||
result_without_parent = file_name_crc(test_path, add_parent_folder=False)
|
||||
assert result_without_parent == "file.txt"
|
||||
|
||||
result_with_parent = file_name_crc(test_path, add_parent_folder=True)
|
||||
# Parent of root-level file would be empty string or root
|
||||
assert "file.txt" in result_with_parent
|
||||
|
||||
def test_file_name_crc_relative_path(self):
|
||||
"""Test with relative path"""
|
||||
test_path = Path("folder/subfolder/file.txt")
|
||||
|
||||
result = file_name_crc(test_path, add_parent_folder=True)
|
||||
assert result == "subfolder/file.txt"
|
||||
|
||||
def test_file_name_crc_current_dir(self):
|
||||
"""Test with file in current directory"""
|
||||
test_path = Path("file.txt")
|
||||
|
||||
result = file_name_crc(test_path, add_parent_folder=False)
|
||||
assert result == "file.txt"
|
||||
|
||||
def test_file_name_crc_nonexistent_file(self, tmp_path: Path):
|
||||
"""Test that file_name_crc works even if file doesn't exist"""
|
||||
test_file = tmp_path / "parent" / "nonexistent.txt"
|
||||
|
||||
# Should work without file existing
|
||||
result1 = file_name_crc(test_file, add_parent_folder=False)
|
||||
assert result1 == "nonexistent.txt"
|
||||
|
||||
result2 = file_name_crc(test_file, add_parent_folder=True)
|
||||
assert result2 == "parent/nonexistent.txt"
|
||||
|
||||
def test_file_name_crc_explicit_true(self, tmp_path: Path):
|
||||
"""Test explicitly setting add_parent_folder to True"""
|
||||
parent = tmp_path / "mydir"
|
||||
parent.mkdir()
|
||||
test_file = parent / "myfile.dat"
|
||||
|
||||
result = file_name_crc(test_file, add_parent_folder=True)
|
||||
assert result == "mydir/myfile.dat"
|
||||
|
||||
def test_file_name_crc_explicit_false(self, tmp_path: Path):
|
||||
"""Test explicitly setting add_parent_folder to False"""
|
||||
parent = tmp_path / "mydir"
|
||||
parent.mkdir()
|
||||
test_file = parent / "myfile.dat"
|
||||
|
||||
result = file_name_crc(test_file, add_parent_folder=False)
|
||||
assert result == "myfile.dat"
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests combining both functions"""
|
||||
|
||||
def test_crc_and_naming_together(self, tmp_path: Path):
|
||||
"""Test using both functions on the same file"""
|
||||
parent = tmp_path / "data"
|
||||
parent.mkdir()
|
||||
test_file = parent / "testfile.csv"
|
||||
test_file.write_bytes(b"Sample data for integration test")
|
||||
|
||||
# Get CRC
|
||||
crc = file_crc(test_file)
|
||||
assert len(crc) == 8
|
||||
|
||||
# Get filename
|
||||
name_simple = file_name_crc(test_file, add_parent_folder=False)
|
||||
assert name_simple == "testfile.csv"
|
||||
|
||||
name_with_parent = file_name_crc(test_file, add_parent_folder=True)
|
||||
assert name_with_parent == "data/testfile.csv"
|
||||
|
||||
def test_multiple_files_crc_comparison(self, tmp_path: Path):
|
||||
"""Test CRC comparison across multiple files"""
|
||||
files: dict[str, str] = {}
|
||||
for i in range(3):
|
||||
file_path = tmp_path / f"file{i}.txt"
|
||||
file_path.write_bytes(f"Content {i}".encode())
|
||||
files[f"file{i}.txt"] = file_crc(file_path)
|
||||
|
||||
# All CRCs should be different
|
||||
assert len(set(files.values())) == 3
|
||||
|
||||
def test_workflow_file_identification(self, tmp_path: Path):
|
||||
"""Test a workflow of identifying files by name and verifying by CRC"""
|
||||
# Create directory structure
|
||||
dir1 = tmp_path / "dir1"
|
||||
dir2 = tmp_path / "dir2"
|
||||
dir1.mkdir()
|
||||
dir2.mkdir()
|
||||
|
||||
# Create same-named files with different content
|
||||
file1 = dir1 / "data.csv"
|
||||
file2 = dir2 / "data.csv"
|
||||
|
||||
file1.write_bytes(b"Data set 1")
|
||||
file2.write_bytes(b"Data set 2")
|
||||
|
||||
# Get names (should be the same)
|
||||
name1 = file_name_crc(file1, add_parent_folder=False)
|
||||
name2 = file_name_crc(file2, add_parent_folder=False)
|
||||
assert name1 == name2 == "data.csv"
|
||||
|
||||
# Get names with parent (should be different)
|
||||
full_name1 = file_name_crc(file1, add_parent_folder=True)
|
||||
full_name2 = file_name_crc(file2, add_parent_folder=True)
|
||||
assert full_name1 == "dir1/data.csv"
|
||||
assert full_name2 == "dir2/data.csv"
|
||||
|
||||
# Get CRCs (should be different)
|
||||
crc1 = file_crc(file1)
|
||||
crc2 = file_crc(file2)
|
||||
assert crc1 != crc2
|
||||
|
||||
# __END__
|
||||
@@ -1,522 +0,0 @@
|
||||
"""
|
||||
PyTest: file_handling/file_handling
|
||||
"""
|
||||
|
||||
# pylint: disable=use-implicit-booleaness-not-comparison
|
||||
|
||||
from pathlib import Path
|
||||
from pytest import CaptureFixture
|
||||
|
||||
from corelibs.file_handling.file_handling import (
|
||||
remove_all_in_directory,
|
||||
)
|
||||
|
||||
|
||||
class TestRemoveAllInDirectory:
|
||||
"""Test suite for remove_all_in_directory function"""
|
||||
|
||||
def test_remove_all_files_in_empty_directory(self, tmp_path: Path):
|
||||
"""Test removing all files from an empty directory"""
|
||||
test_dir = tmp_path / "empty_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert test_dir.exists() # Directory itself should still exist
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_all_files_in_directory(self, tmp_path: Path):
|
||||
"""Test removing all files from a directory with files"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create test files
|
||||
(test_dir / "file1.txt").write_text("content 1")
|
||||
(test_dir / "file2.txt").write_text("content 2")
|
||||
(test_dir / "file3.csv").write_text("csv,data")
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert test_dir.exists()
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_all_subdirectories(self, tmp_path: Path):
|
||||
"""Test removing subdirectories within a directory"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create subdirectories
|
||||
subdir1 = test_dir / "subdir1"
|
||||
subdir2 = test_dir / "subdir2"
|
||||
subdir1.mkdir()
|
||||
subdir2.mkdir()
|
||||
|
||||
# Add files to subdirectories
|
||||
(subdir1 / "file.txt").write_text("content")
|
||||
(subdir2 / "file.txt").write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert test_dir.exists()
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_nested_structure(self, tmp_path: Path):
|
||||
"""Test removing deeply nested directory structure"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create nested structure
|
||||
nested = test_dir / "level1" / "level2" / "level3"
|
||||
nested.mkdir(parents=True)
|
||||
(nested / "deep_file.txt").write_text("deep content")
|
||||
(test_dir / "level1" / "mid_file.txt").write_text("mid content")
|
||||
(test_dir / "top_file.txt").write_text("top content")
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert test_dir.exists()
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_with_ignore_files_single(self, tmp_path: Path):
|
||||
"""Test removing files while ignoring specific files"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create files
|
||||
(test_dir / "keep.txt").write_text("keep me")
|
||||
(test_dir / "remove1.txt").write_text("remove me")
|
||||
(test_dir / "remove2.txt").write_text("remove me too")
|
||||
|
||||
result = remove_all_in_directory(test_dir, ignore_files=["keep.txt"])
|
||||
assert result is True
|
||||
assert test_dir.exists()
|
||||
remaining = list(test_dir.iterdir())
|
||||
assert len(remaining) == 1
|
||||
assert remaining[0].name == "keep.txt"
|
||||
|
||||
def test_remove_with_ignore_files_multiple(self, tmp_path: Path):
|
||||
"""Test removing files while ignoring multiple specific files"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create files
|
||||
(test_dir / "keep1.txt").write_text("keep me")
|
||||
(test_dir / "keep2.log").write_text("keep me too")
|
||||
(test_dir / "remove.txt").write_text("remove me")
|
||||
|
||||
result = remove_all_in_directory(
|
||||
test_dir,
|
||||
ignore_files=["keep1.txt", "keep2.log"]
|
||||
)
|
||||
assert result is True
|
||||
assert test_dir.exists()
|
||||
remaining = {f.name for f in test_dir.iterdir()}
|
||||
assert remaining == {"keep1.txt", "keep2.log"}
|
||||
|
||||
def test_remove_with_ignore_directory(self, tmp_path: Path):
|
||||
"""Test removing with ignored directory"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create directories
|
||||
keep_dir = test_dir / "keep_dir"
|
||||
remove_dir = test_dir / "remove_dir"
|
||||
keep_dir.mkdir()
|
||||
remove_dir.mkdir()
|
||||
|
||||
(keep_dir / "file.txt").write_text("keep")
|
||||
(remove_dir / "file.txt").write_text("remove")
|
||||
|
||||
result = remove_all_in_directory(test_dir, ignore_files=["keep_dir"])
|
||||
assert result is True
|
||||
assert keep_dir.exists()
|
||||
assert not remove_dir.exists()
|
||||
|
||||
def test_remove_with_ignore_nested_files(self, tmp_path: Path):
|
||||
"""Test that ignore_files matches by name at any level"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create files with same name at different levels
|
||||
(test_dir / "keep.txt").write_text("top level keep")
|
||||
(test_dir / "remove.txt").write_text("remove")
|
||||
subdir = test_dir / "subdir"
|
||||
subdir.mkdir()
|
||||
(subdir / "file.txt").write_text("nested")
|
||||
|
||||
result = remove_all_in_directory(test_dir, ignore_files=["keep.txt"])
|
||||
assert result is True
|
||||
# keep.txt should be preserved at top level
|
||||
assert (test_dir / "keep.txt").exists()
|
||||
# Other files should be removed
|
||||
assert not (test_dir / "remove.txt").exists()
|
||||
# Subdirectory not in ignore list should be removed
|
||||
assert not subdir.exists()
|
||||
|
||||
def test_remove_nonexistent_directory(self, tmp_path: Path):
|
||||
"""Test removing from a non-existent directory returns False"""
|
||||
test_dir = tmp_path / "nonexistent"
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is False
|
||||
|
||||
def test_remove_from_file_not_directory(self, tmp_path: Path):
|
||||
"""Test that function returns False when given a file instead of directory"""
|
||||
test_file = tmp_path / "file.txt"
|
||||
test_file.write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_file)
|
||||
assert result is False
|
||||
assert test_file.exists() # File should not be affected
|
||||
|
||||
def test_remove_with_verbose_mode(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test verbose mode produces output"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create files and directories
|
||||
(test_dir / "file1.txt").write_text("content")
|
||||
(test_dir / "file2.txt").write_text("content")
|
||||
subdir = test_dir / "subdir"
|
||||
subdir.mkdir()
|
||||
(subdir / "nested.txt").write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_dir, verbose=True)
|
||||
assert result is True
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Remove old files in: test_dir [" in captured.out
|
||||
assert "]" in captured.out
|
||||
assert "." in captured.out # Files are marked with .
|
||||
assert "/" in captured.out # Directories are marked with /
|
||||
|
||||
def test_remove_with_dry_run_mode(self, tmp_path: Path):
|
||||
"""Test dry run mode doesn't actually remove files"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create test files
|
||||
file1 = test_dir / "file1.txt"
|
||||
file2 = test_dir / "file2.txt"
|
||||
file1.write_text("content 1")
|
||||
file2.write_text("content 2")
|
||||
|
||||
result = remove_all_in_directory(test_dir, dry_run=True)
|
||||
assert result is True
|
||||
# Files should still exist
|
||||
assert file1.exists()
|
||||
assert file2.exists()
|
||||
assert len(list(test_dir.iterdir())) == 2
|
||||
|
||||
def test_remove_with_dry_run_and_verbose(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test dry run with verbose mode shows [DRY RUN] prefix"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
(test_dir / "file.txt").write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_dir, dry_run=True, verbose=True)
|
||||
assert result is True
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "[DRY RUN]" in captured.out
|
||||
|
||||
def test_remove_mixed_content(self, tmp_path: Path):
|
||||
"""Test removing mixed files and directories"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create mixed content
|
||||
(test_dir / "file1.txt").write_text("content")
|
||||
(test_dir / "file2.csv").write_text("csv")
|
||||
subdir1 = test_dir / "subdir1"
|
||||
subdir2 = test_dir / "subdir2"
|
||||
subdir1.mkdir()
|
||||
subdir2.mkdir()
|
||||
(subdir1 / "nested_file.txt").write_text("nested")
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_with_none_ignore_files(self, tmp_path: Path):
|
||||
"""Test that None as ignore_files works correctly"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
(test_dir / "file.txt").write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_dir, ignore_files=None)
|
||||
assert result is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_with_empty_ignore_list(self, tmp_path: Path):
|
||||
"""Test that empty ignore_files list works correctly"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
(test_dir / "file.txt").write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_dir, ignore_files=[])
|
||||
assert result is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_special_characters_in_filenames(self, tmp_path: Path):
|
||||
"""Test removing files with special characters in names"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create files with special characters
|
||||
(test_dir / "file with spaces.txt").write_text("content")
|
||||
(test_dir / "file-with-dashes.txt").write_text("content")
|
||||
(test_dir / "file_with_underscores.txt").write_text("content")
|
||||
(test_dir / "file.multiple.dots.txt").write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_unicode_filenames(self, tmp_path: Path):
|
||||
"""Test removing files with unicode characters in names"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create files with unicode names
|
||||
(test_dir / "ファイル.txt").write_text("content")
|
||||
(test_dir / "文件.txt").write_text("content")
|
||||
(test_dir / "αρχείο.txt").write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_hidden_files(self, tmp_path: Path):
|
||||
"""Test removing hidden files (dotfiles)"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create hidden files
|
||||
(test_dir / ".hidden").write_text("content")
|
||||
(test_dir / ".gitignore").write_text("content")
|
||||
(test_dir / "normal.txt").write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_preserves_ignored_hidden_files(self, tmp_path: Path):
|
||||
"""Test that ignored hidden files are preserved"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
(test_dir / ".gitkeep").write_text("keep")
|
||||
(test_dir / "file.txt").write_text("remove")
|
||||
|
||||
result = remove_all_in_directory(test_dir, ignore_files=[".gitkeep"])
|
||||
assert result is True
|
||||
remaining = list(test_dir.iterdir())
|
||||
assert len(remaining) == 1
|
||||
assert remaining[0].name == ".gitkeep"
|
||||
|
||||
def test_remove_large_number_of_files(self, tmp_path: Path):
|
||||
"""Test removing a large number of files"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create 100 files
|
||||
for i in range(100):
|
||||
(test_dir / f"file_{i:03d}.txt").write_text(f"content {i}")
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_deeply_nested_with_ignore(self, tmp_path: Path):
|
||||
"""Test removing structure while preserving ignored items
|
||||
|
||||
Note: rglob processes files depth-first, so files inside an ignored
|
||||
directory will be processed (and potentially removed) before the directory
|
||||
itself is checked. Only items at the same level or that share the same name
|
||||
as ignored items will be preserved.
|
||||
"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create structure
|
||||
level1 = test_dir / "level1"
|
||||
level1.mkdir()
|
||||
keep_file = test_dir / "keep.txt"
|
||||
(level1 / "file.txt").write_text("remove")
|
||||
keep_file.write_text("keep this file")
|
||||
(test_dir / "top.txt").write_text("remove")
|
||||
|
||||
result = remove_all_in_directory(test_dir, ignore_files=["keep.txt"])
|
||||
assert result is True
|
||||
# Check that keep.txt is preserved
|
||||
assert keep_file.exists()
|
||||
assert keep_file.read_text() == "keep this file"
|
||||
# Other items should be removed
|
||||
assert not (test_dir / "top.txt").exists()
|
||||
assert not level1.exists()
|
||||
|
||||
def test_remove_binary_files(self, tmp_path: Path):
|
||||
"""Test removing binary files"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create binary files
|
||||
(test_dir / "binary1.bin").write_bytes(bytes(range(256)))
|
||||
(test_dir / "binary2.dat").write_bytes(b"\x00\x01\x02\xff")
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_symlinks(self, tmp_path: Path):
|
||||
"""Test removing symbolic links"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create a file and a symlink to it
|
||||
original = tmp_path / "original.txt"
|
||||
original.write_text("original content")
|
||||
symlink = test_dir / "link.txt"
|
||||
symlink.symlink_to(original)
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
# Original file should still exist
|
||||
assert original.exists()
|
||||
|
||||
def test_remove_with_permissions_variations(self, tmp_path: Path):
|
||||
"""Test removing files with different permissions"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create files
|
||||
file1 = test_dir / "readonly.txt"
|
||||
file2 = test_dir / "normal.txt"
|
||||
file1.write_text("readonly")
|
||||
file2.write_text("normal")
|
||||
|
||||
# Make file1 read-only
|
||||
file1.chmod(0o444)
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_default_parameters(self, tmp_path: Path):
|
||||
"""Test function with only required parameter"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
(test_dir / "file.txt").write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_return_value_true_when_successful(self, tmp_path: Path):
|
||||
"""Test that function returns True on successful removal"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
(test_dir / "file.txt").write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
assert isinstance(result, bool)
|
||||
|
||||
def test_remove_return_value_false_when_not_directory(self, tmp_path: Path):
|
||||
"""Test that function returns False when path is not a directory"""
|
||||
test_file = tmp_path / "file.txt"
|
||||
test_file.write_text("content")
|
||||
|
||||
result = remove_all_in_directory(test_file)
|
||||
assert result is False
|
||||
assert isinstance(result, bool)
|
||||
|
||||
def test_remove_directory_becomes_empty(self, tmp_path: Path):
|
||||
"""Test that directory is empty after removal"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create various items
|
||||
(test_dir / "file.txt").write_text("content")
|
||||
subdir = test_dir / "subdir"
|
||||
subdir.mkdir()
|
||||
(subdir / "nested.txt").write_text("nested")
|
||||
|
||||
# Verify directory is not empty before
|
||||
assert len(list(test_dir.iterdir())) > 0
|
||||
|
||||
result = remove_all_in_directory(test_dir)
|
||||
assert result is True
|
||||
|
||||
# Verify directory is empty after
|
||||
assert len(list(test_dir.iterdir())) == 0
|
||||
assert test_dir.exists()
|
||||
assert test_dir.is_dir()
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests for file_handling module"""
|
||||
|
||||
def test_multiple_remove_operations(self, tmp_path: Path):
|
||||
"""Test multiple consecutive remove operations"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# First batch of files
|
||||
(test_dir / "batch1_file1.txt").write_text("content")
|
||||
(test_dir / "batch1_file2.txt").write_text("content")
|
||||
|
||||
result1 = remove_all_in_directory(test_dir)
|
||||
assert result1 is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
# Second batch of files
|
||||
(test_dir / "batch2_file1.txt").write_text("content")
|
||||
(test_dir / "batch2_file2.txt").write_text("content")
|
||||
|
||||
result2 = remove_all_in_directory(test_dir)
|
||||
assert result2 is True
|
||||
assert list(test_dir.iterdir()) == []
|
||||
|
||||
def test_remove_then_recreate(self, tmp_path: Path):
|
||||
"""Test removing files then recreating them"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Create and remove
|
||||
original_file = test_dir / "file.txt"
|
||||
original_file.write_text("original")
|
||||
remove_all_in_directory(test_dir)
|
||||
assert not original_file.exists()
|
||||
|
||||
# Recreate
|
||||
new_file = test_dir / "file.txt"
|
||||
new_file.write_text("new content")
|
||||
assert new_file.exists()
|
||||
assert new_file.read_text() == "new content"
|
||||
|
||||
def test_cleanup_workflow(self, tmp_path: Path):
|
||||
"""Test a typical cleanup workflow"""
|
||||
test_dir = tmp_path / "test_dir"
|
||||
test_dir.mkdir()
|
||||
|
||||
# Simulate work directory
|
||||
(test_dir / "temp1.tmp").write_text("temp")
|
||||
(test_dir / "temp2.tmp").write_text("temp")
|
||||
(test_dir / "result.txt").write_text("important")
|
||||
|
||||
# Clean up temp files, keep result
|
||||
result = remove_all_in_directory(
|
||||
test_dir,
|
||||
ignore_files=["result.txt"]
|
||||
)
|
||||
assert result is True
|
||||
|
||||
remaining = list(test_dir.iterdir())
|
||||
assert len(remaining) == 1
|
||||
assert remaining[0].name == "result.txt"
|
||||
assert remaining[0].read_text() == "important"
|
||||
|
||||
# __END__
|
||||
@@ -1,601 +0,0 @@
|
||||
"""
|
||||
tests for corelibs.iterator_handling.data_search
|
||||
"""
|
||||
|
||||
# pylint: disable=use-implicit-booleaness-not-comparison
|
||||
|
||||
from typing import Any
|
||||
import pytest
|
||||
from corelibs.iterator_handling.data_search import (
|
||||
find_in_array_from_list,
|
||||
key_lookup,
|
||||
value_lookup,
|
||||
ArraySearchList
|
||||
)
|
||||
|
||||
|
||||
class TestFindInArrayFromList:
|
||||
"""Tests for find_in_array_from_list function"""
|
||||
|
||||
def test_basic_single_key_match(self):
|
||||
"""Test basic search with single key-value pair"""
|
||||
data = [
|
||||
{"name": "Alice", "age": 30},
|
||||
{"name": "Bob", "age": 25},
|
||||
{"name": "Charlie", "age": 35}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "name", "value": "Bob"}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["name"] == "Bob"
|
||||
assert result[0]["age"] == 25
|
||||
|
||||
def test_multiple_key_match(self):
|
||||
"""Test search with multiple key-value pairs (AND logic)"""
|
||||
data = [
|
||||
{"name": "Alice", "age": 30, "city": "New York"},
|
||||
{"name": "Bob", "age": 25, "city": "London"},
|
||||
{"name": "Charlie", "age": 30, "city": "Paris"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "age", "value": 30},
|
||||
{"key": "city", "value": "New York"}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["name"] == "Alice"
|
||||
|
||||
def test_value_list_or_match(self):
|
||||
"""Test search with list of values (OR logic)"""
|
||||
data = [
|
||||
{"name": "Alice", "status": "active"},
|
||||
{"name": "Bob", "status": "inactive"},
|
||||
{"name": "Charlie", "status": "pending"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "status", "value": ["active", "pending"]}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["name"] == "Alice"
|
||||
assert result[1]["name"] == "Charlie"
|
||||
|
||||
def test_case_sensitive_true(self):
|
||||
"""Test case-sensitive search (default behavior)"""
|
||||
data = [
|
||||
{"name": "Alice"},
|
||||
{"name": "alice"},
|
||||
{"name": "ALICE"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "name", "value": "Alice"}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["name"] == "Alice"
|
||||
|
||||
def test_case_insensitive_search(self):
|
||||
"""Test case-insensitive search"""
|
||||
data = [
|
||||
{"name": "Alice"},
|
||||
{"name": "alice"},
|
||||
{"name": "ALICE"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "name", "value": "alice", "case_sensitive": False}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 3
|
||||
|
||||
def test_case_insensitive_with_list_values(self):
|
||||
"""Test case-insensitive search with list of values"""
|
||||
data = [
|
||||
{"status": "ACTIVE"},
|
||||
{"status": "Pending"},
|
||||
{"status": "inactive"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "status", "value": ["active", "pending"], "case_sensitive": False}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["status"] == "ACTIVE"
|
||||
assert result[1]["status"] == "Pending"
|
||||
|
||||
def test_return_index_true(self):
|
||||
"""Test returning results with index"""
|
||||
data = [
|
||||
{"name": "Alice"},
|
||||
{"name": "Bob"},
|
||||
{"name": "Charlie"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "name", "value": "Bob"}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params, return_index=True)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["index"] == 1
|
||||
assert result[0]["data"]["name"] == "Bob"
|
||||
|
||||
def test_return_index_multiple_results(self):
|
||||
"""Test returning multiple results with indices"""
|
||||
data = [
|
||||
{"status": "active"},
|
||||
{"status": "inactive"},
|
||||
{"status": "active"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "status", "value": "active"}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params, return_index=True)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["index"] == 0
|
||||
assert result[0]["data"]["status"] == "active"
|
||||
assert result[1]["index"] == 2
|
||||
assert result[1]["data"]["status"] == "active"
|
||||
|
||||
def test_no_match_returns_empty_list(self):
|
||||
"""Test that no match returns empty list"""
|
||||
data = [
|
||||
{"name": "Alice"},
|
||||
{"name": "Bob"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "name", "value": "Charlie"}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_empty_data_returns_empty_list(self):
|
||||
"""Test that empty data list returns empty list"""
|
||||
data: list[dict[str, Any]] = []
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "name", "value": "Alice"}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_missing_key_in_data(self):
|
||||
"""Test search when key doesn't exist in some data items"""
|
||||
data = [
|
||||
{"name": "Alice", "age": 30},
|
||||
{"name": "Bob"}, # Missing 'age' key
|
||||
{"name": "Charlie", "age": 30}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "age", "value": 30}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["name"] == "Alice"
|
||||
assert result[1]["name"] == "Charlie"
|
||||
|
||||
def test_numeric_values(self):
|
||||
"""Test search with numeric values"""
|
||||
data = [
|
||||
{"id": 1, "score": 95},
|
||||
{"id": 2, "score": 87},
|
||||
{"id": 3, "score": 95}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "score", "value": 95}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == 1
|
||||
assert result[1]["id"] == 3
|
||||
|
||||
def test_boolean_values(self):
|
||||
"""Test search with boolean values"""
|
||||
data = [
|
||||
{"name": "Alice", "active": True},
|
||||
{"name": "Bob", "active": False},
|
||||
{"name": "Charlie", "active": True}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "active", "value": True}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["name"] == "Alice"
|
||||
assert result[1]["name"] == "Charlie"
|
||||
|
||||
def test_float_values(self):
|
||||
"""Test search with float values"""
|
||||
data = [
|
||||
{"name": "Product A", "price": 19.99},
|
||||
{"name": "Product B", "price": 29.99},
|
||||
{"name": "Product C", "price": 19.99}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "price", "value": 19.99}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["name"] == "Product A"
|
||||
assert result[1]["name"] == "Product C"
|
||||
|
||||
def test_mixed_value_types_in_list(self):
|
||||
"""Test search with mixed types in value list"""
|
||||
data = [
|
||||
{"id": "1", "value": "active"},
|
||||
{"id": 2, "value": "pending"},
|
||||
{"id": "3", "value": "active"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "id", "value": ["1", "3"]}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "1"
|
||||
assert result[1]["id"] == "3"
|
||||
|
||||
def test_complex_multi_criteria_search(self):
|
||||
"""Test complex search with multiple criteria"""
|
||||
data = [
|
||||
{"name": "Alice", "age": 30, "city": "New York", "status": "active"},
|
||||
{"name": "Bob", "age": 25, "city": "London", "status": "active"},
|
||||
{"name": "Charlie", "age": 30, "city": "Paris", "status": "inactive"},
|
||||
{"name": "David", "age": 30, "city": "New York", "status": "active"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "age", "value": 30},
|
||||
{"key": "city", "value": "New York"},
|
||||
{"key": "status", "value": "active"}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["name"] == "Alice"
|
||||
assert result[1]["name"] == "David"
|
||||
|
||||
def test_invalid_search_params_not_list(self):
|
||||
"""Test that non-list search_params raises ValueError"""
|
||||
data = [{"name": "Alice"}]
|
||||
search_params = {"key": "name", "value": "Alice"} # type: ignore
|
||||
|
||||
with pytest.raises(ValueError, match="search_params must be a list"):
|
||||
find_in_array_from_list(data, search_params) # type: ignore
|
||||
|
||||
def test_missing_key_in_search_params(self):
|
||||
"""Test that missing 'key' in search_params raises KeyError"""
|
||||
data = [{"name": "Alice"}]
|
||||
search_params: list[dict[str, Any]] = [
|
||||
{"value": "Alice"} # Missing 'key'
|
||||
]
|
||||
|
||||
with pytest.raises(KeyError, match="Either Key '' or Value 'Alice' is missing or empty"):
|
||||
find_in_array_from_list(data, search_params) # type: ignore
|
||||
|
||||
def test_missing_value_in_search_params(self):
|
||||
"""Test that missing 'value' in search_params raises KeyError"""
|
||||
data = [{"name": "Alice"}]
|
||||
search_params: list[dict[str, Any]] = [
|
||||
{"key": "name"} # Missing 'value'
|
||||
]
|
||||
|
||||
with pytest.raises(KeyError, match="Either Key 'name' or Value"):
|
||||
find_in_array_from_list(data, search_params) # type: ignore
|
||||
|
||||
def test_empty_key_in_search_params(self):
|
||||
"""Test that empty 'key' in search_params raises KeyError"""
|
||||
data = [{"name": "Alice"}]
|
||||
search_params: list[dict[str, Any]] = [
|
||||
{"key": "", "value": "Alice"}
|
||||
]
|
||||
|
||||
with pytest.raises(KeyError, match="Either Key '' or Value 'Alice' is missing or empty"):
|
||||
find_in_array_from_list(data, search_params) # type: ignore
|
||||
|
||||
def test_empty_value_in_search_params(self):
|
||||
"""Test that empty 'value' in search_params raises KeyError"""
|
||||
data = [{"name": "Alice"}]
|
||||
search_params: list[dict[str, Any]] = [
|
||||
{"key": "name", "value": ""}
|
||||
]
|
||||
|
||||
with pytest.raises(KeyError, match="Either Key 'name' or Value '' is missing or empty"):
|
||||
find_in_array_from_list(data, search_params) # type: ignore
|
||||
|
||||
def test_duplicate_key_in_search_params(self):
|
||||
"""Test that duplicate keys in search_params raises KeyError"""
|
||||
data = [{"name": "Alice", "age": 30}]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "name", "value": "Alice"},
|
||||
{"key": "name", "value": "Bob"} # Duplicate key
|
||||
]
|
||||
|
||||
with pytest.raises(KeyError, match="Key name already exists in search_params"):
|
||||
find_in_array_from_list(data, search_params)
|
||||
|
||||
def test_partial_match_fails(self):
|
||||
"""Test that partial match (not all criteria) returns no result"""
|
||||
data = [
|
||||
{"name": "Alice", "age": 30, "city": "New York"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "name", "value": "Alice"},
|
||||
{"key": "age", "value": 25} # Doesn't match
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_none_value_in_list(self):
|
||||
"""Test search with None in value list"""
|
||||
data = [
|
||||
{"name": "Alice", "nickname": "Ally"},
|
||||
{"name": "Bob", "nickname": None},
|
||||
{"name": "Charlie", "nickname": "Chuck"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "nickname", "value": [None, "Chuck"]}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["name"] == "Bob"
|
||||
assert result[1]["name"] == "Charlie"
|
||||
|
||||
@pytest.mark.parametrize("test_value,expected_count", [
|
||||
("active", 1),
|
||||
("inactive", 1),
|
||||
("pending", 1),
|
||||
("archived", 0)
|
||||
])
|
||||
def test_parametrized_status_search(self, test_value: str, expected_count: int):
|
||||
"""Parametrized test for different status values"""
|
||||
data = [
|
||||
{"id": 1, "status": "active"},
|
||||
{"id": 2, "status": "inactive"},
|
||||
{"id": 3, "status": "pending"}
|
||||
]
|
||||
search_params: list[ArraySearchList] = [
|
||||
{"key": "status", "value": test_value}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search_params)
|
||||
|
||||
assert len(result) == expected_count
|
||||
|
||||
|
||||
class TestKeyLookup:
|
||||
"""Tests for key_lookup function"""
|
||||
|
||||
def test_key_exists(self):
|
||||
"""Test lookup when key exists"""
|
||||
haystack = {"name": "Alice", "age": "30", "city": "New York"}
|
||||
|
||||
result = key_lookup(haystack, "name")
|
||||
|
||||
assert result == "Alice"
|
||||
|
||||
def test_key_not_exists(self):
|
||||
"""Test lookup when key doesn't exist returns empty string"""
|
||||
haystack = {"name": "Alice", "age": "30"}
|
||||
|
||||
result = key_lookup(haystack, "city")
|
||||
|
||||
assert result == ""
|
||||
|
||||
def test_empty_dict(self):
|
||||
"""Test lookup in empty dictionary"""
|
||||
haystack: dict[str, str] = {}
|
||||
|
||||
result = key_lookup(haystack, "name")
|
||||
|
||||
assert result == ""
|
||||
|
||||
def test_multiple_lookups(self):
|
||||
"""Test multiple lookups in same dictionary"""
|
||||
haystack = {"first": "John", "last": "Doe", "email": "john@example.com"}
|
||||
|
||||
assert key_lookup(haystack, "first") == "John"
|
||||
assert key_lookup(haystack, "last") == "Doe"
|
||||
assert key_lookup(haystack, "email") == "john@example.com"
|
||||
assert key_lookup(haystack, "phone") == ""
|
||||
|
||||
def test_numeric_string_values(self):
|
||||
"""Test lookup with numeric string values"""
|
||||
haystack = {"count": "42", "price": "19.99"}
|
||||
|
||||
assert key_lookup(haystack, "count") == "42"
|
||||
assert key_lookup(haystack, "price") == "19.99"
|
||||
|
||||
def test_empty_string_value(self):
|
||||
"""Test lookup when value is empty string"""
|
||||
haystack = {"name": "", "city": "New York"}
|
||||
|
||||
result = key_lookup(haystack, "name")
|
||||
|
||||
assert result == ""
|
||||
|
||||
def test_whitespace_value(self):
|
||||
"""Test lookup when value contains whitespace"""
|
||||
haystack = {"name": " Alice ", "message": " "}
|
||||
|
||||
assert key_lookup(haystack, "name") == " Alice "
|
||||
assert key_lookup(haystack, "message") == " "
|
||||
|
||||
@pytest.mark.parametrize("key,expected", [
|
||||
("a", "1"),
|
||||
("b", "2"),
|
||||
("c", "3"),
|
||||
("d", "")
|
||||
])
|
||||
def test_parametrized_lookup(self, key: str, expected: str):
|
||||
"""Parametrized test for key lookup"""
|
||||
haystack = {"a": "1", "b": "2", "c": "3"}
|
||||
|
||||
result = key_lookup(haystack, key)
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
class TestValueLookup:
|
||||
"""Tests for value_lookup function"""
|
||||
|
||||
def test_value_exists_single(self):
|
||||
"""Test lookup when value exists once"""
|
||||
haystack = {"name": "Alice", "username": "alice123", "email": "alice@example.com"}
|
||||
|
||||
result = value_lookup(haystack, "Alice")
|
||||
|
||||
assert result == "name"
|
||||
|
||||
def test_value_not_exists(self):
|
||||
"""Test lookup when value doesn't exist returns empty string"""
|
||||
haystack = {"name": "Alice", "username": "alice123"}
|
||||
|
||||
result = value_lookup(haystack, "Bob")
|
||||
|
||||
assert result == ""
|
||||
|
||||
def test_value_exists_multiple_no_raise(self):
|
||||
"""Test lookup when value exists multiple times, returns first"""
|
||||
haystack = {"key1": "duplicate", "key2": "unique", "key3": "duplicate"}
|
||||
|
||||
result = value_lookup(haystack, "duplicate")
|
||||
|
||||
assert result in ["key1", "key3"] # Order may vary in dict
|
||||
|
||||
def test_value_exists_multiple_raise_on_many_false(self):
|
||||
"""Test lookup with multiple matches and raise_on_many=False"""
|
||||
haystack = {"a": "same", "b": "same", "c": "different"}
|
||||
|
||||
result = value_lookup(haystack, "same", raise_on_many=False)
|
||||
|
||||
assert result in ["a", "b"]
|
||||
|
||||
def test_value_exists_multiple_raise_on_many_true(self):
|
||||
"""Test lookup with multiple matches and raise_on_many=True raises ValueError"""
|
||||
haystack = {"a": "same", "b": "same", "c": "different"}
|
||||
|
||||
with pytest.raises(ValueError, match="More than one element found with the same name"):
|
||||
value_lookup(haystack, "same", raise_on_many=True)
|
||||
|
||||
def test_value_exists_single_raise_on_many_true(self):
|
||||
"""Test lookup with single match and raise_on_many=True works fine"""
|
||||
haystack = {"name": "Alice", "username": "alice123"}
|
||||
|
||||
result = value_lookup(haystack, "Alice", raise_on_many=True)
|
||||
|
||||
assert result == "name"
|
||||
|
||||
def test_empty_dict(self):
|
||||
"""Test lookup in empty dictionary"""
|
||||
haystack: dict[str, str] = {}
|
||||
|
||||
result = value_lookup(haystack, "Alice")
|
||||
|
||||
assert result == ""
|
||||
|
||||
def test_empty_dict_raise_on_many(self):
|
||||
"""Test lookup in empty dictionary with raise_on_many=True"""
|
||||
haystack: dict[str, str] = {}
|
||||
|
||||
result = value_lookup(haystack, "Alice", raise_on_many=True)
|
||||
|
||||
assert result == ""
|
||||
|
||||
def test_numeric_string_values(self):
|
||||
"""Test lookup with numeric string values"""
|
||||
haystack = {"id": "123", "count": "456", "score": "123"}
|
||||
|
||||
result = value_lookup(haystack, "456")
|
||||
|
||||
assert result == "count"
|
||||
|
||||
def test_empty_string_value(self):
|
||||
"""Test lookup for empty string value"""
|
||||
haystack = {"name": "", "city": "New York", "country": ""}
|
||||
|
||||
result = value_lookup(haystack, "")
|
||||
|
||||
assert result in ["name", "country"]
|
||||
|
||||
def test_whitespace_value(self):
|
||||
"""Test lookup for whitespace value"""
|
||||
haystack = {"a": " spaces ", "b": "normal", "c": " spaces "}
|
||||
|
||||
result = value_lookup(haystack, " spaces ")
|
||||
|
||||
assert result in ["a", "c"]
|
||||
|
||||
def test_case_sensitive_lookup(self):
|
||||
"""Test that lookup is case-sensitive"""
|
||||
haystack = {"name": "Alice", "username": "alice", "email": "ALICE"}
|
||||
|
||||
assert value_lookup(haystack, "Alice") == "name"
|
||||
assert value_lookup(haystack, "alice") == "username"
|
||||
assert value_lookup(haystack, "ALICE") == "email"
|
||||
assert value_lookup(haystack, "aLiCe") == ""
|
||||
|
||||
def test_special_characters(self):
|
||||
"""Test lookup with special characters"""
|
||||
haystack = {"key1": "test@example.com", "key2": "test#value", "key3": "test@example.com"}
|
||||
|
||||
result = value_lookup(haystack, "test@example.com")
|
||||
|
||||
assert result in ["key1", "key3"]
|
||||
|
||||
@pytest.mark.parametrize("value,expected_key", [
|
||||
("value1", "a"),
|
||||
("value2", "b"),
|
||||
("value3", "c"),
|
||||
("nonexistent", "")
|
||||
])
|
||||
def test_parametrized_lookup(self, value: str, expected_key: str):
|
||||
"""Parametrized test for value lookup"""
|
||||
haystack = {"a": "value1", "b": "value2", "c": "value3"}
|
||||
|
||||
result = value_lookup(haystack, value)
|
||||
|
||||
assert result == expected_key
|
||||
|
||||
def test_duplicate_values_consistent_return(self):
|
||||
"""Test that lookup with duplicates consistently returns one of the keys"""
|
||||
haystack = {"x": "dup", "y": "dup", "z": "dup"}
|
||||
|
||||
# Should return same key consistently
|
||||
result1 = value_lookup(haystack, "dup")
|
||||
result2 = value_lookup(haystack, "dup")
|
||||
result3 = value_lookup(haystack, "dup")
|
||||
|
||||
assert result1 == result2 == result3
|
||||
assert result1 in ["x", "y", "z"]
|
||||
@@ -1,652 +0,0 @@
|
||||
"""
|
||||
iterator_handling.dict_helper tests
|
||||
"""
|
||||
|
||||
# pylint: disable=use-implicit-booleaness-not-comparison
|
||||
|
||||
from typing import Any
|
||||
import pytest
|
||||
from corelibs.iterator_handling.dict_helpers import (
|
||||
delete_keys_from_set,
|
||||
build_dict,
|
||||
set_entry,
|
||||
)
|
||||
|
||||
|
||||
class TestDeleteKeysFromSet:
|
||||
"""Test cases for delete_keys_from_set function"""
|
||||
|
||||
def test_delete_single_key_from_dict(self):
|
||||
"""Test deleting a single key from a dictionary"""
|
||||
set_data = {"a": 1, "b": 2, "c": 3}
|
||||
keys = ["b"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == {"a": 1, "c": 3}
|
||||
assert "b" not in result
|
||||
|
||||
def test_delete_multiple_keys_from_dict(self):
|
||||
"""Test deleting multiple keys from a dictionary"""
|
||||
set_data = {"a": 1, "b": 2, "c": 3, "d": 4}
|
||||
keys = ["b", "d"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == {"a": 1, "c": 3}
|
||||
assert "b" not in result
|
||||
assert "d" not in result
|
||||
|
||||
def test_delete_all_keys_from_dict(self):
|
||||
"""Test deleting all keys from a dictionary"""
|
||||
set_data = {"a": 1, "b": 2}
|
||||
keys = ["a", "b"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == {}
|
||||
|
||||
def test_delete_nonexistent_key(self):
|
||||
"""Test deleting a key that doesn't exist"""
|
||||
set_data = {"a": 1, "b": 2}
|
||||
keys = ["c", "d"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == {"a": 1, "b": 2}
|
||||
|
||||
def test_delete_keys_from_nested_dict(self):
|
||||
"""Test deleting keys from nested dictionaries"""
|
||||
set_data = {
|
||||
"a": 1,
|
||||
"b": {"c": 2, "d": 3, "e": 4},
|
||||
"f": 5
|
||||
}
|
||||
keys = ["d", "f"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == {"a": 1, "b": {"c": 2, "e": 4}}
|
||||
assert "d" not in result["b"] # type: ignore
|
||||
assert "f" not in result
|
||||
|
||||
def test_delete_keys_from_deeply_nested_dict(self):
|
||||
"""Test deleting keys from deeply nested structures"""
|
||||
set_data = {
|
||||
"a": 1,
|
||||
"b": {
|
||||
"c": 2,
|
||||
"d": {
|
||||
"e": 3,
|
||||
"f": 4
|
||||
}
|
||||
},
|
||||
"g": 5
|
||||
}
|
||||
keys = ["f", "g"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == {"a": 1, "b": {"c": 2, "d": {"e": 3}}}
|
||||
assert "g" not in result
|
||||
|
||||
def test_delete_keys_from_list(self):
|
||||
"""Test with list containing dictionaries"""
|
||||
set_data = [
|
||||
{"a": 1, "b": 2},
|
||||
{"c": 3, "d": 4},
|
||||
{"e": 5, "f": 6}
|
||||
]
|
||||
keys = ["b", "d", "f"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == [
|
||||
{"a": 1},
|
||||
{"c": 3},
|
||||
{"e": 5}
|
||||
]
|
||||
|
||||
def test_delete_keys_from_list_with_nested_dicts(self):
|
||||
"""Test with list containing nested dictionaries"""
|
||||
set_data = [
|
||||
{"a": 1, "b": {"c": 2, "d": 3}},
|
||||
{"e": 4, "f": {"g": 5, "h": 6}}
|
||||
]
|
||||
keys = ["d", "h"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == [
|
||||
{"a": 1, "b": {"c": 2}},
|
||||
{"e": 4, "f": {"g": 5}}
|
||||
]
|
||||
|
||||
def test_delete_keys_from_dict_with_list_values(self):
|
||||
"""Test with dictionary containing list values"""
|
||||
set_data = {
|
||||
"a": [{"b": 1, "c": 2}, {"d": 3, "e": 4}],
|
||||
"f": 5
|
||||
}
|
||||
keys = ["c", "e"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == {
|
||||
"a": [{"b": 1}, {"d": 3}],
|
||||
"f": 5
|
||||
}
|
||||
|
||||
def test_empty_keys_list(self):
|
||||
"""Test with empty keys list - should return data unchanged"""
|
||||
set_data = {"a": 1, "b": 2, "c": 3}
|
||||
keys: list[str] = []
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == set_data
|
||||
|
||||
def test_empty_dict(self):
|
||||
"""Test with empty dictionary"""
|
||||
set_data: dict[str, Any] = {}
|
||||
keys = ["a", "b"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == {}
|
||||
|
||||
def test_empty_list(self):
|
||||
"""Test with empty list"""
|
||||
set_data: list[Any] = []
|
||||
keys = ["a", "b"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == []
|
||||
|
||||
def test_string_input(self):
|
||||
"""Test with string input - should convert to list"""
|
||||
set_data = "hello"
|
||||
keys = ["a"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == ["hello"]
|
||||
|
||||
def test_complex_mixed_structure(self):
|
||||
"""Test with complex mixed structure"""
|
||||
set_data = {
|
||||
"users": [
|
||||
{
|
||||
"name": "Alice",
|
||||
"age": 30,
|
||||
"password": "secret1",
|
||||
"profile": {
|
||||
"email": "alice@example.com",
|
||||
"password": "secret2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Bob",
|
||||
"age": 25,
|
||||
"password": "secret3",
|
||||
"profile": {
|
||||
"email": "bob@example.com",
|
||||
"password": "secret4"
|
||||
}
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"count": 2,
|
||||
"password": "admin"
|
||||
}
|
||||
}
|
||||
keys = ["password"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
|
||||
# Check that all password fields are removed
|
||||
assert "password" not in result["metadata"] # type: ignore
|
||||
for user in result["users"]: # type: ignore
|
||||
assert "password" not in user
|
||||
assert "password" not in user["profile"]
|
||||
|
||||
# Check that other fields remain
|
||||
assert result["users"][0]["name"] == "Alice" # type: ignore
|
||||
assert result["users"][1]["name"] == "Bob" # type: ignore
|
||||
assert result["metadata"]["count"] == 2 # type: ignore
|
||||
|
||||
def test_dict_with_none_values(self):
|
||||
"""Test with dictionary containing None values"""
|
||||
set_data = {"a": 1, "b": None, "c": 3}
|
||||
keys = ["b"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == {"a": 1, "c": 3}
|
||||
|
||||
def test_dict_with_various_value_types(self):
|
||||
"""Test with dictionary containing various value types"""
|
||||
set_data = {
|
||||
"int": 42,
|
||||
"float": 3.14,
|
||||
"bool": True,
|
||||
"str": "hello",
|
||||
"list": [1, 2, 3],
|
||||
"dict": {"nested": "value"},
|
||||
"none": None
|
||||
}
|
||||
keys = ["bool", "none"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert "bool" not in result
|
||||
assert "none" not in result
|
||||
assert len(result) == 5
|
||||
|
||||
|
||||
class TestBuildDict:
|
||||
"""Test cases for build_dict function"""
|
||||
|
||||
def test_build_dict_without_ignore_entries(self):
|
||||
"""Test build_dict without ignore_entries (None)"""
|
||||
input_dict = {"a": 1, "b": 2, "c": 3}
|
||||
result = build_dict(input_dict)
|
||||
assert result == input_dict
|
||||
assert result is input_dict # Should return same object
|
||||
|
||||
def test_build_dict_with_ignore_entries_single(self):
|
||||
"""Test build_dict with single ignore entry"""
|
||||
input_dict = {"a": 1, "b": 2, "c": 3}
|
||||
ignore = ["b"]
|
||||
result = build_dict(input_dict, ignore)
|
||||
assert result == {"a": 1, "c": 3}
|
||||
assert "b" not in result
|
||||
|
||||
def test_build_dict_with_ignore_entries_multiple(self):
|
||||
"""Test build_dict with multiple ignore entries"""
|
||||
input_dict = {"a": 1, "b": 2, "c": 3, "d": 4}
|
||||
ignore = ["b", "d"]
|
||||
result = build_dict(input_dict, ignore)
|
||||
assert result == {"a": 1, "c": 3}
|
||||
|
||||
def test_build_dict_with_nested_ignore(self):
|
||||
"""Test build_dict with nested structures"""
|
||||
input_dict = {
|
||||
"a": 1,
|
||||
"b": {"c": 2, "d": 3},
|
||||
"e": 4
|
||||
}
|
||||
ignore = ["d", "e"]
|
||||
result = build_dict(input_dict, ignore)
|
||||
assert result == {"a": 1, "b": {"c": 2}}
|
||||
assert "e" not in result
|
||||
assert "d" not in result["b"] # type: ignore
|
||||
|
||||
def test_build_dict_with_empty_ignore_list(self):
|
||||
"""Test build_dict with empty ignore list"""
|
||||
input_dict = {"a": 1, "b": 2}
|
||||
ignore: list[str] = []
|
||||
result = build_dict(input_dict, ignore)
|
||||
assert result == input_dict
|
||||
|
||||
def test_build_dict_with_nonexistent_ignore_keys(self):
|
||||
"""Test build_dict with keys that don't exist"""
|
||||
input_dict = {"a": 1, "b": 2}
|
||||
ignore = ["c", "d"]
|
||||
result = build_dict(input_dict, ignore)
|
||||
assert result == {"a": 1, "b": 2}
|
||||
|
||||
def test_build_dict_ignore_all_keys(self):
|
||||
"""Test build_dict ignoring all keys"""
|
||||
input_dict = {"a": 1, "b": 2}
|
||||
ignore = ["a", "b"]
|
||||
result = build_dict(input_dict, ignore)
|
||||
assert result == {}
|
||||
|
||||
def test_build_dict_with_complex_structure(self):
|
||||
"""Test build_dict with complex nested structure"""
|
||||
input_dict = {
|
||||
"ResponseMetadata": {
|
||||
"RequestId": "12345",
|
||||
"HTTPStatusCode": 200,
|
||||
"RetryAttempts": 0
|
||||
},
|
||||
"data": {
|
||||
"id": 1,
|
||||
"name": "Test",
|
||||
"ResponseMetadata": {"internal": "value"}
|
||||
},
|
||||
"status": "success"
|
||||
}
|
||||
ignore = ["ResponseMetadata", "RetryAttempts"]
|
||||
result = build_dict(input_dict, ignore)
|
||||
|
||||
# ResponseMetadata should be removed at all levels
|
||||
assert "ResponseMetadata" not in result
|
||||
assert "ResponseMetadata" not in result["data"] # type: ignore
|
||||
assert result["data"]["name"] == "Test" # type: ignore
|
||||
assert result["status"] == "success" # type: ignore
|
||||
|
||||
def test_build_dict_with_list_values(self):
|
||||
"""Test build_dict with lists containing dictionaries"""
|
||||
input_dict = {
|
||||
"items": [
|
||||
{"id": 1, "temp": "remove"},
|
||||
{"id": 2, "temp": "remove"}
|
||||
],
|
||||
"temp": "also_remove"
|
||||
}
|
||||
ignore = ["temp"]
|
||||
result = build_dict(input_dict, ignore)
|
||||
|
||||
assert "temp" not in result
|
||||
assert "temp" not in result["items"][0] # type: ignore
|
||||
assert "temp" not in result["items"][1] # type: ignore
|
||||
assert result["items"][0]["id"] == 1 # type: ignore
|
||||
assert result["items"][1]["id"] == 2 # type: ignore
|
||||
|
||||
def test_build_dict_empty_input(self):
|
||||
"""Test build_dict with empty dictionary"""
|
||||
input_dict: dict[str, Any] = {}
|
||||
result = build_dict(input_dict, ["a", "b"])
|
||||
assert result == {}
|
||||
|
||||
def test_build_dict_preserves_type_annotation(self):
|
||||
"""Test that build_dict preserves proper type"""
|
||||
input_dict = {"a": 1, "b": [1, 2, 3], "c": {"nested": "value"}}
|
||||
result = build_dict(input_dict)
|
||||
assert isinstance(result, dict)
|
||||
assert isinstance(result["b"], list)
|
||||
assert isinstance(result["c"], dict)
|
||||
|
||||
|
||||
class TestSetEntry:
|
||||
"""Test cases for set_entry function"""
|
||||
|
||||
def test_set_entry_new_key(self):
|
||||
"""Test setting a new key in dictionary"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
key = "new_key"
|
||||
value = "new_value"
|
||||
result = set_entry(dict_set, key, value)
|
||||
assert result[key] == value
|
||||
assert len(result) == 1
|
||||
|
||||
def test_set_entry_existing_key(self):
|
||||
"""Test overwriting an existing key"""
|
||||
dict_set = {"key": "old_value"}
|
||||
key = "key"
|
||||
value = "new_value"
|
||||
result = set_entry(dict_set, key, value)
|
||||
assert result[key] == value
|
||||
assert result[key] != "old_value"
|
||||
|
||||
def test_set_entry_with_dict_value(self):
|
||||
"""Test setting a dictionary as value"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
key = "config"
|
||||
value = {"setting1": True, "setting2": "value"}
|
||||
result = set_entry(dict_set, key, value)
|
||||
assert result[key] == value
|
||||
assert isinstance(result[key], dict)
|
||||
|
||||
def test_set_entry_with_list_value(self):
|
||||
"""Test setting a list as value"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
key = "items"
|
||||
value = [1, 2, 3, 4]
|
||||
result = set_entry(dict_set, key, value)
|
||||
assert result[key] == value
|
||||
assert isinstance(result[key], list)
|
||||
|
||||
def test_set_entry_with_none_value(self):
|
||||
"""Test setting None as value"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
key = "nullable"
|
||||
value = None
|
||||
result = set_entry(dict_set, key, value)
|
||||
assert result[key] is None
|
||||
assert key in result
|
||||
|
||||
def test_set_entry_with_integer_value(self):
|
||||
"""Test setting integer value"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
key = "count"
|
||||
value = 42
|
||||
result = set_entry(dict_set, key, value)
|
||||
assert result[key] == 42
|
||||
assert isinstance(result[key], int)
|
||||
|
||||
def test_set_entry_with_float_value(self):
|
||||
"""Test setting float value"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
key = "price"
|
||||
value = 19.99
|
||||
result = set_entry(dict_set, key, value)
|
||||
assert result[key] == 19.99
|
||||
assert isinstance(result[key], float)
|
||||
|
||||
def test_set_entry_with_boolean_value(self):
|
||||
"""Test setting boolean value"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
key = "enabled"
|
||||
value = True
|
||||
result = set_entry(dict_set, key, value)
|
||||
assert result[key] is True
|
||||
assert isinstance(result[key], bool)
|
||||
|
||||
def test_set_entry_multiple_times(self):
|
||||
"""Test setting multiple entries"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
set_entry(dict_set, "key1", "value1")
|
||||
set_entry(dict_set, "key2", "value2")
|
||||
set_entry(dict_set, "key3", "value3")
|
||||
|
||||
assert len(dict_set) == 3
|
||||
assert dict_set["key1"] == "value1"
|
||||
assert dict_set["key2"] == "value2"
|
||||
assert dict_set["key3"] == "value3"
|
||||
|
||||
def test_set_entry_overwrites_existing(self):
|
||||
"""Test that setting an existing key overwrites it"""
|
||||
dict_set = {"key": {"old": "data"}}
|
||||
value = {"new": "data"}
|
||||
result = set_entry(dict_set, "key", value)
|
||||
assert result["key"] == {"new": "data"}
|
||||
assert "old" not in result["key"]
|
||||
|
||||
def test_set_entry_modifies_original_dict(self):
|
||||
"""Test that set_entry modifies the original dictionary"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
result = set_entry(dict_set, "key", "value")
|
||||
assert result is dict_set
|
||||
assert dict_set["key"] == "value"
|
||||
|
||||
def test_set_entry_with_empty_string_value(self):
|
||||
"""Test setting empty string as value"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
key = "empty"
|
||||
value = ""
|
||||
result = set_entry(dict_set, key, value)
|
||||
assert result[key] == ""
|
||||
assert key in result
|
||||
|
||||
def test_set_entry_with_complex_nested_structure(self):
|
||||
"""Test setting complex nested structure"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
key = "complex"
|
||||
value = {
|
||||
"level1": {
|
||||
"level2": {
|
||||
"level3": ["a", "b", "c"]
|
||||
}
|
||||
}
|
||||
}
|
||||
result = set_entry(dict_set, key, value)
|
||||
assert result[key]["level1"]["level2"]["level3"] == ["a", "b", "c"]
|
||||
|
||||
|
||||
# Parametrized tests for more comprehensive coverage
|
||||
class TestParametrized:
|
||||
"""Parametrized tests for better coverage"""
|
||||
|
||||
@pytest.mark.parametrize("set_data,keys,expected", [
|
||||
({"a": 1, "b": 2}, ["b"], {"a": 1}),
|
||||
({"a": 1, "b": 2, "c": 3}, ["a", "c"], {"b": 2}),
|
||||
({"a": 1}, ["a"], {}),
|
||||
({"a": 1, "b": 2}, ["c"], {"a": 1, "b": 2}),
|
||||
({}, ["a"], {}),
|
||||
({"a": {"b": 1, "c": 2}}, ["c"], {"a": {"b": 1}}),
|
||||
])
|
||||
def test_delete_keys_parametrized(
|
||||
self,
|
||||
set_data: dict[str, Any],
|
||||
keys: list[str],
|
||||
expected: dict[str, Any]
|
||||
):
|
||||
"""Test delete_keys_from_set with various inputs"""
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert result == expected
|
||||
|
||||
@pytest.mark.parametrize("input_dict,ignore,expected", [
|
||||
({"a": 1, "b": 2}, ["b"], {"a": 1}),
|
||||
({"a": 1, "b": 2}, ["c"], {"a": 1, "b": 2}),
|
||||
({"a": 1, "b": 2}, [], {"a": 1, "b": 2}),
|
||||
({"a": 1}, ["a"], {}),
|
||||
({}, ["a"], {}),
|
||||
])
|
||||
def test_build_dict_parametrized(
|
||||
self,
|
||||
input_dict: dict[str, Any],
|
||||
ignore: list[str],
|
||||
expected: dict[str, Any]
|
||||
):
|
||||
"""Test build_dict with various inputs"""
|
||||
result = build_dict(input_dict, ignore)
|
||||
assert result == expected
|
||||
|
||||
@pytest.mark.parametrize("key,value", [
|
||||
("string_key", "string_value"),
|
||||
("int_key", 42),
|
||||
("float_key", 3.14),
|
||||
("bool_key", True),
|
||||
("list_key", [1, 2, 3]),
|
||||
("dict_key", {"nested": "value"}),
|
||||
("none_key", None),
|
||||
("empty_key", ""),
|
||||
("zero_key", 0),
|
||||
("false_key", False),
|
||||
])
|
||||
def test_set_entry_parametrized(self, key: str, value: Any):
|
||||
"""Test set_entry with various value types"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
result = set_entry(dict_set, key, value)
|
||||
assert result[key] == value
|
||||
|
||||
|
||||
# Edge cases and integration tests
|
||||
class TestEdgeCases:
|
||||
"""Test edge cases and special scenarios"""
|
||||
|
||||
def test_delete_keys_preserves_modification(self):
|
||||
"""Test that original dict is modified"""
|
||||
set_data = {"a": 1, "b": 2, "c": 3}
|
||||
keys = ["b"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
# The function modifies the original dict
|
||||
assert result is set_data
|
||||
assert "b" not in set_data
|
||||
|
||||
def test_build_dict_with_aws_typedef_scenario(self):
|
||||
"""Test build_dict mimicking AWS TypedDict usage"""
|
||||
# Simulating AWS response with ResponseMetadata
|
||||
aws_response: dict[str, Any] = {
|
||||
"Items": [
|
||||
{"id": "1", "name": "Item1"},
|
||||
{"id": "2", "name": "Item2"}
|
||||
],
|
||||
"Count": 2,
|
||||
"ScannedCount": 2,
|
||||
"ResponseMetadata": {
|
||||
"RequestId": "abc123",
|
||||
"HTTPStatusCode": 200,
|
||||
"HTTPHeaders": {},
|
||||
"RetryAttempts": 0
|
||||
}
|
||||
}
|
||||
result = build_dict(aws_response, ["ResponseMetadata"])
|
||||
|
||||
assert "ResponseMetadata" not in result
|
||||
assert result["Count"] == 2 # type: ignore
|
||||
assert len(result["Items"]) == 2 # type: ignore
|
||||
|
||||
def test_set_entry_idempotency(self):
|
||||
"""Test that calling set_entry multiple times with same value is idempotent"""
|
||||
dict_set: dict[str, Any] = {}
|
||||
value = "test_value"
|
||||
|
||||
result1 = set_entry(dict_set, "key", value)
|
||||
result2 = set_entry(dict_set, "key", value)
|
||||
result3 = set_entry(dict_set, "key", value)
|
||||
|
||||
assert result1 is result2 is result3
|
||||
assert result1["key"] == value
|
||||
assert len(result1) == 1
|
||||
|
||||
def test_delete_keys_with_circular_reference_protection(self):
|
||||
"""Test that function handles normal cases without circular issues"""
|
||||
# Python dicts can't have true circular references easily
|
||||
# but we can test deep nesting
|
||||
set_data = {
|
||||
"level1": {
|
||||
"level2": {
|
||||
"level3": {
|
||||
"level4": {
|
||||
"data": "value",
|
||||
"remove": "this"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
keys = ["remove"]
|
||||
result = delete_keys_from_set(set_data, keys)
|
||||
assert "remove" not in result["level1"]["level2"]["level3"]["level4"] # type: ignore
|
||||
assert result["level1"]["level2"]["level3"]["level4"]["data"] == "value" # type: ignore
|
||||
|
||||
def test_build_dict_none_ignore_vs_empty_ignore(self):
|
||||
"""Test difference between None and empty list for ignore_entries"""
|
||||
input_dict = {"a": 1, "b": 2}
|
||||
|
||||
result_none = build_dict(input_dict, None)
|
||||
result_empty = build_dict(input_dict, [])
|
||||
|
||||
assert result_none == input_dict
|
||||
assert result_empty == input_dict
|
||||
# With None, it returns the same object
|
||||
assert result_none is input_dict
|
||||
# With empty list, it goes through delete_keys_from_set
|
||||
assert result_empty is input_dict
|
||||
|
||||
|
||||
# Integration tests
|
||||
class TestIntegration:
|
||||
"""Integration tests combining multiple functions"""
|
||||
|
||||
def test_build_dict_then_set_entry(self):
|
||||
"""Test using build_dict followed by set_entry"""
|
||||
original = {
|
||||
"a": 1,
|
||||
"b": 2,
|
||||
"remove_me": "gone"
|
||||
}
|
||||
cleaned = build_dict(original, ["remove_me"])
|
||||
result = set_entry(cleaned, "c", 3)
|
||||
|
||||
assert result == {"a": 1, "b": 2, "c": 3}
|
||||
assert "remove_me" not in result
|
||||
|
||||
def test_delete_keys_then_set_entry(self):
|
||||
"""Test using delete_keys_from_set followed by set_entry"""
|
||||
data = {"a": 1, "b": 2, "c": 3}
|
||||
cleaned = delete_keys_from_set(data, ["b"])
|
||||
result = set_entry(cleaned, "d", 4) # type: ignore
|
||||
|
||||
assert result == {"a": 1, "c": 3, "d": 4}
|
||||
|
||||
def test_multiple_operations_chain(self):
|
||||
"""Test chaining multiple operations"""
|
||||
data = {
|
||||
"user": {
|
||||
"name": "Alice",
|
||||
"password": "secret",
|
||||
"email": "alice@example.com"
|
||||
},
|
||||
"metadata": {
|
||||
"created": "2024-01-01",
|
||||
"password": "admin"
|
||||
}
|
||||
}
|
||||
|
||||
# Remove passwords
|
||||
cleaned = build_dict(data, ["password"])
|
||||
|
||||
# Add new field
|
||||
result = set_entry(cleaned, "processed", True)
|
||||
|
||||
assert "password" not in result["user"] # type: ignore
|
||||
assert "password" not in result["metadata"] # type: ignore
|
||||
assert result["processed"] is True # type: ignore
|
||||
assert result["user"]["name"] == "Alice" # type: ignore
|
||||
|
||||
# __END__
|
||||
@@ -1,291 +0,0 @@
|
||||
"""
|
||||
tests for corelibs.iterator_handling.dict_helpers
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
import pytest
|
||||
from corelibs.iterator_handling.dict_mask import mask
|
||||
|
||||
|
||||
def test_mask_default_behavior():
|
||||
"""Test masking with default mask_keys"""
|
||||
data = {
|
||||
"username": "john_doe",
|
||||
"password": "secret123",
|
||||
"email": "john@example.com",
|
||||
"api_secret": "abc123",
|
||||
"encryption_key": "xyz789"
|
||||
}
|
||||
|
||||
result = mask(data)
|
||||
|
||||
assert result["username"] == "john_doe"
|
||||
assert result["password"] == "***"
|
||||
assert result["email"] == "john@example.com"
|
||||
assert result["api_secret"] == "***"
|
||||
assert result["encryption_key"] == "***"
|
||||
|
||||
|
||||
def test_mask_custom_keys():
|
||||
"""Test masking with custom mask_keys"""
|
||||
data = {
|
||||
"username": "john_doe",
|
||||
"token": "abc123",
|
||||
"api_key": "xyz789",
|
||||
"password": "secret123"
|
||||
}
|
||||
|
||||
result = mask(data, mask_keys=["token", "api"])
|
||||
|
||||
assert result["username"] == "john_doe"
|
||||
assert result["token"] == "***"
|
||||
assert result["api_key"] == "***"
|
||||
assert result["password"] == "secret123" # Not masked with custom keys
|
||||
|
||||
|
||||
def test_mask_custom_mask_string():
|
||||
"""Test masking with custom mask string"""
|
||||
data = {"password": "secret123"}
|
||||
|
||||
result = mask(data, mask_str="[HIDDEN]")
|
||||
|
||||
assert result["password"] == "[HIDDEN]"
|
||||
|
||||
|
||||
def test_mask_case_insensitive():
|
||||
"""Test that masking is case insensitive"""
|
||||
data = {
|
||||
"PASSWORD": "secret123",
|
||||
"Secret_Key": "abc123",
|
||||
"ENCRYPTION_data": "xyz789"
|
||||
}
|
||||
|
||||
result = mask(data)
|
||||
|
||||
assert result["PASSWORD"] == "***"
|
||||
assert result["Secret_Key"] == "***"
|
||||
assert result["ENCRYPTION_data"] == "***"
|
||||
|
||||
|
||||
def test_mask_key_patterns():
|
||||
"""Test different key matching patterns (start, end, contains)"""
|
||||
data = {
|
||||
"password_hash": "hash123", # starts with
|
||||
"user_password": "secret123", # ends with
|
||||
"my_secret_key": "abc123", # contains with edges
|
||||
"secretvalue": "xyz789", # contains without edges
|
||||
"startsecretvalue": "xyz123", # contains without edges
|
||||
"normal_key": "normal_value"
|
||||
}
|
||||
|
||||
result = mask(data)
|
||||
|
||||
assert result["password_hash"] == "***"
|
||||
assert result["user_password"] == "***"
|
||||
assert result["my_secret_key"] == "***"
|
||||
assert result["secretvalue"] == "***" # will mask beacuse starts with
|
||||
assert result["startsecretvalue"] == "xyz123" # will not mask
|
||||
assert result["normal_key"] == "normal_value"
|
||||
|
||||
|
||||
def test_mask_custom_edges():
|
||||
"""Test masking with custom edge characters"""
|
||||
data = {
|
||||
"my-secret-key": "abc123",
|
||||
"my_secret_key": "xyz789"
|
||||
}
|
||||
|
||||
result = mask(data, mask_str_edges="-")
|
||||
|
||||
assert result["my-secret-key"] == "***"
|
||||
assert result["my_secret_key"] == "xyz789" # Underscore edges don't match
|
||||
|
||||
|
||||
def test_mask_empty_edges():
|
||||
"""Test masking with empty edge characters (substring matching)"""
|
||||
data = {
|
||||
"secretvalue": "abc123",
|
||||
"mysecretkey": "xyz789",
|
||||
"normal_key": "normal_value"
|
||||
}
|
||||
|
||||
result = mask(data, mask_str_edges="")
|
||||
|
||||
assert result["secretvalue"] == "***"
|
||||
assert result["mysecretkey"] == "***"
|
||||
assert result["normal_key"] == "normal_value"
|
||||
|
||||
|
||||
def test_mask_nested_dict():
|
||||
"""Test masking nested dictionaries"""
|
||||
data = {
|
||||
"user": {
|
||||
"name": "john",
|
||||
"password": "secret123",
|
||||
"profile": {
|
||||
"email": "john@example.com",
|
||||
"encryption_key": "abc123"
|
||||
}
|
||||
},
|
||||
"api_secret": "xyz789"
|
||||
}
|
||||
|
||||
result = mask(data)
|
||||
|
||||
assert result["user"]["name"] == "john"
|
||||
assert result["user"]["password"] == "***"
|
||||
assert result["user"]["profile"]["email"] == "john@example.com"
|
||||
assert result["user"]["profile"]["encryption_key"] == "***"
|
||||
assert result["api_secret"] == "***"
|
||||
|
||||
|
||||
def test_mask_lists():
|
||||
"""Test masking lists and nested structures with lists"""
|
||||
data = {
|
||||
"users": [
|
||||
{"name": "john", "password": "secret1"},
|
||||
{"name": "jane", "password": "secret2"}
|
||||
],
|
||||
"secrets": ["secret1", "secret2", "secret3"]
|
||||
}
|
||||
|
||||
result = mask(data)
|
||||
print(f"R {result['secrets']}")
|
||||
|
||||
assert result["users"][0]["name"] == "john"
|
||||
assert result["users"][0]["password"] == "***"
|
||||
assert result["users"][1]["name"] == "jane"
|
||||
assert result["users"][1]["password"] == "***"
|
||||
assert result["secrets"] == ["***", "***", "***"]
|
||||
|
||||
|
||||
def test_mask_mixed_types():
|
||||
"""Test masking with different value types"""
|
||||
data = {
|
||||
"password": "string_value",
|
||||
"secret_number": 12345,
|
||||
"encryption_flag": True,
|
||||
"secret_float": 3.14,
|
||||
"password_none": None,
|
||||
"normal_key": "normal_value"
|
||||
}
|
||||
|
||||
result = mask(data)
|
||||
|
||||
assert result["password"] == "***"
|
||||
assert result["secret_number"] == "***"
|
||||
assert result["encryption_flag"] == "***"
|
||||
assert result["secret_float"] == "***"
|
||||
assert result["password_none"] == "***"
|
||||
assert result["normal_key"] == "normal_value"
|
||||
|
||||
|
||||
def test_mask_skip_true():
|
||||
"""Test that skip=True returns original data unchanged"""
|
||||
data = {
|
||||
"password": "secret123",
|
||||
"encryption_key": "abc123",
|
||||
"normal_key": "normal_value"
|
||||
}
|
||||
|
||||
result = mask(data, skip=True)
|
||||
|
||||
assert result == data
|
||||
assert result is data # Should return the same object
|
||||
|
||||
|
||||
def test_mask_empty_dict():
|
||||
"""Test masking empty dictionary"""
|
||||
data: dict[str, Any] = {}
|
||||
|
||||
result = mask(data)
|
||||
|
||||
assert result == {}
|
||||
|
||||
|
||||
def test_mask_none_mask_keys():
|
||||
"""Test explicit None mask_keys uses defaults"""
|
||||
data = {"password": "secret123", "token": "abc123"}
|
||||
|
||||
result = mask(data, mask_keys=None)
|
||||
|
||||
assert result["password"] == "***"
|
||||
assert result["token"] == "abc123" # Not in default keys
|
||||
|
||||
|
||||
def test_mask_empty_mask_keys():
|
||||
"""Test empty mask_keys list"""
|
||||
data = {"password": "secret123", "secret": "abc123"}
|
||||
|
||||
result = mask(data, mask_keys=[])
|
||||
|
||||
assert result["password"] == "secret123"
|
||||
assert result["secret"] == "abc123"
|
||||
|
||||
|
||||
def test_mask_complex_nested_structure():
|
||||
"""Test masking complex nested structure"""
|
||||
data = {
|
||||
"config": {
|
||||
"database": {
|
||||
"host": "localhost",
|
||||
"password": "db_secret",
|
||||
"users": [
|
||||
{"name": "admin", "password": "admin123"},
|
||||
{"name": "user", "secret_key": "user456"}
|
||||
]
|
||||
},
|
||||
"api": {
|
||||
"endpoints": ["api1", "api2"],
|
||||
"encryption_settings": {
|
||||
"enabled": True,
|
||||
"secret": "api_secret"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result = mask(data)
|
||||
|
||||
assert result["config"]["database"]["host"] == "localhost"
|
||||
assert result["config"]["database"]["password"] == "***"
|
||||
assert result["config"]["database"]["users"][0]["name"] == "admin"
|
||||
assert result["config"]["database"]["users"][0]["password"] == "***"
|
||||
assert result["config"]["database"]["users"][1]["name"] == "user"
|
||||
assert result["config"]["database"]["users"][1]["secret_key"] == "***"
|
||||
assert result["config"]["api"]["endpoints"] == ["api1", "api2"]
|
||||
assert result["config"]["api"]["encryption_settings"]["enabled"] is True
|
||||
assert result["config"]["api"]["encryption_settings"]["secret"] == "***"
|
||||
|
||||
|
||||
def test_mask_preserves_original_data():
|
||||
"""Test that original data is not modified"""
|
||||
original_data = {
|
||||
"password": "secret123",
|
||||
"username": "john_doe"
|
||||
}
|
||||
data_copy = original_data.copy()
|
||||
|
||||
result = mask(original_data)
|
||||
|
||||
assert original_data == data_copy # Original unchanged
|
||||
assert result != original_data # Result is different
|
||||
assert result["password"] == "***"
|
||||
assert original_data["password"] == "secret123"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("mask_key,expected_keys", [
|
||||
(["pass"], ["password", "user_pass", "my_pass_key"]),
|
||||
(["key"], ["api_key", "secret_key", "my_key_value"]),
|
||||
(["token"], ["token", "auth_token", "my_token_here"]),
|
||||
])
|
||||
def test_mask_parametrized_keys(mask_key: list[str], expected_keys: list[str]):
|
||||
"""Parametrized test for different mask key patterns"""
|
||||
data = {key: "value" for key in expected_keys}
|
||||
data["normal_entry"] = "normal_value"
|
||||
|
||||
result = mask(data, mask_keys=mask_key)
|
||||
|
||||
for key in expected_keys:
|
||||
assert result[key] == "***"
|
||||
assert result["normal_entry"] == "normal_value"
|
||||
@@ -1,361 +0,0 @@
|
||||
"""
|
||||
tests for corelibs.iterator_handling.fingerprint
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
import pytest
|
||||
from corelibs.iterator_handling.fingerprint import dict_hash_frozen, dict_hash_crc
|
||||
|
||||
|
||||
class TestDictHashFrozen:
|
||||
"""Tests for dict_hash_frozen function"""
|
||||
|
||||
def test_dict_hash_frozen_simple_dict(self):
|
||||
"""Test hashing a simple dictionary"""
|
||||
data = {"key1": "value1", "key2": "value2"}
|
||||
result = dict_hash_frozen(data)
|
||||
|
||||
assert isinstance(result, int)
|
||||
assert result != 0
|
||||
|
||||
def test_dict_hash_frozen_consistency(self):
|
||||
"""Test that same dict produces same hash"""
|
||||
data = {"name": "John", "age": 30, "city": "Tokyo"}
|
||||
hash1 = dict_hash_frozen(data)
|
||||
hash2 = dict_hash_frozen(data)
|
||||
|
||||
assert hash1 == hash2
|
||||
|
||||
def test_dict_hash_frozen_order_independence(self):
|
||||
"""Test that dict order doesn't affect hash"""
|
||||
data1 = {"a": 1, "b": 2, "c": 3}
|
||||
data2 = {"c": 3, "a": 1, "b": 2}
|
||||
hash1 = dict_hash_frozen(data1)
|
||||
hash2 = dict_hash_frozen(data2)
|
||||
|
||||
assert hash1 == hash2
|
||||
|
||||
def test_dict_hash_frozen_empty_dict(self):
|
||||
"""Test hashing an empty dictionary"""
|
||||
data: dict[Any, Any] = {}
|
||||
result = dict_hash_frozen(data)
|
||||
|
||||
assert isinstance(result, int)
|
||||
|
||||
def test_dict_hash_frozen_different_dicts(self):
|
||||
"""Test that different dicts produce different hashes"""
|
||||
data1 = {"key1": "value1"}
|
||||
data2 = {"key2": "value2"}
|
||||
hash1 = dict_hash_frozen(data1)
|
||||
hash2 = dict_hash_frozen(data2)
|
||||
|
||||
assert hash1 != hash2
|
||||
|
||||
def test_dict_hash_frozen_various_types(self):
|
||||
"""Test hashing dict with various value types"""
|
||||
data = {
|
||||
"string": "value",
|
||||
"int": 42,
|
||||
"float": 3.14,
|
||||
"bool": True,
|
||||
"none": None
|
||||
}
|
||||
result = dict_hash_frozen(data)
|
||||
|
||||
assert isinstance(result, int)
|
||||
|
||||
def test_dict_hash_frozen_numeric_keys(self):
|
||||
"""Test hashing dict with numeric keys"""
|
||||
data = {1: "one", 2: "two", 3: "three"}
|
||||
result = dict_hash_frozen(data)
|
||||
|
||||
assert isinstance(result, int)
|
||||
|
||||
def test_dict_hash_frozen_tuple_values(self):
|
||||
"""Test hashing dict with tuple values"""
|
||||
data = {"coord1": (1, 2), "coord2": (3, 4)}
|
||||
result = dict_hash_frozen(data)
|
||||
|
||||
assert isinstance(result, int)
|
||||
|
||||
def test_dict_hash_frozen_value_change_changes_hash(self):
|
||||
"""Test that changing a value changes the hash"""
|
||||
data1 = {"key": "value1"}
|
||||
data2 = {"key": "value2"}
|
||||
hash1 = dict_hash_frozen(data1)
|
||||
hash2 = dict_hash_frozen(data2)
|
||||
|
||||
assert hash1 != hash2
|
||||
|
||||
|
||||
class TestDictHashCrc:
|
||||
"""Tests for dict_hash_crc function"""
|
||||
|
||||
def test_dict_hash_crc_simple_dict(self):
|
||||
"""Test hashing a simple dictionary"""
|
||||
data = {"key1": "value1", "key2": "value2"}
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 64 # SHA256 produces 64 hex characters
|
||||
|
||||
def test_dict_hash_crc_simple_list(self):
|
||||
"""Test hashing a simple list"""
|
||||
data = ["item1", "item2", "item3"]
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 64
|
||||
|
||||
def test_dict_hash_crc_consistency_dict(self):
|
||||
"""Test that same dict produces same hash"""
|
||||
data = {"name": "John", "age": 30, "city": "Tokyo"}
|
||||
hash1 = dict_hash_crc(data)
|
||||
hash2 = dict_hash_crc(data)
|
||||
|
||||
assert hash1 == hash2
|
||||
|
||||
def test_dict_hash_crc_consistency_list(self):
|
||||
"""Test that same list produces same hash"""
|
||||
data = [1, 2, 3, 4, 5]
|
||||
hash1 = dict_hash_crc(data)
|
||||
hash2 = dict_hash_crc(data)
|
||||
|
||||
assert hash1 == hash2
|
||||
|
||||
def test_dict_hash_crc_order_independence_dict(self):
|
||||
"""Test that dict order doesn't affect hash (sort_keys=True)"""
|
||||
data1 = {"a": 1, "b": 2, "c": 3}
|
||||
data2 = {"c": 3, "a": 1, "b": 2}
|
||||
hash1 = dict_hash_crc(data1)
|
||||
hash2 = dict_hash_crc(data2)
|
||||
|
||||
assert hash1 == hash2
|
||||
|
||||
def test_dict_hash_crc_order_dependence_list(self):
|
||||
"""Test that list order affects hash"""
|
||||
data1 = [1, 2, 3]
|
||||
data2 = [3, 2, 1]
|
||||
hash1 = dict_hash_crc(data1)
|
||||
hash2 = dict_hash_crc(data2)
|
||||
|
||||
assert hash1 != hash2
|
||||
|
||||
def test_dict_hash_crc_empty_dict(self):
|
||||
"""Test hashing an empty dictionary"""
|
||||
data: dict[Any, Any] = {}
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 64
|
||||
|
||||
def test_dict_hash_crc_empty_list(self):
|
||||
"""Test hashing an empty list"""
|
||||
data: list[Any] = []
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 64
|
||||
|
||||
def test_dict_hash_crc_different_dicts(self):
|
||||
"""Test that different dicts produce different hashes"""
|
||||
data1 = {"key1": "value1"}
|
||||
data2 = {"key2": "value2"}
|
||||
hash1 = dict_hash_crc(data1)
|
||||
hash2 = dict_hash_crc(data2)
|
||||
|
||||
assert hash1 != hash2
|
||||
|
||||
def test_dict_hash_crc_different_lists(self):
|
||||
"""Test that different lists produce different hashes"""
|
||||
data1 = ["item1", "item2"]
|
||||
data2 = ["item3", "item4"]
|
||||
hash1 = dict_hash_crc(data1)
|
||||
hash2 = dict_hash_crc(data2)
|
||||
|
||||
assert hash1 != hash2
|
||||
|
||||
def test_dict_hash_crc_nested_dict(self):
|
||||
"""Test hashing nested dictionaries"""
|
||||
data = {
|
||||
"user": {
|
||||
"name": "John",
|
||||
"address": {
|
||||
"city": "Tokyo",
|
||||
"country": "Japan"
|
||||
}
|
||||
}
|
||||
}
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 64
|
||||
|
||||
def test_dict_hash_crc_nested_list(self):
|
||||
"""Test hashing nested lists"""
|
||||
data = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 64
|
||||
|
||||
def test_dict_hash_crc_mixed_nested(self):
|
||||
"""Test hashing mixed nested structures"""
|
||||
data = {
|
||||
"items": [1, 2, 3],
|
||||
"meta": {
|
||||
"count": 3,
|
||||
"tags": ["a", "b", "c"]
|
||||
}
|
||||
}
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 64
|
||||
|
||||
def test_dict_hash_crc_various_types_dict(self):
|
||||
"""Test hashing dict with various value types"""
|
||||
data = {
|
||||
"string": "value",
|
||||
"int": 42,
|
||||
"float": 3.14,
|
||||
"bool": True,
|
||||
"none": None,
|
||||
"list": [1, 2, 3],
|
||||
"nested_dict": {"inner": "value"}
|
||||
}
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 64
|
||||
|
||||
def test_dict_hash_crc_various_types_list(self):
|
||||
"""Test hashing list with various value types"""
|
||||
data = ["string", 42, 3.14, True, None, [1, 2], {"key": "value"}]
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 64
|
||||
|
||||
def test_dict_hash_crc_value_change_changes_hash(self):
|
||||
"""Test that changing a value changes the hash"""
|
||||
data1 = {"key": "value1"}
|
||||
data2 = {"key": "value2"}
|
||||
hash1 = dict_hash_crc(data1)
|
||||
hash2 = dict_hash_crc(data2)
|
||||
|
||||
assert hash1 != hash2
|
||||
|
||||
def test_dict_hash_crc_hex_format(self):
|
||||
"""Test that hash is in hexadecimal format"""
|
||||
data = {"test": "data"}
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
# All characters should be valid hex
|
||||
assert all(c in "0123456789abcdef" for c in result)
|
||||
|
||||
def test_dict_hash_crc_unicode_handling(self):
|
||||
"""Test hashing dict with unicode characters"""
|
||||
data = {
|
||||
"japanese": "日本語",
|
||||
"emoji": "🎉",
|
||||
"chinese": "中文"
|
||||
}
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 64
|
||||
|
||||
def test_dict_hash_crc_special_characters(self):
|
||||
"""Test hashing dict with special characters"""
|
||||
data = {
|
||||
"quotes": "\"quoted\"",
|
||||
"newline": "line1\nline2",
|
||||
"tab": "col1\tcol2",
|
||||
"backslash": "path\\to\\file"
|
||||
}
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert len(result) == 64
|
||||
|
||||
|
||||
class TestComparisonBetweenHashFunctions:
|
||||
"""Tests comparing dict_hash_frozen and dict_hash_crc"""
|
||||
|
||||
def test_both_functions_are_deterministic(self):
|
||||
"""Test that both functions produce consistent results"""
|
||||
data = {"a": 1, "b": 2, "c": 3}
|
||||
|
||||
frozen_hash1 = dict_hash_frozen(data)
|
||||
frozen_hash2 = dict_hash_frozen(data)
|
||||
crc_hash1 = dict_hash_crc(data)
|
||||
crc_hash2 = dict_hash_crc(data)
|
||||
|
||||
assert frozen_hash1 == frozen_hash2
|
||||
assert crc_hash1 == crc_hash2
|
||||
|
||||
def test_both_functions_handle_empty_dict(self):
|
||||
"""Test that both functions can hash empty dict"""
|
||||
data: dict[Any, Any] = {}
|
||||
|
||||
frozen_result = dict_hash_frozen(data)
|
||||
crc_result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(frozen_result, int)
|
||||
assert isinstance(crc_result, str)
|
||||
|
||||
def test_both_functions_detect_changes(self):
|
||||
"""Test that both functions detect value changes"""
|
||||
data1 = {"key": "value1"}
|
||||
data2 = {"key": "value2"}
|
||||
|
||||
frozen_hash1 = dict_hash_frozen(data1)
|
||||
frozen_hash2 = dict_hash_frozen(data2)
|
||||
crc_hash1 = dict_hash_crc(data1)
|
||||
crc_hash2 = dict_hash_crc(data2)
|
||||
|
||||
assert frozen_hash1 != frozen_hash2
|
||||
assert crc_hash1 != crc_hash2
|
||||
|
||||
def test_both_functions_handle_order_independence(self):
|
||||
"""Test that both functions are order-independent for dicts"""
|
||||
data1 = {"x": 10, "y": 20, "z": 30}
|
||||
data2 = {"z": 30, "x": 10, "y": 20}
|
||||
|
||||
frozen_hash1 = dict_hash_frozen(data1)
|
||||
frozen_hash2 = dict_hash_frozen(data2)
|
||||
crc_hash1 = dict_hash_crc(data1)
|
||||
crc_hash2 = dict_hash_crc(data2)
|
||||
|
||||
assert frozen_hash1 == frozen_hash2
|
||||
assert crc_hash1 == crc_hash2
|
||||
|
||||
|
||||
@pytest.mark.parametrize("data,expected_type,expected_length", [
|
||||
({"key": "value"}, str, 64),
|
||||
([1, 2, 3], str, 64),
|
||||
({"nested": {"key": "value"}}, str, 64),
|
||||
([[1, 2], [3, 4]], str, 64),
|
||||
({}, str, 64),
|
||||
([], str, 64),
|
||||
])
|
||||
def test_dict_hash_crc_parametrized(data: dict[Any, Any] | list[Any], expected_type: type, expected_length: int):
|
||||
"""Parametrized test for dict_hash_crc with various inputs"""
|
||||
result = dict_hash_crc(data)
|
||||
|
||||
assert isinstance(result, expected_type)
|
||||
assert len(result) == expected_length
|
||||
|
||||
|
||||
@pytest.mark.parametrize("data", [
|
||||
{"key": "value"},
|
||||
{"a": 1, "b": 2},
|
||||
{"x": 10, "y": 20, "z": 30},
|
||||
{},
|
||||
])
|
||||
def test_dict_hash_frozen_parametrized(data: dict[Any, Any]):
|
||||
"""Parametrized test for dict_hash_frozen with various inputs"""
|
||||
result = dict_hash_frozen(data)
|
||||
|
||||
assert isinstance(result, int)
|
||||
@@ -1,300 +0,0 @@
|
||||
"""
|
||||
iterator_handling.list_helepr tests
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
import pytest
|
||||
from corelibs.iterator_handling.list_helpers import convert_to_list, is_list_in_list
|
||||
|
||||
|
||||
class TestConvertToList:
|
||||
"""Test cases for convert_to_list function"""
|
||||
|
||||
def test_string_input(self):
|
||||
"""Test with string inputs"""
|
||||
assert convert_to_list("hello") == ["hello"]
|
||||
assert convert_to_list("") == [""]
|
||||
assert convert_to_list("123") == ["123"]
|
||||
assert convert_to_list("true") == ["true"]
|
||||
|
||||
def test_integer_input(self):
|
||||
"""Test with integer inputs"""
|
||||
assert convert_to_list(42) == [42]
|
||||
assert convert_to_list(0) == [0]
|
||||
assert convert_to_list(-10) == [-10]
|
||||
assert convert_to_list(999999) == [999999]
|
||||
|
||||
def test_float_input(self):
|
||||
"""Test with float inputs"""
|
||||
assert convert_to_list(3.14) == [3.14]
|
||||
assert convert_to_list(0.0) == [0.0]
|
||||
assert convert_to_list(-2.5) == [-2.5]
|
||||
assert convert_to_list(1.0) == [1.0]
|
||||
|
||||
def test_boolean_input(self):
|
||||
"""Test with boolean inputs"""
|
||||
assert convert_to_list(True) == [True]
|
||||
assert convert_to_list(False) == [False]
|
||||
|
||||
def test_list_input_unchanged(self):
|
||||
"""Test that list inputs are returned unchanged"""
|
||||
# String lists
|
||||
str_list = ["a", "b", "c"]
|
||||
assert convert_to_list(str_list) == str_list
|
||||
assert convert_to_list(str_list) is str_list # Same object reference
|
||||
|
||||
# Integer lists
|
||||
int_list = [1, 2, 3]
|
||||
assert convert_to_list(int_list) == int_list
|
||||
assert convert_to_list(int_list) is int_list
|
||||
|
||||
# Float lists
|
||||
float_list = [1.1, 2.2, 3.3]
|
||||
assert convert_to_list(float_list) == float_list
|
||||
assert convert_to_list(float_list) is float_list
|
||||
|
||||
# Boolean lists
|
||||
bool_list = [True, False, True]
|
||||
assert convert_to_list(bool_list) == bool_list
|
||||
assert convert_to_list(bool_list) is bool_list
|
||||
|
||||
# Mixed lists
|
||||
mixed_list = [1, "hello", 3.14, True]
|
||||
assert convert_to_list(mixed_list) == mixed_list
|
||||
assert convert_to_list(mixed_list) is mixed_list
|
||||
|
||||
# Empty list
|
||||
empty_list: list[int] = []
|
||||
assert convert_to_list(empty_list) == empty_list
|
||||
assert convert_to_list(empty_list) is empty_list
|
||||
|
||||
def test_nested_lists(self):
|
||||
"""Test with nested lists (should still return the same list)"""
|
||||
nested_list: list[list[int]] = [[1, 2], [3, 4]]
|
||||
assert convert_to_list(nested_list) == nested_list
|
||||
assert convert_to_list(nested_list) is nested_list
|
||||
|
||||
def test_single_element_lists(self):
|
||||
"""Test with single element lists"""
|
||||
single_str = ["hello"]
|
||||
assert convert_to_list(single_str) == single_str
|
||||
assert convert_to_list(single_str) is single_str
|
||||
|
||||
single_int = [42]
|
||||
assert convert_to_list(single_int) == single_int
|
||||
assert convert_to_list(single_int) is single_int
|
||||
|
||||
|
||||
class TestIsListInList:
|
||||
"""Test cases for is_list_in_list function"""
|
||||
|
||||
def test_string_lists(self):
|
||||
"""Test with string lists"""
|
||||
list_a = ["a", "b", "c", "d"]
|
||||
list_b = ["b", "d", "e"]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
assert set(result) == {"a", "c"}
|
||||
assert isinstance(result, list)
|
||||
|
||||
def test_integer_lists(self):
|
||||
"""Test with integer lists"""
|
||||
list_a = [1, 2, 3, 4, 5]
|
||||
list_b = [2, 4, 6]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
assert set(result) == {1, 3, 5}
|
||||
assert isinstance(result, list)
|
||||
|
||||
def test_float_lists(self):
|
||||
"""Test with float lists"""
|
||||
list_a = [1.1, 2.2, 3.3, 4.4]
|
||||
list_b = [2.2, 4.4, 5.5]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
assert set(result) == {1.1, 3.3}
|
||||
assert isinstance(result, list)
|
||||
|
||||
def test_boolean_lists(self):
|
||||
"""Test with boolean lists"""
|
||||
list_a = [True, False, True]
|
||||
list_b = [True]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
assert set(result) == {False}
|
||||
assert isinstance(result, list)
|
||||
|
||||
def test_mixed_type_lists(self):
|
||||
"""Test with mixed type lists"""
|
||||
list_a = [1, "hello", 3.14, True, "world"]
|
||||
list_b = ["hello", True, 42]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
assert set(result) == {1, 3.14, "world"}
|
||||
assert isinstance(result, list)
|
||||
|
||||
def test_empty_lists(self):
|
||||
"""Test with empty lists"""
|
||||
# Empty list_a
|
||||
assert is_list_in_list([], [1, 2, 3]) == []
|
||||
|
||||
# Empty list_b
|
||||
list_a = [1, 2, 3]
|
||||
result = is_list_in_list(list_a, [])
|
||||
assert set(result) == {1, 2, 3}
|
||||
|
||||
# Both empty
|
||||
assert is_list_in_list([], []) == []
|
||||
|
||||
def test_no_common_elements(self):
|
||||
"""Test when lists have no common elements"""
|
||||
list_a = [1, 2, 3]
|
||||
list_b = [4, 5, 6]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
assert set(result) == {1, 2, 3}
|
||||
|
||||
def test_all_elements_common(self):
|
||||
"""Test when all elements in list_a are in list_b"""
|
||||
list_a = [1, 2, 3]
|
||||
list_b = [1, 2, 3, 4, 5]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
assert result == []
|
||||
|
||||
def test_identical_lists(self):
|
||||
"""Test with identical lists"""
|
||||
list_a = [1, 2, 3]
|
||||
list_b = [1, 2, 3]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
assert result == []
|
||||
|
||||
def test_duplicate_elements(self):
|
||||
"""Test with duplicate elements in lists"""
|
||||
list_a = [1, 2, 2, 3, 3, 3]
|
||||
list_b = [2, 4]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
# Should return unique elements only (set behavior)
|
||||
assert set(result) == {1, 3}
|
||||
assert isinstance(result, list)
|
||||
|
||||
def test_list_b_larger_than_list_a(self):
|
||||
"""Test when list_b is larger than list_a"""
|
||||
list_a = [1, 2]
|
||||
list_b = [2, 3, 4, 5, 6, 7, 8]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
assert set(result) == {1}
|
||||
|
||||
def test_order_independence(self):
|
||||
"""Test that order doesn't matter due to set operations"""
|
||||
list_a = [3, 1, 4, 1, 5]
|
||||
list_b = [1, 2, 6]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
assert set(result) == {3, 4, 5}
|
||||
|
||||
|
||||
# Parametrized tests for more comprehensive coverage
|
||||
class TestParametrized:
|
||||
"""Parametrized tests for better coverage"""
|
||||
|
||||
@pytest.mark.parametrize("input_value,expected", [
|
||||
("hello", ["hello"]),
|
||||
(42, [42]),
|
||||
(3.14, [3.14]),
|
||||
(True, [True]),
|
||||
(False, [False]),
|
||||
("", [""]),
|
||||
(0, [0]),
|
||||
(0.0, [0.0]),
|
||||
(-1, [-1]),
|
||||
(-2.5, [-2.5]),
|
||||
])
|
||||
def test_convert_to_list_parametrized(self, input_value: Any, expected: Any):
|
||||
"""Test convert_to_list with various single values"""
|
||||
assert convert_to_list(input_value) == expected
|
||||
|
||||
@pytest.mark.parametrize("input_list", [
|
||||
[1, 2, 3],
|
||||
["a", "b", "c"],
|
||||
[1.1, 2.2, 3.3],
|
||||
[True, False],
|
||||
[1, "hello", 3.14, True],
|
||||
[],
|
||||
[42],
|
||||
[[1, 2], [3, 4]],
|
||||
])
|
||||
def test_convert_to_list_with_lists_parametrized(self, input_list: Any):
|
||||
"""Test convert_to_list with various list inputs"""
|
||||
result = convert_to_list(input_list)
|
||||
assert result == input_list
|
||||
assert result is input_list # Same object reference
|
||||
|
||||
@pytest.mark.parametrize("list_a,list_b,expected_set", [
|
||||
([1, 2, 3], [2], {1, 3}),
|
||||
(["a", "b", "c"], ["b", "d"], {"a", "c"}),
|
||||
([1, 2, 3], [4, 5, 6], {1, 2, 3}),
|
||||
([1, 2, 3], [1, 2, 3], set[int]()),
|
||||
([], [1, 2, 3], set[int]()),
|
||||
([1, 2, 3], [], {1, 2, 3}),
|
||||
([True, False], [True], {False}),
|
||||
([1.1, 2.2, 3.3], [2.2], {1.1, 3.3}),
|
||||
])
|
||||
def test_is_list_in_list_parametrized(self, list_a: list[Any], list_b: list[Any], expected_set: Any):
|
||||
"""Test is_list_in_list with various input combinations"""
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
assert set(result) == expected_set
|
||||
assert isinstance(result, list)
|
||||
|
||||
|
||||
# Edge cases and special scenarios
|
||||
class TestEdgeCases:
|
||||
"""Test edge cases and special scenarios"""
|
||||
|
||||
def test_convert_to_list_with_none_like_values(self):
|
||||
"""Test convert_to_list with None-like values (if function supports them)"""
|
||||
# Note: Based on type hints, None is not supported, but testing behavior
|
||||
# This test might need to be adjusted based on actual function behavior
|
||||
# pass
|
||||
|
||||
def test_is_list_in_list_preserves_type_distinctions(self):
|
||||
"""Test that different types are treated as different"""
|
||||
list_a = [1, "1", 1.0, True]
|
||||
list_b = [1] # Only integer 1
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
|
||||
# Note: This test depends on how Python's set handles type equality
|
||||
# 1, 1.0, and True are considered equal in sets
|
||||
# "1" is different from 1
|
||||
# expected_items = {"1"} # String "1" should remain
|
||||
assert "1" in result
|
||||
assert isinstance(result, list)
|
||||
|
||||
def test_large_lists(self):
|
||||
"""Test with large lists"""
|
||||
large_list_a = list(range(1000))
|
||||
large_list_b = list(range(500, 1500))
|
||||
result = is_list_in_list(large_list_a, large_list_b)
|
||||
expected = list(range(500)) # 0 to 499
|
||||
assert set(result) == set(expected)
|
||||
|
||||
def test_memory_efficiency(self):
|
||||
"""Test that convert_to_list doesn't create unnecessary copies"""
|
||||
original_list = [1, 2, 3, 4, 5]
|
||||
result = convert_to_list(original_list)
|
||||
|
||||
# Should be the same object, not a copy
|
||||
assert result is original_list
|
||||
|
||||
# Modifying the original should affect the result
|
||||
original_list.append(6)
|
||||
assert 6 in result
|
||||
|
||||
|
||||
# Performance tests (optional)
|
||||
class TestPerformance:
|
||||
"""Performance-related tests"""
|
||||
|
||||
def test_is_list_in_list_with_duplicates_performance(self):
|
||||
"""Test that function handles duplicates efficiently"""
|
||||
# List with many duplicates
|
||||
list_a = [1, 2, 3] * 100 # 300 elements, many duplicates
|
||||
list_b = [2] * 50 # 50 elements, all the same
|
||||
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
|
||||
# Should still work correctly despite duplicates
|
||||
assert set(result) == {1, 3}
|
||||
assert isinstance(result, list)
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
tests for json_handling module
|
||||
"""
|
||||
@@ -1,869 +0,0 @@
|
||||
"""
|
||||
tests for corelibs.json_handling.jmespath_helper
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
import pytest
|
||||
from corelibs.json_handling.jmespath_helper import jmespath_search
|
||||
|
||||
|
||||
# MARK: jmespath_search tests
|
||||
class TestJmespathSearch:
|
||||
"""Test cases for jmespath_search function"""
|
||||
|
||||
def test_simple_key_lookup(self):
|
||||
"""Test simple key lookup in dictionary"""
|
||||
data = {"name": "John", "age": 30}
|
||||
result = jmespath_search(data, "name")
|
||||
assert result == "John"
|
||||
|
||||
def test_nested_key_lookup(self):
|
||||
"""Test nested key lookup"""
|
||||
data = {
|
||||
"user": {
|
||||
"profile": {
|
||||
"name": "John",
|
||||
"age": 30
|
||||
}
|
||||
}
|
||||
}
|
||||
result = jmespath_search(data, "user.profile.name")
|
||||
assert result == "John"
|
||||
|
||||
def test_array_index_access(self):
|
||||
"""Test accessing array element by index"""
|
||||
data = {
|
||||
"items": [
|
||||
{"id": 1, "name": "Item 1"},
|
||||
{"id": 2, "name": "Item 2"},
|
||||
{"id": 3, "name": "Item 3"}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "items[1].name")
|
||||
assert result == "Item 2"
|
||||
|
||||
def test_array_slice(self):
|
||||
"""Test array slicing"""
|
||||
data = {"numbers": [1, 2, 3, 4, 5]}
|
||||
result = jmespath_search(data, "numbers[1:3]")
|
||||
assert result == [2, 3]
|
||||
|
||||
def test_wildcard_projection(self):
|
||||
"""Test wildcard projection on array"""
|
||||
data = {
|
||||
"users": [
|
||||
{"name": "Alice", "age": 25},
|
||||
{"name": "Bob", "age": 30},
|
||||
{"name": "Charlie", "age": 35}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "users[*].name")
|
||||
assert result == ["Alice", "Bob", "Charlie"]
|
||||
|
||||
def test_filter_expression(self):
|
||||
"""Test filter expression"""
|
||||
data = {
|
||||
"products": [
|
||||
{"name": "Product 1", "price": 100, "stock": 5},
|
||||
{"name": "Product 2", "price": 200, "stock": 0},
|
||||
{"name": "Product 3", "price": 150, "stock": 10}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "products[?stock > `0`].name")
|
||||
assert result == ["Product 1", "Product 3"]
|
||||
|
||||
def test_pipe_expression(self):
|
||||
"""Test pipe expression"""
|
||||
data = {
|
||||
"items": [
|
||||
{"name": "Item 1", "value": 10},
|
||||
{"name": "Item 2", "value": 20},
|
||||
{"name": "Item 3", "value": 30}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "items[*].value | [0]")
|
||||
assert result == 10
|
||||
|
||||
def test_multi_select_hash(self):
|
||||
"""Test multi-select hash"""
|
||||
data = {"name": "John", "age": 30, "city": "New York", "country": "USA"}
|
||||
result = jmespath_search(data, "{name: name, age: age}")
|
||||
assert result == {"name": "John", "age": 30}
|
||||
|
||||
def test_multi_select_list(self):
|
||||
"""Test multi-select list"""
|
||||
data = {"first": "John", "last": "Doe", "age": 30}
|
||||
result = jmespath_search(data, "[first, last]")
|
||||
assert result == ["John", "Doe"]
|
||||
|
||||
def test_flatten_projection(self):
|
||||
"""Test flatten projection"""
|
||||
data = {
|
||||
"groups": [
|
||||
{"items": [1, 2, 3]},
|
||||
{"items": [4, 5, 6]}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "groups[].items[]")
|
||||
assert result == [1, 2, 3, 4, 5, 6]
|
||||
|
||||
def test_function_length(self):
|
||||
"""Test length function"""
|
||||
data = {"items": [1, 2, 3, 4, 5]}
|
||||
result = jmespath_search(data, "length(items)")
|
||||
assert result == 5
|
||||
|
||||
def test_function_max(self):
|
||||
"""Test max function"""
|
||||
data = {"numbers": [10, 5, 20, 15]}
|
||||
result = jmespath_search(data, "max(numbers)")
|
||||
assert result == 20
|
||||
|
||||
def test_function_min(self):
|
||||
"""Test min function"""
|
||||
data = {"numbers": [10, 5, 20, 15]}
|
||||
result = jmespath_search(data, "min(numbers)")
|
||||
assert result == 5
|
||||
|
||||
def test_function_sort(self):
|
||||
"""Test sort function"""
|
||||
data = {"numbers": [3, 1, 4, 1, 5, 9, 2, 6]}
|
||||
result = jmespath_search(data, "sort(numbers)")
|
||||
assert result == [1, 1, 2, 3, 4, 5, 6, 9]
|
||||
|
||||
def test_function_sort_by(self):
|
||||
"""Test sort_by function"""
|
||||
data = {
|
||||
"people": [
|
||||
{"name": "Charlie", "age": 35},
|
||||
{"name": "Alice", "age": 25},
|
||||
{"name": "Bob", "age": 30}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "sort_by(people, &age)[*].name")
|
||||
assert result == ["Alice", "Bob", "Charlie"]
|
||||
|
||||
def test_function_join(self):
|
||||
"""Test join function"""
|
||||
data = {"names": ["Alice", "Bob", "Charlie"]}
|
||||
result = jmespath_search(data, "join(', ', names)")
|
||||
assert result == "Alice, Bob, Charlie"
|
||||
|
||||
def test_function_keys(self):
|
||||
"""Test keys function"""
|
||||
data = {"name": "John", "age": 30, "city": "New York"}
|
||||
result = jmespath_search(data, "keys(@)")
|
||||
assert sorted(result) == ["age", "city", "name"]
|
||||
|
||||
def test_function_values(self):
|
||||
"""Test values function"""
|
||||
data = {"a": 1, "b": 2, "c": 3}
|
||||
result = jmespath_search(data, "values(@)")
|
||||
assert sorted(result) == [1, 2, 3]
|
||||
|
||||
def test_function_type(self):
|
||||
"""Test type function"""
|
||||
data = {"string": "test", "number": 42, "array": [1, 2, 3]}
|
||||
result = jmespath_search(data, "type(string)")
|
||||
assert result == "string"
|
||||
|
||||
def test_function_contains(self):
|
||||
"""Test contains function"""
|
||||
data = {"items": [1, 2, 3, 4, 5]}
|
||||
result = jmespath_search(data, "contains(items, `3`)")
|
||||
assert result is True
|
||||
|
||||
def test_current_node_reference(self):
|
||||
"""Test current node @ reference"""
|
||||
data = [1, 2, 3, 4, 5]
|
||||
result = jmespath_search(data, "@")
|
||||
assert result == [1, 2, 3, 4, 5]
|
||||
|
||||
def test_not_null_expression(self):
|
||||
"""Test not_null expression"""
|
||||
data = {
|
||||
"items": [
|
||||
{"name": "Item 1", "description": "Desc 1"},
|
||||
{"name": "Item 2", "description": None},
|
||||
{"name": "Item 3"}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "items[*].description | [?@ != null]")
|
||||
assert result == ["Desc 1"]
|
||||
|
||||
def test_search_returns_none_for_missing_key(self):
|
||||
"""Test that searching for non-existent key returns None"""
|
||||
data = {"name": "John", "age": 30}
|
||||
result = jmespath_search(data, "nonexistent")
|
||||
assert result is None
|
||||
|
||||
def test_search_with_list_input(self):
|
||||
"""Test search with list as input"""
|
||||
data = [
|
||||
{"name": "Alice", "score": 85},
|
||||
{"name": "Bob", "score": 92},
|
||||
{"name": "Charlie", "score": 78}
|
||||
]
|
||||
result = jmespath_search(data, "[?score > `80`].name")
|
||||
assert result == ["Alice", "Bob"]
|
||||
|
||||
def test_deeply_nested_structure(self):
|
||||
"""Test searching deeply nested structure"""
|
||||
data = {
|
||||
"level1": {
|
||||
"level2": {
|
||||
"level3": {
|
||||
"level4": {
|
||||
"level5": {
|
||||
"value": "deep_value"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
result = jmespath_search(data, "level1.level2.level3.level4.level5.value")
|
||||
assert result == "deep_value"
|
||||
|
||||
def test_complex_filter_expression(self):
|
||||
"""Test complex filter with multiple conditions"""
|
||||
data = {
|
||||
"products": [
|
||||
{"name": "Product 1", "price": 100, "stock": 5, "category": "A"},
|
||||
{"name": "Product 2", "price": 200, "stock": 0, "category": "B"},
|
||||
{"name": "Product 3", "price": 150, "stock": 10, "category": "A"},
|
||||
{"name": "Product 4", "price": 120, "stock": 3, "category": "A"}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(
|
||||
data,
|
||||
"products[?category == 'A' && stock > `0`].name"
|
||||
)
|
||||
assert result == ["Product 1", "Product 3", "Product 4"]
|
||||
|
||||
def test_recursive_descent(self):
|
||||
"""Test recursive descent operator"""
|
||||
data = {
|
||||
"store": {
|
||||
"book": [
|
||||
{"title": "Book 1", "price": 10},
|
||||
{"title": "Book 2", "price": 20}
|
||||
],
|
||||
"bicycle": {
|
||||
"price": 100
|
||||
}
|
||||
}
|
||||
}
|
||||
# Note: JMESPath doesn't have a true recursive descent like JSONPath's '..'
|
||||
# but we can test nested projections
|
||||
result = jmespath_search(data, "store.book[*].price")
|
||||
assert result == [10, 20]
|
||||
|
||||
def test_empty_dict_input(self):
|
||||
"""Test search on empty dictionary"""
|
||||
data: dict[Any, Any] = {}
|
||||
result = jmespath_search(data, "key")
|
||||
assert result is None
|
||||
|
||||
def test_empty_list_input(self):
|
||||
"""Test search on empty list"""
|
||||
data: list[Any] = []
|
||||
result = jmespath_search(data, "[0]")
|
||||
assert result is None
|
||||
|
||||
def test_unicode_keys_and_values(self):
|
||||
"""Test search with unicode keys and values"""
|
||||
data = {
|
||||
"日本語": "テスト",
|
||||
"emoji_🎉": "🚀",
|
||||
"nested": {
|
||||
"中文": "测试"
|
||||
}
|
||||
}
|
||||
# JMESPath requires quoted identifiers for unicode keys
|
||||
result = jmespath_search(data, '"日本語"')
|
||||
assert result == "テスト"
|
||||
|
||||
result2 = jmespath_search(data, 'nested."中文"')
|
||||
assert result2 == "测试"
|
||||
|
||||
def test_numeric_values(self):
|
||||
"""Test search with various numeric values"""
|
||||
data = {
|
||||
"int": 42,
|
||||
"float": 3.14,
|
||||
"negative": -10,
|
||||
"zero": 0,
|
||||
"scientific": 1e10
|
||||
}
|
||||
result = jmespath_search(data, "float")
|
||||
assert result == 3.14
|
||||
|
||||
def test_boolean_values(self):
|
||||
"""Test search with boolean values"""
|
||||
data = {
|
||||
"items": [
|
||||
{"name": "Item 1", "active": True},
|
||||
{"name": "Item 2", "active": False},
|
||||
{"name": "Item 3", "active": True}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "items[?active].name")
|
||||
assert result == ["Item 1", "Item 3"]
|
||||
|
||||
def test_null_values(self):
|
||||
"""Test search with null/None values"""
|
||||
data = {
|
||||
"name": "John",
|
||||
"middle_name": None,
|
||||
"last_name": "Doe"
|
||||
}
|
||||
result = jmespath_search(data, "middle_name")
|
||||
assert result is None
|
||||
|
||||
def test_mixed_types_in_array(self):
|
||||
"""Test search on array with mixed types"""
|
||||
data = {"mixed": [1, "two", 3.0, True, None, {"key": "value"}]}
|
||||
result = jmespath_search(data, "mixed[5].key")
|
||||
assert result == "value"
|
||||
|
||||
def test_expression_with_literals(self):
|
||||
"""Test expression with literal values"""
|
||||
data = {
|
||||
"items": [
|
||||
{"name": "Item 1", "price": 100},
|
||||
{"name": "Item 2", "price": 200}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "items[?price == `100`].name")
|
||||
assert result == ["Item 1"]
|
||||
|
||||
def test_comparison_operators(self):
|
||||
"""Test various comparison operators"""
|
||||
data = {
|
||||
"numbers": [
|
||||
{"value": 10},
|
||||
{"value": 20},
|
||||
{"value": 30},
|
||||
{"value": 40}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "numbers[?value >= `20` && value <= `30`].value")
|
||||
assert result == [20, 30]
|
||||
|
||||
def test_logical_operators(self):
|
||||
"""Test logical operators (and, or, not)"""
|
||||
data = {
|
||||
"items": [
|
||||
{"name": "A", "active": True, "stock": 5},
|
||||
{"name": "B", "active": False, "stock": 0},
|
||||
{"name": "C", "active": True, "stock": 0},
|
||||
{"name": "D", "active": False, "stock": 10}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "items[?active || stock > `0`].name")
|
||||
assert result == ["A", "C", "D"]
|
||||
|
||||
|
||||
# MARK: Error handling tests
|
||||
class TestJmespathSearchErrors:
|
||||
"""Test error handling in jmespath_search function"""
|
||||
|
||||
def test_lexer_error_invalid_syntax(self):
|
||||
"""Test LexerError is converted to ValueError for invalid syntax"""
|
||||
data = {"name": "John"}
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
jmespath_search(data, "name[")
|
||||
|
||||
# This actually raises a ParseError, not LexerError
|
||||
assert "Parse failed" in str(exc_info.value)
|
||||
|
||||
def test_lexer_error_unclosed_bracket(self):
|
||||
"""Test LexerError for unclosed bracket"""
|
||||
data = {"items": [1, 2, 3]}
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
jmespath_search(data, "items[0")
|
||||
|
||||
# This actually raises a ParseError, not LexerError
|
||||
assert "Parse failed" in str(exc_info.value)
|
||||
|
||||
def test_parse_error_invalid_expression(self):
|
||||
"""Test ParseError is converted to ValueError"""
|
||||
data = {"name": "John"}
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
jmespath_search(data, "name..age")
|
||||
|
||||
assert "Parse failed" in str(exc_info.value)
|
||||
|
||||
def test_parse_error_invalid_filter(self):
|
||||
"""Test ParseError for invalid filter syntax"""
|
||||
data = {"items": [1, 2, 3]}
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
jmespath_search(data, "items[?@")
|
||||
|
||||
assert "Parse failed" in str(exc_info.value)
|
||||
|
||||
def test_type_error_invalid_function_usage(self):
|
||||
"""Test JMESPathTypeError for invalid function usage"""
|
||||
data = {"name": "John", "age": 30}
|
||||
|
||||
# Trying to use length on a string (in some contexts this might cause type errors)
|
||||
# Note: This might not always raise an error depending on JMESPath version
|
||||
# Using a more reliable example: trying to use max on non-array
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
jmespath_search(data, "max(name)")
|
||||
|
||||
assert "Search failed with JMESPathTypeError" in str(exc_info.value)
|
||||
|
||||
def test_type_error_with_none_search_params(self):
|
||||
"""Test TypeError when search_params is None"""
|
||||
data = {"name": "John"}
|
||||
|
||||
# None or empty string raises EmptyExpressionError from jmespath
|
||||
with pytest.raises(Exception) as exc_info: # Catches any exception
|
||||
jmespath_search(data, None) # type: ignore
|
||||
|
||||
# The error message should indicate an empty expression issue
|
||||
assert "empty" in str(exc_info.value).lower() or "Type error" in str(exc_info.value)
|
||||
|
||||
def test_type_error_with_invalid_search_params_type(self):
|
||||
"""Test TypeError when search_params is not a string"""
|
||||
data = {"name": "John"}
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
jmespath_search(data, 123) # type: ignore
|
||||
|
||||
assert "Type error for search_params" in str(exc_info.value)
|
||||
|
||||
def test_type_error_with_dict_search_params(self):
|
||||
"""Test TypeError when search_params is a dict"""
|
||||
data = {"name": "John"}
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
jmespath_search(data, {"key": "value"}) # type: ignore
|
||||
|
||||
assert "Type error for search_params" in str(exc_info.value)
|
||||
|
||||
def test_error_message_includes_search_params(self):
|
||||
"""Test that error messages include the search parameters"""
|
||||
data = {"name": "John"}
|
||||
invalid_query = "name["
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
jmespath_search(data, invalid_query)
|
||||
|
||||
error_message = str(exc_info.value)
|
||||
assert invalid_query in error_message
|
||||
# This raises ParseError, not LexerError
|
||||
assert "Parse failed" in error_message
|
||||
|
||||
def test_error_message_includes_exception_details(self):
|
||||
"""Test that error messages include original exception details"""
|
||||
data = {"items": [1, 2, 3]}
|
||||
invalid_query = "items[?"
|
||||
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
jmespath_search(data, invalid_query)
|
||||
|
||||
error_message = str(exc_info.value)
|
||||
# Should contain both the query and some indication of what went wrong
|
||||
assert invalid_query in error_message
|
||||
|
||||
|
||||
# MARK: Edge cases
|
||||
class TestJmespathSearchEdgeCases:
|
||||
"""Test edge cases for jmespath_search function"""
|
||||
|
||||
def test_very_large_array(self):
|
||||
"""Test searching large array"""
|
||||
data = {"items": [{"id": i, "value": i * 10} for i in range(1000)]}
|
||||
result = jmespath_search(data, "items[500].value")
|
||||
assert result == 5000
|
||||
|
||||
def test_very_deep_nesting(self):
|
||||
"""Test very deep nesting"""
|
||||
# Create 20-level deep nested structure
|
||||
data: dict[str, Any] = {"level0": {}}
|
||||
current = data["level0"]
|
||||
for i in range(1, 20):
|
||||
current[f"level{i}"] = {}
|
||||
current = current[f"level{i}"]
|
||||
current["value"] = "deep"
|
||||
|
||||
# Build the search path
|
||||
path = ".".join([f"level{i}" for i in range(20)]) + ".value"
|
||||
result = jmespath_search(data, path)
|
||||
assert result == "deep"
|
||||
|
||||
def test_special_characters_in_keys(self):
|
||||
"""Test keys with special characters (requires escaping)"""
|
||||
data = {"my-key": "value", "my.key": "value2"}
|
||||
|
||||
# JMESPath requires quoting for keys with special characters
|
||||
result = jmespath_search(data, '"my-key"')
|
||||
assert result == "value"
|
||||
|
||||
result2 = jmespath_search(data, '"my.key"')
|
||||
assert result2 == "value2"
|
||||
|
||||
def test_numeric_string_keys(self):
|
||||
"""Test keys that look like numbers"""
|
||||
data = {"123": "numeric_key", "456": "another"}
|
||||
result = jmespath_search(data, '"123"')
|
||||
assert result == "numeric_key"
|
||||
|
||||
def test_empty_string_key(self):
|
||||
"""Test empty string as key"""
|
||||
data = {"": "empty_key_value", "normal": "normal_value"}
|
||||
result = jmespath_search(data, '""')
|
||||
assert result == "empty_key_value"
|
||||
|
||||
def test_whitespace_in_keys(self):
|
||||
"""Test keys with whitespace"""
|
||||
data = {"my key": "value", " trimmed ": "value2"}
|
||||
result = jmespath_search(data, '"my key"')
|
||||
assert result == "value"
|
||||
|
||||
def test_array_with_negative_index(self):
|
||||
"""Test negative array indexing"""
|
||||
data = {"items": [1, 2, 3, 4, 5]}
|
||||
# JMESPath actually supports negative indexing
|
||||
result = jmespath_search(data, "items[-1]")
|
||||
assert result == 5
|
||||
|
||||
def test_out_of_bounds_array_index(self):
|
||||
"""Test out of bounds array access"""
|
||||
data = {"items": [1, 2, 3]}
|
||||
result = jmespath_search(data, "items[10]")
|
||||
assert result is None
|
||||
|
||||
def test_chaining_multiple_operations(self):
|
||||
"""Test chaining multiple JMESPath operations"""
|
||||
data: dict[str, Any] = {
|
||||
"users": [
|
||||
{"name": "Alice", "posts": [{"id": 1}, {"id": 2}]},
|
||||
{"name": "Bob", "posts": [{"id": 3}, {"id": 4}, {"id": 5}]},
|
||||
{"name": "Charlie", "posts": []}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(data, "users[*].posts[].id")
|
||||
assert result == [1, 2, 3, 4, 5]
|
||||
|
||||
def test_projection_on_non_array(self):
|
||||
"""Test projection on non-array (should handle gracefully)"""
|
||||
data = {"value": "not_an_array"}
|
||||
result = jmespath_search(data, "value[*]")
|
||||
assert result is None
|
||||
|
||||
def test_filter_on_non_array(self):
|
||||
"""Test filter on non-array"""
|
||||
data = {"value": "string"}
|
||||
result = jmespath_search(data, "value[?@ == 'x']")
|
||||
assert result is None
|
||||
|
||||
def test_combining_filters_and_projections(self):
|
||||
"""Test combining filters with projections"""
|
||||
data = {
|
||||
"products": [
|
||||
{
|
||||
"name": "Product 1",
|
||||
"variants": [
|
||||
{"color": "red", "stock": 5},
|
||||
{"color": "blue", "stock": 0}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Product 2",
|
||||
"variants": [
|
||||
{"color": "green", "stock": 10},
|
||||
{"color": "yellow", "stock": 3}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
result = jmespath_search(
|
||||
data,
|
||||
"products[*].variants[?stock > `0`].color"
|
||||
)
|
||||
assert result == [["red"], ["green", "yellow"]]
|
||||
|
||||
def test_search_with_root_array(self):
|
||||
"""Test search when root is an array"""
|
||||
data = [
|
||||
{"name": "Alice", "age": 25},
|
||||
{"name": "Bob", "age": 30}
|
||||
]
|
||||
result = jmespath_search(data, "[0].name")
|
||||
assert result == "Alice"
|
||||
|
||||
def test_search_with_primitive_root(self):
|
||||
"""Test search when root is a primitive value"""
|
||||
# When root is primitive, only @ should work
|
||||
data_str = "simple_string"
|
||||
result = jmespath_search(data_str, "@") # type: ignore
|
||||
assert result == "simple_string"
|
||||
|
||||
def test_function_with_empty_array(self):
|
||||
"""Test functions on empty arrays"""
|
||||
data: dict[str, list[Any]] = {"items": []}
|
||||
result = jmespath_search(data, "length(items)")
|
||||
assert result == 0
|
||||
|
||||
def test_nested_multi_select(self):
|
||||
"""Test nested multi-select operations"""
|
||||
data = {
|
||||
"person": {
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
"address": {
|
||||
"city": "New York",
|
||||
"country": "USA"
|
||||
}
|
||||
}
|
||||
}
|
||||
result = jmespath_search(
|
||||
data,
|
||||
"person.{name: name, city: address.city}"
|
||||
)
|
||||
assert result == {"name": "John", "city": "New York"}
|
||||
|
||||
|
||||
# MARK: Integration tests
|
||||
class TestJmespathSearchIntegration:
|
||||
"""Integration tests for complex real-world scenarios"""
|
||||
|
||||
def test_api_response_parsing(self):
|
||||
"""Test parsing typical API response structure"""
|
||||
api_response = {
|
||||
"status": "success",
|
||||
"data": {
|
||||
"users": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Alice",
|
||||
"email": "alice@example.com",
|
||||
"active": True,
|
||||
"metadata": {
|
||||
"created_at": "2025-01-01",
|
||||
"last_login": "2025-10-23"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Bob",
|
||||
"email": "bob@example.com",
|
||||
"active": False,
|
||||
"metadata": {
|
||||
"created_at": "2025-02-01",
|
||||
"last_login": "2025-05-15"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Charlie",
|
||||
"email": "charlie@example.com",
|
||||
"active": True,
|
||||
"metadata": {
|
||||
"created_at": "2025-03-01",
|
||||
"last_login": "2025-10-20"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"metadata": {
|
||||
"total": 3,
|
||||
"page": 1
|
||||
}
|
||||
}
|
||||
|
||||
# Get all active user emails
|
||||
result = jmespath_search(api_response, "data.users[?active].email")
|
||||
assert result == ["alice@example.com", "charlie@example.com"]
|
||||
|
||||
# Get user names and creation dates
|
||||
result2 = jmespath_search(
|
||||
api_response,
|
||||
"data.users[*].{name: name, created: metadata.created_at}"
|
||||
)
|
||||
assert len(result2) == 3
|
||||
assert result2[0]["name"] == "Alice"
|
||||
assert result2[0]["created"] == "2025-01-01"
|
||||
|
||||
def test_config_file_parsing(self):
|
||||
"""Test parsing configuration-like structure"""
|
||||
config = {
|
||||
"version": "1.0",
|
||||
"environments": {
|
||||
"development": {
|
||||
"database": {
|
||||
"host": "localhost",
|
||||
"port": 5432,
|
||||
"name": "dev_db"
|
||||
},
|
||||
"cache": {
|
||||
"enabled": True,
|
||||
"ttl": 300
|
||||
}
|
||||
},
|
||||
"production": {
|
||||
"database": {
|
||||
"host": "prod.example.com",
|
||||
"port": 5432,
|
||||
"name": "prod_db"
|
||||
},
|
||||
"cache": {
|
||||
"enabled": True,
|
||||
"ttl": 3600
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Get production database host
|
||||
result = jmespath_search(config, "environments.production.database.host")
|
||||
assert result == "prod.example.com"
|
||||
|
||||
# Get all database names using values() - object wildcard returns an object
|
||||
# Need to convert to list for sorting
|
||||
result2 = jmespath_search(config, "values(environments)[*].database.name")
|
||||
assert result2 is not None
|
||||
assert sorted(result2) == ["dev_db", "prod_db"]
|
||||
|
||||
def test_nested_filtering_and_transformation(self):
|
||||
"""Test complex nested filtering and transformation"""
|
||||
data = {
|
||||
"departments": [
|
||||
{
|
||||
"name": "Engineering",
|
||||
"employees": [
|
||||
{"name": "Alice", "salary": 100000, "level": "Senior"},
|
||||
{"name": "Bob", "salary": 80000, "level": "Mid"},
|
||||
{"name": "Charlie", "salary": 120000, "level": "Senior"}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Marketing",
|
||||
"employees": [
|
||||
{"name": "Dave", "salary": 70000, "level": "Junior"},
|
||||
{"name": "Eve", "salary": 90000, "level": "Mid"}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Get all senior employees with salary > 100k
|
||||
result = jmespath_search(
|
||||
data,
|
||||
"departments[*].employees[?level == 'Senior' && salary > `100000`].name"
|
||||
)
|
||||
# Note: 100000 is not > 100000, so Alice is excluded
|
||||
assert result == [["Charlie"], []]
|
||||
|
||||
# Get flattened list (using >= instead and flatten operator)
|
||||
result2 = jmespath_search(
|
||||
data,
|
||||
"departments[].employees[?level == 'Senior' && salary >= `100000`].name | []"
|
||||
)
|
||||
assert sorted(result2) == ["Alice", "Charlie"]
|
||||
|
||||
def test_working_with_timestamps(self):
|
||||
"""Test searching and filtering timestamp-like data"""
|
||||
data = {
|
||||
"events": [
|
||||
{"name": "Event 1", "timestamp": "2025-10-20T10:00:00"},
|
||||
{"name": "Event 2", "timestamp": "2025-10-21T15:30:00"},
|
||||
{"name": "Event 3", "timestamp": "2025-10-23T08:45:00"},
|
||||
{"name": "Event 4", "timestamp": "2025-10-24T12:00:00"}
|
||||
]
|
||||
}
|
||||
|
||||
# Get events after a certain date (string comparison)
|
||||
result = jmespath_search(
|
||||
data,
|
||||
"events[?timestamp > '2025-10-22'].name"
|
||||
)
|
||||
assert result == ["Event 3", "Event 4"]
|
||||
|
||||
def test_aggregation_operations(self):
|
||||
"""Test aggregation-like operations"""
|
||||
data = {
|
||||
"sales": [
|
||||
{"product": "A", "quantity": 10, "price": 100},
|
||||
{"product": "B", "quantity": 5, "price": 200},
|
||||
{"product": "C", "quantity": 8, "price": 150}
|
||||
]
|
||||
}
|
||||
|
||||
# Get all quantities
|
||||
quantities = jmespath_search(data, "sales[*].quantity")
|
||||
assert quantities == [10, 5, 8]
|
||||
|
||||
# Get max quantity
|
||||
max_quantity = jmespath_search(data, "max(sales[*].quantity)")
|
||||
assert max_quantity == 10
|
||||
|
||||
# Get min price
|
||||
min_price = jmespath_search(data, "min(sales[*].price)")
|
||||
assert min_price == 100
|
||||
|
||||
# Get sorted products by price
|
||||
sorted_products = jmespath_search(
|
||||
data,
|
||||
"sort_by(sales, &price)[*].product"
|
||||
)
|
||||
assert sorted_products == ["A", "C", "B"]
|
||||
|
||||
def test_data_transformation_pipeline(self):
|
||||
"""Test data transformation pipeline"""
|
||||
raw_data = {
|
||||
"response": {
|
||||
"items": [
|
||||
{
|
||||
"id": "item-1",
|
||||
"attributes": {
|
||||
"name": "Product A",
|
||||
"specs": {"weight": 100, "color": "red"}
|
||||
},
|
||||
"available": True
|
||||
},
|
||||
{
|
||||
"id": "item-2",
|
||||
"attributes": {
|
||||
"name": "Product B",
|
||||
"specs": {"weight": 200, "color": "blue"}
|
||||
},
|
||||
"available": False
|
||||
},
|
||||
{
|
||||
"id": "item-3",
|
||||
"attributes": {
|
||||
"name": "Product C",
|
||||
"specs": {"weight": 150, "color": "red"}
|
||||
},
|
||||
"available": True
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
# Get available red products
|
||||
result = jmespath_search(
|
||||
raw_data,
|
||||
"response.items[?available && attributes.specs.color == 'red'].attributes.name"
|
||||
)
|
||||
assert result == ["Product A", "Product C"]
|
||||
|
||||
# Transform to simplified structure
|
||||
result2 = jmespath_search(
|
||||
raw_data,
|
||||
"response.items[*].{id: id, name: attributes.name, weight: attributes.specs.weight}"
|
||||
)
|
||||
assert len(result2) == 3
|
||||
assert result2[0] == {"id": "item-1", "name": "Product A", "weight": 100}
|
||||
|
||||
|
||||
# __END__
|
||||
@@ -1,698 +0,0 @@
|
||||
"""
|
||||
tests for corelibs.json_handling.json_helper
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime, date
|
||||
from typing import Any
|
||||
from corelibs.json_handling.json_helper import (
|
||||
DateTimeEncoder,
|
||||
default_isoformat,
|
||||
json_dumps,
|
||||
modify_with_jsonpath
|
||||
)
|
||||
|
||||
|
||||
# MARK: DateTimeEncoder tests
|
||||
class TestDateTimeEncoder:
|
||||
"""Test cases for DateTimeEncoder class"""
|
||||
|
||||
def test_datetime_encoding(self):
|
||||
"""Test encoding datetime objects"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45, 123456)
|
||||
data = {"timestamp": dt}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["timestamp"] == "2025-10-23T15:30:45.123456"
|
||||
|
||||
def test_date_encoding(self):
|
||||
"""Test encoding date objects"""
|
||||
d = date(2025, 10, 23)
|
||||
data = {"date": d}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["date"] == "2025-10-23"
|
||||
|
||||
def test_mixed_datetime_date_encoding(self):
|
||||
"""Test encoding mixed datetime and date objects"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45)
|
||||
d = date(2025, 10, 23)
|
||||
data = {
|
||||
"timestamp": dt,
|
||||
"date": d,
|
||||
"name": "test"
|
||||
}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["timestamp"] == "2025-10-23T15:30:45"
|
||||
assert decoded["date"] == "2025-10-23"
|
||||
assert decoded["name"] == "test"
|
||||
|
||||
def test_nested_datetime_encoding(self):
|
||||
"""Test encoding nested structures with datetime objects"""
|
||||
data = {
|
||||
"event": {
|
||||
"name": "Meeting",
|
||||
"start": datetime(2025, 10, 23, 10, 0, 0),
|
||||
"end": datetime(2025, 10, 23, 11, 0, 0),
|
||||
"participants": [
|
||||
{"name": "Alice", "joined": datetime(2025, 10, 23, 10, 5, 0)},
|
||||
{"name": "Bob", "joined": datetime(2025, 10, 23, 10, 10, 0)}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["event"]["start"] == "2025-10-23T10:00:00"
|
||||
assert decoded["event"]["end"] == "2025-10-23T11:00:00"
|
||||
assert decoded["event"]["participants"][0]["joined"] == "2025-10-23T10:05:00"
|
||||
assert decoded["event"]["participants"][1]["joined"] == "2025-10-23T10:10:00"
|
||||
|
||||
def test_list_of_datetimes(self):
|
||||
"""Test encoding list of datetime objects"""
|
||||
data = {
|
||||
"timestamps": [
|
||||
datetime(2025, 10, 23, 10, 0, 0),
|
||||
datetime(2025, 10, 23, 11, 0, 0),
|
||||
datetime(2025, 10, 23, 12, 0, 0)
|
||||
]
|
||||
}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["timestamps"][0] == "2025-10-23T10:00:00"
|
||||
assert decoded["timestamps"][1] == "2025-10-23T11:00:00"
|
||||
assert decoded["timestamps"][2] == "2025-10-23T12:00:00"
|
||||
|
||||
def test_encoder_with_normal_types(self):
|
||||
"""Test that encoder works with standard JSON types"""
|
||||
data = {
|
||||
"string": "test",
|
||||
"number": 42,
|
||||
"float": 3.14,
|
||||
"boolean": True,
|
||||
"null": None,
|
||||
"list": [1, 2, 3],
|
||||
"dict": {"key": "value"}
|
||||
}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded == data
|
||||
|
||||
def test_encoder_returns_none_for_unsupported_types(self):
|
||||
"""Test that encoder default method returns None for unsupported types"""
|
||||
encoder = DateTimeEncoder()
|
||||
|
||||
# The default method should return None for non-date/datetime objects
|
||||
result = encoder.default("string")
|
||||
assert result is None
|
||||
|
||||
result = encoder.default(42)
|
||||
assert result is None
|
||||
|
||||
result = encoder.default([1, 2, 3])
|
||||
assert result is None
|
||||
|
||||
|
||||
# MARK: default function tests
|
||||
class TestDefaultFunction:
|
||||
"""Test cases for the default function"""
|
||||
|
||||
def test_default_datetime(self):
|
||||
"""Test default function with datetime"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45)
|
||||
result = default_isoformat(dt)
|
||||
assert result == "2025-10-23T15:30:45"
|
||||
|
||||
def test_default_date(self):
|
||||
"""Test default function with date"""
|
||||
d = date(2025, 10, 23)
|
||||
result = default_isoformat(d)
|
||||
assert result == "2025-10-23"
|
||||
|
||||
def test_default_with_microseconds(self):
|
||||
"""Test default function with datetime including microseconds"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45, 123456)
|
||||
result = default_isoformat(dt)
|
||||
assert result == "2025-10-23T15:30:45.123456"
|
||||
|
||||
def test_default_returns_none_for_other_types(self):
|
||||
"""Test that default returns None for non-date/datetime objects"""
|
||||
assert default_isoformat("string") is None
|
||||
assert default_isoformat(42) is None
|
||||
assert default_isoformat(3.14) is None
|
||||
assert default_isoformat(True) is None
|
||||
assert default_isoformat(None) is None
|
||||
assert default_isoformat([1, 2, 3]) is None
|
||||
assert default_isoformat({"key": "value"}) is None
|
||||
|
||||
def test_default_as_json_default_parameter(self):
|
||||
"""Test using default function as default parameter in json.dumps"""
|
||||
data = {
|
||||
"timestamp": datetime(2025, 10, 23, 15, 30, 45),
|
||||
"date": date(2025, 10, 23),
|
||||
"name": "test"
|
||||
}
|
||||
|
||||
result = json.dumps(data, default=default_isoformat)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["timestamp"] == "2025-10-23T15:30:45"
|
||||
assert decoded["date"] == "2025-10-23"
|
||||
assert decoded["name"] == "test"
|
||||
|
||||
|
||||
# MARK: json_dumps tests
|
||||
class TestJsonDumps:
|
||||
"""Test cases for json_dumps function"""
|
||||
|
||||
def test_basic_dict(self):
|
||||
"""Test json_dumps with basic dictionary"""
|
||||
data = {"name": "test", "value": 42}
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
assert decoded == data
|
||||
|
||||
def test_unicode_characters(self):
|
||||
"""Test json_dumps preserves unicode characters (ensure_ascii=False)"""
|
||||
data = {"name": "テスト", "emoji": "🎉", "chinese": "测试"}
|
||||
result = json_dumps(data)
|
||||
|
||||
# ensure_ascii=False means unicode characters should be preserved
|
||||
assert "テスト" in result
|
||||
assert "🎉" in result
|
||||
assert "测试" in result
|
||||
|
||||
decoded = json.loads(result)
|
||||
assert decoded == data
|
||||
|
||||
def test_datetime_objects_as_string(self):
|
||||
"""Test json_dumps converts datetime to string (default=str)"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45)
|
||||
data = {"timestamp": dt}
|
||||
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
|
||||
# default=str will convert datetime to its string representation
|
||||
assert isinstance(decoded["timestamp"], str)
|
||||
assert "2025-10-23" in decoded["timestamp"]
|
||||
|
||||
def test_date_objects_as_string(self):
|
||||
"""Test json_dumps converts date to string"""
|
||||
d = date(2025, 10, 23)
|
||||
data = {"date": d}
|
||||
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert isinstance(decoded["date"], str)
|
||||
assert "2025-10-23" in decoded["date"]
|
||||
|
||||
def test_complex_nested_structure(self):
|
||||
"""Test json_dumps with complex nested structures"""
|
||||
data = {
|
||||
"user": {
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
"active": True,
|
||||
"balance": 100.50,
|
||||
"tags": ["admin", "user"],
|
||||
"metadata": {
|
||||
"created": datetime(2025, 1, 1, 0, 0, 0),
|
||||
"updated": date(2025, 10, 23)
|
||||
}
|
||||
},
|
||||
"items": [
|
||||
{"id": 1, "name": "Item 1"},
|
||||
{"id": 2, "name": "Item 2"}
|
||||
]
|
||||
}
|
||||
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["user"]["name"] == "John"
|
||||
assert decoded["user"]["age"] == 30
|
||||
assert decoded["user"]["active"] is True
|
||||
assert decoded["user"]["balance"] == 100.50
|
||||
assert decoded["user"]["tags"] == ["admin", "user"]
|
||||
assert decoded["items"][0]["id"] == 1
|
||||
|
||||
def test_empty_dict(self):
|
||||
"""Test json_dumps with empty dictionary"""
|
||||
data: dict[str, Any] = {}
|
||||
result = json_dumps(data)
|
||||
assert result == "{}"
|
||||
|
||||
def test_empty_list(self):
|
||||
"""Test json_dumps with empty list"""
|
||||
data: list[Any] = []
|
||||
result = json_dumps(data)
|
||||
assert result == "[]"
|
||||
|
||||
def test_list_data(self):
|
||||
"""Test json_dumps with list as root element"""
|
||||
data = [1, 2, 3, "test", True, None]
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
assert decoded == data
|
||||
|
||||
def test_none_value(self):
|
||||
"""Test json_dumps with None"""
|
||||
data = None
|
||||
result = json_dumps(data)
|
||||
assert result == "null"
|
||||
|
||||
def test_boolean_values(self):
|
||||
"""Test json_dumps with boolean values"""
|
||||
data = {"true_val": True, "false_val": False}
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
assert decoded["true_val"] is True
|
||||
assert decoded["false_val"] is False
|
||||
|
||||
def test_numeric_values(self):
|
||||
"""Test json_dumps with various numeric values"""
|
||||
data = {
|
||||
"int": 42,
|
||||
"float": 3.14,
|
||||
"negative": -10,
|
||||
"zero": 0,
|
||||
"scientific": 1e10
|
||||
}
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
assert decoded == data
|
||||
|
||||
def test_custom_object_conversion(self):
|
||||
"""Test json_dumps with custom objects (converted via str)"""
|
||||
class CustomObject:
|
||||
"""test class"""
|
||||
def __str__(self):
|
||||
return "custom_value"
|
||||
|
||||
data = {"custom": CustomObject()}
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
assert decoded["custom"] == "custom_value"
|
||||
|
||||
def test_special_float_values(self):
|
||||
"""Test json_dumps handles special float values"""
|
||||
data = {
|
||||
"infinity": float('inf'),
|
||||
"neg_infinity": float('-inf'),
|
||||
"nan": float('nan')
|
||||
}
|
||||
result = json_dumps(data)
|
||||
# These should be converted to strings via default=str
|
||||
assert "Infinity" in result or "inf" in result.lower()
|
||||
|
||||
|
||||
# MARK: modify_with_jsonpath tests
|
||||
class TestModifyWithJsonpath:
|
||||
"""Test cases for modify_with_jsonpath function"""
|
||||
|
||||
def test_simple_path_modification(self):
|
||||
"""Test modifying a simple path"""
|
||||
data = {"name": "old_name", "age": 30}
|
||||
result = modify_with_jsonpath(data, "$.name", "new_name")
|
||||
|
||||
assert result["name"] == "new_name"
|
||||
assert result["age"] == 30
|
||||
# Original data should not be modified
|
||||
assert data["name"] == "old_name"
|
||||
|
||||
def test_nested_path_modification(self):
|
||||
"""Test modifying nested path"""
|
||||
data = {
|
||||
"user": {
|
||||
"profile": {
|
||||
"name": "John",
|
||||
"age": 30
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.user.profile.name", "Jane")
|
||||
|
||||
assert result["user"]["profile"]["name"] == "Jane"
|
||||
assert result["user"]["profile"]["age"] == 30
|
||||
# Original should be unchanged
|
||||
assert data["user"]["profile"]["name"] == "John"
|
||||
|
||||
def test_array_index_modification(self):
|
||||
"""Test modifying array element by index"""
|
||||
data = {
|
||||
"items": [
|
||||
{"id": 1, "name": "Item 1"},
|
||||
{"id": 2, "name": "Item 2"},
|
||||
{"id": 3, "name": "Item 3"}
|
||||
]
|
||||
}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.items[1].name", "Updated Item 2")
|
||||
|
||||
assert result["items"][1]["name"] == "Updated Item 2"
|
||||
assert result["items"][0]["name"] == "Item 1"
|
||||
assert result["items"][2]["name"] == "Item 3"
|
||||
# Original unchanged
|
||||
assert data["items"][1]["name"] == "Item 2"
|
||||
|
||||
def test_wildcard_modification(self):
|
||||
"""Test modifying multiple elements with wildcard"""
|
||||
data = {
|
||||
"users": [
|
||||
{"name": "Alice", "active": True},
|
||||
{"name": "Bob", "active": True},
|
||||
{"name": "Charlie", "active": True}
|
||||
]
|
||||
}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.users[*].active", False)
|
||||
|
||||
# All active fields should be updated
|
||||
for user in result["users"]:
|
||||
assert user["active"] is False
|
||||
# Original unchanged
|
||||
for user in data["users"]:
|
||||
assert user["active"] is True
|
||||
|
||||
def test_deep_copy_behavior(self):
|
||||
"""Test that modifications don't affect the original data"""
|
||||
original = {
|
||||
"level1": {
|
||||
"level2": {
|
||||
"level3": {
|
||||
"value": "original"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result = modify_with_jsonpath(original, "$.level1.level2.level3.value", "modified")
|
||||
|
||||
assert result["level1"]["level2"]["level3"]["value"] == "modified"
|
||||
assert original["level1"]["level2"]["level3"]["value"] == "original"
|
||||
|
||||
# Verify deep copy by modifying nested dict in result
|
||||
result["level1"]["level2"]["new_key"] = "new_value"
|
||||
assert "new_key" not in original["level1"]["level2"]
|
||||
|
||||
def test_modify_to_different_type(self):
|
||||
"""Test changing value to different type"""
|
||||
data = {"count": "10"}
|
||||
result = modify_with_jsonpath(data, "$.count", 10)
|
||||
|
||||
assert result["count"] == 10
|
||||
assert isinstance(result["count"], int)
|
||||
assert data["count"] == "10"
|
||||
|
||||
def test_modify_to_complex_object(self):
|
||||
"""Test replacing value with complex object"""
|
||||
data = {"simple": "value"}
|
||||
new_value = {"complex": {"nested": "structure"}}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.simple", new_value)
|
||||
|
||||
assert result["simple"] == new_value
|
||||
assert result["simple"]["complex"]["nested"] == "structure"
|
||||
|
||||
def test_modify_to_list(self):
|
||||
"""Test replacing value with list"""
|
||||
data = {"items": None}
|
||||
result = modify_with_jsonpath(data, "$.items", [1, 2, 3])
|
||||
|
||||
assert result["items"] == [1, 2, 3]
|
||||
assert data["items"] is None
|
||||
|
||||
def test_modify_to_none(self):
|
||||
"""Test setting value to None"""
|
||||
data = {"value": "something"}
|
||||
result = modify_with_jsonpath(data, "$.value", None)
|
||||
|
||||
assert result["value"] is None
|
||||
assert data["value"] == "something"
|
||||
|
||||
def test_recursive_descent(self):
|
||||
"""Test using recursive descent operator"""
|
||||
data: dict[str, Any] = {
|
||||
"store": {
|
||||
"book": [
|
||||
{"title": "Book 1", "price": 10},
|
||||
{"title": "Book 2", "price": 20}
|
||||
],
|
||||
"bicycle": {
|
||||
"price": 100
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Update all prices
|
||||
result = modify_with_jsonpath(data, "$..price", 0)
|
||||
|
||||
assert result["store"]["book"][0]["price"] == 0
|
||||
assert result["store"]["book"][1]["price"] == 0
|
||||
assert result["store"]["bicycle"]["price"] == 0
|
||||
# Original unchanged
|
||||
assert data["store"]["book"][0]["price"] == 10
|
||||
|
||||
def test_specific_array_elements(self):
|
||||
"""Test updating specific array elements by index"""
|
||||
data = {
|
||||
"products": [
|
||||
{"name": "Product 1", "price": 100, "stock": 5},
|
||||
{"name": "Product 2", "price": 200, "stock": 0},
|
||||
{"name": "Product 3", "price": 150, "stock": 10}
|
||||
]
|
||||
}
|
||||
|
||||
# Update first product's price
|
||||
result = modify_with_jsonpath(data, "$.products[0].price", 0)
|
||||
|
||||
assert result["products"][0]["price"] == 0
|
||||
assert result["products"][1]["price"] == 200 # not modified
|
||||
assert result["products"][2]["price"] == 150 # not modified
|
||||
|
||||
def test_empty_dict(self):
|
||||
"""Test modifying empty dictionary"""
|
||||
data: dict[str, Any] = {}
|
||||
result = modify_with_jsonpath(data, "$.nonexistent", "value")
|
||||
|
||||
# Should return the original empty dict since path doesn't exist
|
||||
assert result == {}
|
||||
|
||||
def test_complex_real_world_scenario(self):
|
||||
"""Test complex real-world modification scenario"""
|
||||
data: dict[str, Any] = {
|
||||
"api_version": "1.0",
|
||||
"config": {
|
||||
"database": {
|
||||
"host": "localhost",
|
||||
"port": 5432,
|
||||
"credentials": {
|
||||
"username": "admin",
|
||||
"password": "secret"
|
||||
}
|
||||
},
|
||||
"services": [
|
||||
{"name": "auth", "enabled": True, "port": 8001},
|
||||
{"name": "api", "enabled": True, "port": 8002},
|
||||
{"name": "cache", "enabled": False, "port": 8003}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
# Update database port
|
||||
result = modify_with_jsonpath(data, "$.config.database.port", 5433)
|
||||
assert result["config"]["database"]["port"] == 5433
|
||||
|
||||
# Update all service ports
|
||||
result2 = modify_with_jsonpath(result, "$.config.services[*].enabled", True)
|
||||
assert all(service["enabled"] for service in result2["config"]["services"])
|
||||
|
||||
# Original unchanged
|
||||
assert data["config"]["database"]["port"] == 5432
|
||||
assert data["config"]["services"][2]["enabled"] is False
|
||||
|
||||
def test_list_slice_modification(self):
|
||||
"""Test modifying list slice"""
|
||||
data = {"numbers": [1, 2, 3, 4, 5]}
|
||||
|
||||
# Modify first three elements
|
||||
result = modify_with_jsonpath(data, "$.numbers[0:3]", 0)
|
||||
|
||||
assert result["numbers"][0] == 0
|
||||
assert result["numbers"][1] == 0
|
||||
assert result["numbers"][2] == 0
|
||||
assert result["numbers"][3] == 4
|
||||
assert result["numbers"][4] == 5
|
||||
|
||||
def test_modify_with_datetime_value(self):
|
||||
"""Test modifying with datetime value"""
|
||||
data = {"timestamp": "2025-01-01T00:00:00"}
|
||||
new_datetime = datetime(2025, 10, 23, 15, 30, 45)
|
||||
|
||||
result = modify_with_jsonpath(data, "$.timestamp", new_datetime)
|
||||
|
||||
assert result["timestamp"] == new_datetime
|
||||
assert isinstance(result["timestamp"], datetime)
|
||||
|
||||
|
||||
# MARK: Integration tests
|
||||
class TestIntegration:
|
||||
"""Integration tests combining multiple functions"""
|
||||
|
||||
def test_encoder_and_json_dumps_comparison(self):
|
||||
"""Test that DateTimeEncoder and json_dumps handle datetimes differently"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45)
|
||||
data = {"timestamp": dt}
|
||||
|
||||
# Using DateTimeEncoder produces ISO format
|
||||
with_encoder = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded_encoder = json.loads(with_encoder)
|
||||
assert decoded_encoder["timestamp"] == "2025-10-23T15:30:45"
|
||||
|
||||
# Using json_dumps (default=str) produces string representation
|
||||
with_dumps = json_dumps(data)
|
||||
decoded_dumps = json.loads(with_dumps)
|
||||
assert isinstance(decoded_dumps["timestamp"], str)
|
||||
assert "2025-10-23" in decoded_dumps["timestamp"]
|
||||
|
||||
def test_modify_and_serialize(self):
|
||||
"""Test modifying data and then serializing it"""
|
||||
data = {
|
||||
"event": {
|
||||
"name": "Meeting",
|
||||
"date": date(2025, 10, 23),
|
||||
"attendees": [
|
||||
{"name": "Alice", "confirmed": False},
|
||||
{"name": "Bob", "confirmed": False}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
# Modify confirmation status
|
||||
modified = modify_with_jsonpath(data, "$.event.attendees[*].confirmed", True)
|
||||
|
||||
# Serialize with datetime handling
|
||||
serialized = json.dumps(modified, cls=DateTimeEncoder)
|
||||
decoded = json.loads(serialized)
|
||||
|
||||
assert decoded["event"]["date"] == "2025-10-23"
|
||||
assert decoded["event"]["attendees"][0]["confirmed"] is True
|
||||
assert decoded["event"]["attendees"][1]["confirmed"] is True
|
||||
|
||||
def test_round_trip_with_modification(self):
|
||||
"""Test full round trip: serialize -> modify -> serialize"""
|
||||
original = {
|
||||
"config": {
|
||||
"updated": datetime(2025, 10, 23, 15, 30, 45),
|
||||
"version": "1.0"
|
||||
}
|
||||
}
|
||||
|
||||
# Serialize
|
||||
json_str = json.dumps(original, cls=DateTimeEncoder)
|
||||
|
||||
# Deserialize
|
||||
deserialized = json.loads(json_str)
|
||||
|
||||
# Modify
|
||||
modified = modify_with_jsonpath(deserialized, "$.config.version", "2.0")
|
||||
|
||||
# Serialize again
|
||||
final_json = json_dumps(modified)
|
||||
final_data = json.loads(final_json)
|
||||
|
||||
assert final_data["config"]["version"] == "2.0"
|
||||
assert final_data["config"]["updated"] == "2025-10-23T15:30:45"
|
||||
|
||||
|
||||
# MARK: Edge cases
|
||||
class TestEdgeCases:
|
||||
"""Test edge cases and error scenarios"""
|
||||
|
||||
def test_circular_reference_in_modify(self):
|
||||
"""Test that modify_with_jsonpath handles data without circular references"""
|
||||
# Note: JSON doesn't support circular references, so we test normal nested data
|
||||
data = {
|
||||
"a": {
|
||||
"b": {
|
||||
"c": "value"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.a.b.c", "new_value")
|
||||
assert result["a"]["b"]["c"] == "new_value"
|
||||
|
||||
def test_unicode_in_keys_and_values(self):
|
||||
"""Test handling unicode in both keys and values"""
|
||||
data = {
|
||||
"日本語": "テスト",
|
||||
"emoji_🎉": "🚀",
|
||||
"normal": "value"
|
||||
}
|
||||
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["日本語"] == "テスト"
|
||||
assert decoded["emoji_🎉"] == "🚀"
|
||||
assert decoded["normal"] == "value"
|
||||
|
||||
def test_very_nested_structure(self):
|
||||
"""Test deeply nested structure"""
|
||||
# Create a 10-level deep nested structure
|
||||
data: dict[str, Any] = {"level0": {}}
|
||||
current = data["level0"]
|
||||
for i in range(1, 10):
|
||||
current[f"level{i}"] = {}
|
||||
current = current[f"level{i}"]
|
||||
current["value"] = "deep_value"
|
||||
|
||||
result = modify_with_jsonpath(data, "$..value", "modified_deep_value")
|
||||
|
||||
# Navigate to the deep value
|
||||
current = result["level0"]
|
||||
for i in range(1, 10):
|
||||
current = current[f"level{i}"]
|
||||
assert current["value"] == "modified_deep_value"
|
||||
|
||||
def test_large_list_modification(self):
|
||||
"""Test modifying large list"""
|
||||
data = {"items": [{"id": i, "value": i * 10} for i in range(100)]}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.items[*].value", 0)
|
||||
|
||||
assert all(item["value"] == 0 for item in result["items"])
|
||||
assert len(result["items"]) == 100
|
||||
|
||||
def test_mixed_date_types_encoding(self):
|
||||
"""Test encoding with both date and datetime in same structure"""
|
||||
data = {
|
||||
"created_date": date(2025, 10, 23),
|
||||
"created_datetime": datetime(2025, 10, 23, 15, 30, 45),
|
||||
"updated_date": date(2025, 10, 24),
|
||||
"updated_datetime": datetime(2025, 10, 24, 16, 45, 30)
|
||||
}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["created_date"] == "2025-10-23"
|
||||
assert decoded["created_datetime"] == "2025-10-23T15:30:45"
|
||||
assert decoded["updated_date"] == "2025-10-24"
|
||||
assert decoded["updated_datetime"] == "2025-10-24T16:45:30"
|
||||
@@ -438,4 +438,81 @@ class TestLogLevelHandling:
|
||||
level = log_instance.get_log_level("file_handler")
|
||||
assert level == LoggingLevel.DEBUG
|
||||
|
||||
|
||||
class DummyHandler:
|
||||
"""Dummy log level handler"""
|
||||
def __init__(self, level: LoggingLevel):
|
||||
self.level = level
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def log_instance_level() -> Log:
|
||||
"""
|
||||
Minimal log instance with dummy handlers
|
||||
|
||||
Returns:
|
||||
Log -- _description_
|
||||
"""
|
||||
log = Log(
|
||||
log_path=Path("/tmp/test.log"),
|
||||
log_name="test",
|
||||
log_settings={
|
||||
"log_level_console": LoggingLevel.DEBUG,
|
||||
"log_level_file": LoggingLevel.DEBUG,
|
||||
"console_enabled": False,
|
||||
"console_color_output_enabled": False,
|
||||
"console_format_type": None,
|
||||
"per_run_log": False,
|
||||
"add_start_info": False,
|
||||
"add_end_info": False,
|
||||
"log_queue": None,
|
||||
}
|
||||
)
|
||||
return log
|
||||
|
||||
|
||||
def test_any_handler_is_minimum_level_true(log_instance_level: Log):
|
||||
"""Test any_handler_is_minimum_level returns True when a handler meets the level"""
|
||||
# Handler with DEBUG level, should include INFO
|
||||
log_instance_level.handlers = {
|
||||
"h1": DummyHandler(LoggingLevel.DEBUG)
|
||||
}
|
||||
assert log_instance_level.any_handler_is_minimum_level(LoggingLevel.INFO) is True
|
||||
|
||||
|
||||
def test_any_handler_is_minimum_level_false(log_instance_level: Log):
|
||||
"""Test any_handler_is_minimum_level returns False when no handler meets the level"""
|
||||
# Handler with WARNING level, should include ERROR
|
||||
log_instance_level.handlers = {
|
||||
"h1": DummyHandler(LoggingLevel.WARNING)
|
||||
}
|
||||
assert log_instance_level.any_handler_is_minimum_level(LoggingLevel.ERROR) is True
|
||||
|
||||
|
||||
def test_any_handler_is_minimum_level_multiple(log_instance_level: Log):
|
||||
"""Test any_handler_is_minimum_level with multiple handlers"""
|
||||
# Multiple handlers, one matches
|
||||
log_instance_level.handlers = {
|
||||
"h1": DummyHandler(LoggingLevel.ERROR),
|
||||
"h2": DummyHandler(LoggingLevel.DEBUG)
|
||||
}
|
||||
assert log_instance_level.any_handler_is_minimum_level(LoggingLevel.INFO) is True
|
||||
# None matches
|
||||
log_instance_level.handlers = {
|
||||
"h1": DummyHandler(LoggingLevel.ERROR),
|
||||
"h2": DummyHandler(LoggingLevel.CRITICAL)
|
||||
}
|
||||
assert log_instance_level.any_handler_is_minimum_level(LoggingLevel.DEBUG) is False
|
||||
|
||||
|
||||
def test_any_handler_is_minimum_level_handles_exceptions(log_instance_level: Log):
|
||||
"""Test any_handler_is_minimum_level handles exceptions gracefully"""
|
||||
# Handler with missing level attribute
|
||||
class BadHandler:
|
||||
pass
|
||||
log_instance_level.handlers = {
|
||||
"h1": BadHandler()
|
||||
}
|
||||
# Should not raise, just return False
|
||||
assert log_instance_level.any_handler_is_minimum_level(LoggingLevel.DEBUG) is False
|
||||
# __END__
|
||||
|
||||
@@ -28,6 +28,7 @@ def tmp_log_path(tmp_path: Path) -> Path:
|
||||
@pytest.fixture
|
||||
def basic_log_settings() -> LogSettings:
|
||||
"""Basic log settings for testing"""
|
||||
# Return a new dict each time to avoid state pollution
|
||||
return {
|
||||
"log_level_console": LoggingLevel.WARNING,
|
||||
"log_level_file": LoggingLevel.DEBUG,
|
||||
@@ -308,4 +309,54 @@ class TestUpdateConsoleFormatter:
|
||||
# Verify message was logged
|
||||
assert "Test warning message" in caplog.text
|
||||
|
||||
def test_log_console_format_option_set_to_none(
|
||||
self, tmp_log_path: Path
|
||||
):
|
||||
"""Test that when log_console_format option is set to None, it uses ConsoleFormatSettings.ALL"""
|
||||
# Save the original DEFAULT_LOG_SETTINGS to restore it after test
|
||||
original_default = Log.DEFAULT_LOG_SETTINGS.copy()
|
||||
|
||||
try:
|
||||
# Reset DEFAULT_LOG_SETTINGS to ensure clean state
|
||||
Log.DEFAULT_LOG_SETTINGS = {
|
||||
"log_level_console": Log.DEFAULT_LOG_LEVEL_CONSOLE,
|
||||
"log_level_file": Log.DEFAULT_LOG_LEVEL_FILE,
|
||||
"per_run_log": False,
|
||||
"console_enabled": True,
|
||||
"console_color_output_enabled": True,
|
||||
"console_format_type": ConsoleFormatSettings.ALL,
|
||||
"add_start_info": True,
|
||||
"add_end_info": False,
|
||||
"log_queue": None,
|
||||
}
|
||||
|
||||
# Create a fresh settings dict with console_format_type explicitly set to None
|
||||
settings: LogSettings = {
|
||||
"log_level_console": LoggingLevel.WARNING,
|
||||
"log_level_file": LoggingLevel.DEBUG,
|
||||
"per_run_log": False,
|
||||
"console_enabled": True,
|
||||
"console_color_output_enabled": False,
|
||||
"console_format_type": None, # type: ignore
|
||||
"add_start_info": False,
|
||||
"add_end_info": False,
|
||||
"log_queue": None,
|
||||
}
|
||||
|
||||
# Verify that None is explicitly set in the input
|
||||
assert settings['console_format_type'] is None
|
||||
|
||||
log = Log(
|
||||
log_path=tmp_log_path,
|
||||
log_name="test_log",
|
||||
log_settings=settings
|
||||
)
|
||||
|
||||
# Verify that None was replaced with ConsoleFormatSettings.ALL
|
||||
# The Log class should replace None with the default value (ALL)
|
||||
assert log.log_settings['console_format_type'] == ConsoleFormatSettings.ALL
|
||||
finally:
|
||||
# Restore original DEFAULT_LOG_SETTINGS
|
||||
Log.DEFAULT_LOG_SETTINGS = original_default
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -2,11 +2,10 @@
|
||||
PyTest: requests_handling/caller
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from unittest.mock import Mock, patch
|
||||
import pytest
|
||||
import requests
|
||||
from corelibs.requests_handling.caller import Caller
|
||||
from corelibs.requests_handling.caller import Caller, ErrorResponse, ProxyConfig
|
||||
|
||||
|
||||
class TestCallerInit:
|
||||
@@ -21,13 +20,17 @@ class TestCallerInit:
|
||||
assert caller.timeout == 20
|
||||
assert caller.verify is True
|
||||
assert caller.proxy is None
|
||||
assert caller.cafile is None
|
||||
assert caller.ca_file is None
|
||||
|
||||
def test_init_with_all_params(self):
|
||||
"""Test Caller initialization with all parameters"""
|
||||
header = {"Authorization": "Bearer token", "Content-Type": "application/json"}
|
||||
proxy = {"http": "http://proxy.example.com:8080", "https": "https://proxy.example.com:8080"}
|
||||
caller = Caller(header=header, verify=False, timeout=30, proxy=proxy)
|
||||
proxy: ProxyConfig = {
|
||||
"type": "socks5",
|
||||
"host": "proxy.example.com:8080",
|
||||
"port": "8080"
|
||||
}
|
||||
caller = Caller(header=header, timeout=30, proxy=proxy, verify=False)
|
||||
|
||||
assert caller.headers == header
|
||||
assert caller.timeout == 30
|
||||
@@ -58,7 +61,7 @@ class TestCallerInit:
|
||||
ca_file_path = "/path/to/ca/cert.pem"
|
||||
caller = Caller(header={}, ca_file=ca_file_path)
|
||||
|
||||
assert caller.cafile == ca_file_path
|
||||
assert caller.ca_file == ca_file_path
|
||||
|
||||
|
||||
class TestCallerGet:
|
||||
@@ -81,7 +84,8 @@ class TestCallerGet:
|
||||
headers={"Authorization": "Bearer token"},
|
||||
timeout=20,
|
||||
verify=True,
|
||||
proxies=None
|
||||
proxies=None,
|
||||
cert=None
|
||||
)
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.get')
|
||||
@@ -101,7 +105,8 @@ class TestCallerGet:
|
||||
headers={},
|
||||
timeout=20,
|
||||
verify=True,
|
||||
proxies=None
|
||||
proxies=None,
|
||||
cert=None
|
||||
)
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.get')
|
||||
@@ -134,7 +139,11 @@ class TestCallerGet:
|
||||
mock_response = Mock(spec=requests.Response)
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
proxy = {"http": "http://proxy.example.com:8080"}
|
||||
proxy: ProxyConfig = {
|
||||
"type": "socks5",
|
||||
"host": "proxy.example.com:8080",
|
||||
"port": "8080"
|
||||
}
|
||||
caller = Caller(header={}, proxy=proxy)
|
||||
caller.get("https://api.example.com/data")
|
||||
|
||||
@@ -142,40 +151,46 @@ class TestCallerGet:
|
||||
assert mock_get.call_args[1]["proxies"] == proxy
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.get')
|
||||
def test_get_invalid_schema_returns_none(self, mock_get: Mock, capsys: Any):
|
||||
"""Test GET request with invalid URL schema returns None"""
|
||||
def test_get_invalid_schema_returns_none(self, mock_get: Mock):
|
||||
"""Test GET request with invalid URL schema returns ErrorResponse"""
|
||||
mock_get.side_effect = requests.exceptions.InvalidSchema("Invalid URL")
|
||||
|
||||
caller = Caller(header={})
|
||||
response = caller.get("invalid://example.com")
|
||||
|
||||
assert response is None
|
||||
captured = capsys.readouterr()
|
||||
assert "Invalid URL during 'get'" in captured.out
|
||||
assert isinstance(response, ErrorResponse)
|
||||
assert response.code == 200
|
||||
assert "Invalid URL during 'get'" in response.message
|
||||
assert response.action == "get"
|
||||
assert response.url == "invalid://example.com"
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.get')
|
||||
def test_get_timeout_returns_none(self, mock_get: Mock, capsys: Any):
|
||||
"""Test GET request timeout returns None"""
|
||||
def test_get_timeout_returns_none(self, mock_get: Mock):
|
||||
"""Test GET request timeout returns ErrorResponse"""
|
||||
mock_get.side_effect = requests.exceptions.ReadTimeout("Timeout")
|
||||
|
||||
caller = Caller(header={})
|
||||
response = caller.get("https://api.example.com/data")
|
||||
|
||||
assert response is None
|
||||
captured = capsys.readouterr()
|
||||
assert "Timeout (20s) during 'get'" in captured.out
|
||||
assert isinstance(response, ErrorResponse)
|
||||
assert response.code == 300
|
||||
assert "Timeout (20s) during 'get'" in response.message
|
||||
assert response.action == "get"
|
||||
assert response.url == "https://api.example.com/data"
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.get')
|
||||
def test_get_connection_error_returns_none(self, mock_get: Mock, capsys: Any):
|
||||
"""Test GET request connection error returns None"""
|
||||
def test_get_connection_error_returns_none(self, mock_get: Mock):
|
||||
"""Test GET request connection error returns ErrorResponse"""
|
||||
mock_get.side_effect = requests.exceptions.ConnectionError("Connection failed")
|
||||
|
||||
caller = Caller(header={})
|
||||
response = caller.get("https://api.example.com/data")
|
||||
|
||||
assert response is None
|
||||
captured = capsys.readouterr()
|
||||
assert "Connection error during 'get'" in captured.out
|
||||
assert isinstance(response, ErrorResponse)
|
||||
assert response.code == 400
|
||||
assert "Connection error during 'get'" in response.message
|
||||
assert response.action == "get"
|
||||
assert response.url == "https://api.example.com/data"
|
||||
|
||||
|
||||
class TestCallerPost:
|
||||
@@ -200,7 +215,8 @@ class TestCallerPost:
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=20,
|
||||
verify=True,
|
||||
proxies=None
|
||||
proxies=None,
|
||||
cert=None
|
||||
)
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.post')
|
||||
@@ -234,40 +250,46 @@ class TestCallerPost:
|
||||
assert mock_post.call_args[1]["json"] == data
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.post')
|
||||
def test_post_invalid_schema_returns_none(self, mock_post: Mock, capsys: Any):
|
||||
"""Test POST request with invalid URL schema returns None"""
|
||||
def test_post_invalid_schema_returns_none(self, mock_post: Mock):
|
||||
"""Test POST request with invalid URL schema returns ErrorResponse"""
|
||||
mock_post.side_effect = requests.exceptions.InvalidSchema("Invalid URL")
|
||||
|
||||
caller = Caller(header={})
|
||||
response = caller.post("invalid://example.com", data={"test": "data"})
|
||||
|
||||
assert response is None
|
||||
captured = capsys.readouterr()
|
||||
assert "Invalid URL during 'post'" in captured.out
|
||||
assert isinstance(response, ErrorResponse)
|
||||
assert response.code == 200
|
||||
assert "Invalid URL during 'post'" in response.message
|
||||
assert response.action == "post"
|
||||
assert response.url == "invalid://example.com"
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.post')
|
||||
def test_post_timeout_returns_none(self, mock_post: Mock, capsys: Any):
|
||||
"""Test POST request timeout returns None"""
|
||||
def test_post_timeout_returns_none(self, mock_post: Mock):
|
||||
"""Test POST request timeout returns ErrorResponse"""
|
||||
mock_post.side_effect = requests.exceptions.ReadTimeout("Timeout")
|
||||
|
||||
caller = Caller(header={})
|
||||
response = caller.post("https://api.example.com/data", data={"test": "data"})
|
||||
|
||||
assert response is None
|
||||
captured = capsys.readouterr()
|
||||
assert "Timeout (20s) during 'post'" in captured.out
|
||||
assert isinstance(response, ErrorResponse)
|
||||
assert response.code == 300
|
||||
assert "Timeout (20s) during 'post'" in response.message
|
||||
assert response.action == "post"
|
||||
assert response.url == "https://api.example.com/data"
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.post')
|
||||
def test_post_connection_error_returns_none(self, mock_post: Mock, capsys: Any):
|
||||
"""Test POST request connection error returns None"""
|
||||
def test_post_connection_error_returns_none(self, mock_post: Mock):
|
||||
"""Test POST request connection error returns ErrorResponse"""
|
||||
mock_post.side_effect = requests.exceptions.ConnectionError("Connection failed")
|
||||
|
||||
caller = Caller(header={})
|
||||
response = caller.post("https://api.example.com/data", data={"test": "data"})
|
||||
|
||||
assert response is None
|
||||
captured = capsys.readouterr()
|
||||
assert "Connection error during 'post'" in captured.out
|
||||
assert isinstance(response, ErrorResponse)
|
||||
assert response.code == 400
|
||||
assert "Connection error during 'post'" in response.message
|
||||
assert response.action == "post"
|
||||
assert response.url == "https://api.example.com/data"
|
||||
|
||||
|
||||
class TestCallerPut:
|
||||
@@ -292,7 +314,8 @@ class TestCallerPut:
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=20,
|
||||
verify=True,
|
||||
proxies=None
|
||||
proxies=None,
|
||||
cert=None
|
||||
)
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.put')
|
||||
@@ -311,16 +334,18 @@ class TestCallerPut:
|
||||
assert mock_put.call_args[1]["params"] == params
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.put')
|
||||
def test_put_timeout_returns_none(self, mock_put: Mock, capsys: Any):
|
||||
"""Test PUT request timeout returns None"""
|
||||
def test_put_timeout_returns_none(self, mock_put: Mock):
|
||||
"""Test PUT request timeout returns ErrorResponse"""
|
||||
mock_put.side_effect = requests.exceptions.ReadTimeout("Timeout")
|
||||
|
||||
caller = Caller(header={})
|
||||
response = caller.put("https://api.example.com/data/1", data={"test": "data"})
|
||||
|
||||
assert response is None
|
||||
captured = capsys.readouterr()
|
||||
assert "Timeout (20s) during 'put'" in captured.out
|
||||
assert isinstance(response, ErrorResponse)
|
||||
assert response.code == 300
|
||||
assert "Timeout (20s) during 'put'" in response.message
|
||||
assert response.action == "put"
|
||||
assert response.url == "https://api.example.com/data/1"
|
||||
|
||||
|
||||
class TestCallerPatch:
|
||||
@@ -345,7 +370,8 @@ class TestCallerPatch:
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=20,
|
||||
verify=True,
|
||||
proxies=None
|
||||
proxies=None,
|
||||
cert=None
|
||||
)
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.patch')
|
||||
@@ -364,16 +390,18 @@ class TestCallerPatch:
|
||||
assert mock_patch.call_args[1]["params"] == params
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.patch')
|
||||
def test_patch_connection_error_returns_none(self, mock_patch: Mock, capsys: Any):
|
||||
"""Test PATCH request connection error returns None"""
|
||||
def test_patch_connection_error_returns_none(self, mock_patch: Mock):
|
||||
"""Test PATCH request connection error returns ErrorResponse"""
|
||||
mock_patch.side_effect = requests.exceptions.ConnectionError("Connection failed")
|
||||
|
||||
caller = Caller(header={})
|
||||
response = caller.patch("https://api.example.com/data/1", data={"test": "data"})
|
||||
|
||||
assert response is None
|
||||
captured = capsys.readouterr()
|
||||
assert "Connection error during 'patch'" in captured.out
|
||||
assert isinstance(response, ErrorResponse)
|
||||
assert response.code == 400
|
||||
assert "Connection error during 'patch'" in response.message
|
||||
assert response.action == "patch"
|
||||
assert response.url == "https://api.example.com/data/1"
|
||||
|
||||
|
||||
class TestCallerDelete:
|
||||
@@ -396,7 +424,8 @@ class TestCallerDelete:
|
||||
headers={"Authorization": "Bearer token"},
|
||||
timeout=20,
|
||||
verify=True,
|
||||
proxies=None
|
||||
proxies=None,
|
||||
cert=None
|
||||
)
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.delete')
|
||||
@@ -414,16 +443,18 @@ class TestCallerDelete:
|
||||
assert mock_delete.call_args[1]["params"] == params
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.delete')
|
||||
def test_delete_invalid_schema_returns_none(self, mock_delete: Mock, capsys: Any):
|
||||
"""Test DELETE request with invalid URL schema returns None"""
|
||||
def test_delete_invalid_schema_returns_none(self, mock_delete: Mock):
|
||||
"""Test DELETE request with invalid URL schema returns ErrorResponse"""
|
||||
mock_delete.side_effect = requests.exceptions.InvalidSchema("Invalid URL")
|
||||
|
||||
caller = Caller(header={})
|
||||
response = caller.delete("invalid://example.com/data/1")
|
||||
|
||||
assert response is None
|
||||
captured = capsys.readouterr()
|
||||
assert "Invalid URL during 'delete'" in captured.out
|
||||
assert isinstance(response, ErrorResponse)
|
||||
assert response.code == 200
|
||||
assert "Invalid URL during 'delete'" in response.message
|
||||
assert response.action == "delete"
|
||||
assert response.url == "invalid://example.com/data/1"
|
||||
|
||||
|
||||
class TestCallerParametrized:
|
||||
@@ -492,7 +523,7 @@ class TestCallerParametrized:
|
||||
])
|
||||
@patch('corelibs.requests_handling.caller.requests.get')
|
||||
def test_exception_handling(
|
||||
self, mock_get: Mock, exception_class: type, expected_message: str, capsys: Any
|
||||
self, mock_get: Mock, exception_class: type, expected_message: str
|
||||
):
|
||||
"""Test exception handling for all exception types"""
|
||||
mock_get.side_effect = exception_class("Test error")
|
||||
@@ -500,9 +531,8 @@ class TestCallerParametrized:
|
||||
caller = Caller(header={})
|
||||
response = caller.get("https://api.example.com/data")
|
||||
|
||||
assert response is None
|
||||
captured = capsys.readouterr()
|
||||
assert expected_message in captured.out
|
||||
assert isinstance(response, ErrorResponse)
|
||||
assert expected_message in response.message
|
||||
|
||||
|
||||
class TestCallerIntegration:
|
||||
@@ -599,7 +629,8 @@ class TestCallerEdgeCases:
|
||||
headers={},
|
||||
timeout=20,
|
||||
verify=True,
|
||||
proxies=None
|
||||
proxies=None,
|
||||
cert=None
|
||||
)
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.post')
|
||||
@@ -659,7 +690,8 @@ class TestCallerEdgeCases:
|
||||
headers={},
|
||||
timeout=20,
|
||||
verify=True,
|
||||
proxies=None
|
||||
proxies=None,
|
||||
cert=None
|
||||
)
|
||||
|
||||
@patch('corelibs.requests_handling.caller.requests.get')
|
||||
@@ -679,7 +711,8 @@ class TestCallerEdgeCases:
|
||||
headers={},
|
||||
timeout=20,
|
||||
verify=True,
|
||||
proxies=None
|
||||
proxies=None,
|
||||
cert=None
|
||||
)
|
||||
|
||||
def test_timeout_zero(self):
|
||||
@@ -730,9 +763,10 @@ class TestCallerProxyHandling:
|
||||
mock_response = Mock(spec=requests.Response)
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
proxy = {
|
||||
"http": "http://proxy.example.com:8080",
|
||||
"https": "https://proxy.example.com:8080"
|
||||
proxy: ProxyConfig = {
|
||||
"type": "socks5",
|
||||
"host": "proxy.example.com:8080",
|
||||
"port": "8080"
|
||||
}
|
||||
caller = Caller(header={}, proxy=proxy)
|
||||
caller.get("https://api.example.com/data")
|
||||
@@ -746,9 +780,10 @@ class TestCallerProxyHandling:
|
||||
mock_response = Mock(spec=requests.Response)
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
proxy = {
|
||||
"http": "http://user:pass@proxy.example.com:8080",
|
||||
"https": "https://user:pass@proxy.example.com:8080"
|
||||
proxy: ProxyConfig = {
|
||||
"type": "socks5",
|
||||
"host": "proxy.example.com:8080",
|
||||
"port": "8080"
|
||||
}
|
||||
caller = Caller(header={}, proxy=proxy)
|
||||
caller.post("https://api.example.com/data", data={"test": "data"})
|
||||
@@ -789,7 +824,7 @@ class TestCallerResponseHandling:
|
||||
caller = Caller(header={})
|
||||
response = caller.get("https://api.example.com/data")
|
||||
|
||||
assert response is not None
|
||||
assert not isinstance(response, ErrorResponse)
|
||||
assert response.status_code == 200
|
||||
assert response.text == "Success"
|
||||
assert response.json() == {"status": "ok"}
|
||||
@@ -805,7 +840,7 @@ class TestCallerResponseHandling:
|
||||
caller = Caller(header={})
|
||||
response = caller.get("https://api.example.com/data")
|
||||
|
||||
assert response is not None
|
||||
assert not isinstance(response, ErrorResponse)
|
||||
assert response.status_code == status_code
|
||||
|
||||
|
||||
|
||||
@@ -1,516 +0,0 @@
|
||||
"""
|
||||
PyTest: string_handling/text_colors
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from corelibs.string_handling.text_colors import Colors
|
||||
|
||||
|
||||
class TestColorsInitialState:
|
||||
"""Tests for Colors class initial state"""
|
||||
|
||||
def test_bold_initial_value(self):
|
||||
"""Test that bold has correct ANSI code"""
|
||||
assert Colors.bold == '\033[1m'
|
||||
|
||||
def test_underline_initial_value(self):
|
||||
"""Test that underline has correct ANSI code"""
|
||||
assert Colors.underline == '\033[4m'
|
||||
|
||||
def test_end_initial_value(self):
|
||||
"""Test that end has correct ANSI code"""
|
||||
assert Colors.end == '\033[0m'
|
||||
|
||||
def test_reset_initial_value(self):
|
||||
"""Test that reset has correct ANSI code"""
|
||||
assert Colors.reset == '\033[0m'
|
||||
|
||||
|
||||
class TestColorsNormal:
|
||||
"""Tests for normal color ANSI codes"""
|
||||
|
||||
def test_black_normal(self):
|
||||
"""Test black color code"""
|
||||
assert Colors.black == "\033[30m"
|
||||
|
||||
def test_red_normal(self):
|
||||
"""Test red color code"""
|
||||
assert Colors.red == "\033[31m"
|
||||
|
||||
def test_green_normal(self):
|
||||
"""Test green color code"""
|
||||
assert Colors.green == "\033[32m"
|
||||
|
||||
def test_yellow_normal(self):
|
||||
"""Test yellow color code"""
|
||||
assert Colors.yellow == "\033[33m"
|
||||
|
||||
def test_blue_normal(self):
|
||||
"""Test blue color code"""
|
||||
assert Colors.blue == "\033[34m"
|
||||
|
||||
def test_magenta_normal(self):
|
||||
"""Test magenta color code"""
|
||||
assert Colors.magenta == "\033[35m"
|
||||
|
||||
def test_cyan_normal(self):
|
||||
"""Test cyan color code"""
|
||||
assert Colors.cyan == "\033[36m"
|
||||
|
||||
def test_white_normal(self):
|
||||
"""Test white color code"""
|
||||
assert Colors.white == "\033[37m"
|
||||
|
||||
|
||||
class TestColorsBold:
|
||||
"""Tests for bold color ANSI codes"""
|
||||
|
||||
def test_black_bold(self):
|
||||
"""Test black bold color code"""
|
||||
assert Colors.black_bold == "\033[1;30m"
|
||||
|
||||
def test_red_bold(self):
|
||||
"""Test red bold color code"""
|
||||
assert Colors.red_bold == "\033[1;31m"
|
||||
|
||||
def test_green_bold(self):
|
||||
"""Test green bold color code"""
|
||||
assert Colors.green_bold == "\033[1;32m"
|
||||
|
||||
def test_yellow_bold(self):
|
||||
"""Test yellow bold color code"""
|
||||
assert Colors.yellow_bold == "\033[1;33m"
|
||||
|
||||
def test_blue_bold(self):
|
||||
"""Test blue bold color code"""
|
||||
assert Colors.blue_bold == "\033[1;34m"
|
||||
|
||||
def test_magenta_bold(self):
|
||||
"""Test magenta bold color code"""
|
||||
assert Colors.magenta_bold == "\033[1;35m"
|
||||
|
||||
def test_cyan_bold(self):
|
||||
"""Test cyan bold color code"""
|
||||
assert Colors.cyan_bold == "\033[1;36m"
|
||||
|
||||
def test_white_bold(self):
|
||||
"""Test white bold color code"""
|
||||
assert Colors.white_bold == "\033[1;37m"
|
||||
|
||||
|
||||
class TestColorsBright:
|
||||
"""Tests for bright color ANSI codes"""
|
||||
|
||||
def test_black_bright(self):
|
||||
"""Test black bright color code"""
|
||||
assert Colors.black_bright == '\033[90m'
|
||||
|
||||
def test_red_bright(self):
|
||||
"""Test red bright color code"""
|
||||
assert Colors.red_bright == '\033[91m'
|
||||
|
||||
def test_green_bright(self):
|
||||
"""Test green bright color code"""
|
||||
assert Colors.green_bright == '\033[92m'
|
||||
|
||||
def test_yellow_bright(self):
|
||||
"""Test yellow bright color code"""
|
||||
assert Colors.yellow_bright == '\033[93m'
|
||||
|
||||
def test_blue_bright(self):
|
||||
"""Test blue bright color code"""
|
||||
assert Colors.blue_bright == '\033[94m'
|
||||
|
||||
def test_magenta_bright(self):
|
||||
"""Test magenta bright color code"""
|
||||
assert Colors.magenta_bright == '\033[95m'
|
||||
|
||||
def test_cyan_bright(self):
|
||||
"""Test cyan bright color code"""
|
||||
assert Colors.cyan_bright == '\033[96m'
|
||||
|
||||
def test_white_bright(self):
|
||||
"""Test white bright color code"""
|
||||
assert Colors.white_bright == '\033[97m'
|
||||
|
||||
|
||||
class TestColorsDisable:
|
||||
"""Tests for Colors.disable() method"""
|
||||
|
||||
def setup_method(self):
|
||||
"""Reset colors before each test"""
|
||||
Colors.reset_colors()
|
||||
|
||||
def teardown_method(self):
|
||||
"""Reset colors after each test"""
|
||||
Colors.reset_colors()
|
||||
|
||||
def test_disable_bold_and_underline(self):
|
||||
"""Test that disable() sets bold and underline to empty strings"""
|
||||
Colors.disable()
|
||||
assert Colors.bold == ''
|
||||
assert Colors.underline == ''
|
||||
|
||||
def test_disable_end_and_reset(self):
|
||||
"""Test that disable() sets end and reset to empty strings"""
|
||||
Colors.disable()
|
||||
assert Colors.end == ''
|
||||
assert Colors.reset == ''
|
||||
|
||||
def test_disable_normal_colors(self):
|
||||
"""Test that disable() sets all normal colors to empty strings"""
|
||||
Colors.disable()
|
||||
assert Colors.black == ''
|
||||
assert Colors.red == ''
|
||||
assert Colors.green == ''
|
||||
assert Colors.yellow == ''
|
||||
assert Colors.blue == ''
|
||||
assert Colors.magenta == ''
|
||||
assert Colors.cyan == ''
|
||||
assert Colors.white == ''
|
||||
|
||||
def test_disable_bold_colors(self):
|
||||
"""Test that disable() sets all bold colors to empty strings"""
|
||||
Colors.disable()
|
||||
assert Colors.black_bold == ''
|
||||
assert Colors.red_bold == ''
|
||||
assert Colors.green_bold == ''
|
||||
assert Colors.yellow_bold == ''
|
||||
assert Colors.blue_bold == ''
|
||||
assert Colors.magenta_bold == ''
|
||||
assert Colors.cyan_bold == ''
|
||||
assert Colors.white_bold == ''
|
||||
|
||||
def test_disable_bright_colors(self):
|
||||
"""Test that disable() sets all bright colors to empty strings"""
|
||||
Colors.disable()
|
||||
assert Colors.black_bright == ''
|
||||
assert Colors.red_bright == ''
|
||||
assert Colors.green_bright == ''
|
||||
assert Colors.yellow_bright == ''
|
||||
assert Colors.blue_bright == ''
|
||||
assert Colors.magenta_bright == ''
|
||||
assert Colors.cyan_bright == ''
|
||||
assert Colors.white_bright == ''
|
||||
|
||||
def test_disable_all_colors_at_once(self):
|
||||
"""Test that all color attributes are empty after disable()"""
|
||||
Colors.disable()
|
||||
# Check that all public attributes are empty strings
|
||||
for attr in dir(Colors):
|
||||
if not attr.startswith('_') and attr not in ['disable', 'reset_colors']:
|
||||
assert getattr(Colors, attr) == '', f"{attr} should be empty after disable()"
|
||||
|
||||
|
||||
class TestColorsResetColors:
|
||||
"""Tests for Colors.reset_colors() method"""
|
||||
|
||||
def setup_method(self):
|
||||
"""Disable colors before each test"""
|
||||
Colors.disable()
|
||||
|
||||
def teardown_method(self):
|
||||
"""Reset colors after each test"""
|
||||
Colors.reset_colors()
|
||||
|
||||
def test_reset_bold_and_underline(self):
|
||||
"""Test that reset_colors() restores bold and underline"""
|
||||
Colors.reset_colors()
|
||||
assert Colors.bold == '\033[1m'
|
||||
assert Colors.underline == '\033[4m'
|
||||
|
||||
def test_reset_end_and_reset(self):
|
||||
"""Test that reset_colors() restores end and reset"""
|
||||
Colors.reset_colors()
|
||||
assert Colors.end == '\033[0m'
|
||||
assert Colors.reset == '\033[0m'
|
||||
|
||||
def test_reset_normal_colors(self):
|
||||
"""Test that reset_colors() restores all normal colors"""
|
||||
Colors.reset_colors()
|
||||
assert Colors.black == "\033[30m"
|
||||
assert Colors.red == "\033[31m"
|
||||
assert Colors.green == "\033[32m"
|
||||
assert Colors.yellow == "\033[33m"
|
||||
assert Colors.blue == "\033[34m"
|
||||
assert Colors.magenta == "\033[35m"
|
||||
assert Colors.cyan == "\033[36m"
|
||||
assert Colors.white == "\033[37m"
|
||||
|
||||
def test_reset_bold_colors(self):
|
||||
"""Test that reset_colors() restores all bold colors"""
|
||||
Colors.reset_colors()
|
||||
assert Colors.black_bold == "\033[1;30m"
|
||||
assert Colors.red_bold == "\033[1;31m"
|
||||
assert Colors.green_bold == "\033[1;32m"
|
||||
assert Colors.yellow_bold == "\033[1;33m"
|
||||
assert Colors.blue_bold == "\033[1;34m"
|
||||
assert Colors.magenta_bold == "\033[1;35m"
|
||||
assert Colors.cyan_bold == "\033[1;36m"
|
||||
assert Colors.white_bold == "\033[1;37m"
|
||||
|
||||
def test_reset_bright_colors(self):
|
||||
"""Test that reset_colors() restores all bright colors"""
|
||||
Colors.reset_colors()
|
||||
assert Colors.black_bright == '\033[90m'
|
||||
assert Colors.red_bright == '\033[91m'
|
||||
assert Colors.green_bright == '\033[92m'
|
||||
assert Colors.yellow_bright == '\033[93m'
|
||||
assert Colors.blue_bright == '\033[94m'
|
||||
assert Colors.magenta_bright == '\033[95m'
|
||||
assert Colors.cyan_bright == '\033[96m'
|
||||
assert Colors.white_bright == '\033[97m'
|
||||
|
||||
|
||||
class TestColorsDisableAndReset:
|
||||
"""Tests for disable and reset cycle"""
|
||||
|
||||
def setup_method(self):
|
||||
"""Reset colors before each test"""
|
||||
Colors.reset_colors()
|
||||
|
||||
def teardown_method(self):
|
||||
"""Reset colors after each test"""
|
||||
Colors.reset_colors()
|
||||
|
||||
def test_disable_then_reset_cycle(self):
|
||||
"""Test that colors can be disabled and then reset multiple times"""
|
||||
# Initial state
|
||||
original_red = Colors.red
|
||||
|
||||
# Disable
|
||||
Colors.disable()
|
||||
assert Colors.red == ''
|
||||
|
||||
# Reset
|
||||
Colors.reset_colors()
|
||||
assert Colors.red == original_red
|
||||
|
||||
# Disable again
|
||||
Colors.disable()
|
||||
assert Colors.red == ''
|
||||
|
||||
# Reset again
|
||||
Colors.reset_colors()
|
||||
assert Colors.red == original_red
|
||||
|
||||
def test_multiple_disables(self):
|
||||
"""Test that calling disable() multiple times is safe"""
|
||||
Colors.disable()
|
||||
Colors.disable()
|
||||
Colors.disable()
|
||||
assert Colors.red == ''
|
||||
assert Colors.blue == ''
|
||||
|
||||
def test_multiple_resets(self):
|
||||
"""Test that calling reset_colors() multiple times is safe"""
|
||||
Colors.reset_colors()
|
||||
Colors.reset_colors()
|
||||
Colors.reset_colors()
|
||||
assert Colors.red == "\033[31m"
|
||||
assert Colors.blue == "\033[34m"
|
||||
|
||||
|
||||
class TestColorsUsage:
|
||||
"""Tests for practical usage of Colors class"""
|
||||
|
||||
def setup_method(self):
|
||||
"""Reset colors before each test"""
|
||||
Colors.reset_colors()
|
||||
|
||||
def teardown_method(self):
|
||||
"""Reset colors after each test"""
|
||||
Colors.reset_colors()
|
||||
|
||||
def test_colored_string_with_reset(self):
|
||||
"""Test creating a colored string with reset"""
|
||||
result = f"{Colors.red}Error{Colors.end}"
|
||||
assert result == "\033[31mError\033[0m"
|
||||
|
||||
def test_bold_colored_string(self):
|
||||
"""Test creating a bold colored string"""
|
||||
result = f"{Colors.bold}{Colors.yellow}Warning{Colors.end}"
|
||||
assert result == "\033[1m\033[33mWarning\033[0m"
|
||||
|
||||
def test_underline_colored_string(self):
|
||||
"""Test creating an underlined colored string"""
|
||||
result = f"{Colors.underline}{Colors.blue}Info{Colors.end}"
|
||||
assert result == "\033[4m\033[34mInfo\033[0m"
|
||||
|
||||
def test_bold_underline_colored_string(self):
|
||||
"""Test creating a bold and underlined colored string"""
|
||||
result = f"{Colors.bold}{Colors.underline}{Colors.green}Success{Colors.end}"
|
||||
assert result == "\033[1m\033[4m\033[32mSuccess\033[0m"
|
||||
|
||||
def test_multiple_colors_in_string(self):
|
||||
"""Test using multiple colors in one string"""
|
||||
result = f"{Colors.red}Red{Colors.end} {Colors.blue}Blue{Colors.end}"
|
||||
assert result == "\033[31mRed\033[0m \033[34mBlue\033[0m"
|
||||
|
||||
def test_bright_color_usage(self):
|
||||
"""Test using bright color variants"""
|
||||
result = f"{Colors.cyan_bright}Bright Cyan{Colors.end}"
|
||||
assert result == "\033[96mBright Cyan\033[0m"
|
||||
|
||||
def test_bold_color_shortcut(self):
|
||||
"""Test using bold color shortcuts"""
|
||||
result = f"{Colors.red_bold}Bold Red{Colors.end}"
|
||||
assert result == "\033[1;31mBold Red\033[0m"
|
||||
|
||||
def test_disabled_colors_produce_plain_text(self):
|
||||
"""Test that disabled colors produce plain text without ANSI codes"""
|
||||
Colors.disable()
|
||||
result = f"{Colors.red}Error{Colors.end}"
|
||||
assert result == "Error"
|
||||
assert "\033[" not in result
|
||||
|
||||
def test_disabled_bold_underline_produce_plain_text(self):
|
||||
"""Test that disabled formatting produces plain text"""
|
||||
Colors.disable()
|
||||
result = f"{Colors.bold}{Colors.underline}{Colors.green}Success{Colors.end}"
|
||||
assert result == "Success"
|
||||
assert "\033[" not in result
|
||||
|
||||
|
||||
class TestColorsPrivateAttributes:
|
||||
"""Tests to ensure private attributes are not directly accessible"""
|
||||
|
||||
def test_private_bold_not_accessible(self):
|
||||
"""Test that __BOLD is private"""
|
||||
with pytest.raises(AttributeError):
|
||||
_ = Colors.__BOLD
|
||||
|
||||
def test_private_colors_not_accessible(self):
|
||||
"""Test that private color attributes are not accessible"""
|
||||
with pytest.raises(AttributeError):
|
||||
_ = Colors.__RED
|
||||
with pytest.raises(AttributeError):
|
||||
_ = Colors.__GREEN
|
||||
|
||||
|
||||
# Parametrized tests
|
||||
@pytest.mark.parametrize("color_attr,expected_code", [
|
||||
("black", "\033[30m"),
|
||||
("red", "\033[31m"),
|
||||
("green", "\033[32m"),
|
||||
("yellow", "\033[33m"),
|
||||
("blue", "\033[34m"),
|
||||
("magenta", "\033[35m"),
|
||||
("cyan", "\033[36m"),
|
||||
("white", "\033[37m"),
|
||||
])
|
||||
def test_normal_colors_parametrized(color_attr: str, expected_code: str):
|
||||
"""Parametrized test for normal colors"""
|
||||
Colors.reset_colors()
|
||||
assert getattr(Colors, color_attr) == expected_code
|
||||
|
||||
|
||||
@pytest.mark.parametrize("color_attr,expected_code", [
|
||||
("black_bold", "\033[1;30m"),
|
||||
("red_bold", "\033[1;31m"),
|
||||
("green_bold", "\033[1;32m"),
|
||||
("yellow_bold", "\033[1;33m"),
|
||||
("blue_bold", "\033[1;34m"),
|
||||
("magenta_bold", "\033[1;35m"),
|
||||
("cyan_bold", "\033[1;36m"),
|
||||
("white_bold", "\033[1;37m"),
|
||||
])
|
||||
def test_bold_colors_parametrized(color_attr: str, expected_code: str):
|
||||
"""Parametrized test for bold colors"""
|
||||
Colors.reset_colors()
|
||||
assert getattr(Colors, color_attr) == expected_code
|
||||
|
||||
|
||||
@pytest.mark.parametrize("color_attr,expected_code", [
|
||||
("black_bright", '\033[90m'),
|
||||
("red_bright", '\033[91m'),
|
||||
("green_bright", '\033[92m'),
|
||||
("yellow_bright", '\033[93m'),
|
||||
("blue_bright", '\033[94m'),
|
||||
("magenta_bright", '\033[95m'),
|
||||
("cyan_bright", '\033[96m'),
|
||||
("white_bright", '\033[97m'),
|
||||
])
|
||||
def test_bright_colors_parametrized(color_attr: str, expected_code: str):
|
||||
"""Parametrized test for bright colors"""
|
||||
Colors.reset_colors()
|
||||
assert getattr(Colors, color_attr) == expected_code
|
||||
|
||||
|
||||
@pytest.mark.parametrize("color_attr", [
|
||||
"bold", "underline", "end", "reset",
|
||||
"black", "red", "green", "yellow", "blue", "magenta", "cyan", "white",
|
||||
"black_bold", "red_bold", "green_bold", "yellow_bold",
|
||||
"blue_bold", "magenta_bold", "cyan_bold", "white_bold",
|
||||
"black_bright", "red_bright", "green_bright", "yellow_bright",
|
||||
"blue_bright", "magenta_bright", "cyan_bright", "white_bright",
|
||||
])
|
||||
def test_disable_all_attributes_parametrized(color_attr: str):
|
||||
"""Parametrized test that all color attributes are disabled"""
|
||||
Colors.reset_colors()
|
||||
Colors.disable()
|
||||
assert getattr(Colors, color_attr) == ''
|
||||
|
||||
|
||||
@pytest.mark.parametrize("color_attr", [
|
||||
"bold", "underline", "end", "reset",
|
||||
"black", "red", "green", "yellow", "blue", "magenta", "cyan", "white",
|
||||
"black_bold", "red_bold", "green_bold", "yellow_bold",
|
||||
"blue_bold", "magenta_bold", "cyan_bold", "white_bold",
|
||||
"black_bright", "red_bright", "green_bright", "yellow_bright",
|
||||
"blue_bright", "magenta_bright", "cyan_bright", "white_bright",
|
||||
])
|
||||
def test_reset_all_attributes_parametrized(color_attr: str):
|
||||
"""Parametrized test that all color attributes are reset"""
|
||||
Colors.disable()
|
||||
Colors.reset_colors()
|
||||
assert getattr(Colors, color_attr) != ''
|
||||
assert '\033[' in getattr(Colors, color_attr)
|
||||
|
||||
|
||||
# Edge case tests
|
||||
class TestColorsEdgeCases:
|
||||
"""Tests for edge cases and special scenarios"""
|
||||
|
||||
def setup_method(self):
|
||||
"""Reset colors before each test"""
|
||||
Colors.reset_colors()
|
||||
|
||||
def teardown_method(self):
|
||||
"""Reset colors after each test"""
|
||||
Colors.reset_colors()
|
||||
|
||||
def test_colors_class_is_not_instantiable(self):
|
||||
"""Test that Colors class can be instantiated (it's not abstract)"""
|
||||
# The class uses static methods, but can be instantiated
|
||||
instance = Colors()
|
||||
assert isinstance(instance, Colors)
|
||||
|
||||
def test_static_methods_work_on_instance(self):
|
||||
"""Test that static methods work when called on instance"""
|
||||
instance = Colors()
|
||||
instance.disable()
|
||||
assert Colors.red == ''
|
||||
instance.reset_colors()
|
||||
assert Colors.red == "\033[31m"
|
||||
|
||||
def test_concatenation_of_multiple_effects(self):
|
||||
"""Test concatenating multiple color effects"""
|
||||
result = f"{Colors.bold}{Colors.underline}{Colors.red_bright}Test{Colors.reset}"
|
||||
assert "\033[1m" in result # bold
|
||||
assert "\033[4m" in result # underline
|
||||
assert "\033[91m" in result # red bright
|
||||
assert "\033[0m" in result # reset
|
||||
|
||||
def test_empty_string_with_colors(self):
|
||||
"""Test applying colors to empty string"""
|
||||
result = f"{Colors.red}{Colors.end}"
|
||||
assert result == "\033[31m\033[0m"
|
||||
|
||||
def test_nested_color_changes(self):
|
||||
"""Test nested color changes in string"""
|
||||
result = f"{Colors.red}Red {Colors.blue}Blue{Colors.end} Red again{Colors.end}"
|
||||
assert result == "\033[31mRed \033[34mBlue\033[0m Red again\033[0m"
|
||||
|
||||
|
||||
# __END__
|
||||
480
uv.lock
generated
480
uv.lock
generated
@@ -1,480 +0,0 @@
|
||||
version = 1
|
||||
revision = 3
|
||||
requires-python = ">=3.13"
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.11.12"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
version = "2.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pycparser", marker = "implementation_name != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "corelibs"
|
||||
version = "0.41.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "corelibs-datetime" },
|
||||
{ name = "corelibs-enum-base" },
|
||||
{ name = "corelibs-var" },
|
||||
{ name = "cryptography" },
|
||||
{ name = "jmespath" },
|
||||
{ name = "jsonpath-ng" },
|
||||
{ name = "psutil" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "deepdiff" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-cov" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "corelibs-datetime", specifier = ">=1.0.1", index = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" },
|
||||
{ name = "corelibs-enum-base", specifier = ">=1.0.0", index = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" },
|
||||
{ name = "corelibs-var", specifier = ">=1.0.0", index = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" },
|
||||
{ name = "cryptography", specifier = ">=46.0.3" },
|
||||
{ name = "jmespath", specifier = ">=1.0.1" },
|
||||
{ name = "jsonpath-ng", specifier = ">=1.7.0" },
|
||||
{ name = "psutil", specifier = ">=7.0.0" },
|
||||
{ name = "requests", extras = ["proxy"], specifier = ">=2.32.4" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "deepdiff", specifier = ">=8.6.1" },
|
||||
{ name = "pytest", specifier = ">=8.4.1" },
|
||||
{ name = "pytest-cov", specifier = ">=6.2.1" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "corelibs-datetime"
|
||||
version = "1.0.1"
|
||||
source = { registry = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" }
|
||||
dependencies = [
|
||||
{ name = "corelibs-var" },
|
||||
]
|
||||
sdist = { url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-datetime/1.0.1/corelibs_datetime-1.0.1.tar.gz", hash = "sha256:ff58c6f824f35b87b1a5c153f65fdd82b65e42bb5a649d46d9115dc5fa61042f" }
|
||||
wheels = [
|
||||
{ url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-datetime/1.0.1/corelibs_datetime-1.0.1-py3-none-any.whl", hash = "sha256:f1a4d431f9f913dd39976a119ff8a2db34e966c61b1775c26b0da72a8bdb5ec1" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "corelibs-enum-base"
|
||||
version = "1.0.0"
|
||||
source = { registry = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" }
|
||||
sdist = { url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-enum-base/1.0.0/corelibs_enum_base-1.0.0.tar.gz", hash = "sha256:c696a297d88f674d40e5d190f396909b5f663a995ac735e545ceb5bb4907121d" }
|
||||
wheels = [
|
||||
{ url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-enum-base/1.0.0/corelibs_enum_base-1.0.0-py3-none-any.whl", hash = "sha256:c305d4063c69021aaf9ef75fbcce961039dae3c3de7820febeac7082c998a1f8" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "corelibs-var"
|
||||
version = "1.0.0"
|
||||
source = { registry = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" }
|
||||
sdist = { url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-var/1.0.0/corelibs_var-1.0.0.tar.gz", hash = "sha256:b85d6fd3802a1b687290666e4b1dbb47cf9723aa72bf73eb004e9e4936776364" }
|
||||
wheels = [
|
||||
{ url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-var/1.0.0/corelibs_var-1.0.0-py3-none-any.whl", hash = "sha256:a3546785bf9c94eec08b5c500b69b971e83e11d92bc0e4d3cbd9411a561fdbc2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.12.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/89/26/4a96807b193b011588099c3b5c89fbb05294e5b90e71018e065465f34eb6/coverage-7.12.0.tar.gz", hash = "sha256:fc11e0a4e372cb5f282f16ef90d4a585034050ccda536451901abfb19a57f40c", size = 819341, upload-time = "2025-11-18T13:34:20.766Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/14/771700b4048774e48d2c54ed0c674273702713c9ee7acdfede40c2666747/coverage-7.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:47324fffca8d8eae7e185b5bb20c14645f23350f870c1649003618ea91a78941", size = 217725, upload-time = "2025-11-18T13:32:49.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/a7/3aa4144d3bcb719bf67b22d2d51c2d577bf801498c13cb08f64173e80497/coverage-7.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ccf3b2ede91decd2fb53ec73c1f949c3e034129d1e0b07798ff1d02ea0c8fa4a", size = 218098, upload-time = "2025-11-18T13:32:50.78Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/9c/b846bbc774ff81091a12a10203e70562c91ae71badda00c5ae5b613527b1/coverage-7.12.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b365adc70a6936c6b0582dc38746b33b2454148c02349345412c6e743efb646d", size = 249093, upload-time = "2025-11-18T13:32:52.554Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/b6/67d7c0e1f400b32c883e9342de4a8c2ae7c1a0b57c5de87622b7262e2309/coverage-7.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bc13baf85cd8a4cfcf4a35c7bc9d795837ad809775f782f697bf630b7e200211", size = 251686, upload-time = "2025-11-18T13:32:54.862Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/75/b095bd4b39d49c3be4bffbb3135fea18a99a431c52dd7513637c0762fecb/coverage-7.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:099d11698385d572ceafb3288a5b80fe1fc58bf665b3f9d362389de488361d3d", size = 252930, upload-time = "2025-11-18T13:32:56.417Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/f3/466f63015c7c80550bead3093aacabf5380c1220a2a93c35d374cae8f762/coverage-7.12.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:473dc45d69694069adb7680c405fb1e81f60b2aff42c81e2f2c3feaf544d878c", size = 249296, upload-time = "2025-11-18T13:32:58.074Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/86/eba2209bf2b7e28c68698fc13437519a295b2d228ba9e0ec91673e09fa92/coverage-7.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:583f9adbefd278e9de33c33d6846aa8f5d164fa49b47144180a0e037f0688bb9", size = 251068, upload-time = "2025-11-18T13:32:59.646Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/55/ca8ae7dbba962a3351f18940b359b94c6bafdd7757945fdc79ec9e452dc7/coverage-7.12.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2089cc445f2dc0af6f801f0d1355c025b76c24481935303cf1af28f636688f0", size = 249034, upload-time = "2025-11-18T13:33:01.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/d7/39136149325cad92d420b023b5fd900dabdd1c3a0d1d5f148ef4a8cedef5/coverage-7.12.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:950411f1eb5d579999c5f66c62a40961f126fc71e5e14419f004471957b51508", size = 248853, upload-time = "2025-11-18T13:33:02.935Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/b6/76e1add8b87ef60e00643b0b7f8f7bb73d4bf5249a3be19ebefc5793dd25/coverage-7.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b1aab7302a87bafebfe76b12af681b56ff446dc6f32ed178ff9c092ca776e6bc", size = 250619, upload-time = "2025-11-18T13:33:04.336Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/87/924c6dc64f9203f7a3c1832a6a0eee5a8335dbe5f1bdadcc278d6f1b4d74/coverage-7.12.0-cp313-cp313-win32.whl", hash = "sha256:d7e0d0303c13b54db495eb636bc2465b2fb8475d4c8bcec8fe4b5ca454dfbae8", size = 220261, upload-time = "2025-11-18T13:33:06.493Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/77/dd4aff9af16ff776bf355a24d87eeb48fc6acde54c907cc1ea89b14a8804/coverage-7.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:ce61969812d6a98a981d147d9ac583a36ac7db7766f2e64a9d4d059c2fe29d07", size = 221072, upload-time = "2025-11-18T13:33:07.926Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/49/5c9dc46205fef31b1b226a6e16513193715290584317fd4df91cdaf28b22/coverage-7.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bcec6f47e4cb8a4c2dc91ce507f6eefc6a1b10f58df32cdc61dff65455031dfc", size = 219702, upload-time = "2025-11-18T13:33:09.631Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/62/f87922641c7198667994dd472a91e1d9b829c95d6c29529ceb52132436ad/coverage-7.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:459443346509476170d553035e4a3eed7b860f4fe5242f02de1010501956ce87", size = 218420, upload-time = "2025-11-18T13:33:11.153Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/dd/1cc13b2395ef15dbb27d7370a2509b4aee77890a464fb35d72d428f84871/coverage-7.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:04a79245ab2b7a61688958f7a855275997134bc84f4a03bc240cf64ff132abf6", size = 218773, upload-time = "2025-11-18T13:33:12.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/40/35773cc4bb1e9d4658d4fb669eb4195b3151bef3bbd6f866aba5cd5dac82/coverage-7.12.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:09a86acaaa8455f13d6a99221d9654df249b33937b4e212b4e5a822065f12aa7", size = 260078, upload-time = "2025-11-18T13:33:14.037Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/ee/231bb1a6ffc2905e396557585ebc6bdc559e7c66708376d245a1f1d330fc/coverage-7.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:907e0df1b71ba77463687a74149c6122c3f6aac56c2510a5d906b2f368208560", size = 262144, upload-time = "2025-11-18T13:33:15.601Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/be/32f4aa9f3bf0b56f3971001b56508352c7753915345d45fab4296a986f01/coverage-7.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9b57e2d0ddd5f0582bae5437c04ee71c46cd908e7bc5d4d0391f9a41e812dd12", size = 264574, upload-time = "2025-11-18T13:33:17.354Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/68/7c/00489fcbc2245d13ab12189b977e0cf06ff3351cb98bc6beba8bd68c5902/coverage-7.12.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:58c1c6aa677f3a1411fe6fb28ec3a942e4f665df036a3608816e0847fad23296", size = 259298, upload-time = "2025-11-18T13:33:18.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/b4/f0760d65d56c3bea95b449e02570d4abd2549dc784bf39a2d4721a2d8ceb/coverage-7.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4c589361263ab2953e3c4cd2a94db94c4ad4a8e572776ecfbad2389c626e4507", size = 262150, upload-time = "2025-11-18T13:33:20.644Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/71/9a9314df00f9326d78c1e5a910f520d599205907432d90d1c1b7a97aa4b1/coverage-7.12.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:91b810a163ccad2e43b1faa11d70d3cf4b6f3d83f9fd5f2df82a32d47b648e0d", size = 259763, upload-time = "2025-11-18T13:33:22.189Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/34/01a0aceed13fbdf925876b9a15d50862eb8845454301fe3cdd1df08b2182/coverage-7.12.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:40c867af715f22592e0d0fb533a33a71ec9e0f73a6945f722a0c85c8c1cbe3a2", size = 258653, upload-time = "2025-11-18T13:33:24.239Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/04/81d8fd64928acf1574bbb0181f66901c6c1c6279c8ccf5f84259d2c68ae9/coverage-7.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:68b0d0a2d84f333de875666259dadf28cc67858bc8fd8b3f1eae84d3c2bec455", size = 260856, upload-time = "2025-11-18T13:33:26.365Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/76/fa2a37bfaeaf1f766a2d2360a25a5297d4fb567098112f6517475eee120b/coverage-7.12.0-cp313-cp313t-win32.whl", hash = "sha256:73f9e7fbd51a221818fd11b7090eaa835a353ddd59c236c57b2199486b116c6d", size = 220936, upload-time = "2025-11-18T13:33:28.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/52/60f64d932d555102611c366afb0eb434b34266b1d9266fc2fe18ab641c47/coverage-7.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:24cff9d1f5743f67db7ba46ff284018a6e9aeb649b67aa1e70c396aa1b7cb23c", size = 222001, upload-time = "2025-11-18T13:33:29.656Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/df/c303164154a5a3aea7472bf323b7c857fed93b26618ed9fc5c2955566bb0/coverage-7.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c87395744f5c77c866d0f5a43d97cc39e17c7f1cb0115e54a2fe67ca75c5d14d", size = 220273, upload-time = "2025-11-18T13:33:31.415Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/2e/fc12db0883478d6e12bbd62d481210f0c8daf036102aa11434a0c5755825/coverage-7.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a1c59b7dc169809a88b21a936eccf71c3895a78f5592051b1af8f4d59c2b4f92", size = 217777, upload-time = "2025-11-18T13:33:32.86Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/c1/ce3e525d223350c6ec16b9be8a057623f54226ef7f4c2fee361ebb6a02b8/coverage-7.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8787b0f982e020adb732b9f051f3e49dd5054cebbc3f3432061278512a2b1360", size = 218100, upload-time = "2025-11-18T13:33:34.532Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/87/113757441504aee3808cb422990ed7c8bcc2d53a6779c66c5adef0942939/coverage-7.12.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5ea5a9f7dc8877455b13dd1effd3202e0bca72f6f3ab09f9036b1bcf728f69ac", size = 249151, upload-time = "2025-11-18T13:33:36.135Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/1d/9529d9bd44049b6b05bb319c03a3a7e4b0a8a802d28fa348ad407e10706d/coverage-7.12.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fdba9f15849534594f60b47c9a30bc70409b54947319a7c4fd0e8e3d8d2f355d", size = 251667, upload-time = "2025-11-18T13:33:37.996Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/bb/567e751c41e9c03dc29d3ce74b8c89a1e3396313e34f255a2a2e8b9ebb56/coverage-7.12.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a00594770eb715854fb1c57e0dea08cce6720cfbc531accdb9850d7c7770396c", size = 253003, upload-time = "2025-11-18T13:33:39.553Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/b3/c2cce2d8526a02fb9e9ca14a263ca6fc074449b33a6afa4892838c903528/coverage-7.12.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5560c7e0d82b42eb1951e4f68f071f8017c824ebfd5a6ebe42c60ac16c6c2434", size = 249185, upload-time = "2025-11-18T13:33:42.086Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/a7/967f93bb66e82c9113c66a8d0b65ecf72fc865adfba5a145f50c7af7e58d/coverage-7.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2e26b481c9159c2773a37947a9718cfdc58893029cdfb177531793e375cfc", size = 251025, upload-time = "2025-11-18T13:33:43.634Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/b2/f2f6f56337bc1af465d5b2dc1ee7ee2141b8b9272f3bf6213fcbc309a836/coverage-7.12.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6e1a8c066dabcde56d5d9fed6a66bc19a2883a3fe051f0c397a41fc42aedd4cc", size = 248979, upload-time = "2025-11-18T13:33:46.04Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/7a/bf4209f45a4aec09d10a01a57313a46c0e0e8f4c55ff2965467d41a92036/coverage-7.12.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f7ba9da4726e446d8dd8aae5a6cd872511184a5d861de80a86ef970b5dacce3e", size = 248800, upload-time = "2025-11-18T13:33:47.546Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/b7/1e01b8696fb0521810f60c5bbebf699100d6754183e6cc0679bf2ed76531/coverage-7.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e0f483ab4f749039894abaf80c2f9e7ed77bbf3c737517fb88c8e8e305896a17", size = 250460, upload-time = "2025-11-18T13:33:49.537Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/ae/84324fb9cb46c024760e706353d9b771a81b398d117d8c1fe010391c186f/coverage-7.12.0-cp314-cp314-win32.whl", hash = "sha256:76336c19a9ef4a94b2f8dc79f8ac2da3f193f625bb5d6f51a328cd19bfc19933", size = 220533, upload-time = "2025-11-18T13:33:51.16Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/71/1033629deb8460a8f97f83e6ac4ca3b93952e2b6f826056684df8275e015/coverage-7.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:7c1059b600aec6ef090721f8f633f60ed70afaffe8ecab85b59df748f24b31fe", size = 221348, upload-time = "2025-11-18T13:33:52.776Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/5f/ac8107a902f623b0c251abdb749be282dc2ab61854a8a4fcf49e276fce2f/coverage-7.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:172cf3a34bfef42611963e2b661302a8931f44df31629e5b1050567d6b90287d", size = 219922, upload-time = "2025-11-18T13:33:54.316Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/6e/f27af2d4da367f16077d21ef6fe796c874408219fa6dd3f3efe7751bd910/coverage-7.12.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:aa7d48520a32cb21c7a9b31f81799e8eaec7239db36c3b670be0fa2403828d1d", size = 218511, upload-time = "2025-11-18T13:33:56.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/dd/65fd874aa460c30da78f9d259400d8e6a4ef457d61ab052fd248f0050558/coverage-7.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:90d58ac63bc85e0fb919f14d09d6caa63f35a5512a2205284b7816cafd21bb03", size = 218771, upload-time = "2025-11-18T13:33:57.966Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/e0/7c6b71d327d8068cb79c05f8f45bf1b6145f7a0de23bbebe63578fe5240a/coverage-7.12.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca8ecfa283764fdda3eae1bdb6afe58bf78c2c3ec2b2edcb05a671f0bba7b3f9", size = 260151, upload-time = "2025-11-18T13:33:59.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/ce/4697457d58285b7200de6b46d606ea71066c6e674571a946a6ea908fb588/coverage-7.12.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:874fe69a0785d96bd066059cd4368022cebbec1a8958f224f0016979183916e6", size = 262257, upload-time = "2025-11-18T13:34:01.166Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/33/acbc6e447aee4ceba88c15528dbe04a35fb4d67b59d393d2e0d6f1e242c1/coverage-7.12.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b3c889c0b8b283a24d721a9eabc8ccafcfc3aebf167e4cd0d0e23bf8ec4e339", size = 264671, upload-time = "2025-11-18T13:34:02.795Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/ec/e2822a795c1ed44d569980097be839c5e734d4c0c1119ef8e0a073496a30/coverage-7.12.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8bb5b894b3ec09dcd6d3743229dc7f2c42ef7787dc40596ae04c0edda487371e", size = 259231, upload-time = "2025-11-18T13:34:04.397Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/c5/a7ec5395bb4a49c9b7ad97e63f0c92f6bf4a9e006b1393555a02dae75f16/coverage-7.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:79a44421cd5fba96aa57b5e3b5a4d3274c449d4c622e8f76882d76635501fd13", size = 262137, upload-time = "2025-11-18T13:34:06.068Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/0c/02c08858b764129f4ecb8e316684272972e60777ae986f3865b10940bdd6/coverage-7.12.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:33baadc0efd5c7294f436a632566ccc1f72c867f82833eb59820ee37dc811c6f", size = 259745, upload-time = "2025-11-18T13:34:08.04Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/04/4fd32b7084505f3829a8fe45c1a74a7a728cb251aaadbe3bec04abcef06d/coverage-7.12.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:c406a71f544800ef7e9e0000af706b88465f3573ae8b8de37e5f96c59f689ad1", size = 258570, upload-time = "2025-11-18T13:34:09.676Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/35/2365e37c90df4f5342c4fa202223744119fe31264ee2924f09f074ea9b6d/coverage-7.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e71bba6a40883b00c6d571599b4627f50c360b3d0d02bfc658168936be74027b", size = 260899, upload-time = "2025-11-18T13:34:11.259Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/56/26ab0464ca733fa325e8e71455c58c1c374ce30f7c04cebb88eabb037b18/coverage-7.12.0-cp314-cp314t-win32.whl", hash = "sha256:9157a5e233c40ce6613dead4c131a006adfda70e557b6856b97aceed01b0e27a", size = 221313, upload-time = "2025-11-18T13:34:12.863Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/1c/017a3e1113ed34d998b27d2c6dba08a9e7cb97d362f0ec988fcd873dcf81/coverage-7.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:e84da3a0fd233aeec797b981c51af1cabac74f9bd67be42458365b30d11b5291", size = 222423, upload-time = "2025-11-18T13:34:15.14Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/36/bcc504fdd5169301b52568802bb1b9cdde2e27a01d39fbb3b4b508ab7c2c/coverage-7.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:01d24af36fedda51c2b1aca56e4330a3710f83b02a5ff3743a6b015ffa7c9384", size = 220459, upload-time = "2025-11-18T13:34:17.222Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/a3/43b749004e3c09452e39bb56347a008f0a0668aad37324a99b5c8ca91d9e/coverage-7.12.0-py3-none-any.whl", hash = "sha256:159d50c0b12e060b15ed3d39f87ed43d4f7f7ad40b8a534f4dd331adbb51104a", size = 209503, upload-time = "2025-11-18T13:34:18.892Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deepdiff"
|
||||
version = "8.6.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "orderly-set" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/76/36c9aab3d5c19a94091f7c6c6e784efca50d87b124bf026c36e94719f33c/deepdiff-8.6.1.tar.gz", hash = "sha256:ec56d7a769ca80891b5200ec7bd41eec300ced91ebcc7797b41eb2b3f3ff643a", size = 634054, upload-time = "2025-09-03T19:40:41.461Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/e6/efe534ef0952b531b630780e19cabd416e2032697019d5295defc6ef9bd9/deepdiff-8.6.1-py3-none-any.whl", hash = "sha256:ee8708a7f7d37fb273a541fa24ad010ed484192cd0c4ffc0fa0ed5e2d4b9e78b", size = 91378, upload-time = "2025-09-03T19:40:39.679Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.11"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jmespath"
|
||||
version = "1.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonpath-ng"
|
||||
version = "1.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "ply" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6d/86/08646239a313f895186ff0a4573452038eed8c86f54380b3ebac34d32fb2/jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c", size = 37838, upload-time = "2024-10-11T15:41:42.404Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/35/5a/73ecb3d82f8615f32ccdadeb9356726d6cae3a4bbc840b437ceb95708063/jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6", size = 30105, upload-time = "2024-11-20T17:58:30.418Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "orderly-set"
|
||||
version = "5.5.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4a/88/39c83c35d5e97cc203e9e77a4f93bf87ec89cf6a22ac4818fdcc65d66584/orderly_set-5.5.0.tar.gz", hash = "sha256:e87185c8e4d8afa64e7f8160ee2c542a475b738bc891dc3f58102e654125e6ce", size = 27414, upload-time = "2025-07-10T20:10:55.885Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/12/27/fb8d7338b4d551900fa3e580acbe7a0cf655d940e164cb5c00ec31961094/orderly_set-5.5.0-py3-none-any.whl", hash = "sha256:46f0b801948e98f427b412fcabb831677194c05c3b699b80de260374baa0b1e7", size = 13068, upload-time = "2025-07-10T20:10:54.377Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ply"
|
||||
version = "3.11"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", size = 159130, upload-time = "2018-02-15T19:01:31.097Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce", size = 49567, upload-time = "2018-02-15T19:01:27.172Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
version = "7.1.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/bb/6670bded3e3236eb4287c7bcdc167e9fae6e1e9286e437f7111caed2f909/psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353", size = 239843, upload-time = "2025-11-02T12:26:11.968Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/66/853d50e75a38c9a7370ddbeefabdd3d3116b9c31ef94dc92c6729bc36bec/psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b", size = 240369, upload-time = "2025-11-02T12:26:14.358Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/bd/313aba97cb5bfb26916dc29cf0646cbe4dd6a89ca69e8c6edce654876d39/psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9", size = 288210, upload-time = "2025-11-02T12:26:16.699Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/fa/76e3c06e760927a0cfb5705eb38164254de34e9bd86db656d4dbaa228b04/psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f", size = 291182, upload-time = "2025-11-02T12:26:18.848Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/1d/5774a91607035ee5078b8fd747686ebec28a962f178712de100d00b78a32/psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7", size = 250466, upload-time = "2025-11-02T12:26:21.183Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/ca/e426584bacb43a5cb1ac91fae1937f478cd8fbe5e4ff96574e698a2c77cd/psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264", size = 245756, upload-time = "2025-11-02T12:26:23.148Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
version = "2.23"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "9.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "iniconfig" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-cov"
|
||||
version = "7.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "coverage" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "charset-normalizer" },
|
||||
{ name = "idna" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.5.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
|
||||
]
|
||||
Reference in New Issue
Block a user