Compare commits
46 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d098eb58f3 | ||
|
|
5319a059ad | ||
|
|
163b8c4018 | ||
|
|
6322b95068 | ||
|
|
715ed1f9c2 | ||
|
|
82a759dd21 | ||
|
|
fe913608c4 | ||
|
|
79f9c5d1c6 | ||
|
|
3d091129e2 | ||
|
|
1a978f786d | ||
|
|
51669d3c5f | ||
|
|
d128dcb479 | ||
|
|
84286593f6 | ||
|
|
8d97f09e5e | ||
|
|
2748bc19be | ||
|
|
0b3c8fc774 | ||
|
|
7da18e0f00 | ||
|
|
49e38081ad | ||
|
|
a14f993a31 | ||
|
|
ae938f9909 | ||
|
|
f91e0bb93a | ||
|
|
d3f61005cf | ||
|
|
2923a3e88b | ||
|
|
a73ced0067 | ||
|
|
f89b91fe7f | ||
|
|
5950485d46 | ||
|
|
f349927a63 | ||
|
|
dfe8890598 | ||
|
|
d224876a8e | ||
|
|
17e8c76b94 | ||
|
|
9034a31cd6 | ||
|
|
523e61c9f7 | ||
|
|
cf575ded90 | ||
|
|
11a75d8532 | ||
|
|
6593e11332 | ||
|
|
c310f669d6 | ||
|
|
f327f47c3f | ||
|
|
acd61e825e | ||
|
|
895701da59 | ||
|
|
e0fb0db1f0 | ||
|
|
dc7e56106e | ||
|
|
90e5179980 | ||
|
|
9db39003c4 | ||
|
|
4ffe372434 | ||
|
|
a00c27c465 | ||
|
|
1f7f4b8d53 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,3 +4,4 @@
|
|||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
**/.env
|
**/.env
|
||||||
.coverage
|
.coverage
|
||||||
|
uv.lock
|
||||||
|
|||||||
@@ -1,19 +1,20 @@
|
|||||||
# MARK: Project info
|
# MARK: Project info
|
||||||
[project]
|
[project]
|
||||||
name = "corelibs"
|
name = "corelibs"
|
||||||
version = "0.39.2"
|
version = "0.48.0"
|
||||||
description = "Collection of utils for Python scripts"
|
description = "Collection of utils for Python scripts"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.13"
|
requires-python = ">=3.13"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"corelibs-datetime>=1.0.1",
|
"corelibs-datetime>=1.0.1",
|
||||||
"corelibs-enum-base>=1.0.0",
|
"corelibs-enum-base>=1.0.0",
|
||||||
|
"corelibs-text-colors>=1.0.0",
|
||||||
"corelibs-var>=1.0.0",
|
"corelibs-var>=1.0.0",
|
||||||
"cryptography>=46.0.3",
|
"cryptography>=46.0.3",
|
||||||
"jmespath>=1.0.1",
|
"jmespath>=1.0.1",
|
||||||
"jsonpath-ng>=1.7.0",
|
"jsonpath-ng>=1.7.0",
|
||||||
"psutil>=7.0.0",
|
"psutil>=7.0.0",
|
||||||
"requests>=2.32.4",
|
"requests[socks]>=2.32.5",
|
||||||
]
|
]
|
||||||
|
|
||||||
# MARK: build system
|
# MARK: build system
|
||||||
@@ -33,12 +34,14 @@ publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
|||||||
corelibs-enum-base = { index = "opj-pypi" }
|
corelibs-enum-base = { index = "opj-pypi" }
|
||||||
corelibs-datetime = { index = "opj-pypi" }
|
corelibs-datetime = { index = "opj-pypi" }
|
||||||
corelibs-var = { index = "opj-pypi" }
|
corelibs-var = { index = "opj-pypi" }
|
||||||
|
corelibs-text-colors = { index = "opj-pypi" }
|
||||||
|
|
||||||
[dependency-groups]
|
[dependency-groups]
|
||||||
dev = [
|
dev = [
|
||||||
"deepdiff>=8.6.1",
|
"deepdiff>=8.6.1",
|
||||||
"pytest>=8.4.1",
|
"pytest>=8.4.1",
|
||||||
"pytest-cov>=6.2.1",
|
"pytest-cov>=6.2.1",
|
||||||
|
"typing-extensions>=4.15.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
# MARK: Python linting
|
# MARK: Python linting
|
||||||
|
|||||||
@@ -19,9 +19,26 @@ def compile_re(reg: str) -> re.Pattern[str]:
|
|||||||
|
|
||||||
|
|
||||||
# email regex
|
# email regex
|
||||||
EMAIL_BASIC_REGEX: str = r"""
|
SUB_EMAIL_BASIC_REGEX: str = r"""
|
||||||
^[A-Za-z0-9!#$%&'*+\-\/=?^_`{|}~][A-Za-z0-9!#$%:\(\)&'*+\-\/=?^_`{|}~\.]{0,63}
|
[A-Za-z0-9!#$%&'*+\-\/=?^_`{|}~][A-Za-z0-9!#$%:\(\)&'*+\-\/=?^_`{|}~\.]{0,63}
|
||||||
@(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[a-zA-Z]{2,6}$
|
@(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[a-zA-Z]{2,6}
|
||||||
|
"""
|
||||||
|
EMAIL_BASIC_REGEX = rf"^{SUB_EMAIL_BASIC_REGEX}$"
|
||||||
|
# name + email regex for email sending type like "foo bar" <email@mail.com>
|
||||||
|
NAME_EMAIL_SIMPLE_REGEX = r"""
|
||||||
|
^\s*(?:"(?P<name1>[^"]+)"\s*<(?P<email1>[^>]+)>|
|
||||||
|
(?P<name2>.+?)\s*<(?P<email2>[^>]+)>|
|
||||||
|
<(?P<email3>[^>]+)>|
|
||||||
|
(?P<email4>[^\s<>]+))\s*$
|
||||||
|
"""
|
||||||
|
# name + email with the basic regex set
|
||||||
|
NAME_EMAIL_BASIC_REGEX = rf"""
|
||||||
|
^\s*(?:
|
||||||
|
"(?P<name1>[^"]+)"\s*<(?P<email1>{SUB_EMAIL_BASIC_REGEX})>|
|
||||||
|
(?P<name2>.+?)\s*<(?P<email2>{SUB_EMAIL_BASIC_REGEX})>|
|
||||||
|
<(?P<email3>{SUB_EMAIL_BASIC_REGEX})>|
|
||||||
|
(?P<email4>{SUB_EMAIL_BASIC_REGEX})
|
||||||
|
)\s*$
|
||||||
"""
|
"""
|
||||||
# Domain regex with localhost
|
# Domain regex with localhost
|
||||||
DOMAIN_WITH_LOCALHOST_REGEX: str = r"""
|
DOMAIN_WITH_LOCALHOST_REGEX: str = r"""
|
||||||
|
|||||||
23
src/corelibs/check_handling/regex_constants_compiled.py
Normal file
23
src/corelibs/check_handling/regex_constants_compiled.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
"""
|
||||||
|
List of regex compiled strings that can be used
|
||||||
|
"""
|
||||||
|
|
||||||
|
from corelibs.check_handling.regex_constants import (
|
||||||
|
compile_re,
|
||||||
|
EMAIL_BASIC_REGEX,
|
||||||
|
NAME_EMAIL_SIMPLE_REGEX,
|
||||||
|
NAME_EMAIL_BASIC_REGEX,
|
||||||
|
DOMAIN_WITH_LOCALHOST_REGEX,
|
||||||
|
DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||||
|
DOMAIN_REGEX
|
||||||
|
)
|
||||||
|
|
||||||
|
# all above in compiled form
|
||||||
|
COMPILED_EMAIL_BASIC_REGEX = compile_re(EMAIL_BASIC_REGEX)
|
||||||
|
COMPILED_NAME_EMAIL_SIMPLE_REGEX = compile_re(NAME_EMAIL_SIMPLE_REGEX)
|
||||||
|
COMPILED_NAME_EMAIL_BASIC_REGEX = compile_re(NAME_EMAIL_BASIC_REGEX)
|
||||||
|
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX = compile_re(DOMAIN_WITH_LOCALHOST_REGEX)
|
||||||
|
COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX = compile_re(DOMAIN_WITH_LOCALHOST_PORT_REGEX)
|
||||||
|
COMPILED_DOMAIN_REGEX = compile_re(DOMAIN_REGEX)
|
||||||
|
|
||||||
|
# __END__
|
||||||
@@ -53,6 +53,9 @@ class SettingsLoader:
|
|||||||
# for check settings, abort flag
|
# for check settings, abort flag
|
||||||
self.__check_settings_abort: bool = False
|
self.__check_settings_abort: bool = False
|
||||||
|
|
||||||
|
# error messages for raise ValueError
|
||||||
|
self.__error_msg: list[str] = []
|
||||||
|
|
||||||
# MARK: load settings
|
# MARK: load settings
|
||||||
def load_settings(
|
def load_settings(
|
||||||
self,
|
self,
|
||||||
@@ -87,12 +90,16 @@ class SettingsLoader:
|
|||||||
Returns:
|
Returns:
|
||||||
dict[str, str]: key = value list
|
dict[str, str]: key = value list
|
||||||
"""
|
"""
|
||||||
|
# reset error message list before run
|
||||||
|
self.__error_msg = []
|
||||||
# default set entries
|
# default set entries
|
||||||
entry_set_empty: dict[str, str | None] = {}
|
entry_set_empty: dict[str, str | None] = {}
|
||||||
# entries that have to be split
|
# entries that have to be split
|
||||||
entry_split_char: dict[str, str] = {}
|
entry_split_char: dict[str, str] = {}
|
||||||
# entries that should be converted
|
# entries that should be converted
|
||||||
entry_convert: dict[str, str] = {}
|
entry_convert: dict[str, str] = {}
|
||||||
|
# no args to set
|
||||||
|
args_overrride: list[str] = []
|
||||||
# all the settings for the config id given
|
# all the settings for the config id given
|
||||||
settings: dict[str, dict[str, Any]] = {
|
settings: dict[str, dict[str, Any]] = {
|
||||||
config_id: {},
|
config_id: {},
|
||||||
@@ -107,7 +114,7 @@ class SettingsLoader:
|
|||||||
if allow_not_exist is True:
|
if allow_not_exist is True:
|
||||||
return {}
|
return {}
|
||||||
raise ValueError(self.__print(
|
raise ValueError(self.__print(
|
||||||
f"[!] Cannot read [{config_id}] block in the {self.config_file}: {e}",
|
f"[!] Cannot read [{config_id}] block in the file {self.config_file}: {e}",
|
||||||
'CRITICAL'
|
'CRITICAL'
|
||||||
)) from e
|
)) from e
|
||||||
try:
|
try:
|
||||||
@@ -162,12 +169,17 @@ class SettingsLoader:
|
|||||||
f"[!] In [{config_id}] the split character setup for entry failed: {check}: {e}",
|
f"[!] In [{config_id}] the split character setup for entry failed: {check}: {e}",
|
||||||
'CRITICAL'
|
'CRITICAL'
|
||||||
)) from e
|
)) from e
|
||||||
|
if check == "args_override:yes":
|
||||||
|
args_overrride.append(key)
|
||||||
if skip:
|
if skip:
|
||||||
continue
|
continue
|
||||||
settings[config_id][key] = [
|
if settings[config_id][key]:
|
||||||
__value.replace(" ", "")
|
settings[config_id][key] = [
|
||||||
for __value in settings[config_id][key].split(split_char)
|
__value.replace(" ", "")
|
||||||
]
|
for __value in settings[config_id][key].split(split_char)
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
settings[config_id][key] = []
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
raise ValueError(self.__print(
|
raise ValueError(self.__print(
|
||||||
f"[!] Cannot read [{config_id}] block because the entry [{e}] could not be found",
|
f"[!] Cannot read [{config_id}] block because the entry [{e}] could not be found",
|
||||||
@@ -177,17 +189,23 @@ class SettingsLoader:
|
|||||||
# ignore error if arguments are set
|
# ignore error if arguments are set
|
||||||
if not self.__check_arguments(config_validate, True):
|
if not self.__check_arguments(config_validate, True):
|
||||||
raise ValueError(self.__print(f"[!] Cannot find file: {self.config_file}", 'CRITICAL'))
|
raise ValueError(self.__print(f"[!] Cannot find file: {self.config_file}", 'CRITICAL'))
|
||||||
else:
|
# base set
|
||||||
# base set
|
settings[config_id] = {}
|
||||||
settings[config_id] = {}
|
|
||||||
# make sure all are set
|
# make sure all are set
|
||||||
# if we have arguments set, this override config settings
|
# if we have arguments set, this override config settings
|
||||||
error: bool = False
|
error: bool = False
|
||||||
for entry, validate in config_validate.items():
|
for entry, validate in config_validate.items():
|
||||||
# if we have command line option set, this one overrides config
|
# if we have command line option set, this one overrides config
|
||||||
if self.__get_arg(entry):
|
if (args_entry := self.__get_arg(entry)) is not None:
|
||||||
self.__print(f"[*] Command line option override for: {entry}", 'WARNING')
|
self.__print(f"[*] Command line option override for: {entry}", 'WARNING')
|
||||||
settings[config_id][entry] = self.args.get(entry)
|
if (
|
||||||
|
# only set if flagged as allowed override from args
|
||||||
|
entry in args_overrride and
|
||||||
|
(isinstance(args_entry, list) and entry_split_char.get(entry)) or
|
||||||
|
(not isinstance(args_entry, list) and not entry_split_char.get(entry))
|
||||||
|
):
|
||||||
|
# args is list, but entry has not split, do not set
|
||||||
|
settings[config_id][entry] = args_entry
|
||||||
# validate checks
|
# validate checks
|
||||||
for check in validate:
|
for check in validate:
|
||||||
# CHECKS
|
# CHECKS
|
||||||
@@ -263,7 +281,10 @@ class SettingsLoader:
|
|||||||
error = True
|
error = True
|
||||||
self.__print(f"[!] Missing content entry for: {entry}", 'ERROR')
|
self.__print(f"[!] Missing content entry for: {entry}", 'ERROR')
|
||||||
if error is True:
|
if error is True:
|
||||||
raise ValueError(self.__print("[!] Missing or incorrect settings data. Cannot proceed", 'CRITICAL'))
|
self.__print("[!] Missing or incorrect settings data. Cannot proceed", 'CRITICAL')
|
||||||
|
raise ValueError(
|
||||||
|
"Missing or incorrect settings data. Cannot proceed: " + "; ".join(self.__error_msg)
|
||||||
|
)
|
||||||
# set empty
|
# set empty
|
||||||
for [entry, empty_set] in entry_set_empty.items():
|
for [entry, empty_set] in entry_set_empty.items():
|
||||||
# if set, skip, else set to empty value
|
# if set, skip, else set to empty value
|
||||||
@@ -277,10 +298,8 @@ class SettingsLoader:
|
|||||||
elif convert_type in ["float", "any"] and is_float(settings[config_id][entry]):
|
elif convert_type in ["float", "any"] and is_float(settings[config_id][entry]):
|
||||||
settings[config_id][entry] = float(settings[config_id][entry])
|
settings[config_id][entry] = float(settings[config_id][entry])
|
||||||
elif convert_type in ["bool", "any"] and (
|
elif convert_type in ["bool", "any"] and (
|
||||||
settings[config_id][entry] == "true" or
|
settings[config_id][entry].lower() == "true" or
|
||||||
settings[config_id][entry] == "True" or
|
settings[config_id][entry].lower() == "false"
|
||||||
settings[config_id][entry] == "false" or
|
|
||||||
settings[config_id][entry] == "False"
|
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
settings[config_id][entry] = str_to_bool(settings[config_id][entry])
|
settings[config_id][entry] = str_to_bool(settings[config_id][entry])
|
||||||
@@ -558,7 +577,10 @@ class SettingsLoader:
|
|||||||
self.log.logger.log(Log.get_log_level_int(level), msg, stacklevel=2)
|
self.log.logger.log(Log.get_log_level_int(level), msg, stacklevel=2)
|
||||||
if self.log is None or self.always_print:
|
if self.log is None or self.always_print:
|
||||||
if print_error:
|
if print_error:
|
||||||
print(msg)
|
print(f"[SettingsLoader] {msg}")
|
||||||
|
if level == 'ERROR':
|
||||||
|
# remove any prefix [!] for error message list
|
||||||
|
self.__error_msg.append(msg.replace('[!] ', '').strip())
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,10 +7,13 @@ from typing import Any, Sequence
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
import csv
|
import csv
|
||||||
|
from corelibs.file_handling.file_bom_encoding import is_bom_encoded, is_bom_encoded_info
|
||||||
from corelibs.exceptions.csv_exceptions import (
|
from corelibs.exceptions.csv_exceptions import (
|
||||||
NoCsvReader, CompulsoryCsvHeaderCheckFailed, CsvHeaderDataMissing
|
NoCsvReader, CompulsoryCsvHeaderCheckFailed, CsvHeaderDataMissing
|
||||||
)
|
)
|
||||||
|
|
||||||
|
ENCODING = 'utf-8'
|
||||||
|
ENCODING_UTF8_SIG = 'utf-8-sig'
|
||||||
DELIMITER = ","
|
DELIMITER = ","
|
||||||
QUOTECHAR = '"'
|
QUOTECHAR = '"'
|
||||||
# type: _QuotingType
|
# type: _QuotingType
|
||||||
@@ -27,6 +30,7 @@ class CsvWriter:
|
|||||||
file_name: Path,
|
file_name: Path,
|
||||||
header_mapping: dict[str, str],
|
header_mapping: dict[str, str],
|
||||||
header_order: list[str] | None = None,
|
header_order: list[str] | None = None,
|
||||||
|
encoding: str = ENCODING,
|
||||||
delimiter: str = DELIMITER,
|
delimiter: str = DELIMITER,
|
||||||
quotechar: str = QUOTECHAR,
|
quotechar: str = QUOTECHAR,
|
||||||
quoting: Any = QUOTING,
|
quoting: Any = QUOTING,
|
||||||
@@ -38,6 +42,7 @@ class CsvWriter:
|
|||||||
self.__delimiter = delimiter
|
self.__delimiter = delimiter
|
||||||
self.__quotechar = quotechar
|
self.__quotechar = quotechar
|
||||||
self.__quoting = quoting
|
self.__quoting = quoting
|
||||||
|
self.__encoding = encoding
|
||||||
self.csv_file_writer = self.__open_csv(header_order)
|
self.csv_file_writer = self.__open_csv(header_order)
|
||||||
|
|
||||||
def __open_csv(self, header_order: list[str] | None) -> csv.DictWriter[str]:
|
def __open_csv(self, header_order: list[str] | None) -> csv.DictWriter[str]:
|
||||||
@@ -69,7 +74,8 @@ class CsvWriter:
|
|||||||
try:
|
try:
|
||||||
fp = open(
|
fp = open(
|
||||||
self.__file_name,
|
self.__file_name,
|
||||||
"w", encoding="utf-8"
|
"w",
|
||||||
|
encoding=self.__encoding
|
||||||
)
|
)
|
||||||
csv_file_writer = csv.DictWriter(
|
csv_file_writer = csv.DictWriter(
|
||||||
fp,
|
fp,
|
||||||
@@ -109,6 +115,7 @@ class CsvReader:
|
|||||||
self,
|
self,
|
||||||
file_name: Path,
|
file_name: Path,
|
||||||
header_check: Sequence[str] | None = None,
|
header_check: Sequence[str] | None = None,
|
||||||
|
encoding: str = ENCODING,
|
||||||
delimiter: str = DELIMITER,
|
delimiter: str = DELIMITER,
|
||||||
quotechar: str = QUOTECHAR,
|
quotechar: str = QUOTECHAR,
|
||||||
quoting: Any = QUOTING,
|
quoting: Any = QUOTING,
|
||||||
@@ -118,6 +125,7 @@ class CsvReader:
|
|||||||
self.__delimiter = delimiter
|
self.__delimiter = delimiter
|
||||||
self.__quotechar = quotechar
|
self.__quotechar = quotechar
|
||||||
self.__quoting = quoting
|
self.__quoting = quoting
|
||||||
|
self.__encoding = encoding
|
||||||
self.header: Sequence[str] | None = None
|
self.header: Sequence[str] | None = None
|
||||||
self.csv_file_reader = self.__open_csv()
|
self.csv_file_reader = self.__open_csv()
|
||||||
|
|
||||||
@@ -129,9 +137,16 @@ class CsvReader:
|
|||||||
csv.DictReader | None: _description_
|
csv.DictReader | None: _description_
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
# if UTF style check if this is BOM
|
||||||
|
if self.__encoding.lower().startswith('utf-') and is_bom_encoded(self.__file_name):
|
||||||
|
bom_info = is_bom_encoded_info(self.__file_name)
|
||||||
|
if bom_info['encoding'] == 'utf-8':
|
||||||
|
self.__encoding = ENCODING_UTF8_SIG
|
||||||
|
else:
|
||||||
|
self.__encoding = bom_info['encoding'] or self.__encoding
|
||||||
fp = open(
|
fp = open(
|
||||||
self.__file_name,
|
self.__file_name,
|
||||||
"r", encoding="utf-8"
|
"r", encoding=self.__encoding
|
||||||
)
|
)
|
||||||
csv_file_reader = csv.DictReader(
|
csv_file_reader = csv.DictReader(
|
||||||
fp,
|
fp,
|
||||||
|
|||||||
76
src/corelibs/db_handling/sql_main.py
Normal file
76
src/corelibs/db_handling/sql_main.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
"""
|
||||||
|
Main SQL base for any SQL calls
|
||||||
|
This is a wrapper for SQLiteIO or other future DB Interfaces
|
||||||
|
[Note: at the moment only SQLiteIO is implemented]
|
||||||
|
- on class creation connection with ValueError on fail
|
||||||
|
- connect method checks if already connected and warns
|
||||||
|
- connection class fails with ValueError if not valid target is selected (SQL wrapper type)
|
||||||
|
- connected check class method
|
||||||
|
- a process class that returns data as list or False if end or error
|
||||||
|
|
||||||
|
TODO: adapt more CoreLibs DB IO class flow here
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any, Literal
|
||||||
|
from corelibs.debug_handling.debug_helpers import call_stack
|
||||||
|
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from corelibs.logging_handling.log import Logger
|
||||||
|
|
||||||
|
|
||||||
|
IDENT_SPLIT_CHARACTER: str = ':'
|
||||||
|
|
||||||
|
|
||||||
|
class SQLMain:
|
||||||
|
"""Main SQL interface class"""
|
||||||
|
def __init__(self, log: 'Logger', db_ident: str):
|
||||||
|
self.log = log
|
||||||
|
self.dbh: SQLiteIO | None = None
|
||||||
|
self.db_target: str | None = None
|
||||||
|
self.connect(db_ident)
|
||||||
|
if not self.connected():
|
||||||
|
raise ValueError(f'Failed to connect to database [{call_stack()}]')
|
||||||
|
|
||||||
|
def connect(self, db_ident: str):
|
||||||
|
"""setup basic connection"""
|
||||||
|
if self.dbh is not None and self.dbh.conn is not None:
|
||||||
|
self.log.warning(f"A database connection already exists for: {self.db_target} [{call_stack()}]")
|
||||||
|
return
|
||||||
|
self.db_target, db_dsn = db_ident.split(IDENT_SPLIT_CHARACTER)
|
||||||
|
match self.db_target:
|
||||||
|
case 'sqlite':
|
||||||
|
# this is a Path only at the moment
|
||||||
|
self.dbh = SQLiteIO(self.log, db_dsn, row_factory='Dict')
|
||||||
|
case _:
|
||||||
|
raise ValueError(f'SQL interface for {self.db_target} is not implemented [{call_stack()}]')
|
||||||
|
if not self.dbh.db_connected():
|
||||||
|
raise ValueError(f"DB Connection failed for: {self.db_target} [{call_stack()}]")
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""close connection"""
|
||||||
|
if self.dbh is None or not self.connected():
|
||||||
|
return
|
||||||
|
# self.log.info(f"Close DB Connection: {self.db_target} [{call_stack()}]")
|
||||||
|
self.dbh.db_close()
|
||||||
|
|
||||||
|
def connected(self) -> bool:
|
||||||
|
"""check connectuon"""
|
||||||
|
if self.dbh is None or not self.dbh.db_connected():
|
||||||
|
self.log.warning(f"No connection [{call_stack()}]")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def process_query(
|
||||||
|
self, query: str, params: tuple[Any, ...] | None = None
|
||||||
|
) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||||
|
"""mini wrapper for execute query"""
|
||||||
|
if self.dbh is not None:
|
||||||
|
result = self.dbh.execute_query(query, params)
|
||||||
|
if result is False:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
self.log.error(f"Problem connecting to db: {self.db_target} [{call_stack()}]")
|
||||||
|
return False
|
||||||
|
return result
|
||||||
|
|
||||||
|
# __END__
|
||||||
@@ -4,6 +4,8 @@ Send email wrapper
|
|||||||
|
|
||||||
import smtplib
|
import smtplib
|
||||||
from email.message import EmailMessage
|
from email.message import EmailMessage
|
||||||
|
from email.header import Header
|
||||||
|
from email.utils import formataddr, parseaddr
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from corelibs.logging_handling.log import Logger
|
from corelibs.logging_handling.log import Logger
|
||||||
@@ -133,21 +135,30 @@ class SendEmail:
|
|||||||
_subject = template["subject"]
|
_subject = template["subject"]
|
||||||
_body = template["body"]
|
_body = template["body"]
|
||||||
for key, value in replace.items():
|
for key, value in replace.items():
|
||||||
_subject = _subject.replace(f"{{{{{key}}}}}", value)
|
placeholder = f"{{{{{key}}}}}"
|
||||||
_body = _body.replace(f"{{{{{key}}}}}", value)
|
_subject = _subject.replace(placeholder, value)
|
||||||
|
_body = _body.replace(placeholder, value)
|
||||||
|
name, addr = parseaddr(from_email)
|
||||||
|
if name:
|
||||||
|
# Encode the name part with MIME encoding
|
||||||
|
encoded_name = str(Header(name, 'utf-8'))
|
||||||
|
from_email_encoded = formataddr((encoded_name, addr))
|
||||||
|
else:
|
||||||
|
from_email_encoded = from_email
|
||||||
# create a simple email and add subhect, from email
|
# create a simple email and add subhect, from email
|
||||||
msg_email = EmailMessage()
|
msg_email = EmailMessage()
|
||||||
# msg.set_content(_body, charset='utf-8', cte='quoted-printable')
|
# msg.set_content(_body, charset='utf-8', cte='quoted-printable')
|
||||||
msg_email.set_content(_body, charset="utf-8")
|
msg_email.set_content(_body, charset="utf-8")
|
||||||
msg_email["Subject"] = _subject
|
msg_email["Subject"] = _subject
|
||||||
msg_email["From"] = from_email
|
msg_email["From"] = from_email_encoded
|
||||||
# push to array for sening
|
# push to array for sening
|
||||||
msg.append(msg_email)
|
msg.append(msg_email)
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def send_email_list(
|
def send_email_list(
|
||||||
self,
|
self,
|
||||||
email: list[EmailMessage], receivers: list[str],
|
emails: list[EmailMessage],
|
||||||
|
receivers: list[str],
|
||||||
combined_send: bool | None = None,
|
combined_send: bool | None = None,
|
||||||
test_only: bool | None = None
|
test_only: bool | None = None
|
||||||
):
|
):
|
||||||
@@ -170,18 +181,27 @@ class SendEmail:
|
|||||||
smtp = smtplib.SMTP(smtp_host)
|
smtp = smtplib.SMTP(smtp_host)
|
||||||
except ConnectionRefusedError as e:
|
except ConnectionRefusedError as e:
|
||||||
self.log.error("Could not open SMTP connection to: %s, %s", smtp_host, e)
|
self.log.error("Could not open SMTP connection to: %s, %s", smtp_host, e)
|
||||||
|
# prepare receiver list
|
||||||
|
receivers_encoded: list[str] = []
|
||||||
|
for __receiver in receivers:
|
||||||
|
to_name, to_addr = parseaddr(__receiver)
|
||||||
|
if to_name:
|
||||||
|
# Encode the name part with MIME encoding
|
||||||
|
encoded_to_name = str(Header(to_name, 'utf-8'))
|
||||||
|
receivers_encoded.append(formataddr((encoded_to_name, to_addr)))
|
||||||
|
else:
|
||||||
|
receivers_encoded.append(__receiver)
|
||||||
# loop over messages and then over recievers
|
# loop over messages and then over recievers
|
||||||
for msg in email:
|
for msg in emails:
|
||||||
if combined_send is True:
|
if combined_send is True:
|
||||||
msg["To"] = ", ".join(receivers)
|
msg["To"] = ", ".join(receivers_encoded)
|
||||||
if not self.settings.get('test'):
|
if not self.settings.get('test'):
|
||||||
if smtp is not None:
|
if smtp is not None:
|
||||||
smtp.send_message(msg, msg["From"], receivers)
|
smtp.send_message(msg, msg["From"], receivers_encoded)
|
||||||
else:
|
else:
|
||||||
self.log.info(f"[EMAIL] Test, not sending email\n{msg}")
|
self.log.info(f"[EMAIL] Test, not sending email\n{msg}")
|
||||||
else:
|
else:
|
||||||
for receiver in receivers:
|
for receiver in receivers_encoded:
|
||||||
# send to
|
|
||||||
self.log.debug(f"===> Send to: {receiver}")
|
self.log.debug(f"===> Send to: {receiver}")
|
||||||
if "To" in msg:
|
if "To" in msg:
|
||||||
msg.replace_header("To", receiver)
|
msg.replace_header("To", receiver)
|
||||||
|
|||||||
@@ -4,11 +4,38 @@ Various dictionary, object and list hashers
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
from typing import Any
|
from typing import Any, cast, Sequence
|
||||||
|
|
||||||
|
|
||||||
|
def hash_object(obj: Any) -> str:
|
||||||
|
"""
|
||||||
|
RECOMMENDED for new use
|
||||||
|
Create a hash for any dict or list with mixed key types
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
obj {Any} -- _description_
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str -- _description_
|
||||||
|
"""
|
||||||
|
def normalize(o: Any) -> Any:
|
||||||
|
if isinstance(o, dict):
|
||||||
|
# Sort by repr of keys to handle mixed types (str, int, etc.)
|
||||||
|
o = cast(dict[Any, Any], o)
|
||||||
|
return tuple(sorted((repr(k), normalize(v)) for k, v in o.items()))
|
||||||
|
if isinstance(o, (list, tuple)):
|
||||||
|
o = cast(Sequence[Any], o)
|
||||||
|
return tuple(normalize(item) for item in o)
|
||||||
|
return repr(o)
|
||||||
|
|
||||||
|
normalized = normalize(obj)
|
||||||
|
return hashlib.sha256(str(normalized).encode()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def dict_hash_frozen(data: dict[Any, Any]) -> int:
|
def dict_hash_frozen(data: dict[Any, Any]) -> int:
|
||||||
"""
|
"""
|
||||||
|
NOT RECOMMENDED, use dict_hash_crc or hash_object instead
|
||||||
|
If used, DO NOT CHANGE
|
||||||
hash a dict via freeze
|
hash a dict via freeze
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -22,18 +49,25 @@ def dict_hash_frozen(data: dict[Any, Any]) -> int:
|
|||||||
|
|
||||||
def dict_hash_crc(data: dict[Any, Any] | list[Any]) -> str:
|
def dict_hash_crc(data: dict[Any, Any] | list[Any]) -> str:
|
||||||
"""
|
"""
|
||||||
Create a sha256 hash over dict
|
LEGACY METHOD, must be kept for fallback, if used by other code, DO NOT CHANGE
|
||||||
|
Create a sha256 hash over dict or list
|
||||||
alternative for
|
alternative for
|
||||||
dict_hash_frozen
|
dict_hash_frozen
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
data (dict | list): _description_
|
data (dict[Any, Any] | list[Any]): _description_
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: _description_
|
str: sha256 hash, prefiex with HO_ if fallback used
|
||||||
"""
|
"""
|
||||||
return hashlib.sha256(
|
try:
|
||||||
json.dumps(data, sort_keys=True, ensure_ascii=True).encode('utf-8')
|
return hashlib.sha256(
|
||||||
).hexdigest()
|
# IT IS IMPORTANT THAT THE BELOW CALL STAYS THE SAME AND DOES NOT CHANGE OR WE WILL GET DIFFERENT HASHES
|
||||||
|
# separators=(',', ':') to get rid of spaces, but if this is used the hash will be different, DO NOT ADD
|
||||||
|
json.dumps(data, sort_keys=True, ensure_ascii=True, default=str).encode('utf-8')
|
||||||
|
).hexdigest()
|
||||||
|
except TypeError:
|
||||||
|
# Fallback tod different hasher, will return DIFFERENT hash than above, so only usable in int/str key mixes
|
||||||
|
return "HO_" + hash_object(data)
|
||||||
|
|
||||||
# __END__
|
# __END__
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
List type helpers
|
List type helpers
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
from typing import Any, Sequence
|
from typing import Any, Sequence
|
||||||
|
|
||||||
|
|
||||||
@@ -44,4 +45,31 @@ def is_list_in_list(
|
|||||||
# Get the difference and extract just the values
|
# Get the difference and extract just the values
|
||||||
return [item for item, _ in set_a - set_b]
|
return [item for item, _ in set_a - set_b]
|
||||||
|
|
||||||
|
|
||||||
|
def make_unique_list_of_dicts(dict_list: list[Any]) -> list[Any]:
|
||||||
|
"""
|
||||||
|
Create a list of unique dictionary entries
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
dict_list {list[Any]} -- _description_
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[Any] -- _description_
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# try json dumps, can fail with int and str index types
|
||||||
|
return list(
|
||||||
|
{
|
||||||
|
json.dumps(d, sort_keys=True, ensure_ascii=True, separators=(',', ':')): d
|
||||||
|
for d in dict_list
|
||||||
|
}.values()
|
||||||
|
)
|
||||||
|
except TypeError:
|
||||||
|
# Fallback for non-serializable entries, slow but works
|
||||||
|
unique: list[Any] = []
|
||||||
|
for d in dict_list:
|
||||||
|
if d not in unique:
|
||||||
|
unique.append(d)
|
||||||
|
return unique
|
||||||
|
|
||||||
# __END__
|
# __END__
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ from pathlib import Path
|
|||||||
import atexit
|
import atexit
|
||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from typing import MutableMapping, TextIO, TypedDict, Any, TYPE_CHECKING, cast
|
from typing import MutableMapping, TextIO, TypedDict, Any, TYPE_CHECKING, cast
|
||||||
|
from corelibs_text_colors.text_colors import Colors
|
||||||
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
||||||
from corelibs.string_handling.text_colors import Colors
|
|
||||||
from corelibs.debug_handling.debug_helpers import call_stack, exception_stack
|
from corelibs.debug_handling.debug_helpers import call_stack, exception_stack
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -392,6 +392,24 @@ class LogParent:
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
return LoggingLevel.NOTSET
|
return LoggingLevel.NOTSET
|
||||||
|
|
||||||
|
def any_handler_is_minimum_level(self, log_level: LoggingLevel) -> bool:
|
||||||
|
"""
|
||||||
|
if any handler is set to minimum level
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
log_level {LoggingLevel} -- _description_
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool -- _description_
|
||||||
|
"""
|
||||||
|
for handler in self.handlers.values():
|
||||||
|
try:
|
||||||
|
if LoggingLevel.from_any(handler.level).includes(log_level):
|
||||||
|
return True
|
||||||
|
except (IndexError, AttributeError):
|
||||||
|
continue
|
||||||
|
return False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def validate_log_level(log_level: Any) -> bool:
|
def validate_log_level(log_level: Any) -> bool:
|
||||||
"""
|
"""
|
||||||
@@ -584,9 +602,9 @@ class Log(LogParent):
|
|||||||
__setting = self.DEFAULT_LOG_SETTINGS.get(__log_entry, True)
|
__setting = self.DEFAULT_LOG_SETTINGS.get(__log_entry, True)
|
||||||
default_log_settings[__log_entry] = __setting
|
default_log_settings[__log_entry] = __setting
|
||||||
# check console log type
|
# check console log type
|
||||||
default_log_settings['console_format_type'] = cast('ConsoleFormat', log_settings.get(
|
if (console_format_type := log_settings.get('console_format_type')) is None:
|
||||||
'console_format_type', self.DEFAULT_LOG_SETTINGS['console_format_type']
|
console_format_type = self.DEFAULT_LOG_SETTINGS['console_format_type']
|
||||||
))
|
default_log_settings['console_format_type'] = cast('ConsoleFormat', console_format_type)
|
||||||
# check log queue
|
# check log queue
|
||||||
__setting = log_settings.get('log_queue', self.DEFAULT_LOG_SETTINGS['log_queue'])
|
__setting = log_settings.get('log_queue', self.DEFAULT_LOG_SETTINGS['log_queue'])
|
||||||
if __setting is not None:
|
if __setting is not None:
|
||||||
@@ -756,6 +774,16 @@ class Log(LogParent):
|
|||||||
self.__set_console_formatter(console_format_type)
|
self.__set_console_formatter(console_format_type)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get_console_formatter(self) -> ConsoleFormat:
|
||||||
|
"""
|
||||||
|
Get the current console formatter, this the settings type
|
||||||
|
Note that if eg "ALL" is set it will return the combined information but not the ALL flag name itself
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ConsoleFormat -- _description_
|
||||||
|
"""
|
||||||
|
return self.log_settings['console_format_type']
|
||||||
|
|
||||||
# MARK: console handler
|
# MARK: console handler
|
||||||
def __create_console_handler(
|
def __create_console_handler(
|
||||||
self, handler_name: str,
|
self, handler_name: str,
|
||||||
|
|||||||
0
src/corelibs/math_handling/__init__.py
Normal file
0
src/corelibs/math_handling/__init__.py
Normal file
35
src/corelibs/math_handling/math_helpers.py
Normal file
35
src/corelibs/math_handling/math_helpers.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
"""
|
||||||
|
Various math helpers
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
|
||||||
|
|
||||||
|
def gcd(a: int, b: int):
|
||||||
|
"""
|
||||||
|
Calculate: Greatest Common Divisor
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
a {int} -- _description_
|
||||||
|
b {int} -- _description_
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
_type_ -- _description_
|
||||||
|
"""
|
||||||
|
return math.gcd(a, b)
|
||||||
|
|
||||||
|
|
||||||
|
def lcd(a: int, b: int):
|
||||||
|
"""
|
||||||
|
Calculate: Least Common Denominator
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
a {int} -- _description_
|
||||||
|
b {int} -- _description_
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
_type_ -- _description_
|
||||||
|
"""
|
||||||
|
return math.lcm(a, b)
|
||||||
|
|
||||||
|
# __END__
|
||||||
@@ -3,32 +3,61 @@ requests lib interface
|
|||||||
V2 call type
|
V2 call type
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any, TypedDict, cast
|
||||||
import warnings
|
|
||||||
import requests
|
import requests
|
||||||
# to hide the verfiy warnings because of the bad SSL settings from Netskope, Akamai, etc
|
from requests import exceptions
|
||||||
warnings.filterwarnings('ignore', message='Unverified HTTPS request')
|
|
||||||
|
|
||||||
|
class ErrorResponse:
|
||||||
|
"""
|
||||||
|
Error response structure. This is returned if a request could not be completed
|
||||||
|
"""
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
code: int,
|
||||||
|
message: str,
|
||||||
|
action: str,
|
||||||
|
url: str,
|
||||||
|
exception: exceptions.InvalidSchema | exceptions.ReadTimeout | exceptions.ConnectionError | None = None
|
||||||
|
) -> None:
|
||||||
|
self.code = code
|
||||||
|
self.message = message
|
||||||
|
self.action = action
|
||||||
|
self.url = url
|
||||||
|
self.exception_name = type(exception).__name__ if exception is not None else None
|
||||||
|
self.exception_trace = exception if exception is not None else None
|
||||||
|
|
||||||
|
|
||||||
|
class ProxyConfig(TypedDict):
|
||||||
|
"""
|
||||||
|
Socks proxy settings
|
||||||
|
"""
|
||||||
|
type: str
|
||||||
|
host: str
|
||||||
|
port: str
|
||||||
|
|
||||||
|
|
||||||
class Caller:
|
class Caller:
|
||||||
"""_summary_"""
|
"""
|
||||||
|
requests lib interface
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
header: dict[str, str],
|
header: dict[str, str],
|
||||||
verify: bool = True,
|
|
||||||
timeout: int = 20,
|
timeout: int = 20,
|
||||||
proxy: dict[str, str] | None = None,
|
proxy: ProxyConfig | None = None,
|
||||||
|
verify: bool = True,
|
||||||
ca_file: str | None = None
|
ca_file: str | None = None
|
||||||
):
|
):
|
||||||
self.headers = header
|
self.headers = header
|
||||||
self.timeout: int = timeout
|
self.timeout: int = timeout
|
||||||
self.cafile = ca_file
|
self.ca_file = ca_file
|
||||||
self.verify = verify
|
self.verify = verify
|
||||||
self.proxy = proxy
|
self.proxy = cast(dict[str, str], proxy) if proxy is not None else None
|
||||||
|
|
||||||
def __timeout(self, timeout: int | None) -> int:
|
def __timeout(self, timeout: int | None) -> int:
|
||||||
if timeout is not None:
|
if timeout is not None and timeout >= 0:
|
||||||
return timeout
|
return timeout
|
||||||
return self.timeout
|
return self.timeout
|
||||||
|
|
||||||
@@ -39,7 +68,7 @@ class Caller:
|
|||||||
data: dict[str, Any] | None = None,
|
data: dict[str, Any] | None = None,
|
||||||
params: dict[str, Any] | None = None,
|
params: dict[str, Any] | None = None,
|
||||||
timeout: int | None = None
|
timeout: int | None = None
|
||||||
) -> requests.Response | None:
|
) -> requests.Response | ErrorResponse:
|
||||||
"""
|
"""
|
||||||
call wrapper, on error returns None
|
call wrapper, on error returns None
|
||||||
|
|
||||||
@@ -56,67 +85,96 @@ class Caller:
|
|||||||
if data is None:
|
if data is None:
|
||||||
data = {}
|
data = {}
|
||||||
try:
|
try:
|
||||||
response = None
|
|
||||||
if action == "get":
|
if action == "get":
|
||||||
response = requests.get(
|
return requests.get(
|
||||||
url,
|
url,
|
||||||
params=params,
|
params=params,
|
||||||
headers=self.headers,
|
headers=self.headers,
|
||||||
timeout=self.__timeout(timeout),
|
timeout=self.__timeout(timeout),
|
||||||
verify=self.verify,
|
verify=self.verify,
|
||||||
proxies=self.proxy
|
proxies=self.proxy,
|
||||||
|
cert=self.ca_file
|
||||||
)
|
)
|
||||||
elif action == "post":
|
if action == "post":
|
||||||
response = requests.post(
|
return requests.post(
|
||||||
url,
|
url,
|
||||||
params=params,
|
params=params,
|
||||||
json=data,
|
json=data,
|
||||||
headers=self.headers,
|
headers=self.headers,
|
||||||
timeout=self.__timeout(timeout),
|
timeout=self.__timeout(timeout),
|
||||||
verify=self.verify,
|
verify=self.verify,
|
||||||
proxies=self.proxy
|
proxies=self.proxy,
|
||||||
|
cert=self.ca_file
|
||||||
)
|
)
|
||||||
elif action == "put":
|
if action == "put":
|
||||||
response = requests.put(
|
return requests.put(
|
||||||
url,
|
url,
|
||||||
params=params,
|
params=params,
|
||||||
json=data,
|
json=data,
|
||||||
headers=self.headers,
|
headers=self.headers,
|
||||||
timeout=self.__timeout(timeout),
|
timeout=self.__timeout(timeout),
|
||||||
verify=self.verify,
|
verify=self.verify,
|
||||||
proxies=self.proxy
|
proxies=self.proxy,
|
||||||
|
cert=self.ca_file
|
||||||
)
|
)
|
||||||
elif action == "patch":
|
if action == "patch":
|
||||||
response = requests.patch(
|
return requests.patch(
|
||||||
url,
|
url,
|
||||||
params=params,
|
params=params,
|
||||||
json=data,
|
json=data,
|
||||||
headers=self.headers,
|
headers=self.headers,
|
||||||
timeout=self.__timeout(timeout),
|
timeout=self.__timeout(timeout),
|
||||||
verify=self.verify,
|
verify=self.verify,
|
||||||
proxies=self.proxy
|
proxies=self.proxy,
|
||||||
|
cert=self.ca_file
|
||||||
)
|
)
|
||||||
elif action == "delete":
|
if action == "delete":
|
||||||
response = requests.delete(
|
return requests.delete(
|
||||||
url,
|
url,
|
||||||
params=params,
|
params=params,
|
||||||
headers=self.headers,
|
headers=self.headers,
|
||||||
timeout=self.__timeout(timeout),
|
timeout=self.__timeout(timeout),
|
||||||
verify=self.verify,
|
verify=self.verify,
|
||||||
proxies=self.proxy
|
proxies=self.proxy,
|
||||||
|
cert=self.ca_file
|
||||||
)
|
)
|
||||||
return response
|
return ErrorResponse(
|
||||||
except requests.exceptions.InvalidSchema as e:
|
100,
|
||||||
print(f"Invalid URL during '{action}' for {url}:\n\t{e}")
|
f"Unsupported action '{action}'",
|
||||||
return None
|
action,
|
||||||
except requests.exceptions.ReadTimeout as e:
|
url
|
||||||
print(f"Timeout ({self.timeout}s) during '{action}' for {url}:\n\t{e}")
|
)
|
||||||
return None
|
except exceptions.InvalidSchema as e:
|
||||||
except requests.exceptions.ConnectionError as e:
|
return ErrorResponse(
|
||||||
print(f"Connection error during '{action}' for {url}:\n\t{e}")
|
200,
|
||||||
return None
|
f"Invalid URL during '{action}' for {url}",
|
||||||
|
action,
|
||||||
|
url,
|
||||||
|
e
|
||||||
|
)
|
||||||
|
except exceptions.ReadTimeout as e:
|
||||||
|
return ErrorResponse(
|
||||||
|
300,
|
||||||
|
f"Timeout ({self.timeout}s) during '{action}' for {url}",
|
||||||
|
action,
|
||||||
|
url,
|
||||||
|
e
|
||||||
|
)
|
||||||
|
except exceptions.ConnectionError as e:
|
||||||
|
return ErrorResponse(
|
||||||
|
400,
|
||||||
|
f"Connection error during '{action}' for {url}",
|
||||||
|
action,
|
||||||
|
url,
|
||||||
|
e
|
||||||
|
)
|
||||||
|
|
||||||
def get(self, url: str, params: dict[str, Any] | None = None) -> requests.Response | None:
|
def get(
|
||||||
|
self,
|
||||||
|
url: str,
|
||||||
|
params: dict[str, Any] | None = None,
|
||||||
|
timeout: int | None = None
|
||||||
|
) -> requests.Response | ErrorResponse:
|
||||||
"""
|
"""
|
||||||
get data
|
get data
|
||||||
|
|
||||||
@@ -127,11 +185,15 @@ class Caller:
|
|||||||
Returns:
|
Returns:
|
||||||
requests.Response: _description_
|
requests.Response: _description_
|
||||||
"""
|
"""
|
||||||
return self.__call('get', url, params=params)
|
return self.__call('get', url, params=params, timeout=timeout)
|
||||||
|
|
||||||
def post(
|
def post(
|
||||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
self,
|
||||||
) -> requests.Response | None:
|
url: str,
|
||||||
|
data: dict[str, Any] | None = None,
|
||||||
|
params: dict[str, Any] | None = None,
|
||||||
|
timeout: int | None = None
|
||||||
|
) -> requests.Response | ErrorResponse:
|
||||||
"""
|
"""
|
||||||
post data
|
post data
|
||||||
|
|
||||||
@@ -143,11 +205,15 @@ class Caller:
|
|||||||
Returns:
|
Returns:
|
||||||
requests.Response | None: _description_
|
requests.Response | None: _description_
|
||||||
"""
|
"""
|
||||||
return self.__call('post', url, data, params)
|
return self.__call('post', url, data, params, timeout=timeout)
|
||||||
|
|
||||||
def put(
|
def put(
|
||||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
self,
|
||||||
) -> requests.Response | None:
|
url: str,
|
||||||
|
data: dict[str, Any] | None = None,
|
||||||
|
params: dict[str, Any] | None = None,
|
||||||
|
timeout: int | None = None
|
||||||
|
) -> requests.Response | ErrorResponse:
|
||||||
"""_summary_
|
"""_summary_
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -158,11 +224,15 @@ class Caller:
|
|||||||
Returns:
|
Returns:
|
||||||
requests.Response | None: _description_
|
requests.Response | None: _description_
|
||||||
"""
|
"""
|
||||||
return self.__call('put', url, data, params)
|
return self.__call('put', url, data, params, timeout=timeout)
|
||||||
|
|
||||||
def patch(
|
def patch(
|
||||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
self,
|
||||||
) -> requests.Response | None:
|
url: str,
|
||||||
|
data: dict[str, Any] | None = None,
|
||||||
|
params: dict[str, Any] | None = None,
|
||||||
|
timeout: int | None = None
|
||||||
|
) -> requests.Response | ErrorResponse:
|
||||||
"""_summary_
|
"""_summary_
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -173,9 +243,14 @@ class Caller:
|
|||||||
Returns:
|
Returns:
|
||||||
requests.Response | None: _description_
|
requests.Response | None: _description_
|
||||||
"""
|
"""
|
||||||
return self.__call('patch', url, data, params)
|
return self.__call('patch', url, data, params, timeout=timeout)
|
||||||
|
|
||||||
def delete(self, url: str, params: dict[str, Any] | None = None) -> requests.Response | None:
|
def delete(
|
||||||
|
self,
|
||||||
|
url: str,
|
||||||
|
params: dict[str, Any] | None = None,
|
||||||
|
timeout: int | None = None
|
||||||
|
) -> requests.Response | ErrorResponse:
|
||||||
"""
|
"""
|
||||||
delete
|
delete
|
||||||
|
|
||||||
@@ -186,6 +261,6 @@ class Caller:
|
|||||||
Returns:
|
Returns:
|
||||||
requests.Response | None: _description_
|
requests.Response | None: _description_
|
||||||
"""
|
"""
|
||||||
return self.__call('delete', url, params=params)
|
return self.__call('delete', url, params=params, timeout=timeout)
|
||||||
|
|
||||||
# __END__
|
# __END__
|
||||||
|
|||||||
@@ -5,152 +5,14 @@ Set colors with print(f"something {Colors.yellow}colorful{Colors.end})
|
|||||||
bold + underline + color combinations are possible.
|
bold + underline + color combinations are possible.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from warnings import deprecated
|
||||||
|
from corelibs_text_colors.text_colors import Colors as ColorsNew
|
||||||
|
|
||||||
class Colors:
|
|
||||||
|
@deprecated("Use src.corelibs_text_colors.text_colors instead")
|
||||||
|
class Colors(ColorsNew):
|
||||||
"""
|
"""
|
||||||
ANSI colors defined
|
ANSI colors defined
|
||||||
"""
|
"""
|
||||||
# General sets, these should not be accessd
|
|
||||||
__BOLD = '\033[1m'
|
|
||||||
__UNDERLINE = '\033[4m'
|
|
||||||
__END = '\033[0m'
|
|
||||||
__RESET = '\033[0m'
|
|
||||||
# Define ANSI color codes as class attributes
|
|
||||||
__BLACK = "\033[30m"
|
|
||||||
__RED = "\033[31m"
|
|
||||||
__GREEN = "\033[32m"
|
|
||||||
__YELLOW = "\033[33m"
|
|
||||||
__BLUE = "\033[34m"
|
|
||||||
__MAGENTA = "\033[35m"
|
|
||||||
__CYAN = "\033[36m"
|
|
||||||
__WHITE = "\033[37m"
|
|
||||||
|
|
||||||
# Define bold/bright versions of the colors
|
|
||||||
__BLACK_BOLD = "\033[1;30m"
|
|
||||||
__RED_BOLD = "\033[1;31m"
|
|
||||||
__GREEN_BOLD = "\033[1;32m"
|
|
||||||
__YELLOW_BOLD = "\033[1;33m"
|
|
||||||
__BLUE_BOLD = "\033[1;34m"
|
|
||||||
__MAGENTA_BOLD = "\033[1;35m"
|
|
||||||
__CYAN_BOLD = "\033[1;36m"
|
|
||||||
__WHITE_BOLD = "\033[1;37m"
|
|
||||||
|
|
||||||
# BRIGHT, alternative
|
|
||||||
__BLACK_BRIGHT = '\033[90m'
|
|
||||||
__RED_BRIGHT = '\033[91m'
|
|
||||||
__GREEN_BRIGHT = '\033[92m'
|
|
||||||
__YELLOW_BRIGHT = '\033[93m'
|
|
||||||
__BLUE_BRIGHT = '\033[94m'
|
|
||||||
__MAGENTA_BRIGHT = '\033[95m'
|
|
||||||
__CYAN_BRIGHT = '\033[96m'
|
|
||||||
__WHITE_BRIGHT = '\033[97m'
|
|
||||||
|
|
||||||
# set access vars
|
|
||||||
bold = __BOLD
|
|
||||||
underline = __UNDERLINE
|
|
||||||
end = __END
|
|
||||||
reset = __RESET
|
|
||||||
# normal
|
|
||||||
black = __BLACK
|
|
||||||
red = __RED
|
|
||||||
green = __GREEN
|
|
||||||
yellow = __YELLOW
|
|
||||||
blue = __BLUE
|
|
||||||
magenta = __MAGENTA
|
|
||||||
cyan = __CYAN
|
|
||||||
white = __WHITE
|
|
||||||
# bold
|
|
||||||
black_bold = __BLACK_BOLD
|
|
||||||
red_bold = __RED_BOLD
|
|
||||||
green_bold = __GREEN_BOLD
|
|
||||||
yellow_bold = __YELLOW_BOLD
|
|
||||||
blue_bold = __BLUE_BOLD
|
|
||||||
magenta_bold = __MAGENTA_BOLD
|
|
||||||
cyan_bold = __CYAN_BOLD
|
|
||||||
white_bold = __WHITE_BOLD
|
|
||||||
# bright
|
|
||||||
black_bright = __BLACK_BRIGHT
|
|
||||||
red_bright = __RED_BRIGHT
|
|
||||||
green_bright = __GREEN_BRIGHT
|
|
||||||
yellow_bright = __YELLOW_BRIGHT
|
|
||||||
blue_bright = __BLUE_BRIGHT
|
|
||||||
magenta_bright = __MAGENTA_BRIGHT
|
|
||||||
cyan_bright = __CYAN_BRIGHT
|
|
||||||
white_bright = __WHITE_BRIGHT
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def disable():
|
|
||||||
"""
|
|
||||||
No colors
|
|
||||||
"""
|
|
||||||
Colors.bold = ''
|
|
||||||
Colors.underline = ''
|
|
||||||
Colors.end = ''
|
|
||||||
Colors.reset = ''
|
|
||||||
# normal
|
|
||||||
Colors.black = ''
|
|
||||||
Colors.red = ''
|
|
||||||
Colors.green = ''
|
|
||||||
Colors.yellow = ''
|
|
||||||
Colors.blue = ''
|
|
||||||
Colors.magenta = ''
|
|
||||||
Colors.cyan = ''
|
|
||||||
Colors.white = ''
|
|
||||||
# bold/bright
|
|
||||||
Colors.black_bold = ''
|
|
||||||
Colors.red_bold = ''
|
|
||||||
Colors.green_bold = ''
|
|
||||||
Colors.yellow_bold = ''
|
|
||||||
Colors.blue_bold = ''
|
|
||||||
Colors.magenta_bold = ''
|
|
||||||
Colors.cyan_bold = ''
|
|
||||||
Colors.white_bold = ''
|
|
||||||
# bold/bright alt
|
|
||||||
Colors.black_bright = ''
|
|
||||||
Colors.red_bright = ''
|
|
||||||
Colors.green_bright = ''
|
|
||||||
Colors.yellow_bright = ''
|
|
||||||
Colors.blue_bright = ''
|
|
||||||
Colors.magenta_bright = ''
|
|
||||||
Colors.cyan_bright = ''
|
|
||||||
Colors.white_bright = ''
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def reset_colors():
|
|
||||||
"""
|
|
||||||
reset colors to the original ones
|
|
||||||
"""
|
|
||||||
# set access vars
|
|
||||||
Colors.bold = Colors.__BOLD
|
|
||||||
Colors.underline = Colors.__UNDERLINE
|
|
||||||
Colors.end = Colors.__END
|
|
||||||
Colors.reset = Colors.__RESET
|
|
||||||
# normal
|
|
||||||
Colors.black = Colors.__BLACK
|
|
||||||
Colors.red = Colors.__RED
|
|
||||||
Colors.green = Colors.__GREEN
|
|
||||||
Colors.yellow = Colors.__YELLOW
|
|
||||||
Colors.blue = Colors.__BLUE
|
|
||||||
Colors.magenta = Colors.__MAGENTA
|
|
||||||
Colors.cyan = Colors.__CYAN
|
|
||||||
Colors.white = Colors.__WHITE
|
|
||||||
# bold
|
|
||||||
Colors.black_bold = Colors.__BLACK_BOLD
|
|
||||||
Colors.red_bold = Colors.__RED_BOLD
|
|
||||||
Colors.green_bold = Colors.__GREEN_BOLD
|
|
||||||
Colors.yellow_bold = Colors.__YELLOW_BOLD
|
|
||||||
Colors.blue_bold = Colors.__BLUE_BOLD
|
|
||||||
Colors.magenta_bold = Colors.__MAGENTA_BOLD
|
|
||||||
Colors.cyan_bold = Colors.__CYAN_BOLD
|
|
||||||
Colors.white_bold = Colors.__WHITE_BOLD
|
|
||||||
# bright
|
|
||||||
Colors.black_bright = Colors.__BLACK_BRIGHT
|
|
||||||
Colors.red_bright = Colors.__RED_BRIGHT
|
|
||||||
Colors.green_bright = Colors.__GREEN_BRIGHT
|
|
||||||
Colors.yellow_bright = Colors.__YELLOW_BRIGHT
|
|
||||||
Colors.blue_bright = Colors.__BLUE_BRIGHT
|
|
||||||
Colors.magenta_bright = Colors.__MAGENTA_BRIGHT
|
|
||||||
Colors.cyan_bright = Colors.__CYAN_BRIGHT
|
|
||||||
Colors.white_bright = Colors.__WHITE_BRIGHT
|
|
||||||
|
|
||||||
# __END__
|
# __END__
|
||||||
|
|||||||
@@ -2,82 +2,24 @@
|
|||||||
Enum base classes
|
Enum base classes
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from enum import Enum
|
import warnings
|
||||||
from warnings import deprecated
|
from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
||||||
from typing import Any
|
|
||||||
# from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
|
||||||
|
|
||||||
|
|
||||||
class EnumBase(Enum):
|
class EnumBase(CorelibsEnumBase):
|
||||||
"""
|
"""
|
||||||
base for enum
|
base for enum
|
||||||
|
|
||||||
|
.. deprecated::
|
||||||
|
Use corelibs_enum_base.EnumBase instead
|
||||||
|
DEPRECATED: Use corelibs_enum_base.enum_base.EnumBase instead
|
||||||
|
|
||||||
lookup_any and from_any will return "EnumBase" and the sub class name
|
lookup_any and from_any will return "EnumBase" and the sub class name
|
||||||
run the return again to "from_any" to get a clean value, or cast it
|
run the return again to "from_any" to get a clean value, or cast it
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
|
||||||
def lookup_key(cls, enum_key: str):
|
|
||||||
"""Lookup from key side (must be string)"""
|
|
||||||
# if there is a ":", then this is legacy, replace with ___
|
|
||||||
if ":" in enum_key:
|
|
||||||
enum_key = enum_key.replace(':', '___')
|
|
||||||
try:
|
|
||||||
return cls[enum_key.upper()]
|
|
||||||
except KeyError as e:
|
|
||||||
raise ValueError(f"Invalid key: {enum_key}") from e
|
|
||||||
except AttributeError as e:
|
|
||||||
raise ValueError(f"Invalid key: {enum_key}") from e
|
|
||||||
|
|
||||||
@classmethod
|
# At the module level, issue a deprecation warning
|
||||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
warnings.warn("Use corelibs_enum_base.enum_base.EnumBase instead", DeprecationWarning, stacklevel=2)
|
||||||
def lookup_value(cls, enum_value: Any):
|
|
||||||
"""Lookup through value side"""
|
|
||||||
try:
|
|
||||||
return cls(enum_value)
|
|
||||||
except ValueError as e:
|
|
||||||
raise ValueError(f"Invalid value: {enum_value}") from e
|
|
||||||
|
|
||||||
@classmethod
|
# __EMD__
|
||||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
|
||||||
def from_any(cls, enum_any: Any):
|
|
||||||
"""
|
|
||||||
This only works in the following order
|
|
||||||
-> class itself, as is
|
|
||||||
-> str, assume key lookup
|
|
||||||
-> if failed try other
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
enum_any {Any} -- _description_
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
_type_ -- _description_
|
|
||||||
"""
|
|
||||||
if isinstance(enum_any, cls):
|
|
||||||
return enum_any
|
|
||||||
# try key first if it is string
|
|
||||||
# if failed try value
|
|
||||||
if isinstance(enum_any, str):
|
|
||||||
try:
|
|
||||||
return cls.lookup_key(enum_any)
|
|
||||||
except (ValueError, AttributeError):
|
|
||||||
try:
|
|
||||||
return cls.lookup_value(enum_any)
|
|
||||||
except ValueError as e:
|
|
||||||
raise ValueError(f"Could not find as key or value: {enum_any}") from e
|
|
||||||
return cls.lookup_value(enum_any)
|
|
||||||
|
|
||||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
|
||||||
def to_value(self) -> Any:
|
|
||||||
"""Convert to value"""
|
|
||||||
return self.value
|
|
||||||
|
|
||||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
|
||||||
def to_lower_case(self) -> str:
|
|
||||||
"""return lower case"""
|
|
||||||
return self.name.lower()
|
|
||||||
|
|
||||||
@deprecated("Use corelibs_enum_base.EnumBase instead")
|
|
||||||
def __str__(self) -> str:
|
|
||||||
"""return [Enum].NAME like it was called with .name"""
|
|
||||||
return self.name
|
|
||||||
|
|||||||
15
src/corelibs/var_handling/enum_base.pyi
Normal file
15
src/corelibs/var_handling/enum_base.pyi
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
"""
|
||||||
|
Enum base classes [STPUB]
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing_extensions import deprecated
|
||||||
|
from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated("Use corelibs_enum_base.enum_base.EnumBase instead")
|
||||||
|
class EnumBase(CorelibsEnumBase):
|
||||||
|
"""
|
||||||
|
base for enum
|
||||||
|
lookup_any and from_any will return "EnumBase" and the sub class name
|
||||||
|
run the return again to "from_any" to get a clean value, or cast it
|
||||||
|
"""
|
||||||
109
test-run/check_handling/regex_checks.py
Normal file
109
test-run/check_handling/regex_checks.py
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
"""
|
||||||
|
Test check andling for regex checks
|
||||||
|
"""
|
||||||
|
|
||||||
|
from corelibs_text_colors.text_colors import Colors
|
||||||
|
from corelibs.check_handling.regex_constants import (
|
||||||
|
compile_re, DOMAIN_WITH_LOCALHOST_REGEX, EMAIL_BASIC_REGEX, NAME_EMAIL_BASIC_REGEX, SUB_EMAIL_BASIC_REGEX
|
||||||
|
)
|
||||||
|
from corelibs.check_handling.regex_constants_compiled import (
|
||||||
|
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX, COMPILED_EMAIL_BASIC_REGEX,
|
||||||
|
COMPILED_NAME_EMAIL_SIMPLE_REGEX, COMPILED_NAME_EMAIL_BASIC_REGEX
|
||||||
|
)
|
||||||
|
|
||||||
|
NAME_EMAIL_SIMPLE_REGEX = r"""
|
||||||
|
^\s*(?:"(?P<name1>[^"]+)"\s*<(?P<email1>[^>]+)>|
|
||||||
|
(?P<name2>.+?)\s*<(?P<email2>[^>]+)>|
|
||||||
|
<(?P<email3>[^>]+)>|
|
||||||
|
(?P<email4>[^\s<>]+))\s*$
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def domain_test():
|
||||||
|
"""
|
||||||
|
domain regex test
|
||||||
|
"""
|
||||||
|
print("=" * 30)
|
||||||
|
test_domains = [
|
||||||
|
"example.com",
|
||||||
|
"localhost",
|
||||||
|
"subdomain.localhost",
|
||||||
|
"test.localhost.com",
|
||||||
|
"some-domain.org"
|
||||||
|
]
|
||||||
|
|
||||||
|
regex_domain_check = COMPILED_DOMAIN_WITH_LOCALHOST_REGEX
|
||||||
|
print(f"REGEX: {DOMAIN_WITH_LOCALHOST_REGEX}")
|
||||||
|
print(f"Check regex: {regex_domain_check.search('localhost')}")
|
||||||
|
|
||||||
|
for domain in test_domains:
|
||||||
|
if regex_domain_check.search(domain):
|
||||||
|
print(f"Matched: {domain}")
|
||||||
|
else:
|
||||||
|
print(f"Did not match: {domain}")
|
||||||
|
|
||||||
|
|
||||||
|
def email_test():
|
||||||
|
"""
|
||||||
|
email regex test
|
||||||
|
"""
|
||||||
|
print("=" * 30)
|
||||||
|
email_list = """
|
||||||
|
e@bar.com
|
||||||
|
<f@foobar.com>
|
||||||
|
"Master" <foobar@bar.com>
|
||||||
|
"not valid" not@valid.com
|
||||||
|
also not valid not@valid.com
|
||||||
|
some header <something@bar.com>
|
||||||
|
test master <master@master.com>
|
||||||
|
日本語 <japan@jp.net>
|
||||||
|
"ひほん カケ苦" <foo@bar.com>
|
||||||
|
single@entry.com
|
||||||
|
arsch@popsch.com
|
||||||
|
test open <open@open.com>
|
||||||
|
"""
|
||||||
|
|
||||||
|
print(f"REGEX: SUB_EMAIL_BASIC_REGEX: {SUB_EMAIL_BASIC_REGEX}")
|
||||||
|
print(f"REGEX: EMAIL_BASIC_REGEX: {EMAIL_BASIC_REGEX}")
|
||||||
|
print(f"REGEX: COMPILED_NAME_EMAIL_SIMPLE_REGEX: {COMPILED_NAME_EMAIL_SIMPLE_REGEX}")
|
||||||
|
print(f"REGEX: NAME_EMAIL_BASIC_REGEX: {NAME_EMAIL_BASIC_REGEX}")
|
||||||
|
|
||||||
|
basic_email = COMPILED_EMAIL_BASIC_REGEX
|
||||||
|
sub_basic_email = compile_re(SUB_EMAIL_BASIC_REGEX)
|
||||||
|
simple_name_email_regex = COMPILED_NAME_EMAIL_SIMPLE_REGEX
|
||||||
|
full_name_email_regex = COMPILED_NAME_EMAIL_BASIC_REGEX
|
||||||
|
for email in email_list.splitlines():
|
||||||
|
email = email.strip()
|
||||||
|
if not email:
|
||||||
|
continue
|
||||||
|
print(f">>> Testing: {email}")
|
||||||
|
if not basic_email.match(email):
|
||||||
|
print(f"{Colors.red}[EMAIL ] No match: {email}{Colors.reset}")
|
||||||
|
else:
|
||||||
|
print(f"{Colors.green}[EMAIL ] Matched : {email}{Colors.reset}")
|
||||||
|
if not sub_basic_email.match(email):
|
||||||
|
print(f"{Colors.red}[SUB ] No match: {email}{Colors.reset}")
|
||||||
|
else:
|
||||||
|
print(f"{Colors.green}[SUB ] Matched : {email}{Colors.reset}")
|
||||||
|
if not simple_name_email_regex.match(email):
|
||||||
|
print(f"{Colors.red}[SIMPLE] No match: {email}{Colors.reset}")
|
||||||
|
else:
|
||||||
|
print(f"{Colors.green}[SIMPLE] Matched : {email}{Colors.reset}")
|
||||||
|
if not full_name_email_regex.match(email):
|
||||||
|
print(f"{Colors.red}[FULL ] No match: {email}{Colors.reset}")
|
||||||
|
else:
|
||||||
|
print(f"{Colors.green}[FULL ] Matched : {email}{Colors.reset}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""
|
||||||
|
Test regex checks
|
||||||
|
"""
|
||||||
|
domain_test()
|
||||||
|
email_test()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
# __END__
|
||||||
@@ -1,16 +1,23 @@
|
|||||||
[TestA]
|
[TestA]
|
||||||
foo=bar
|
foo=bar
|
||||||
|
overload_from_args=bar
|
||||||
foobar=1
|
foobar=1
|
||||||
bar=st
|
bar=st
|
||||||
|
arg_overload=should_not_be_set_because_of_command_line_is_list
|
||||||
|
arg_overload_list=too,be,long
|
||||||
|
arg_overload_not_set=this should not be set because of override flag
|
||||||
|
just_values=too,be,long
|
||||||
some_match=foo
|
some_match=foo
|
||||||
some_match_list=foo,bar
|
some_match_list=foo,bar
|
||||||
test_list=a,b,c,d f, g h
|
test_list=a,b,c,d f, g h
|
||||||
other_list=a|b|c|d|
|
other_list=a|b|c|d|
|
||||||
third_list=xy|ab|df|fg
|
third_list=xy|ab|df|fg
|
||||||
|
empty_list=
|
||||||
str_length=foobar
|
str_length=foobar
|
||||||
int_range=20
|
int_range=20
|
||||||
int_range_not_set=
|
int_range_not_set=
|
||||||
int_range_not_set_empty_set=5
|
int_range_not_set_empty_set=5
|
||||||
|
bool_var=True
|
||||||
#
|
#
|
||||||
match_target=foo
|
match_target=foo
|
||||||
match_target_list=foo,bar,baz
|
match_target_list=foo,bar,baz
|
||||||
@@ -32,3 +39,6 @@ email_bad=gii@bar.com
|
|||||||
[LoadTest]
|
[LoadTest]
|
||||||
a.b.c=foo
|
a.b.c=foo
|
||||||
d:e:f=bar
|
d:e:f=bar
|
||||||
|
|
||||||
|
[ErrorTest]
|
||||||
|
some_value=42
|
||||||
|
|||||||
@@ -21,11 +21,6 @@ def main():
|
|||||||
Main run
|
Main run
|
||||||
"""
|
"""
|
||||||
|
|
||||||
value = "2025/1/1"
|
|
||||||
regex_c = re.compile(SettingsLoaderCheck.CHECK_SETTINGS['string.date']['regex'], re.VERBOSE)
|
|
||||||
result = regex_c.search(value)
|
|
||||||
print(f"regex {regex_c} check against {value} -> {result}")
|
|
||||||
|
|
||||||
# for log testing
|
# for log testing
|
||||||
log = Log(
|
log = Log(
|
||||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'settings_loader.log'),
|
log_path=ROOT_PATH.joinpath(LOG_DIR, 'settings_loader.log'),
|
||||||
@@ -37,9 +32,17 @@ def main():
|
|||||||
)
|
)
|
||||||
log.logger.info('Settings loader')
|
log.logger.info('Settings loader')
|
||||||
|
|
||||||
|
value = "2025/1/1"
|
||||||
|
regex_c = re.compile(SettingsLoaderCheck.CHECK_SETTINGS['string.date']['regex'], re.VERBOSE)
|
||||||
|
result = regex_c.search(value)
|
||||||
|
log.info(f"regex {regex_c} check against {value} -> {result}")
|
||||||
|
|
||||||
sl = SettingsLoader(
|
sl = SettingsLoader(
|
||||||
{
|
{
|
||||||
'foo': 'OVERLOAD'
|
'overload_from_args': 'OVERLOAD from ARGS',
|
||||||
|
'arg_overload': ['should', 'not', 'be', 'set'],
|
||||||
|
'arg_overload_list': ['overload', 'this', 'list'],
|
||||||
|
'arg_overload_not_set': "DO_NOT_SET",
|
||||||
},
|
},
|
||||||
ROOT_PATH.joinpath(CONFIG_DIR, CONFIG_FILE),
|
ROOT_PATH.joinpath(CONFIG_DIR, CONFIG_FILE),
|
||||||
log=log
|
log=log
|
||||||
@@ -50,9 +53,11 @@ def main():
|
|||||||
config_load,
|
config_load,
|
||||||
{
|
{
|
||||||
# "doesnt": ["split:,"],
|
# "doesnt": ["split:,"],
|
||||||
"foo": ["mandatory:yes"],
|
"overload_from_args": ["args_override:yes", "mandatory:yes"],
|
||||||
"foobar": ["check:int"],
|
"foobar": ["check:int"],
|
||||||
"bar": ["mandatory:yes"],
|
"bar": ["mandatory:yes"],
|
||||||
|
"arg_overload_list": ["args_override:yes", "split:,",],
|
||||||
|
"arg_overload_not_set": [],
|
||||||
"some_match": ["matching:foo|bar"],
|
"some_match": ["matching:foo|bar"],
|
||||||
"some_match_list": ["split:,", "matching:foo|bar"],
|
"some_match_list": ["split:,", "matching:foo|bar"],
|
||||||
"test_list": [
|
"test_list": [
|
||||||
@@ -64,6 +69,9 @@ def main():
|
|||||||
"split:|",
|
"split:|",
|
||||||
"check:string.alphanumeric"
|
"check:string.alphanumeric"
|
||||||
],
|
],
|
||||||
|
"empty_list": [
|
||||||
|
"split:,",
|
||||||
|
],
|
||||||
"str_length": [
|
"str_length": [
|
||||||
"length:2-10"
|
"length:2-10"
|
||||||
],
|
],
|
||||||
@@ -76,6 +84,7 @@ def main():
|
|||||||
"int_range_not_set_empty_set": [
|
"int_range_not_set_empty_set": [
|
||||||
"empty:"
|
"empty:"
|
||||||
],
|
],
|
||||||
|
"bool_var": ["convert:bool"],
|
||||||
"match_target": ["matching:foo"],
|
"match_target": ["matching:foo"],
|
||||||
"match_target_list": ["split:,", "matching:foo|bar|baz",],
|
"match_target_list": ["split:,", "matching:foo|bar|baz",],
|
||||||
"match_source_a": ["in:match_target"],
|
"match_source_a": ["in:match_target"],
|
||||||
@@ -120,6 +129,20 @@ def main():
|
|||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
print(f"Could not load settings: {e}")
|
print(f"Could not load settings: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
config_load = 'ErrorTest'
|
||||||
|
config_data = sl.load_settings(
|
||||||
|
config_load,
|
||||||
|
{
|
||||||
|
"some_value": [
|
||||||
|
"check:string.email.basic",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||||
|
except ValueError as e:
|
||||||
|
print(f"Could not load settings: {e}")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
139
test-run/db_handling/sql_main.py
Normal file
139
test-run/db_handling/sql_main.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
"""
|
||||||
|
SQL Main wrapper test
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from uuid import uuid4
|
||||||
|
import json
|
||||||
|
from corelibs.debug_handling.dump_data import dump_data
|
||||||
|
from corelibs.logging_handling.log import Log, Logger
|
||||||
|
from corelibs.db_handling.sql_main import SQLMain
|
||||||
|
|
||||||
|
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||||
|
ROOT_PATH: Path = SCRIPT_PATH
|
||||||
|
DATABASE_DIR: Path = Path("database")
|
||||||
|
LOG_DIR: Path = Path("log")
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""
|
||||||
|
Comment
|
||||||
|
"""
|
||||||
|
log = Log(
|
||||||
|
log_path=ROOT_PATH.joinpath(LOG_DIR, 'sqlite_main.log'),
|
||||||
|
log_name="SQLite Main",
|
||||||
|
log_settings={
|
||||||
|
"log_level_console": 'DEBUG',
|
||||||
|
"log_level_file": 'DEBUG',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
sql_main = SQLMain(
|
||||||
|
log=Logger(log.get_logger_settings()),
|
||||||
|
db_ident=f"sqlite:{ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_main.db')}"
|
||||||
|
)
|
||||||
|
if sql_main.connected():
|
||||||
|
log.info("SQL Main connected successfully")
|
||||||
|
else:
|
||||||
|
log.error('SQL Main connection failed')
|
||||||
|
if sql_main.dbh is None:
|
||||||
|
log.error('SQL Main DBH instance is None')
|
||||||
|
return
|
||||||
|
|
||||||
|
if sql_main.dbh.trigger_exists('trg_test_a_set_date_updated_on_update'):
|
||||||
|
log.info("Trigger trg_test_a_set_date_updated_on_update exists")
|
||||||
|
if sql_main.dbh.table_exists('test_a'):
|
||||||
|
log.info("Table test_a exists, dropping for clean test")
|
||||||
|
sql_main.dbh.execute_query("DROP TABLE test_a;")
|
||||||
|
# create a dummy table
|
||||||
|
table_sql = """
|
||||||
|
CREATE TABLE IF NOT EXISTS test_a (
|
||||||
|
test_a_id INTEGER PRIMARY KEY,
|
||||||
|
date_created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%f', 'now')),
|
||||||
|
date_updated TEXT,
|
||||||
|
uid TEXT NOT NULL UNIQUE,
|
||||||
|
set_current_timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
text_a TEXT,
|
||||||
|
content,
|
||||||
|
int_a INTEGER,
|
||||||
|
float_a REAL
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
|
||||||
|
result = sql_main.dbh.execute_query(table_sql)
|
||||||
|
log.debug(f"Create table result: {result}")
|
||||||
|
trigger_sql = """
|
||||||
|
CREATE TRIGGER trg_test_a_set_date_updated_on_update
|
||||||
|
AFTER UPDATE ON test_a
|
||||||
|
FOR EACH ROW
|
||||||
|
WHEN OLD.date_updated IS NULL OR NEW.date_updated = OLD.date_updated
|
||||||
|
BEGIN
|
||||||
|
UPDATE test_a
|
||||||
|
SET date_updated = (strftime('%Y-%m-%d %H:%M:%f', 'now'))
|
||||||
|
WHERE test_a_id = NEW.test_a_id;
|
||||||
|
END;
|
||||||
|
"""
|
||||||
|
result = sql_main.dbh.execute_query(trigger_sql)
|
||||||
|
log.debug(f"Create trigger result: {result}")
|
||||||
|
result = sql_main.dbh.meta_data_detail('test_a')
|
||||||
|
log.debug(f"Table meta data detail: {dump_data(result)}")
|
||||||
|
# INSERT DATA
|
||||||
|
sql = """
|
||||||
|
INSERT INTO test_a (uid, text_a, content, int_a, float_a)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
RETURNING test_a_id, uid;
|
||||||
|
"""
|
||||||
|
result = sql_main.dbh.execute_query(
|
||||||
|
sql,
|
||||||
|
(
|
||||||
|
str(uuid4()),
|
||||||
|
'Some text A',
|
||||||
|
json.dumps({'foo': 'bar', 'number': 42}),
|
||||||
|
123,
|
||||||
|
123.456,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
log.debug(f"[1] Insert data result: {dump_data(result)}")
|
||||||
|
__uid: str = ''
|
||||||
|
if result is not False:
|
||||||
|
# first one only of interest
|
||||||
|
result = dict(result[0])
|
||||||
|
__uid = str(result.get('uid', ''))
|
||||||
|
# second insert
|
||||||
|
result = sql_main.dbh.execute_query(
|
||||||
|
sql,
|
||||||
|
(
|
||||||
|
str(uuid4()),
|
||||||
|
'Some text A',
|
||||||
|
json.dumps({'foo': 'bar', 'number': 42}),
|
||||||
|
123,
|
||||||
|
123.456,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
log.debug(f"[2] Insert data result: {dump_data(result)}")
|
||||||
|
result = sql_main.dbh.execute_query("SELECT * FROM test_a;")
|
||||||
|
log.debug(f"Select data result: {dump_data(result)}")
|
||||||
|
result = sql_main.dbh.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||||
|
log.debug(f"Fetch row result: {dump_data(result)}")
|
||||||
|
sql = """
|
||||||
|
UPDATE test_a
|
||||||
|
SET text_a = ?
|
||||||
|
WHERE uid = ?;
|
||||||
|
"""
|
||||||
|
result = sql_main.dbh.execute_query(
|
||||||
|
sql,
|
||||||
|
(
|
||||||
|
'Some updated text A',
|
||||||
|
__uid,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
log.debug(f"Update data result: {dump_data(result)}")
|
||||||
|
result = sql_main.dbh.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||||
|
log.debug(f"Fetch row after update result: {dump_data(result)}")
|
||||||
|
|
||||||
|
sql_main.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
# __END__
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Main comment
|
SQLite IO test
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|||||||
@@ -24,12 +24,19 @@ def main() -> None:
|
|||||||
"lookup_value_c": "B02",
|
"lookup_value_c": "B02",
|
||||||
"replace_value": "R02",
|
"replace_value": "R02",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"lookup_value_p": "A03",
|
||||||
|
"lookup_value_c": "B03",
|
||||||
|
"replace_value": "R03",
|
||||||
|
},
|
||||||
]
|
]
|
||||||
test_foo = ArraySearchList(
|
test_foo = ArraySearchList(
|
||||||
key = "lookup_value_p",
|
key="lookup_value_p",
|
||||||
value = "A01"
|
value="A01"
|
||||||
)
|
)
|
||||||
print(test_foo)
|
result = find_in_array_from_list(data, [test_foo])
|
||||||
|
print(f"Search A: {dump_data(test_foo)} -> {dump_data(result)}")
|
||||||
|
|
||||||
search: list[ArraySearchList] = [
|
search: list[ArraySearchList] = [
|
||||||
{
|
{
|
||||||
"key": "lookup_value_p",
|
"key": "lookup_value_p",
|
||||||
@@ -38,12 +45,122 @@ def main() -> None:
|
|||||||
{
|
{
|
||||||
"key": "lookup_value_c",
|
"key": "lookup_value_c",
|
||||||
"value": "B01"
|
"value": "B01"
|
||||||
|
},
|
||||||
|
]
|
||||||
|
result = find_in_array_from_list(data, search)
|
||||||
|
print(f"Search B: {dump_data(search)} -> {dump_data(result)}")
|
||||||
|
|
||||||
|
search: list[ArraySearchList] = [
|
||||||
|
{
|
||||||
|
"key": "lookup_value_p",
|
||||||
|
"value": "A01"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "lookup_value_c",
|
||||||
|
"value": "B01"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "lookup_value_c",
|
||||||
|
"value": "B02"
|
||||||
|
},
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
result = find_in_array_from_list(data, search)
|
||||||
|
print(f"Search C: {dump_data(search)} -> {dump_data(result)}")
|
||||||
|
except KeyError as e:
|
||||||
|
print(f"Search C raised KeyError: {e}")
|
||||||
|
|
||||||
|
search: list[ArraySearchList] = [
|
||||||
|
{
|
||||||
|
"key": "lookup_value_p",
|
||||||
|
"value": "A01"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "lookup_value_c",
|
||||||
|
"value": ["B01", "B02"]
|
||||||
|
},
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
result = find_in_array_from_list(data, search)
|
||||||
|
print(f"Search D: {dump_data(search)} -> {dump_data(result)}")
|
||||||
|
except KeyError as e:
|
||||||
|
print(f"Search D raised KeyError: {e}")
|
||||||
|
|
||||||
|
search: list[ArraySearchList] = [
|
||||||
|
{
|
||||||
|
"key": "lookup_value_p",
|
||||||
|
"value": ["A01", "A03"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "lookup_value_c",
|
||||||
|
"value": ["B01", "B02"]
|
||||||
|
},
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
result = find_in_array_from_list(data, search)
|
||||||
|
print(f"Search E: {dump_data(search)} -> {dump_data(result)}")
|
||||||
|
except KeyError as e:
|
||||||
|
print(f"Search E raised KeyError: {e}")
|
||||||
|
|
||||||
|
search: list[ArraySearchList] = [
|
||||||
|
{
|
||||||
|
"key": "lookup_value_p",
|
||||||
|
"value": "NOT FOUND"
|
||||||
|
},
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
result = find_in_array_from_list(data, search)
|
||||||
|
print(f"Search F: {dump_data(search)} -> {dump_data(result)}")
|
||||||
|
except KeyError as e:
|
||||||
|
print(f"Search F raised KeyError: {e}")
|
||||||
|
|
||||||
|
data = [
|
||||||
|
{
|
||||||
|
"sd_user_id": "1593",
|
||||||
|
"email": "",
|
||||||
|
"employee_id": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sd_user_id": "1592",
|
||||||
|
"email": "",
|
||||||
|
"employee_id": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sd_user_id": "1596",
|
||||||
|
"email": "",
|
||||||
|
"employee_id": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sd_user_id": "1594",
|
||||||
|
"email": "",
|
||||||
|
"employee_id": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sd_user_id": "1595",
|
||||||
|
"email": "",
|
||||||
|
"employee_id": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sd_user_id": "1861",
|
||||||
|
"email": "",
|
||||||
|
"employee_id": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sd_user_id": "1862",
|
||||||
|
"email": "",
|
||||||
|
"employee_id": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sd_user_id": "1860",
|
||||||
|
"email": "",
|
||||||
|
"employee_id": ""
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
result = find_in_array_from_list(data, [ArraySearchList(
|
||||||
result = find_in_array_from_list(data, search)
|
key="sd_user_id",
|
||||||
|
value="1593"
|
||||||
print(f"Search {dump_data(search)} -> {dump_data(result)}")
|
)])
|
||||||
|
print(f"Search F: -> {dump_data(result)}")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -2,7 +2,10 @@
|
|||||||
test list helpers
|
test list helpers
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from corelibs.iterator_handling.list_helpers import is_list_in_list, convert_to_list
|
from typing import Any
|
||||||
|
from corelibs.debug_handling.dump_data import dump_data
|
||||||
|
from corelibs.iterator_handling.list_helpers import is_list_in_list, convert_to_list, make_unique_list_of_dicts
|
||||||
|
from corelibs.iterator_handling.fingerprint import dict_hash_crc
|
||||||
|
|
||||||
|
|
||||||
def __test_is_list_in_list_a():
|
def __test_is_list_in_list_a():
|
||||||
@@ -18,9 +21,66 @@ def __convert_list():
|
|||||||
print(f"IN: {source} -> {result}")
|
print(f"IN: {source} -> {result}")
|
||||||
|
|
||||||
|
|
||||||
|
def __make_unique_list_of_dicts():
|
||||||
|
dict_list = [
|
||||||
|
{"a": 1, "b": 2, "nested": {"x": 10, "y": 20}},
|
||||||
|
{"a": 1, "b": 2, "nested": {"x": 10, "y": 20}},
|
||||||
|
{"b": 2, "a": 1, "nested": {"y": 20, "x": 10}},
|
||||||
|
{"b": 2, "a": 1, "nested": {"y": 20, "x": 30}},
|
||||||
|
{"a": 3, "b": 4, "nested": {"x": 30, "y": 40}}
|
||||||
|
]
|
||||||
|
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||||
|
dhf = dict_hash_crc(unique_dicts)
|
||||||
|
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||||
|
|
||||||
|
dict_list = [
|
||||||
|
{"a": 1, 1: "one"},
|
||||||
|
{1: "one", "a": 1},
|
||||||
|
{"a": 2, 1: "one"}
|
||||||
|
]
|
||||||
|
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||||
|
dhf = dict_hash_crc(unique_dicts)
|
||||||
|
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||||
|
|
||||||
|
dict_list = [
|
||||||
|
{"a": 1, "b": [1, 2, 3]},
|
||||||
|
{"b": [1, 2, 3], "a": 1},
|
||||||
|
{"a": 1, "b": [1, 2, 4]},
|
||||||
|
1, 2, "String", 1, "Foobar"
|
||||||
|
]
|
||||||
|
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||||
|
dhf = dict_hash_crc(unique_dicts)
|
||||||
|
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||||
|
|
||||||
|
dict_list: list[Any] = [
|
||||||
|
[],
|
||||||
|
{},
|
||||||
|
[],
|
||||||
|
{},
|
||||||
|
{"a": []},
|
||||||
|
{"a": []},
|
||||||
|
{"a": {}},
|
||||||
|
{"a": {}},
|
||||||
|
]
|
||||||
|
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||||
|
dhf = dict_hash_crc(unique_dicts)
|
||||||
|
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||||
|
|
||||||
|
dict_list: list[Any] = [
|
||||||
|
(1, 2),
|
||||||
|
(1, 2),
|
||||||
|
(2, 3),
|
||||||
|
]
|
||||||
|
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||||
|
dhf = dict_hash_crc(unique_dicts)
|
||||||
|
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
"""List helpers test runner"""
|
||||||
__test_is_list_in_list_a()
|
__test_is_list_in_list_a()
|
||||||
__convert_list()
|
__convert_list()
|
||||||
|
__make_unique_list_of_dicts()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -27,7 +27,8 @@ def main():
|
|||||||
"per_run_log": True,
|
"per_run_log": True,
|
||||||
# "console_format_type": ConsoleFormatSettings.NONE,
|
# "console_format_type": ConsoleFormatSettings.NONE,
|
||||||
# "console_format_type": ConsoleFormatSettings.MINIMAL,
|
# "console_format_type": ConsoleFormatSettings.MINIMAL,
|
||||||
"console_format_type": ConsoleFormat.TIME_MICROSECONDS | ConsoleFormat.NAME | ConsoleFormat.LEVEL,
|
# "console_format_type": ConsoleFormat.TIME_MICROSECONDS | ConsoleFormat.NAME | ConsoleFormat.LEVEL,
|
||||||
|
"console_format_type": None,
|
||||||
# "console_format_type": ConsoleFormat.NAME,
|
# "console_format_type": ConsoleFormat.NAME,
|
||||||
# "console_format_type": (
|
# "console_format_type": (
|
||||||
# ConsoleFormat.TIME | ConsoleFormat.TIMEZONE | ConsoleFormat.LINENO | ConsoleFormat.LEVEL
|
# ConsoleFormat.TIME | ConsoleFormat.TIMEZONE | ConsoleFormat.LINENO | ConsoleFormat.LEVEL
|
||||||
@@ -108,13 +109,31 @@ def main():
|
|||||||
log.set_log_level(Log.CONSOLE_HANDLER, LoggingLevel.ERROR)
|
log.set_log_level(Log.CONSOLE_HANDLER, LoggingLevel.ERROR)
|
||||||
log.logger.warning('[NORMAL] Invisible Warning test: %s', log.logger.name)
|
log.logger.warning('[NORMAL] Invisible Warning test: %s', log.logger.name)
|
||||||
log.logger.error('[NORMAL] Visible Error test: %s', log.logger.name)
|
log.logger.error('[NORMAL] Visible Error test: %s', log.logger.name)
|
||||||
|
log.logger.debug('[NORMAL] Visible Debug test: %s', log.logger.name)
|
||||||
|
print(f"*** Any handler is minimum level ERROR: {log.any_handler_is_minimum_level(LoggingLevel.ERROR)}")
|
||||||
|
print(f"*** Any handler is minimum level DEBUG: {log.any_handler_is_minimum_level(LoggingLevel.DEBUG)}")
|
||||||
|
for handler in log.handlers.values():
|
||||||
|
print(
|
||||||
|
f"*** Setting handler {handler} is level {LoggingLevel.from_any(handler.level).name} -> "
|
||||||
|
f"*** INC {LoggingLevel.from_any(handler.level).includes(LoggingLevel.DEBUG)}")
|
||||||
|
|
||||||
|
print(f"*** WARNING includes ERROR: {LoggingLevel.WARNING.includes(LoggingLevel.ERROR)}")
|
||||||
|
print(f"*** ERROR includes WARNING: {LoggingLevel.ERROR.includes(LoggingLevel.WARNING)}")
|
||||||
|
|
||||||
log.set_log_level(Log.CONSOLE_HANDLER, LoggingLevel.DEBUG)
|
log.set_log_level(Log.CONSOLE_HANDLER, LoggingLevel.DEBUG)
|
||||||
log.debug('Current logging format: %s', log.log_settings['console_format_type'])
|
log.debug('Current logging format: %s', log.log_settings['console_format_type'])
|
||||||
|
log.debug('Current console formatter: %s', log.get_console_formatter())
|
||||||
log.update_console_formatter(ConsoleFormat.TIME | ConsoleFormat.LINENO)
|
log.update_console_formatter(ConsoleFormat.TIME | ConsoleFormat.LINENO)
|
||||||
log.info('Does hit show less')
|
log.info('Does hit show less A')
|
||||||
|
log.debug('Current console formatter after A: %s', log.get_console_formatter())
|
||||||
log.update_console_formatter(ConsoleFormat.TIME | ConsoleFormat.LINENO)
|
log.update_console_formatter(ConsoleFormat.TIME | ConsoleFormat.LINENO)
|
||||||
log.info('Does hit show less B')
|
log.info('Does hit show less B')
|
||||||
|
log.debug('Current console formatter after B: %s', log.get_console_formatter())
|
||||||
|
log.update_console_formatter(ConsoleFormatSettings.ALL)
|
||||||
|
log.info('Does hit show less C')
|
||||||
|
log.debug('Current console formatter after C: %s', log.get_console_formatter())
|
||||||
|
print(f"*** Any handler is minimum level ERROR: {log.any_handler_is_minimum_level(LoggingLevel.ERROR)}")
|
||||||
|
print(f"*** Any handler is minimum level DEBUG: {log.any_handler_is_minimum_level(LoggingLevel.DEBUG)}")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -8,10 +8,21 @@ import re
|
|||||||
import pytest
|
import pytest
|
||||||
from corelibs.check_handling.regex_constants import (
|
from corelibs.check_handling.regex_constants import (
|
||||||
compile_re,
|
compile_re,
|
||||||
|
SUB_EMAIL_BASIC_REGEX,
|
||||||
EMAIL_BASIC_REGEX,
|
EMAIL_BASIC_REGEX,
|
||||||
|
NAME_EMAIL_SIMPLE_REGEX,
|
||||||
|
NAME_EMAIL_BASIC_REGEX,
|
||||||
DOMAIN_WITH_LOCALHOST_REGEX,
|
DOMAIN_WITH_LOCALHOST_REGEX,
|
||||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||||
DOMAIN_REGEX,
|
DOMAIN_REGEX
|
||||||
|
)
|
||||||
|
from corelibs.check_handling.regex_constants_compiled import (
|
||||||
|
COMPILED_EMAIL_BASIC_REGEX,
|
||||||
|
COMPILED_NAME_EMAIL_SIMPLE_REGEX,
|
||||||
|
COMPILED_NAME_EMAIL_BASIC_REGEX,
|
||||||
|
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX,
|
||||||
|
COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||||
|
COMPILED_DOMAIN_REGEX,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -48,7 +59,7 @@ class TestEmailBasicRegex:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def email_pattern(self) -> re.Pattern[str]:
|
def email_pattern(self) -> re.Pattern[str]:
|
||||||
"""Fixture that returns compiled email regex pattern."""
|
"""Fixture that returns compiled email regex pattern."""
|
||||||
return compile_re(EMAIL_BASIC_REGEX)
|
return COMPILED_EMAIL_BASIC_REGEX
|
||||||
|
|
||||||
@pytest.mark.parametrize("valid_email", [
|
@pytest.mark.parametrize("valid_email", [
|
||||||
"user@example.com",
|
"user@example.com",
|
||||||
@@ -123,13 +134,272 @@ class TestEmailBasicRegex:
|
|||||||
assert not email_pattern.match(email)
|
assert not email_pattern.match(email)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSubEmailBasicRegex:
|
||||||
|
"""Test cases for SUB_EMAIL_BASIC_REGEX pattern (without anchors)."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sub_email_pattern(self) -> re.Pattern[str]:
|
||||||
|
"""Fixture that returns compiled sub email regex pattern."""
|
||||||
|
return compile_re(rf"^{SUB_EMAIL_BASIC_REGEX}$")
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("valid_email", [
|
||||||
|
"user@example.com",
|
||||||
|
"test.user@example.com",
|
||||||
|
"user+tag@example.co.uk",
|
||||||
|
"first.last@subdomain.example.com",
|
||||||
|
"user123@test-domain.com",
|
||||||
|
"a@example.com",
|
||||||
|
"user_name@example.com",
|
||||||
|
"user-name@example.com",
|
||||||
|
"user@sub.domain.example.com",
|
||||||
|
"test!#$%&'*+-/=?^_`{|}~@example.com",
|
||||||
|
"1234567890@example.com",
|
||||||
|
])
|
||||||
|
def test_valid_emails_match(self, sub_email_pattern: re.Pattern[str], valid_email: str) -> None:
|
||||||
|
"""Test that valid email addresses match SUB_EMAIL_BASIC_REGEX."""
|
||||||
|
assert sub_email_pattern.match(valid_email), (
|
||||||
|
f"Failed to match valid email: {valid_email}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("invalid_email", [
|
||||||
|
"",
|
||||||
|
"@example.com",
|
||||||
|
"user@",
|
||||||
|
"user",
|
||||||
|
"user@.com",
|
||||||
|
"user@domain",
|
||||||
|
"user @example.com",
|
||||||
|
".user@example.com",
|
||||||
|
"user@-example.com",
|
||||||
|
"user@example-.com",
|
||||||
|
"user@example.c",
|
||||||
|
"user@example.toolong",
|
||||||
|
])
|
||||||
|
def test_invalid_emails_no_match(self, sub_email_pattern: re.Pattern[str], invalid_email: str) -> None:
|
||||||
|
"""Test that invalid emails don't match SUB_EMAIL_BASIC_REGEX."""
|
||||||
|
assert not sub_email_pattern.match(invalid_email), (
|
||||||
|
f"Incorrectly matched invalid email: {invalid_email}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_sub_email_max_local_part_length(self, sub_email_pattern: re.Pattern[str]) -> None:
|
||||||
|
"""Test email with maximum local part length (64 characters)."""
|
||||||
|
local_part = "a" * 64
|
||||||
|
email = f"{local_part}@example.com"
|
||||||
|
assert sub_email_pattern.match(email)
|
||||||
|
|
||||||
|
def test_sub_email_exceeds_local_part_length(self, sub_email_pattern: re.Pattern[str]) -> None:
|
||||||
|
"""Test email exceeding maximum local part length."""
|
||||||
|
local_part = "a" * 65
|
||||||
|
email = f"{local_part}@example.com"
|
||||||
|
assert not sub_email_pattern.match(email)
|
||||||
|
|
||||||
|
|
||||||
|
class TestNameEmailSimpleRegex:
|
||||||
|
"""Test cases for NAME_EMAIL_SIMPLE_REGEX pattern."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def name_email_simple_pattern(self) -> re.Pattern[str]:
|
||||||
|
"""Fixture that returns compiled name+email simple regex pattern."""
|
||||||
|
return COMPILED_NAME_EMAIL_SIMPLE_REGEX
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("test_input,expected_groups", [
|
||||||
|
('"John Doe" <john@example.com>', {'name1': 'John Doe', 'email1': 'john@example.com'}),
|
||||||
|
('John Doe <john@example.com>', {'name2': 'John Doe', 'email2': 'john@example.com'}),
|
||||||
|
('<john@example.com>', {'email3': 'john@example.com'}),
|
||||||
|
('john@example.com', {'email4': 'john@example.com'}),
|
||||||
|
(' "Jane Smith" <jane@test.com> ', {'name1': 'Jane Smith', 'email1': 'jane@test.com'}),
|
||||||
|
('Bob <bob@test.org>', {'name2': 'Bob', 'email2': 'bob@test.org'}),
|
||||||
|
])
|
||||||
|
def test_valid_name_email_combinations(
|
||||||
|
self, name_email_simple_pattern: re.Pattern[str], test_input: str, expected_groups: dict[str, str]
|
||||||
|
) -> None:
|
||||||
|
"""Test that valid name+email combinations match and extract correct groups."""
|
||||||
|
match = name_email_simple_pattern.match(test_input)
|
||||||
|
assert match is not None, f"Failed to match: {test_input}"
|
||||||
|
|
||||||
|
# Check that expected groups are present and match
|
||||||
|
for group_name, expected_value in expected_groups.items():
|
||||||
|
assert match.group(group_name) == expected_value, (
|
||||||
|
f"Group {group_name} expected '{expected_value}', got '{match.group(group_name)}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("invalid_input", [
|
||||||
|
"",
|
||||||
|
"not an email",
|
||||||
|
"<>",
|
||||||
|
'"Name Only"',
|
||||||
|
'Name <',
|
||||||
|
'<email',
|
||||||
|
'Name <<email@test.com>>',
|
||||||
|
'Name <email@test.com',
|
||||||
|
'Name email@test.com>',
|
||||||
|
])
|
||||||
|
def test_invalid_name_email_combinations(
|
||||||
|
self, name_email_simple_pattern: re.Pattern[str], invalid_input: str
|
||||||
|
) -> None:
|
||||||
|
"""Test that invalid inputs don't match NAME_EMAIL_SIMPLE_REGEX."""
|
||||||
|
assert not name_email_simple_pattern.match(invalid_input), (
|
||||||
|
f"Incorrectly matched invalid input: {invalid_input}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_extract_name_from_quoted(
|
||||||
|
self, name_email_simple_pattern: re.Pattern[str]
|
||||||
|
) -> None:
|
||||||
|
"""Test extracting name from quoted format."""
|
||||||
|
match = name_email_simple_pattern.match('"Alice Wonder" <alice@example.com>')
|
||||||
|
assert match is not None
|
||||||
|
assert match.group('name1') == 'Alice Wonder'
|
||||||
|
assert match.group('email1') == 'alice@example.com'
|
||||||
|
|
||||||
|
def test_extract_name_from_unquoted(
|
||||||
|
self, name_email_simple_pattern: re.Pattern[str]
|
||||||
|
) -> None:
|
||||||
|
"""Test extracting name from unquoted format."""
|
||||||
|
match = name_email_simple_pattern.match('Bob Builder <bob@example.com>')
|
||||||
|
assert match is not None
|
||||||
|
assert match.group('name2') == 'Bob Builder'
|
||||||
|
assert match.group('email2') == 'bob@example.com'
|
||||||
|
|
||||||
|
def test_email_only_in_brackets(
|
||||||
|
self, name_email_simple_pattern: re.Pattern[str]
|
||||||
|
) -> None:
|
||||||
|
"""Test email-only format in angle brackets."""
|
||||||
|
match = name_email_simple_pattern.match('<charlie@example.com>')
|
||||||
|
assert match is not None
|
||||||
|
assert match.group('email3') == 'charlie@example.com'
|
||||||
|
|
||||||
|
def test_email_only_plain(
|
||||||
|
self, name_email_simple_pattern: re.Pattern[str]
|
||||||
|
) -> None:
|
||||||
|
"""Test plain email format without brackets."""
|
||||||
|
match = name_email_simple_pattern.match('dave@example.com')
|
||||||
|
assert match is not None
|
||||||
|
assert match.group('email4') == 'dave@example.com'
|
||||||
|
|
||||||
|
def test_whitespace_handling(
|
||||||
|
self, name_email_simple_pattern: re.Pattern[str]
|
||||||
|
) -> None:
|
||||||
|
"""Test that leading/trailing whitespace is handled correctly."""
|
||||||
|
match = name_email_simple_pattern.match(' "User Name" <user@example.com> ')
|
||||||
|
assert match is not None
|
||||||
|
assert match.group('name1') == 'User Name'
|
||||||
|
assert match.group('email1') == 'user@example.com'
|
||||||
|
|
||||||
|
|
||||||
|
class TestNameEmailBasicRegex:
|
||||||
|
"""Test cases for NAME_EMAIL_BASIC_REGEX pattern with strict email validation."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def name_email_basic_pattern(self) -> re.Pattern[str]:
|
||||||
|
"""Fixture that returns compiled name+email basic regex pattern."""
|
||||||
|
return COMPILED_NAME_EMAIL_BASIC_REGEX
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("test_input,expected_name,expected_email", [
|
||||||
|
('"John Doe" <john@example.com>', 'John Doe', 'john@example.com'),
|
||||||
|
('John Doe <john@example.com>', 'John Doe', 'john@example.com'),
|
||||||
|
('<john@example.com>', None, 'john@example.com'),
|
||||||
|
('john@example.com', None, 'john@example.com'),
|
||||||
|
(' "Jane Smith" <jane.smith@test.co.uk> ', 'Jane Smith', 'jane.smith@test.co.uk'),
|
||||||
|
('Alice Wonder <alice+tag@example.com>', 'Alice Wonder', 'alice+tag@example.com'),
|
||||||
|
])
|
||||||
|
def test_valid_name_email_with_validation(
|
||||||
|
self,
|
||||||
|
name_email_basic_pattern: re.Pattern[str],
|
||||||
|
test_input: str,
|
||||||
|
expected_name: str | None,
|
||||||
|
expected_email: str,
|
||||||
|
) -> None:
|
||||||
|
"""Test valid name+email with strict email validation."""
|
||||||
|
match = name_email_basic_pattern.match(test_input)
|
||||||
|
assert match is not None, f"Failed to match: {test_input}"
|
||||||
|
|
||||||
|
# Extract name and email from whichever group matched
|
||||||
|
name = match.group('name1') or match.group('name2')
|
||||||
|
email = (
|
||||||
|
match.group('email1') or match.group('email2') or
|
||||||
|
match.group('email3') or match.group('email4')
|
||||||
|
)
|
||||||
|
|
||||||
|
assert name == expected_name, f"Expected name '{expected_name}', got '{name}'"
|
||||||
|
assert email == expected_email, f"Expected email '{expected_email}', got '{email}'"
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("invalid_input", [
|
||||||
|
'"John Doe" <invalid.email>', # invalid email format
|
||||||
|
'John Doe <@example.com>', # missing local part
|
||||||
|
'<user@>', # missing domain
|
||||||
|
'user@domain', # no TLD
|
||||||
|
'"Name" <user @example.com>', # space in email
|
||||||
|
'<.user@example.com>', # starts with dot
|
||||||
|
'user@-example.com', # domain starts with hyphen
|
||||||
|
'Name <user@example.c>', # TLD too short
|
||||||
|
'Name <user@example.toolongdomain>', # TLD too long
|
||||||
|
])
|
||||||
|
def test_invalid_email_format_rejected(
|
||||||
|
self, name_email_basic_pattern: re.Pattern[str], invalid_input: str
|
||||||
|
) -> None:
|
||||||
|
"""Test that inputs with invalid email formats are rejected."""
|
||||||
|
assert not name_email_basic_pattern.match(invalid_input), (
|
||||||
|
f"Incorrectly matched invalid input: {invalid_input}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_quoted_name_with_valid_email(
|
||||||
|
self, name_email_basic_pattern: re.Pattern[str]
|
||||||
|
) -> None:
|
||||||
|
"""Test quoted name format with valid email."""
|
||||||
|
match = name_email_basic_pattern.match('"Alice Wonder" <alice@example.com>')
|
||||||
|
assert match is not None
|
||||||
|
assert match.group('name1') == 'Alice Wonder'
|
||||||
|
assert match.group('email1') == 'alice@example.com'
|
||||||
|
|
||||||
|
def test_unquoted_name_with_valid_email(
|
||||||
|
self, name_email_basic_pattern: re.Pattern[str]
|
||||||
|
) -> None:
|
||||||
|
"""Test unquoted name format with valid email."""
|
||||||
|
match = name_email_basic_pattern.match('Bob Builder <bob@example.com>')
|
||||||
|
assert match is not None
|
||||||
|
assert match.group('name2') == 'Bob Builder'
|
||||||
|
assert match.group('email2') == 'bob@example.com'
|
||||||
|
|
||||||
|
def test_email_only_formats(
|
||||||
|
self, name_email_basic_pattern: re.Pattern[str]
|
||||||
|
) -> None:
|
||||||
|
"""Test email-only formats (with and without brackets)."""
|
||||||
|
# With brackets
|
||||||
|
match1 = name_email_basic_pattern.match('<charlie@example.com>')
|
||||||
|
assert match1 is not None
|
||||||
|
assert match1.group('email3') == 'charlie@example.com'
|
||||||
|
|
||||||
|
# Without brackets
|
||||||
|
match2 = name_email_basic_pattern.match('dave@example.com')
|
||||||
|
assert match2 is not None
|
||||||
|
assert match2.group('email4') == 'dave@example.com'
|
||||||
|
|
||||||
|
def test_whitespace_handling(
|
||||||
|
self, name_email_basic_pattern: re.Pattern[str]
|
||||||
|
) -> None:
|
||||||
|
"""Test that leading/trailing whitespace is handled correctly."""
|
||||||
|
match = name_email_basic_pattern.match(' "User" <user@example.com> ')
|
||||||
|
assert match is not None
|
||||||
|
assert match.group('name1') == 'User'
|
||||||
|
assert match.group('email1') == 'user@example.com'
|
||||||
|
|
||||||
|
def test_special_characters_in_local_part(
|
||||||
|
self, name_email_basic_pattern: re.Pattern[str]
|
||||||
|
) -> None:
|
||||||
|
"""Test email with special characters in local part."""
|
||||||
|
match = name_email_basic_pattern.match('Test User <test!#$%&\'*+-/=?^_`{|}~@example.com>')
|
||||||
|
assert match is not None
|
||||||
|
assert match.group('name2') == 'Test User'
|
||||||
|
assert match.group('email2') == 'test!#$%&\'*+-/=?^_`{|}~@example.com'
|
||||||
|
|
||||||
|
|
||||||
class TestDomainWithLocalhostRegex:
|
class TestDomainWithLocalhostRegex:
|
||||||
"""Test cases for DOMAIN_WITH_LOCALHOST_REGEX pattern."""
|
"""Test cases for DOMAIN_WITH_LOCALHOST_REGEX pattern."""
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def domain_localhost_pattern(self) -> re.Pattern[str]:
|
def domain_localhost_pattern(self) -> re.Pattern[str]:
|
||||||
"""Fixture that returns compiled domain with localhost regex pattern."""
|
"""Fixture that returns compiled domain with localhost regex pattern."""
|
||||||
return compile_re(DOMAIN_WITH_LOCALHOST_REGEX)
|
return COMPILED_DOMAIN_WITH_LOCALHOST_REGEX
|
||||||
|
|
||||||
@pytest.mark.parametrize("valid_domain", [
|
@pytest.mark.parametrize("valid_domain", [
|
||||||
"localhost",
|
"localhost",
|
||||||
@@ -181,7 +451,7 @@ class TestDomainWithLocalhostPortRegex:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def domain_localhost_port_pattern(self) -> re.Pattern[str]:
|
def domain_localhost_port_pattern(self) -> re.Pattern[str]:
|
||||||
"""Fixture that returns compiled domain and localhost with port pattern."""
|
"""Fixture that returns compiled domain and localhost with port pattern."""
|
||||||
return compile_re(DOMAIN_WITH_LOCALHOST_PORT_REGEX)
|
return COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX
|
||||||
|
|
||||||
@pytest.mark.parametrize("valid_domain", [
|
@pytest.mark.parametrize("valid_domain", [
|
||||||
"localhost",
|
"localhost",
|
||||||
@@ -247,7 +517,7 @@ class TestDomainRegex:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def domain_pattern(self) -> re.Pattern[str]:
|
def domain_pattern(self) -> re.Pattern[str]:
|
||||||
"""Fixture that returns compiled domain regex pattern."""
|
"""Fixture that returns compiled domain regex pattern."""
|
||||||
return compile_re(DOMAIN_REGEX)
|
return COMPILED_DOMAIN_REGEX
|
||||||
|
|
||||||
@pytest.mark.parametrize("valid_domain", [
|
@pytest.mark.parametrize("valid_domain", [
|
||||||
"example.com",
|
"example.com",
|
||||||
@@ -306,6 +576,8 @@ class TestRegexPatternConsistency:
|
|||||||
"""Test that all regex patterns can be compiled without errors."""
|
"""Test that all regex patterns can be compiled without errors."""
|
||||||
patterns = [
|
patterns = [
|
||||||
EMAIL_BASIC_REGEX,
|
EMAIL_BASIC_REGEX,
|
||||||
|
NAME_EMAIL_SIMPLE_REGEX,
|
||||||
|
NAME_EMAIL_BASIC_REGEX,
|
||||||
DOMAIN_WITH_LOCALHOST_REGEX,
|
DOMAIN_WITH_LOCALHOST_REGEX,
|
||||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||||
DOMAIN_REGEX,
|
DOMAIN_REGEX,
|
||||||
@@ -314,9 +586,24 @@ class TestRegexPatternConsistency:
|
|||||||
compiled = compile_re(pattern)
|
compiled = compile_re(pattern)
|
||||||
assert isinstance(compiled, re.Pattern)
|
assert isinstance(compiled, re.Pattern)
|
||||||
|
|
||||||
|
def test_compiled_patterns_are_patterns(self) -> None:
|
||||||
|
"""Test that all COMPILED_ constants are Pattern objects."""
|
||||||
|
compiled_patterns = [
|
||||||
|
COMPILED_EMAIL_BASIC_REGEX,
|
||||||
|
COMPILED_NAME_EMAIL_SIMPLE_REGEX,
|
||||||
|
COMPILED_NAME_EMAIL_BASIC_REGEX,
|
||||||
|
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX,
|
||||||
|
COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||||
|
COMPILED_DOMAIN_REGEX,
|
||||||
|
]
|
||||||
|
for pattern in compiled_patterns:
|
||||||
|
assert isinstance(pattern, re.Pattern)
|
||||||
|
|
||||||
def test_domain_patterns_are_strings(self) -> None:
|
def test_domain_patterns_are_strings(self) -> None:
|
||||||
"""Test that all regex constants are strings."""
|
"""Test that all regex constants are strings."""
|
||||||
assert isinstance(EMAIL_BASIC_REGEX, str)
|
assert isinstance(EMAIL_BASIC_REGEX, str)
|
||||||
|
assert isinstance(NAME_EMAIL_SIMPLE_REGEX, str)
|
||||||
|
assert isinstance(NAME_EMAIL_BASIC_REGEX, str)
|
||||||
assert isinstance(DOMAIN_WITH_LOCALHOST_REGEX, str)
|
assert isinstance(DOMAIN_WITH_LOCALHOST_REGEX, str)
|
||||||
assert isinstance(DOMAIN_WITH_LOCALHOST_PORT_REGEX, str)
|
assert isinstance(DOMAIN_WITH_LOCALHOST_PORT_REGEX, str)
|
||||||
assert isinstance(DOMAIN_REGEX, str)
|
assert isinstance(DOMAIN_REGEX, str)
|
||||||
@@ -325,8 +612,8 @@ class TestRegexPatternConsistency:
|
|||||||
"""Test that domain patterns follow expected hierarchy."""
|
"""Test that domain patterns follow expected hierarchy."""
|
||||||
# DOMAIN_WITH_LOCALHOST_PORT_REGEX should accept everything
|
# DOMAIN_WITH_LOCALHOST_PORT_REGEX should accept everything
|
||||||
# DOMAIN_WITH_LOCALHOST_REGEX accepts
|
# DOMAIN_WITH_LOCALHOST_REGEX accepts
|
||||||
domain_localhost = compile_re(DOMAIN_WITH_LOCALHOST_REGEX)
|
domain_localhost = COMPILED_DOMAIN_WITH_LOCALHOST_REGEX
|
||||||
domain_localhost_port = compile_re(DOMAIN_WITH_LOCALHOST_PORT_REGEX)
|
domain_localhost_port = COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX
|
||||||
|
|
||||||
test_cases = ["example.com", "subdomain.example.com", "localhost"]
|
test_cases = ["example.com", "subdomain.example.com", "localhost"]
|
||||||
for test_case in test_cases:
|
for test_case in test_cases:
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ class TestSettingsLoaderInit:
|
|||||||
|
|
||||||
def test_init_with_valid_config_file(self, tmp_path: Path):
|
def test_init_with_valid_config_file(self, tmp_path: Path):
|
||||||
"""Test initialization with a valid config file"""
|
"""Test initialization with a valid config file"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[Section]\nkey=value\n")
|
config_file.write_text("[Section]\nkey=value\n")
|
||||||
|
|
||||||
loader = SettingsLoader(
|
loader = SettingsLoader(
|
||||||
@@ -35,7 +35,7 @@ class TestSettingsLoaderInit:
|
|||||||
|
|
||||||
def test_init_with_missing_config_file(self, tmp_path: Path):
|
def test_init_with_missing_config_file(self, tmp_path: Path):
|
||||||
"""Test initialization with missing config file"""
|
"""Test initialization with missing config file"""
|
||||||
config_file = tmp_path / "missing.ini"
|
config_file = tmp_path.joinpath("missing.ini")
|
||||||
|
|
||||||
loader = SettingsLoader(
|
loader = SettingsLoader(
|
||||||
args={},
|
args={},
|
||||||
@@ -60,7 +60,7 @@ class TestSettingsLoaderInit:
|
|||||||
|
|
||||||
def test_init_with_log(self, tmp_path: Path):
|
def test_init_with_log(self, tmp_path: Path):
|
||||||
"""Test initialization with Log object"""
|
"""Test initialization with Log object"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[Section]\nkey=value\n")
|
config_file.write_text("[Section]\nkey=value\n")
|
||||||
mock_log = Mock(spec=Log)
|
mock_log = Mock(spec=Log)
|
||||||
|
|
||||||
@@ -80,7 +80,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_basic(self, tmp_path: Path):
|
def test_load_settings_basic(self, tmp_path: Path):
|
||||||
"""Test loading basic settings without validation"""
|
"""Test loading basic settings without validation"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nkey1=value1\nkey2=value2\n")
|
config_file.write_text("[TestSection]\nkey1=value1\nkey2=value2\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -90,7 +90,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_with_missing_section(self, tmp_path: Path):
|
def test_load_settings_with_missing_section(self, tmp_path: Path):
|
||||||
"""Test loading settings with missing section"""
|
"""Test loading settings with missing section"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[OtherSection]\nkey=value\n")
|
config_file.write_text("[OtherSection]\nkey=value\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -100,7 +100,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_allow_not_exist(self, tmp_path: Path):
|
def test_load_settings_allow_not_exist(self, tmp_path: Path):
|
||||||
"""Test loading settings with allow_not_exist flag"""
|
"""Test loading settings with allow_not_exist flag"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[OtherSection]\nkey=value\n")
|
config_file.write_text("[OtherSection]\nkey=value\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -110,7 +110,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_mandatory_field_present(self, tmp_path: Path):
|
def test_load_settings_mandatory_field_present(self, tmp_path: Path):
|
||||||
"""Test mandatory field validation when field is present"""
|
"""Test mandatory field validation when field is present"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nrequired_field=value\n")
|
config_file.write_text("[TestSection]\nrequired_field=value\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -123,7 +123,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_mandatory_field_missing(self, tmp_path: Path):
|
def test_load_settings_mandatory_field_missing(self, tmp_path: Path):
|
||||||
"""Test mandatory field validation when field is missing"""
|
"""Test mandatory field validation when field is missing"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nother_field=value\n")
|
config_file.write_text("[TestSection]\nother_field=value\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -136,7 +136,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_mandatory_field_empty(self, tmp_path: Path):
|
def test_load_settings_mandatory_field_empty(self, tmp_path: Path):
|
||||||
"""Test mandatory field validation when field is empty"""
|
"""Test mandatory field validation when field is empty"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nrequired_field=\n")
|
config_file.write_text("[TestSection]\nrequired_field=\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -149,7 +149,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_with_split(self, tmp_path: Path):
|
def test_load_settings_with_split(self, tmp_path: Path):
|
||||||
"""Test splitting values into lists"""
|
"""Test splitting values into lists"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nlist_field=a,b,c,d\n")
|
config_file.write_text("[TestSection]\nlist_field=a,b,c,d\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -162,7 +162,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_with_custom_split_char(self, tmp_path: Path):
|
def test_load_settings_with_custom_split_char(self, tmp_path: Path):
|
||||||
"""Test splitting with custom delimiter"""
|
"""Test splitting with custom delimiter"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nlist_field=a|b|c|d\n")
|
config_file.write_text("[TestSection]\nlist_field=a|b|c|d\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -175,7 +175,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_split_removes_spaces(self, tmp_path: Path):
|
def test_load_settings_split_removes_spaces(self, tmp_path: Path):
|
||||||
"""Test that split removes spaces from values"""
|
"""Test that split removes spaces from values"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nlist_field=a, b , c , d\n")
|
config_file.write_text("[TestSection]\nlist_field=a, b , c , d\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -188,7 +188,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_empty_split_char_fallback(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
def test_load_settings_empty_split_char_fallback(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||||
"""Test fallback to default split char when empty"""
|
"""Test fallback to default split char when empty"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nlist_field=a,b,c\n")
|
config_file.write_text("[TestSection]\nlist_field=a,b,c\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -201,9 +201,22 @@ class TestLoadSettings:
|
|||||||
captured = capsys.readouterr()
|
captured = capsys.readouterr()
|
||||||
assert "fallback to:" in captured.out
|
assert "fallback to:" in captured.out
|
||||||
|
|
||||||
|
def test_load_settings_split_empty_value(self, tmp_path: Path):
|
||||||
|
"""Test that split on empty value results in empty list"""
|
||||||
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
|
config_file.write_text("[TestSection]\nlist_field=\n")
|
||||||
|
|
||||||
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
|
result = loader.load_settings(
|
||||||
|
"TestSection",
|
||||||
|
{"list_field": ["split:,"]}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["list_field"] == []
|
||||||
|
|
||||||
def test_load_settings_convert_to_int(self, tmp_path: Path):
|
def test_load_settings_convert_to_int(self, tmp_path: Path):
|
||||||
"""Test converting values to int"""
|
"""Test converting values to int"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nnumber=123\n")
|
config_file.write_text("[TestSection]\nnumber=123\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -217,7 +230,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_convert_to_float(self, tmp_path: Path):
|
def test_load_settings_convert_to_float(self, tmp_path: Path):
|
||||||
"""Test converting values to float"""
|
"""Test converting values to float"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nnumber=123.45\n")
|
config_file.write_text("[TestSection]\nnumber=123.45\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -231,7 +244,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_convert_to_bool_true(self, tmp_path: Path):
|
def test_load_settings_convert_to_bool_true(self, tmp_path: Path):
|
||||||
"""Test converting values to boolean True"""
|
"""Test converting values to boolean True"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nflag1=true\nflag2=True\n")
|
config_file.write_text("[TestSection]\nflag1=true\nflag2=True\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -245,7 +258,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_convert_to_bool_false(self, tmp_path: Path):
|
def test_load_settings_convert_to_bool_false(self, tmp_path: Path):
|
||||||
"""Test converting values to boolean False"""
|
"""Test converting values to boolean False"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nflag1=false\nflag2=False\n")
|
config_file.write_text("[TestSection]\nflag1=false\nflag2=False\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -259,7 +272,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_convert_invalid_type(self, tmp_path: Path):
|
def test_load_settings_convert_invalid_type(self, tmp_path: Path):
|
||||||
"""Test converting with invalid type raises error"""
|
"""Test converting with invalid type raises error"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -272,7 +285,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_empty_set_to_none(self, tmp_path: Path):
|
def test_load_settings_empty_set_to_none(self, tmp_path: Path):
|
||||||
"""Test setting empty values to None"""
|
"""Test setting empty values to None"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nother=value\n")
|
config_file.write_text("[TestSection]\nother=value\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -285,7 +298,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_empty_set_to_custom_value(self, tmp_path: Path):
|
def test_load_settings_empty_set_to_custom_value(self, tmp_path: Path):
|
||||||
"""Test setting empty values to custom value"""
|
"""Test setting empty values to custom value"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nother=value\n")
|
config_file.write_text("[TestSection]\nother=value\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -298,7 +311,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_matching_valid(self, tmp_path: Path):
|
def test_load_settings_matching_valid(self, tmp_path: Path):
|
||||||
"""Test matching validation with valid value"""
|
"""Test matching validation with valid value"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nmode=production\n")
|
config_file.write_text("[TestSection]\nmode=production\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -311,7 +324,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_matching_invalid(self, tmp_path: Path):
|
def test_load_settings_matching_invalid(self, tmp_path: Path):
|
||||||
"""Test matching validation with invalid value"""
|
"""Test matching validation with invalid value"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nmode=invalid\n")
|
config_file.write_text("[TestSection]\nmode=invalid\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -324,7 +337,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_in_valid(self, tmp_path: Path):
|
def test_load_settings_in_valid(self, tmp_path: Path):
|
||||||
"""Test 'in' validation with valid value"""
|
"""Test 'in' validation with valid value"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nallowed=a,b,c\nvalue=b\n")
|
config_file.write_text("[TestSection]\nallowed=a,b,c\nvalue=b\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -340,7 +353,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_in_invalid(self, tmp_path: Path):
|
def test_load_settings_in_invalid(self, tmp_path: Path):
|
||||||
"""Test 'in' validation with invalid value"""
|
"""Test 'in' validation with invalid value"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nallowed=a,b,c\nvalue=d\n")
|
config_file.write_text("[TestSection]\nallowed=a,b,c\nvalue=d\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -356,7 +369,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_in_missing_target(self, tmp_path: Path):
|
def test_load_settings_in_missing_target(self, tmp_path: Path):
|
||||||
"""Test 'in' validation with missing target"""
|
"""Test 'in' validation with missing target"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nvalue=a\n")
|
config_file.write_text("[TestSection]\nvalue=a\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -369,7 +382,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_length_exact(self, tmp_path: Path):
|
def test_load_settings_length_exact(self, tmp_path: Path):
|
||||||
"""Test length validation with exact match"""
|
"""Test length validation with exact match"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -382,7 +395,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_length_exact_invalid(self, tmp_path: Path):
|
def test_load_settings_length_exact_invalid(self, tmp_path: Path):
|
||||||
"""Test length validation with exact match failure"""
|
"""Test length validation with exact match failure"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -395,7 +408,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_length_range(self, tmp_path: Path):
|
def test_load_settings_length_range(self, tmp_path: Path):
|
||||||
"""Test length validation with range"""
|
"""Test length validation with range"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nvalue=testing\n")
|
config_file.write_text("[TestSection]\nvalue=testing\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -408,7 +421,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_length_min_only(self, tmp_path: Path):
|
def test_load_settings_length_min_only(self, tmp_path: Path):
|
||||||
"""Test length validation with minimum only"""
|
"""Test length validation with minimum only"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nvalue=testing\n")
|
config_file.write_text("[TestSection]\nvalue=testing\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -421,7 +434,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_length_max_only(self, tmp_path: Path):
|
def test_load_settings_length_max_only(self, tmp_path: Path):
|
||||||
"""Test length validation with maximum only"""
|
"""Test length validation with maximum only"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -434,7 +447,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_range_valid(self, tmp_path: Path):
|
def test_load_settings_range_valid(self, tmp_path: Path):
|
||||||
"""Test range validation with valid value"""
|
"""Test range validation with valid value"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nnumber=25\n")
|
config_file.write_text("[TestSection]\nnumber=25\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -447,7 +460,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_range_invalid(self, tmp_path: Path):
|
def test_load_settings_range_invalid(self, tmp_path: Path):
|
||||||
"""Test range validation with invalid value"""
|
"""Test range validation with invalid value"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nnumber=100\n")
|
config_file.write_text("[TestSection]\nnumber=100\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -460,7 +473,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_check_int_valid(self, tmp_path: Path):
|
def test_load_settings_check_int_valid(self, tmp_path: Path):
|
||||||
"""Test check:int with valid integer"""
|
"""Test check:int with valid integer"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nnumber=12345\n")
|
config_file.write_text("[TestSection]\nnumber=12345\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -473,7 +486,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_check_int_cleanup(self, tmp_path: Path):
|
def test_load_settings_check_int_cleanup(self, tmp_path: Path):
|
||||||
"""Test check:int with cleanup"""
|
"""Test check:int with cleanup"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nnumber=12a34b5\n")
|
config_file.write_text("[TestSection]\nnumber=12a34b5\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -486,7 +499,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_check_email_valid(self, tmp_path: Path):
|
def test_load_settings_check_email_valid(self, tmp_path: Path):
|
||||||
"""Test check:string.email.basic with valid email"""
|
"""Test check:string.email.basic with valid email"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nemail=test@example.com\n")
|
config_file.write_text("[TestSection]\nemail=test@example.com\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -499,7 +512,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_check_email_invalid(self, tmp_path: Path):
|
def test_load_settings_check_email_invalid(self, tmp_path: Path):
|
||||||
"""Test check:string.email.basic with invalid email"""
|
"""Test check:string.email.basic with invalid email"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nemail=not-an-email\n")
|
config_file.write_text("[TestSection]\nemail=not-an-email\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -512,7 +525,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_args_override(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
def test_load_settings_args_override(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||||
"""Test command line arguments override config values"""
|
"""Test command line arguments override config values"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||||
|
|
||||||
loader = SettingsLoader(
|
loader = SettingsLoader(
|
||||||
@@ -528,9 +541,126 @@ class TestLoadSettings:
|
|||||||
captured = capsys.readouterr()
|
captured = capsys.readouterr()
|
||||||
assert "Command line option override" in captured.out
|
assert "Command line option override" in captured.out
|
||||||
|
|
||||||
|
def test_load_settings_args_no_flag(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||||
|
"""Test default behavior (no args_override:yes) with list argument that has split"""
|
||||||
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
|
config_file.write_text("[TestSection]\nvalue=a,b,c\n")
|
||||||
|
|
||||||
|
loader = SettingsLoader(
|
||||||
|
args={"value": ["x", "y", "z"]},
|
||||||
|
config_file=config_file
|
||||||
|
)
|
||||||
|
result = loader.load_settings(
|
||||||
|
"TestSection",
|
||||||
|
{"value": ["split:,"]}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Without args_override:yes flag, should use config value (no override)
|
||||||
|
assert result["value"] == ["a", "b", "c"]
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
# Message is printed but without args_override:yes flag, override doesn't happen
|
||||||
|
assert "Command line option override" in captured.out
|
||||||
|
|
||||||
|
def test_load_settings_args_list_no_split(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||||
|
"""Test that list arguments without split entry are skipped"""
|
||||||
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
|
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||||
|
|
||||||
|
loader = SettingsLoader(
|
||||||
|
args={"value": ["arg1", "arg2", "arg3"]},
|
||||||
|
config_file=config_file
|
||||||
|
)
|
||||||
|
result = loader.load_settings(
|
||||||
|
"TestSection",
|
||||||
|
{"value": []}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should keep config value since args is list but no split defined
|
||||||
|
assert result["value"] == "config_value"
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
# Message is printed but list without split prevents the override
|
||||||
|
assert "Command line option override" in captured.out
|
||||||
|
|
||||||
|
def test_load_settings_args_list_with_split(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||||
|
"""Test that list arguments with split entry and args_override:yes are applied"""
|
||||||
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
|
config_file.write_text("[TestSection]\nvalue=a,b,c\n")
|
||||||
|
|
||||||
|
loader = SettingsLoader(
|
||||||
|
args={"value": ["arg1", "arg2", "arg3"]},
|
||||||
|
config_file=config_file
|
||||||
|
)
|
||||||
|
result = loader.load_settings(
|
||||||
|
"TestSection",
|
||||||
|
{"value": ["split:,", "args_override:yes"]}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should use args value because split is defined AND args_override:yes is set
|
||||||
|
assert result["value"] == ["arg1", "arg2", "arg3"]
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert "Command line option override" in captured.out
|
||||||
|
|
||||||
|
def test_load_settings_args_no_with_mandatory(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||||
|
"""Test default behavior (no args_override:yes) with mandatory field and list args with split"""
|
||||||
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
|
config_file.write_text("[TestSection]\nvalue=config1,config2\n")
|
||||||
|
|
||||||
|
loader = SettingsLoader(
|
||||||
|
args={"value": ["arg1", "arg2"]},
|
||||||
|
config_file=config_file
|
||||||
|
)
|
||||||
|
result = loader.load_settings(
|
||||||
|
"TestSection",
|
||||||
|
{"value": ["mandatory:yes", "split:,"]}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should use config value because args_override:yes is not set (default: no override)
|
||||||
|
assert result["value"] == ["config1", "config2"]
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
# Message is printed but without args_override:yes flag, override doesn't happen
|
||||||
|
assert "Command line option override" in captured.out
|
||||||
|
|
||||||
|
def test_load_settings_args_no_with_mandatory_valid(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||||
|
"""Test default behavior with string args (always overrides due to current logic)"""
|
||||||
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
|
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||||
|
|
||||||
|
loader = SettingsLoader(
|
||||||
|
args={"value": "arg_value"},
|
||||||
|
config_file=config_file
|
||||||
|
)
|
||||||
|
result = loader.load_settings(
|
||||||
|
"TestSection",
|
||||||
|
{"value": ["mandatory:yes"]}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Current behavior: string args without split always override (regardless of args_override:yes)
|
||||||
|
assert result["value"] == "arg_value"
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert "Command line option override" in captured.out
|
||||||
|
|
||||||
|
def test_load_settings_args_string_no_split(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||||
|
"""Test that string arguments with args_override:yes work normally"""
|
||||||
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
|
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||||
|
|
||||||
|
loader = SettingsLoader(
|
||||||
|
args={"value": "arg_value"},
|
||||||
|
config_file=config_file
|
||||||
|
)
|
||||||
|
result = loader.load_settings(
|
||||||
|
"TestSection",
|
||||||
|
{"value": ["args_override:yes"]}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should use args value for non-list args with args_override:yes
|
||||||
|
assert result["value"] == "arg_value"
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert "Command line option override" in captured.out
|
||||||
|
|
||||||
def test_load_settings_no_config_file_with_args(self, tmp_path: Path):
|
def test_load_settings_no_config_file_with_args(self, tmp_path: Path):
|
||||||
"""Test loading settings without config file but with mandatory args"""
|
"""Test loading settings without config file but with mandatory args"""
|
||||||
config_file = tmp_path / "missing.ini"
|
config_file = tmp_path.joinpath("missing.ini")
|
||||||
|
|
||||||
loader = SettingsLoader(
|
loader = SettingsLoader(
|
||||||
args={"required": "value"},
|
args={"required": "value"},
|
||||||
@@ -545,7 +675,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_no_config_file_missing_args(self, tmp_path: Path):
|
def test_load_settings_no_config_file_missing_args(self, tmp_path: Path):
|
||||||
"""Test loading settings without config file and missing args"""
|
"""Test loading settings without config file and missing args"""
|
||||||
config_file = tmp_path / "missing.ini"
|
config_file = tmp_path.joinpath("missing.ini")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
|
|
||||||
@@ -557,7 +687,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_check_list_with_split(self, tmp_path: Path):
|
def test_load_settings_check_list_with_split(self, tmp_path: Path):
|
||||||
"""Test check validation with list values"""
|
"""Test check validation with list values"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nlist=abc,def,ghi\n")
|
config_file.write_text("[TestSection]\nlist=abc,def,ghi\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -570,7 +700,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_check_list_cleanup(self, tmp_path: Path):
|
def test_load_settings_check_list_cleanup(self, tmp_path: Path):
|
||||||
"""Test check validation cleans up list values"""
|
"""Test check validation cleans up list values"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nlist=ab-c,de_f,gh!i\n")
|
config_file.write_text("[TestSection]\nlist=ab-c,de_f,gh!i\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -583,7 +713,7 @@ class TestLoadSettings:
|
|||||||
|
|
||||||
def test_load_settings_invalid_check_type(self, tmp_path: Path):
|
def test_load_settings_invalid_check_type(self, tmp_path: Path):
|
||||||
"""Test with invalid check type"""
|
"""Test with invalid check type"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||||
|
|
||||||
loader = SettingsLoader(args={}, config_file=config_file)
|
loader = SettingsLoader(args={}, config_file=config_file)
|
||||||
@@ -600,7 +730,7 @@ class TestComplexScenarios:
|
|||||||
|
|
||||||
def test_complex_validation_scenario(self, tmp_path: Path):
|
def test_complex_validation_scenario(self, tmp_path: Path):
|
||||||
"""Test complex scenario with multiple validations"""
|
"""Test complex scenario with multiple validations"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text(
|
config_file.write_text(
|
||||||
"[Production]\n"
|
"[Production]\n"
|
||||||
"environment=production\n"
|
"environment=production\n"
|
||||||
@@ -641,7 +771,7 @@ class TestComplexScenarios:
|
|||||||
|
|
||||||
def test_email_list_validation(self, tmp_path: Path):
|
def test_email_list_validation(self, tmp_path: Path):
|
||||||
"""Test email list with validation"""
|
"""Test email list with validation"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text(
|
config_file.write_text(
|
||||||
"[EmailConfig]\n"
|
"[EmailConfig]\n"
|
||||||
"emails=test@example.com,admin@domain.org,user+tag@site.co.uk\n"
|
"emails=test@example.com,admin@domain.org,user+tag@site.co.uk\n"
|
||||||
@@ -658,7 +788,7 @@ class TestComplexScenarios:
|
|||||||
|
|
||||||
def test_mixed_args_and_config(self, tmp_path: Path):
|
def test_mixed_args_and_config(self, tmp_path: Path):
|
||||||
"""Test mixing command line args and config file"""
|
"""Test mixing command line args and config file"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text(
|
config_file.write_text(
|
||||||
"[Settings]\n"
|
"[Settings]\n"
|
||||||
"value1=config_value1\n"
|
"value1=config_value1\n"
|
||||||
@@ -679,7 +809,7 @@ class TestComplexScenarios:
|
|||||||
|
|
||||||
def test_multiple_check_types(self, tmp_path: Path):
|
def test_multiple_check_types(self, tmp_path: Path):
|
||||||
"""Test multiple different check types"""
|
"""Test multiple different check types"""
|
||||||
config_file = tmp_path / "test.ini"
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
config_file.write_text(
|
config_file.write_text(
|
||||||
"[Checks]\n"
|
"[Checks]\n"
|
||||||
"numbers=123,456,789\n"
|
"numbers=123,456,789\n"
|
||||||
@@ -704,5 +834,48 @@ class TestComplexScenarios:
|
|||||||
assert result["emails"] == "test@example.com"
|
assert result["emails"] == "test@example.com"
|
||||||
assert result["date"] == "2025-01-15"
|
assert result["date"] == "2025-01-15"
|
||||||
|
|
||||||
|
def test_args_no_and_list_skip_combination(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||||
|
"""Test combination of args_override:yes flag and list argument skip behavior"""
|
||||||
|
config_file = tmp_path.joinpath("test.ini")
|
||||||
|
config_file.write_text(
|
||||||
|
"[Settings]\n"
|
||||||
|
"no_override=a,b,c\n"
|
||||||
|
"list_no_split=config_list\n"
|
||||||
|
"list_with_split=x,y,z\n"
|
||||||
|
"normal=config_normal\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
loader = SettingsLoader(
|
||||||
|
args={
|
||||||
|
"no_override": ["arg1", "arg2"],
|
||||||
|
"list_no_split": ["arg1", "arg2"],
|
||||||
|
"list_with_split": ["p", "q", "r"],
|
||||||
|
"normal": "arg_normal"
|
||||||
|
},
|
||||||
|
config_file=config_file
|
||||||
|
)
|
||||||
|
result = loader.load_settings(
|
||||||
|
"Settings",
|
||||||
|
{
|
||||||
|
"no_override": ["split:,"],
|
||||||
|
"list_no_split": [],
|
||||||
|
"list_with_split": ["split:,", "args_override:yes"],
|
||||||
|
"normal": ["args_override:yes"]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should use config value (no args_override:yes flag for list with split)
|
||||||
|
assert result["no_override"] == ["a", "b", "c"]
|
||||||
|
# Should use config value because args is list without split
|
||||||
|
assert result["list_no_split"] == "config_list"
|
||||||
|
# Should use args value because split is defined AND args_override:yes is set
|
||||||
|
assert result["list_with_split"] == ["p", "q", "r"]
|
||||||
|
# Should use args value (args_override:yes set for string arg)
|
||||||
|
assert result["normal"] == "arg_normal"
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
# Should see override messages (even though list_no_split prints, it doesn't apply)
|
||||||
|
assert "Command line option override" in captured.out
|
||||||
|
|
||||||
|
|
||||||
# __END__
|
# __END__
|
||||||
|
|||||||
461
tests/unit/db_handling/test_sql_main.py
Normal file
461
tests/unit/db_handling/test_sql_main.py
Normal file
@@ -0,0 +1,461 @@
|
|||||||
|
"""
|
||||||
|
PyTest: db_handling/sql_main
|
||||||
|
Tests for SQLMain class - Main SQL interface wrapper
|
||||||
|
|
||||||
|
Note: Pylance warnings about "Redefining name from outer scope" in fixtures are expected.
|
||||||
|
This is standard pytest fixture behavior where fixture parameters shadow fixture definitions.
|
||||||
|
"""
|
||||||
|
# pylint: disable=redefined-outer-name,too-many-public-methods,protected-access
|
||||||
|
# pyright: reportUnknownParameterType=false, reportUnknownArgumentType=false
|
||||||
|
# pyright: reportMissingParameterType=false, reportUnknownVariableType=false
|
||||||
|
# pyright: reportArgumentType=false, reportGeneralTypeIssues=false
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Generator
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
import pytest
|
||||||
|
from corelibs.db_handling.sql_main import SQLMain, IDENT_SPLIT_CHARACTER
|
||||||
|
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||||
|
|
||||||
|
|
||||||
|
# Test fixtures
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_logger() -> MagicMock:
|
||||||
|
"""Create a mock logger for testing"""
|
||||||
|
logger = MagicMock()
|
||||||
|
logger.debug = MagicMock()
|
||||||
|
logger.info = MagicMock()
|
||||||
|
logger.warning = MagicMock()
|
||||||
|
logger.error = MagicMock()
|
||||||
|
return logger
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_db_path(tmp_path: Path) -> Path:
|
||||||
|
"""Create a temporary database file path"""
|
||||||
|
return tmp_path / "test_database.db"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_sqlite_io() -> Generator[MagicMock, None, None]:
|
||||||
|
"""Create a mock SQLiteIO instance"""
|
||||||
|
mock_io = MagicMock(spec=SQLiteIO)
|
||||||
|
mock_io.conn = MagicMock()
|
||||||
|
mock_io.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_io.db_close = MagicMock()
|
||||||
|
mock_io.execute_query = MagicMock(return_value=[])
|
||||||
|
yield mock_io
|
||||||
|
|
||||||
|
|
||||||
|
# Test constant
|
||||||
|
class TestConstants:
|
||||||
|
"""Tests for module-level constants"""
|
||||||
|
|
||||||
|
def test_ident_split_character(self):
|
||||||
|
"""Test that IDENT_SPLIT_CHARACTER is defined correctly"""
|
||||||
|
assert IDENT_SPLIT_CHARACTER == ':'
|
||||||
|
|
||||||
|
|
||||||
|
# Test SQLMain class initialization
|
||||||
|
class TestSQLMainInit:
|
||||||
|
"""Tests for SQLMain.__init__"""
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_successful_initialization_sqlite(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test successful initialization with SQLite"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
assert sql_main.log == mock_logger
|
||||||
|
assert sql_main.dbh == mock_sqlite_instance
|
||||||
|
assert sql_main.db_target == 'sqlite'
|
||||||
|
mock_sqlite_class.assert_called_once_with(mock_logger, str(temp_db_path), row_factory='Dict')
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_initialization_connection_failure(self, mock_sqlite_class: MagicMock, mock_logger: MagicMock):
|
||||||
|
"""Test initialization fails when connection cannot be established"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = None
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = 'sqlite:/path/to/db.db'
|
||||||
|
with pytest.raises(ValueError, match='DB Connection failed for: sqlite'):
|
||||||
|
SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
def test_initialization_invalid_db_target(self, mock_logger: MagicMock):
|
||||||
|
"""Test initialization with unsupported database target"""
|
||||||
|
db_ident = 'postgresql:/path/to/db'
|
||||||
|
with pytest.raises(ValueError, match='SQL interface for postgresql is not implemented'):
|
||||||
|
SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
def test_initialization_malformed_db_ident(self, mock_logger: MagicMock):
|
||||||
|
"""Test initialization with malformed db_ident string"""
|
||||||
|
db_ident = 'sqlite_no_colon'
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
|
||||||
|
# Test SQLMain.connect method
|
||||||
|
class TestSQLMainConnect:
|
||||||
|
"""Tests for SQLMain.connect"""
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_connect_when_already_connected(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test connect warns when already connected"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
# Reset mock to check second call
|
||||||
|
mock_logger.warning.reset_mock()
|
||||||
|
|
||||||
|
# Try to connect again
|
||||||
|
sql_main.connect(f'sqlite:{temp_db_path}')
|
||||||
|
|
||||||
|
# Should have warned about existing connection
|
||||||
|
mock_logger.warning.assert_called_once()
|
||||||
|
assert 'already exists' in str(mock_logger.warning.call_args)
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_connect_sqlite_success(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test successful SQLite connection"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
sql_main = SQLMain.__new__(SQLMain)
|
||||||
|
sql_main.log = mock_logger
|
||||||
|
sql_main.dbh = None
|
||||||
|
sql_main.db_target = None
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main.connect(db_ident)
|
||||||
|
|
||||||
|
assert sql_main.db_target == 'sqlite'
|
||||||
|
assert sql_main.dbh == mock_sqlite_instance
|
||||||
|
mock_sqlite_class.assert_called_once_with(mock_logger, str(temp_db_path), row_factory='Dict')
|
||||||
|
|
||||||
|
def test_connect_unsupported_database(self, mock_logger: MagicMock):
|
||||||
|
"""Test connect with unsupported database type"""
|
||||||
|
sql_main = SQLMain.__new__(SQLMain)
|
||||||
|
sql_main.log = mock_logger
|
||||||
|
sql_main.dbh = None
|
||||||
|
sql_main.db_target = None
|
||||||
|
|
||||||
|
db_ident = 'mysql:/path/to/db'
|
||||||
|
with pytest.raises(ValueError, match='SQL interface for mysql is not implemented'):
|
||||||
|
sql_main.connect(db_ident)
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_connect_db_connection_failed(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test connect raises error when DB connection fails"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
sql_main = SQLMain.__new__(SQLMain)
|
||||||
|
sql_main.log = mock_logger
|
||||||
|
sql_main.dbh = None
|
||||||
|
sql_main.db_target = None
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
with pytest.raises(ValueError, match='DB Connection failed for: sqlite'):
|
||||||
|
sql_main.connect(db_ident)
|
||||||
|
|
||||||
|
|
||||||
|
# Test SQLMain.close method
|
||||||
|
class TestSQLMainClose:
|
||||||
|
"""Tests for SQLMain.close"""
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_close_successful(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test successful database close"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_instance.db_close = MagicMock()
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
sql_main.close()
|
||||||
|
|
||||||
|
mock_sqlite_instance.db_close.assert_called_once()
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_close_when_not_connected(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test close when not connected does nothing"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_instance.db_close = MagicMock()
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
# Change db_connected to return False to simulate disconnection
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||||
|
|
||||||
|
sql_main.close()
|
||||||
|
|
||||||
|
# Should not raise error and should exit early
|
||||||
|
assert mock_sqlite_instance.db_close.call_count == 0
|
||||||
|
|
||||||
|
def test_close_when_dbh_is_none(self, mock_logger: MagicMock):
|
||||||
|
"""Test close when dbh is None"""
|
||||||
|
sql_main = SQLMain.__new__(SQLMain)
|
||||||
|
sql_main.log = mock_logger
|
||||||
|
sql_main.dbh = None
|
||||||
|
sql_main.db_target = 'sqlite'
|
||||||
|
|
||||||
|
# Should not raise error
|
||||||
|
sql_main.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Test SQLMain.connected method
|
||||||
|
class TestSQLMainConnected:
|
||||||
|
"""Tests for SQLMain.connected"""
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_connected_returns_true(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test connected returns True when connected"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
assert sql_main.connected() is True
|
||||||
|
mock_logger.warning.assert_not_called()
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_connected_returns_false_when_not_connected(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test connected returns False and warns when not connected"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
# Reset warning calls from init
|
||||||
|
mock_logger.warning.reset_mock()
|
||||||
|
|
||||||
|
# Change db_connected to return False to simulate disconnection
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||||
|
|
||||||
|
assert sql_main.connected() is False
|
||||||
|
mock_logger.warning.assert_called_once()
|
||||||
|
assert 'No connection' in str(mock_logger.warning.call_args)
|
||||||
|
|
||||||
|
def test_connected_returns_false_when_dbh_is_none(self, mock_logger: MagicMock):
|
||||||
|
"""Test connected returns False when dbh is None"""
|
||||||
|
sql_main = SQLMain.__new__(SQLMain)
|
||||||
|
sql_main.log = mock_logger
|
||||||
|
sql_main.dbh = None
|
||||||
|
sql_main.db_target = 'sqlite'
|
||||||
|
|
||||||
|
assert sql_main.connected() is False
|
||||||
|
mock_logger.warning.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
# Test SQLMain.process_query method
|
||||||
|
class TestSQLMainProcessQuery:
|
||||||
|
"""Tests for SQLMain.process_query"""
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_process_query_success_no_params(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test successful query execution without parameters"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
expected_result = [{'id': 1, 'name': 'test'}]
|
||||||
|
mock_sqlite_instance.execute_query = MagicMock(return_value=expected_result)
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
query = "SELECT * FROM test"
|
||||||
|
result = sql_main.process_query(query)
|
||||||
|
|
||||||
|
assert result == expected_result
|
||||||
|
mock_sqlite_instance.execute_query.assert_called_once_with(query, None)
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_process_query_success_with_params(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test successful query execution with parameters"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
expected_result = [{'id': 1, 'name': 'test'}]
|
||||||
|
mock_sqlite_instance.execute_query = MagicMock(return_value=expected_result)
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
query = "SELECT * FROM test WHERE id = ?"
|
||||||
|
params = (1,)
|
||||||
|
result = sql_main.process_query(query, params)
|
||||||
|
|
||||||
|
assert result == expected_result
|
||||||
|
mock_sqlite_instance.execute_query.assert_called_once_with(query, params)
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_process_query_returns_false_on_error(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test query returns False when execute_query fails"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_instance.execute_query = MagicMock(return_value=False)
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
query = "SELECT * FROM nonexistent"
|
||||||
|
result = sql_main.process_query(query)
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_process_query_dbh_is_none(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test query returns False when dbh is None"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
# Manually set dbh to None
|
||||||
|
sql_main.dbh = None
|
||||||
|
|
||||||
|
query = "SELECT * FROM test"
|
||||||
|
result = sql_main.process_query(query)
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
mock_logger.error.assert_called_once()
|
||||||
|
assert 'Problem connecting to db' in str(mock_logger.error.call_args)
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_process_query_returns_empty_list(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test query returns empty list when no results"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_instance.execute_query = MagicMock(return_value=[])
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
query = "SELECT * FROM test WHERE 1=0"
|
||||||
|
result = sql_main.process_query(query)
|
||||||
|
|
||||||
|
assert result == []
|
||||||
|
|
||||||
|
|
||||||
|
# Integration-like tests
|
||||||
|
class TestSQLMainIntegration:
|
||||||
|
"""Integration-like tests for complete workflows"""
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_full_workflow_connect_query_close(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test complete workflow: connect, query, close"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_instance.execute_query = MagicMock(return_value=[{'count': 5}])
|
||||||
|
mock_sqlite_instance.db_close = MagicMock()
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
# Execute query
|
||||||
|
result = sql_main.process_query("SELECT COUNT(*) as count FROM test")
|
||||||
|
assert result == [{'count': 5}]
|
||||||
|
|
||||||
|
# Check connected
|
||||||
|
assert sql_main.connected() is True
|
||||||
|
|
||||||
|
# Close connection
|
||||||
|
sql_main.close()
|
||||||
|
mock_sqlite_instance.db_close.assert_called_once()
|
||||||
|
|
||||||
|
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||||
|
def test_multiple_queries_same_connection(
|
||||||
|
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||||
|
):
|
||||||
|
"""Test multiple queries on the same connection"""
|
||||||
|
mock_sqlite_instance = MagicMock()
|
||||||
|
mock_sqlite_instance.conn = MagicMock()
|
||||||
|
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||||
|
mock_sqlite_instance.execute_query = MagicMock(side_effect=[
|
||||||
|
[{'id': 1}],
|
||||||
|
[{'id': 2}],
|
||||||
|
[{'id': 3}]
|
||||||
|
])
|
||||||
|
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||||
|
|
||||||
|
db_ident = f'sqlite:{temp_db_path}'
|
||||||
|
sql_main = SQLMain(mock_logger, db_ident)
|
||||||
|
|
||||||
|
result1 = sql_main.process_query("SELECT * FROM test WHERE id = 1")
|
||||||
|
result2 = sql_main.process_query("SELECT * FROM test WHERE id = 2")
|
||||||
|
result3 = sql_main.process_query("SELECT * FROM test WHERE id = 3")
|
||||||
|
|
||||||
|
assert result1 == [{'id': 1}]
|
||||||
|
assert result2 == [{'id': 2}]
|
||||||
|
assert result3 == [{'id': 3}]
|
||||||
|
assert mock_sqlite_instance.execute_query.call_count == 3
|
||||||
|
|
||||||
|
|
||||||
|
# __END__
|
||||||
@@ -4,7 +4,101 @@ tests for corelibs.iterator_handling.fingerprint
|
|||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
import pytest
|
import pytest
|
||||||
from corelibs.iterator_handling.fingerprint import dict_hash_frozen, dict_hash_crc
|
from corelibs.iterator_handling.fingerprint import dict_hash_frozen, dict_hash_crc, hash_object
|
||||||
|
|
||||||
|
|
||||||
|
class TestHashObject:
|
||||||
|
"""Tests for hash_object function"""
|
||||||
|
|
||||||
|
def test_hash_object_simple_dict(self):
|
||||||
|
"""Test hashing a simple dictionary with hash_object"""
|
||||||
|
data = {"key1": "value1", "key2": "value2"}
|
||||||
|
result = hash_object(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert len(result) == 64 # SHA256 produces 64 hex characters
|
||||||
|
|
||||||
|
def test_hash_object_mixed_keys(self):
|
||||||
|
"""Test hash_object with mixed int and string keys"""
|
||||||
|
data = {"key1": "value1", 1: "value2", 2: "value3"}
|
||||||
|
result = hash_object(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert len(result) == 64
|
||||||
|
|
||||||
|
def test_hash_object_consistency(self):
|
||||||
|
"""Test that hash_object produces consistent results"""
|
||||||
|
data = {"str_key": "value", 123: "number_key"}
|
||||||
|
hash1 = hash_object(data)
|
||||||
|
hash2 = hash_object(data)
|
||||||
|
|
||||||
|
assert hash1 == hash2
|
||||||
|
|
||||||
|
def test_hash_object_order_independence(self):
|
||||||
|
"""Test that hash_object is order-independent"""
|
||||||
|
data1 = {"a": 1, 1: "one", "b": 2, 2: "two"}
|
||||||
|
data2 = {2: "two", "b": 2, 1: "one", "a": 1}
|
||||||
|
hash1 = hash_object(data1)
|
||||||
|
hash2 = hash_object(data2)
|
||||||
|
|
||||||
|
assert hash1 == hash2
|
||||||
|
|
||||||
|
def test_hash_object_list_of_dicts_mixed_keys(self):
|
||||||
|
"""Test hash_object with list of dicts containing mixed keys"""
|
||||||
|
data = [
|
||||||
|
{"name": "item1", 1: "value1"},
|
||||||
|
{"name": "item2", 2: "value2"}
|
||||||
|
]
|
||||||
|
result = hash_object(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert len(result) == 64
|
||||||
|
|
||||||
|
def test_hash_object_nested_mixed_keys(self):
|
||||||
|
"""Test hash_object with nested structures containing mixed keys"""
|
||||||
|
data = {
|
||||||
|
"outer": {
|
||||||
|
"inner": "value",
|
||||||
|
1: "mixed_key"
|
||||||
|
},
|
||||||
|
2: "another_mixed"
|
||||||
|
}
|
||||||
|
result = hash_object(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert len(result) == 64
|
||||||
|
|
||||||
|
def test_hash_object_different_data(self):
|
||||||
|
"""Test that different data produces different hashes"""
|
||||||
|
data1 = {"key": "value", 1: "one"}
|
||||||
|
data2 = {"key": "value", 2: "two"}
|
||||||
|
hash1 = hash_object(data1)
|
||||||
|
hash2 = hash_object(data2)
|
||||||
|
|
||||||
|
assert hash1 != hash2
|
||||||
|
|
||||||
|
def test_hash_object_complex_nested(self):
|
||||||
|
"""Test hash_object with complex nested structures"""
|
||||||
|
data = {
|
||||||
|
"level1": {
|
||||||
|
"level2": {
|
||||||
|
1: "value",
|
||||||
|
"key": [1, 2, {"nested": "deep", 3: "int_key"}]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result = hash_object(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert len(result) == 64
|
||||||
|
|
||||||
|
def test_hash_object_list_with_tuples(self):
|
||||||
|
"""Test hash_object with lists containing tuples"""
|
||||||
|
data = [("a", 1), ("b", 2), {1: "mixed", "key": "value"}]
|
||||||
|
result = hash_object(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert len(result) == 64
|
||||||
|
|
||||||
|
|
||||||
class TestDictHashFrozen:
|
class TestDictHashFrozen:
|
||||||
@@ -279,6 +373,116 @@ class TestDictHashCrc:
|
|||||||
assert isinstance(result, str)
|
assert isinstance(result, str)
|
||||||
assert len(result) == 64
|
assert len(result) == 64
|
||||||
|
|
||||||
|
def test_dict_hash_crc_fallback_mixed_keys(self):
|
||||||
|
"""Test dict_hash_crc fallback with mixed int and string keys"""
|
||||||
|
data = {"key1": "value1", 1: "value2", 2: "value3"}
|
||||||
|
result = dict_hash_crc(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
# Fallback prefixes with "HO_"
|
||||||
|
assert result.startswith("HO_")
|
||||||
|
# Hash should be 64 chars + 3 char prefix = 67 total
|
||||||
|
assert len(result) == 67
|
||||||
|
|
||||||
|
def test_dict_hash_crc_fallback_consistency(self):
|
||||||
|
"""Test that fallback produces consistent hashes"""
|
||||||
|
data = {"str_key": "value", 123: "number_key", 456: "another"}
|
||||||
|
hash1 = dict_hash_crc(data)
|
||||||
|
hash2 = dict_hash_crc(data)
|
||||||
|
|
||||||
|
assert hash1 == hash2
|
||||||
|
assert hash1.startswith("HO_")
|
||||||
|
|
||||||
|
def test_dict_hash_crc_fallback_order_independence(self):
|
||||||
|
"""Test that fallback is order-independent for mixed-key dicts"""
|
||||||
|
data1 = {"a": 1, 1: "one", "b": 2, 2: "two"}
|
||||||
|
data2 = {2: "two", "b": 2, 1: "one", "a": 1}
|
||||||
|
hash1 = dict_hash_crc(data1)
|
||||||
|
hash2 = dict_hash_crc(data2)
|
||||||
|
|
||||||
|
assert hash1 == hash2
|
||||||
|
assert hash1.startswith("HO_")
|
||||||
|
|
||||||
|
def test_dict_hash_crc_fallback_list_of_dicts_mixed_keys(self):
|
||||||
|
"""Test fallback with list of dicts containing mixed keys"""
|
||||||
|
data = [
|
||||||
|
{"name": "item1", 1: "value1"},
|
||||||
|
{"name": "item2", 2: "value2"},
|
||||||
|
{3: "value3", "type": "mixed"}
|
||||||
|
]
|
||||||
|
result = dict_hash_crc(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert result.startswith("HO_")
|
||||||
|
assert len(result) == 67
|
||||||
|
|
||||||
|
def test_dict_hash_crc_fallback_nested_mixed_keys(self):
|
||||||
|
"""Test fallback with nested dicts containing mixed keys"""
|
||||||
|
data = {
|
||||||
|
"outer": {
|
||||||
|
"inner": "value",
|
||||||
|
1: "mixed_key"
|
||||||
|
},
|
||||||
|
2: "another_mixed"
|
||||||
|
}
|
||||||
|
result = dict_hash_crc(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert result.startswith("HO_")
|
||||||
|
assert len(result) == 67
|
||||||
|
|
||||||
|
def test_dict_hash_crc_fallback_different_data(self):
|
||||||
|
"""Test that different mixed-key data produces different hashes"""
|
||||||
|
data1 = {"key": "value", 1: "one"}
|
||||||
|
data2 = {"key": "value", 2: "two"}
|
||||||
|
hash1 = dict_hash_crc(data1)
|
||||||
|
hash2 = dict_hash_crc(data2)
|
||||||
|
|
||||||
|
assert hash1 != hash2
|
||||||
|
assert hash1.startswith("HO_")
|
||||||
|
assert hash2.startswith("HO_")
|
||||||
|
|
||||||
|
def test_dict_hash_crc_fallback_complex_structure(self):
|
||||||
|
"""Test fallback with complex nested structure with mixed keys"""
|
||||||
|
data = [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
1: "first",
|
||||||
|
"data": {
|
||||||
|
"nested": "value",
|
||||||
|
100: "nested_int_key"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
2: "second",
|
||||||
|
"items": [1, 2, 3]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
result = dict_hash_crc(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert result.startswith("HO_")
|
||||||
|
assert len(result) == 67
|
||||||
|
|
||||||
|
def test_dict_hash_crc_no_fallback_string_keys_only(self):
|
||||||
|
"""Test that string-only keys don't trigger fallback"""
|
||||||
|
data = {"key1": "value1", "key2": "value2", "key3": "value3"}
|
||||||
|
result = dict_hash_crc(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert not result.startswith("HO_")
|
||||||
|
assert len(result) == 64
|
||||||
|
|
||||||
|
def test_dict_hash_crc_no_fallback_int_keys_only(self):
|
||||||
|
"""Test that int-only keys don't trigger fallback"""
|
||||||
|
data = {1: "one", 2: "two", 3: "three"}
|
||||||
|
result = dict_hash_crc(data)
|
||||||
|
|
||||||
|
assert isinstance(result, str)
|
||||||
|
assert not result.startswith("HO_")
|
||||||
|
assert len(result) == 64
|
||||||
|
|
||||||
|
|
||||||
class TestComparisonBetweenHashFunctions:
|
class TestComparisonBetweenHashFunctions:
|
||||||
"""Tests comparing dict_hash_frozen and dict_hash_crc"""
|
"""Tests comparing dict_hash_frozen and dict_hash_crc"""
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ iterator_handling.list_helepr tests
|
|||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
import pytest
|
import pytest
|
||||||
from corelibs.iterator_handling.list_helpers import convert_to_list, is_list_in_list
|
from corelibs.iterator_handling.list_helpers import convert_to_list, is_list_in_list, make_unique_list_of_dicts
|
||||||
|
|
||||||
|
|
||||||
class TestConvertToList:
|
class TestConvertToList:
|
||||||
@@ -298,3 +298,225 @@ class TestPerformance:
|
|||||||
# Should still work correctly despite duplicates
|
# Should still work correctly despite duplicates
|
||||||
assert set(result) == {1, 3}
|
assert set(result) == {1, 3}
|
||||||
assert isinstance(result, list)
|
assert isinstance(result, list)
|
||||||
|
|
||||||
|
|
||||||
|
class TestMakeUniqueListOfDicts:
|
||||||
|
"""Test cases for make_unique_list_of_dicts function"""
|
||||||
|
|
||||||
|
def test_basic_duplicate_removal(self):
|
||||||
|
"""Test basic removal of duplicate dictionaries"""
|
||||||
|
dict_list = [
|
||||||
|
{"a": 1, "b": 2},
|
||||||
|
{"a": 1, "b": 2},
|
||||||
|
{"a": 3, "b": 4}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
assert {"a": 1, "b": 2} in result
|
||||||
|
assert {"a": 3, "b": 4} in result
|
||||||
|
|
||||||
|
def test_order_independent_duplicates(self):
|
||||||
|
"""Test that dictionaries with different key orders are treated as duplicates"""
|
||||||
|
dict_list = [
|
||||||
|
{"a": 1, "b": 2},
|
||||||
|
{"b": 2, "a": 1}, # Same content, different order
|
||||||
|
{"a": 3, "b": 4}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
assert {"a": 1, "b": 2} in result
|
||||||
|
assert {"a": 3, "b": 4} in result
|
||||||
|
|
||||||
|
def test_empty_list(self):
|
||||||
|
"""Test with empty list"""
|
||||||
|
result = make_unique_list_of_dicts([])
|
||||||
|
assert result == []
|
||||||
|
assert isinstance(result, list)
|
||||||
|
|
||||||
|
def test_single_dict(self):
|
||||||
|
"""Test with single dictionary"""
|
||||||
|
dict_list = [{"a": 1, "b": 2}]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert result == [{"a": 1, "b": 2}]
|
||||||
|
|
||||||
|
def test_all_unique(self):
|
||||||
|
"""Test when all dictionaries are unique"""
|
||||||
|
dict_list = [
|
||||||
|
{"a": 1},
|
||||||
|
{"b": 2},
|
||||||
|
{"c": 3},
|
||||||
|
{"d": 4}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 4
|
||||||
|
for d in dict_list:
|
||||||
|
assert d in result
|
||||||
|
|
||||||
|
def test_all_duplicates(self):
|
||||||
|
"""Test when all dictionaries are duplicates"""
|
||||||
|
dict_list = [
|
||||||
|
{"a": 1, "b": 2},
|
||||||
|
{"a": 1, "b": 2},
|
||||||
|
{"a": 1, "b": 2},
|
||||||
|
{"b": 2, "a": 1}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0] == {"a": 1, "b": 2}
|
||||||
|
|
||||||
|
def test_nested_values(self):
|
||||||
|
"""Test with nested structures as values"""
|
||||||
|
dict_list = [
|
||||||
|
{"a": [1, 2], "b": 3},
|
||||||
|
{"a": [1, 2], "b": 3},
|
||||||
|
{"a": [1, 3], "b": 3}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
assert {"a": [1, 2], "b": 3} in result
|
||||||
|
assert {"a": [1, 3], "b": 3} in result
|
||||||
|
|
||||||
|
def test_different_value_types(self):
|
||||||
|
"""Test with different value types"""
|
||||||
|
dict_list = [
|
||||||
|
{"str": "hello", "int": 42, "float": 3.14, "bool": True},
|
||||||
|
{"str": "hello", "int": 42, "float": 3.14, "bool": True},
|
||||||
|
{"str": "world", "int": 99, "float": 2.71, "bool": False}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
|
||||||
|
def test_empty_dicts(self):
|
||||||
|
"""Test with empty dictionaries"""
|
||||||
|
dict_list: list[Any] = [
|
||||||
|
{},
|
||||||
|
{},
|
||||||
|
{"a": 1}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
assert {} in result
|
||||||
|
assert {"a": 1} in result
|
||||||
|
|
||||||
|
def test_single_key_dicts(self):
|
||||||
|
"""Test with single key dictionaries"""
|
||||||
|
dict_list = [
|
||||||
|
{"a": 1},
|
||||||
|
{"a": 1},
|
||||||
|
{"a": 2},
|
||||||
|
{"b": 1}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 3
|
||||||
|
assert {"a": 1} in result
|
||||||
|
assert {"a": 2} in result
|
||||||
|
assert {"b": 1} in result
|
||||||
|
|
||||||
|
def test_many_keys(self):
|
||||||
|
"""Test with dictionaries containing many keys"""
|
||||||
|
dict1 = {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5}
|
||||||
|
dict2 = {"e": 5, "d": 4, "c": 3, "b": 2, "a": 1} # Same, different order
|
||||||
|
dict3 = {"a": 1, "b": 2, "c": 3, "d": 4, "e": 6} # Different value
|
||||||
|
dict_list = [dict1, dict2, dict3]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
|
||||||
|
def test_numeric_keys(self):
|
||||||
|
"""Test with numeric keys"""
|
||||||
|
dict_list = [
|
||||||
|
{1: "one", 2: "two"},
|
||||||
|
{2: "two", 1: "one"},
|
||||||
|
{1: "one", 2: "three"}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
|
||||||
|
def test_none_values(self):
|
||||||
|
"""Test with None values"""
|
||||||
|
dict_list = [
|
||||||
|
{"a": None, "b": 2},
|
||||||
|
{"a": None, "b": 2},
|
||||||
|
{"a": 1, "b": None}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
assert {"a": None, "b": 2} in result
|
||||||
|
assert {"a": 1, "b": None} in result
|
||||||
|
|
||||||
|
def test_mixed_key_types(self):
|
||||||
|
"""Test with mixed key types (string and numeric)"""
|
||||||
|
dict_list = [
|
||||||
|
{"a": 1, 1: "one"},
|
||||||
|
{1: "one", "a": 1},
|
||||||
|
{"a": 2, 1: "one"}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("dict_list,expected_length", [
|
||||||
|
([{"a": 1}, {"a": 1}, {"a": 1}], 1),
|
||||||
|
([{"a": 1}, {"a": 2}, {"a": 3}], 3),
|
||||||
|
([{"a": 1, "b": 2}, {"b": 2, "a": 1}], 1),
|
||||||
|
([{}, {}], 1),
|
||||||
|
([{"x": [1, 2]}, {"x": [1, 2]}], 1),
|
||||||
|
([{"a": 1}, {"b": 2}, {"c": 3}], 3),
|
||||||
|
]) # pyright: ignore[reportUnknownArgumentType]
|
||||||
|
def test_parametrized_unique_dicts(self, dict_list: list[Any], expected_length: int):
|
||||||
|
"""Test make_unique_list_of_dicts with various input combinations"""
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == expected_length
|
||||||
|
assert isinstance(result, list)
|
||||||
|
|
||||||
|
def test_large_list(self):
|
||||||
|
"""Test with a large list of dictionaries"""
|
||||||
|
dict_list = [{"id": i % 100, "value": f"val_{i % 100}"} for i in range(1000)]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
# Should have 100 unique dicts (0-99)
|
||||||
|
assert len(result) == 100
|
||||||
|
|
||||||
|
def test_preserves_last_occurrence(self):
|
||||||
|
"""Test behavior with duplicate entries"""
|
||||||
|
# The function uses dict comprehension, which keeps last occurrence
|
||||||
|
dict_list = [
|
||||||
|
{"a": 1, "b": 2},
|
||||||
|
{"a": 3, "b": 4},
|
||||||
|
{"a": 1, "b": 2}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
# Just verify correct unique count, order may vary
|
||||||
|
|
||||||
|
def test_nested_dicts(self):
|
||||||
|
"""Test with nested dictionaries"""
|
||||||
|
dict_list = [
|
||||||
|
{"outer": {"inner": 1}},
|
||||||
|
{"outer": {"inner": 1}},
|
||||||
|
{"outer": {"inner": 2}}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
|
||||||
|
def test_string_values_case_sensitive(self):
|
||||||
|
"""Test that string values are case-sensitive"""
|
||||||
|
dict_list = [
|
||||||
|
{"name": "John"},
|
||||||
|
{"name": "john"},
|
||||||
|
{"name": "JOHN"},
|
||||||
|
{"name": "John"}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 3
|
||||||
|
|
||||||
|
def test_boolean_values(self):
|
||||||
|
"""Test with boolean values"""
|
||||||
|
dict_list = [
|
||||||
|
{"flag": True, "count": 1},
|
||||||
|
{"count": 1, "flag": True},
|
||||||
|
{"flag": False, "count": 1}
|
||||||
|
]
|
||||||
|
result = make_unique_list_of_dicts(dict_list)
|
||||||
|
assert len(result) == 2
|
||||||
|
assert {"flag": True, "count": 1} in result
|
||||||
|
assert {"flag": False, "count": 1} in result
|
||||||
|
|
||||||
|
# __END__
|
||||||
|
|||||||
@@ -438,4 +438,81 @@ class TestLogLevelHandling:
|
|||||||
level = log_instance.get_log_level("file_handler")
|
level = log_instance.get_log_level("file_handler")
|
||||||
assert level == LoggingLevel.DEBUG
|
assert level == LoggingLevel.DEBUG
|
||||||
|
|
||||||
|
|
||||||
|
class DummyHandler:
|
||||||
|
"""Dummy log level handler"""
|
||||||
|
def __init__(self, level: LoggingLevel):
|
||||||
|
self.level = level
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def log_instance_level() -> Log:
|
||||||
|
"""
|
||||||
|
Minimal log instance with dummy handlers
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Log -- _description_
|
||||||
|
"""
|
||||||
|
log = Log(
|
||||||
|
log_path=Path("/tmp/test.log"),
|
||||||
|
log_name="test",
|
||||||
|
log_settings={
|
||||||
|
"log_level_console": LoggingLevel.DEBUG,
|
||||||
|
"log_level_file": LoggingLevel.DEBUG,
|
||||||
|
"console_enabled": False,
|
||||||
|
"console_color_output_enabled": False,
|
||||||
|
"console_format_type": None,
|
||||||
|
"per_run_log": False,
|
||||||
|
"add_start_info": False,
|
||||||
|
"add_end_info": False,
|
||||||
|
"log_queue": None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return log
|
||||||
|
|
||||||
|
|
||||||
|
def test_any_handler_is_minimum_level_true(log_instance_level: Log):
|
||||||
|
"""Test any_handler_is_minimum_level returns True when a handler meets the level"""
|
||||||
|
# Handler with DEBUG level, should include INFO
|
||||||
|
log_instance_level.handlers = {
|
||||||
|
"h1": DummyHandler(LoggingLevel.DEBUG)
|
||||||
|
}
|
||||||
|
assert log_instance_level.any_handler_is_minimum_level(LoggingLevel.INFO) is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_any_handler_is_minimum_level_false(log_instance_level: Log):
|
||||||
|
"""Test any_handler_is_minimum_level returns False when no handler meets the level"""
|
||||||
|
# Handler with WARNING level, should include ERROR
|
||||||
|
log_instance_level.handlers = {
|
||||||
|
"h1": DummyHandler(LoggingLevel.WARNING)
|
||||||
|
}
|
||||||
|
assert log_instance_level.any_handler_is_minimum_level(LoggingLevel.ERROR) is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_any_handler_is_minimum_level_multiple(log_instance_level: Log):
|
||||||
|
"""Test any_handler_is_minimum_level with multiple handlers"""
|
||||||
|
# Multiple handlers, one matches
|
||||||
|
log_instance_level.handlers = {
|
||||||
|
"h1": DummyHandler(LoggingLevel.ERROR),
|
||||||
|
"h2": DummyHandler(LoggingLevel.DEBUG)
|
||||||
|
}
|
||||||
|
assert log_instance_level.any_handler_is_minimum_level(LoggingLevel.INFO) is True
|
||||||
|
# None matches
|
||||||
|
log_instance_level.handlers = {
|
||||||
|
"h1": DummyHandler(LoggingLevel.ERROR),
|
||||||
|
"h2": DummyHandler(LoggingLevel.CRITICAL)
|
||||||
|
}
|
||||||
|
assert log_instance_level.any_handler_is_minimum_level(LoggingLevel.DEBUG) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_any_handler_is_minimum_level_handles_exceptions(log_instance_level: Log):
|
||||||
|
"""Test any_handler_is_minimum_level handles exceptions gracefully"""
|
||||||
|
# Handler with missing level attribute
|
||||||
|
class BadHandler:
|
||||||
|
pass
|
||||||
|
log_instance_level.handlers = {
|
||||||
|
"h1": BadHandler()
|
||||||
|
}
|
||||||
|
# Should not raise, just return False
|
||||||
|
assert log_instance_level.any_handler_is_minimum_level(LoggingLevel.DEBUG) is False
|
||||||
# __END__
|
# __END__
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ def tmp_log_path(tmp_path: Path) -> Path:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def basic_log_settings() -> LogSettings:
|
def basic_log_settings() -> LogSettings:
|
||||||
"""Basic log settings for testing"""
|
"""Basic log settings for testing"""
|
||||||
|
# Return a new dict each time to avoid state pollution
|
||||||
return {
|
return {
|
||||||
"log_level_console": LoggingLevel.WARNING,
|
"log_level_console": LoggingLevel.WARNING,
|
||||||
"log_level_file": LoggingLevel.DEBUG,
|
"log_level_file": LoggingLevel.DEBUG,
|
||||||
@@ -308,4 +309,54 @@ class TestUpdateConsoleFormatter:
|
|||||||
# Verify message was logged
|
# Verify message was logged
|
||||||
assert "Test warning message" in caplog.text
|
assert "Test warning message" in caplog.text
|
||||||
|
|
||||||
|
def test_log_console_format_option_set_to_none(
|
||||||
|
self, tmp_log_path: Path
|
||||||
|
):
|
||||||
|
"""Test that when log_console_format option is set to None, it uses ConsoleFormatSettings.ALL"""
|
||||||
|
# Save the original DEFAULT_LOG_SETTINGS to restore it after test
|
||||||
|
original_default = Log.DEFAULT_LOG_SETTINGS.copy()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Reset DEFAULT_LOG_SETTINGS to ensure clean state
|
||||||
|
Log.DEFAULT_LOG_SETTINGS = {
|
||||||
|
"log_level_console": Log.DEFAULT_LOG_LEVEL_CONSOLE,
|
||||||
|
"log_level_file": Log.DEFAULT_LOG_LEVEL_FILE,
|
||||||
|
"per_run_log": False,
|
||||||
|
"console_enabled": True,
|
||||||
|
"console_color_output_enabled": True,
|
||||||
|
"console_format_type": ConsoleFormatSettings.ALL,
|
||||||
|
"add_start_info": True,
|
||||||
|
"add_end_info": False,
|
||||||
|
"log_queue": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create a fresh settings dict with console_format_type explicitly set to None
|
||||||
|
settings: LogSettings = {
|
||||||
|
"log_level_console": LoggingLevel.WARNING,
|
||||||
|
"log_level_file": LoggingLevel.DEBUG,
|
||||||
|
"per_run_log": False,
|
||||||
|
"console_enabled": True,
|
||||||
|
"console_color_output_enabled": False,
|
||||||
|
"console_format_type": None, # type: ignore
|
||||||
|
"add_start_info": False,
|
||||||
|
"add_end_info": False,
|
||||||
|
"log_queue": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Verify that None is explicitly set in the input
|
||||||
|
assert settings['console_format_type'] is None
|
||||||
|
|
||||||
|
log = Log(
|
||||||
|
log_path=tmp_log_path,
|
||||||
|
log_name="test_log",
|
||||||
|
log_settings=settings
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify that None was replaced with ConsoleFormatSettings.ALL
|
||||||
|
# The Log class should replace None with the default value (ALL)
|
||||||
|
assert log.log_settings['console_format_type'] == ConsoleFormatSettings.ALL
|
||||||
|
finally:
|
||||||
|
# Restore original DEFAULT_LOG_SETTINGS
|
||||||
|
Log.DEFAULT_LOG_SETTINGS = original_default
|
||||||
|
|
||||||
# __END__
|
# __END__
|
||||||
|
|||||||
0
tests/unit/math_handling/__init__.py
Normal file
0
tests/unit/math_handling/__init__.py
Normal file
121
tests/unit/math_handling/test_math_helpers.py
Normal file
121
tests/unit/math_handling/test_math_helpers.py
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for math_helpers module
|
||||||
|
"""
|
||||||
|
|
||||||
|
from corelibs.math_handling.math_helpers import gcd, lcd
|
||||||
|
|
||||||
|
|
||||||
|
class TestGcd:
|
||||||
|
"""Test cases for the gcd (Greatest Common Divisor) function"""
|
||||||
|
|
||||||
|
def test_gcd_basic_positive_numbers(self):
|
||||||
|
"""Test GCD with basic positive numbers"""
|
||||||
|
assert gcd(12, 8) == 4
|
||||||
|
assert gcd(15, 10) == 5
|
||||||
|
assert gcd(21, 14) == 7
|
||||||
|
|
||||||
|
def test_gcd_coprime_numbers(self):
|
||||||
|
"""Test GCD with coprime numbers (GCD should be 1)"""
|
||||||
|
assert gcd(13, 7) == 1
|
||||||
|
assert gcd(17, 19) == 1
|
||||||
|
assert gcd(25, 49) == 1
|
||||||
|
|
||||||
|
def test_gcd_same_numbers(self):
|
||||||
|
"""Test GCD with same numbers"""
|
||||||
|
assert gcd(5, 5) == 5
|
||||||
|
assert gcd(100, 100) == 100
|
||||||
|
|
||||||
|
def test_gcd_with_zero(self):
|
||||||
|
"""Test GCD when one or both numbers are zero"""
|
||||||
|
assert gcd(0, 5) == 5
|
||||||
|
assert gcd(5, 0) == 5
|
||||||
|
assert gcd(0, 0) == 0
|
||||||
|
|
||||||
|
def test_gcd_with_one(self):
|
||||||
|
"""Test GCD when one number is 1"""
|
||||||
|
assert gcd(1, 5) == 1
|
||||||
|
assert gcd(100, 1) == 1
|
||||||
|
|
||||||
|
def test_gcd_large_numbers(self):
|
||||||
|
"""Test GCD with large numbers"""
|
||||||
|
assert gcd(1000000, 500000) == 500000
|
||||||
|
assert gcd(123456, 789012) == 12
|
||||||
|
|
||||||
|
def test_gcd_reversed_order(self):
|
||||||
|
"""Test GCD is commutative (order doesn't matter)"""
|
||||||
|
assert gcd(12, 8) == gcd(8, 12)
|
||||||
|
assert gcd(100, 35) == gcd(35, 100)
|
||||||
|
|
||||||
|
def test_gcd_negative_numbers(self):
|
||||||
|
"""Test GCD with negative numbers"""
|
||||||
|
assert gcd(-12, 8) == 4
|
||||||
|
assert gcd(12, -8) == 4
|
||||||
|
assert gcd(-12, -8) == 4
|
||||||
|
|
||||||
|
def test_gcd_multiples(self):
|
||||||
|
"""Test GCD when one number is a multiple of the other"""
|
||||||
|
assert gcd(10, 5) == 5
|
||||||
|
assert gcd(100, 25) == 25
|
||||||
|
assert gcd(7, 21) == 7
|
||||||
|
|
||||||
|
|
||||||
|
class TestLcd:
|
||||||
|
"""Test cases for the lcd (Least Common Denominator/Multiple) function"""
|
||||||
|
|
||||||
|
def test_lcd_basic_positive_numbers(self):
|
||||||
|
"""Test LCD with basic positive numbers"""
|
||||||
|
assert lcd(4, 6) == 12
|
||||||
|
assert lcd(3, 5) == 15
|
||||||
|
assert lcd(12, 8) == 24
|
||||||
|
|
||||||
|
def test_lcd_coprime_numbers(self):
|
||||||
|
"""Test LCD with coprime numbers (should be their product)"""
|
||||||
|
assert lcd(7, 13) == 91
|
||||||
|
assert lcd(11, 13) == 143
|
||||||
|
assert lcd(5, 7) == 35
|
||||||
|
|
||||||
|
def test_lcd_same_numbers(self):
|
||||||
|
"""Test LCD with same numbers"""
|
||||||
|
assert lcd(5, 5) == 5
|
||||||
|
assert lcd(100, 100) == 100
|
||||||
|
|
||||||
|
def test_lcd_with_one(self):
|
||||||
|
"""Test LCD when one number is 1"""
|
||||||
|
assert lcd(1, 5) == 5
|
||||||
|
assert lcd(100, 1) == 100
|
||||||
|
|
||||||
|
def test_lcd_with_zero(self):
|
||||||
|
"""Test LCD when one or both numbers are zero"""
|
||||||
|
assert lcd(0, 5) == 0
|
||||||
|
assert lcd(5, 0) == 0
|
||||||
|
assert lcd(0, 0) == 0
|
||||||
|
|
||||||
|
def test_lcd_large_numbers(self):
|
||||||
|
"""Test LCD with large numbers"""
|
||||||
|
assert lcd(100, 150) == 300
|
||||||
|
assert lcd(1000, 500) == 1000
|
||||||
|
|
||||||
|
def test_lcd_reversed_order(self):
|
||||||
|
"""Test LCD is commutative (order doesn't matter)"""
|
||||||
|
assert lcd(4, 6) == lcd(6, 4)
|
||||||
|
assert lcd(12, 18) == lcd(18, 12)
|
||||||
|
|
||||||
|
def test_lcd_negative_numbers(self):
|
||||||
|
"""Test LCD with negative numbers"""
|
||||||
|
assert lcd(-4, 6) == 12
|
||||||
|
assert lcd(4, -6) == 12
|
||||||
|
assert lcd(-4, -6) == 12
|
||||||
|
|
||||||
|
def test_lcd_multiples(self):
|
||||||
|
"""Test LCD when one number is a multiple of the other"""
|
||||||
|
assert lcd(5, 10) == 10
|
||||||
|
assert lcd(3, 9) == 9
|
||||||
|
assert lcd(25, 100) == 100
|
||||||
|
|
||||||
|
def test_lcd_gcd_relationship(self):
|
||||||
|
"""Test the mathematical relationship between LCD and GCD: lcd(a,b) * gcd(a,b) = a * b"""
|
||||||
|
test_cases = [(12, 8), (15, 10), (21, 14), (100, 35)]
|
||||||
|
for a, b in test_cases:
|
||||||
|
assert lcd(a, b) * gcd(a, b) == a * b
|
||||||
|
|
||||||
|
# __END__
|
||||||
@@ -2,11 +2,10 @@
|
|||||||
PyTest: requests_handling/caller
|
PyTest: requests_handling/caller
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
import pytest
|
import pytest
|
||||||
import requests
|
import requests
|
||||||
from corelibs.requests_handling.caller import Caller
|
from corelibs.requests_handling.caller import Caller, ErrorResponse, ProxyConfig
|
||||||
|
|
||||||
|
|
||||||
class TestCallerInit:
|
class TestCallerInit:
|
||||||
@@ -21,13 +20,17 @@ class TestCallerInit:
|
|||||||
assert caller.timeout == 20
|
assert caller.timeout == 20
|
||||||
assert caller.verify is True
|
assert caller.verify is True
|
||||||
assert caller.proxy is None
|
assert caller.proxy is None
|
||||||
assert caller.cafile is None
|
assert caller.ca_file is None
|
||||||
|
|
||||||
def test_init_with_all_params(self):
|
def test_init_with_all_params(self):
|
||||||
"""Test Caller initialization with all parameters"""
|
"""Test Caller initialization with all parameters"""
|
||||||
header = {"Authorization": "Bearer token", "Content-Type": "application/json"}
|
header = {"Authorization": "Bearer token", "Content-Type": "application/json"}
|
||||||
proxy = {"http": "http://proxy.example.com:8080", "https": "https://proxy.example.com:8080"}
|
proxy: ProxyConfig = {
|
||||||
caller = Caller(header=header, verify=False, timeout=30, proxy=proxy)
|
"type": "socks5",
|
||||||
|
"host": "proxy.example.com:8080",
|
||||||
|
"port": "8080"
|
||||||
|
}
|
||||||
|
caller = Caller(header=header, timeout=30, proxy=proxy, verify=False)
|
||||||
|
|
||||||
assert caller.headers == header
|
assert caller.headers == header
|
||||||
assert caller.timeout == 30
|
assert caller.timeout == 30
|
||||||
@@ -58,7 +61,7 @@ class TestCallerInit:
|
|||||||
ca_file_path = "/path/to/ca/cert.pem"
|
ca_file_path = "/path/to/ca/cert.pem"
|
||||||
caller = Caller(header={}, ca_file=ca_file_path)
|
caller = Caller(header={}, ca_file=ca_file_path)
|
||||||
|
|
||||||
assert caller.cafile == ca_file_path
|
assert caller.ca_file == ca_file_path
|
||||||
|
|
||||||
|
|
||||||
class TestCallerGet:
|
class TestCallerGet:
|
||||||
@@ -81,7 +84,8 @@ class TestCallerGet:
|
|||||||
headers={"Authorization": "Bearer token"},
|
headers={"Authorization": "Bearer token"},
|
||||||
timeout=20,
|
timeout=20,
|
||||||
verify=True,
|
verify=True,
|
||||||
proxies=None
|
proxies=None,
|
||||||
|
cert=None
|
||||||
)
|
)
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.get')
|
@patch('corelibs.requests_handling.caller.requests.get')
|
||||||
@@ -101,7 +105,8 @@ class TestCallerGet:
|
|||||||
headers={},
|
headers={},
|
||||||
timeout=20,
|
timeout=20,
|
||||||
verify=True,
|
verify=True,
|
||||||
proxies=None
|
proxies=None,
|
||||||
|
cert=None
|
||||||
)
|
)
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.get')
|
@patch('corelibs.requests_handling.caller.requests.get')
|
||||||
@@ -134,7 +139,11 @@ class TestCallerGet:
|
|||||||
mock_response = Mock(spec=requests.Response)
|
mock_response = Mock(spec=requests.Response)
|
||||||
mock_get.return_value = mock_response
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
proxy = {"http": "http://proxy.example.com:8080"}
|
proxy: ProxyConfig = {
|
||||||
|
"type": "socks5",
|
||||||
|
"host": "proxy.example.com:8080",
|
||||||
|
"port": "8080"
|
||||||
|
}
|
||||||
caller = Caller(header={}, proxy=proxy)
|
caller = Caller(header={}, proxy=proxy)
|
||||||
caller.get("https://api.example.com/data")
|
caller.get("https://api.example.com/data")
|
||||||
|
|
||||||
@@ -142,40 +151,46 @@ class TestCallerGet:
|
|||||||
assert mock_get.call_args[1]["proxies"] == proxy
|
assert mock_get.call_args[1]["proxies"] == proxy
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.get')
|
@patch('corelibs.requests_handling.caller.requests.get')
|
||||||
def test_get_invalid_schema_returns_none(self, mock_get: Mock, capsys: Any):
|
def test_get_invalid_schema_returns_none(self, mock_get: Mock):
|
||||||
"""Test GET request with invalid URL schema returns None"""
|
"""Test GET request with invalid URL schema returns ErrorResponse"""
|
||||||
mock_get.side_effect = requests.exceptions.InvalidSchema("Invalid URL")
|
mock_get.side_effect = requests.exceptions.InvalidSchema("Invalid URL")
|
||||||
|
|
||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.get("invalid://example.com")
|
response = caller.get("invalid://example.com")
|
||||||
|
|
||||||
assert response is None
|
assert isinstance(response, ErrorResponse)
|
||||||
captured = capsys.readouterr()
|
assert response.code == 200
|
||||||
assert "Invalid URL during 'get'" in captured.out
|
assert "Invalid URL during 'get'" in response.message
|
||||||
|
assert response.action == "get"
|
||||||
|
assert response.url == "invalid://example.com"
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.get')
|
@patch('corelibs.requests_handling.caller.requests.get')
|
||||||
def test_get_timeout_returns_none(self, mock_get: Mock, capsys: Any):
|
def test_get_timeout_returns_none(self, mock_get: Mock):
|
||||||
"""Test GET request timeout returns None"""
|
"""Test GET request timeout returns ErrorResponse"""
|
||||||
mock_get.side_effect = requests.exceptions.ReadTimeout("Timeout")
|
mock_get.side_effect = requests.exceptions.ReadTimeout("Timeout")
|
||||||
|
|
||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.get("https://api.example.com/data")
|
response = caller.get("https://api.example.com/data")
|
||||||
|
|
||||||
assert response is None
|
assert isinstance(response, ErrorResponse)
|
||||||
captured = capsys.readouterr()
|
assert response.code == 300
|
||||||
assert "Timeout (20s) during 'get'" in captured.out
|
assert "Timeout (20s) during 'get'" in response.message
|
||||||
|
assert response.action == "get"
|
||||||
|
assert response.url == "https://api.example.com/data"
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.get')
|
@patch('corelibs.requests_handling.caller.requests.get')
|
||||||
def test_get_connection_error_returns_none(self, mock_get: Mock, capsys: Any):
|
def test_get_connection_error_returns_none(self, mock_get: Mock):
|
||||||
"""Test GET request connection error returns None"""
|
"""Test GET request connection error returns ErrorResponse"""
|
||||||
mock_get.side_effect = requests.exceptions.ConnectionError("Connection failed")
|
mock_get.side_effect = requests.exceptions.ConnectionError("Connection failed")
|
||||||
|
|
||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.get("https://api.example.com/data")
|
response = caller.get("https://api.example.com/data")
|
||||||
|
|
||||||
assert response is None
|
assert isinstance(response, ErrorResponse)
|
||||||
captured = capsys.readouterr()
|
assert response.code == 400
|
||||||
assert "Connection error during 'get'" in captured.out
|
assert "Connection error during 'get'" in response.message
|
||||||
|
assert response.action == "get"
|
||||||
|
assert response.url == "https://api.example.com/data"
|
||||||
|
|
||||||
|
|
||||||
class TestCallerPost:
|
class TestCallerPost:
|
||||||
@@ -200,7 +215,8 @@ class TestCallerPost:
|
|||||||
headers={"Content-Type": "application/json"},
|
headers={"Content-Type": "application/json"},
|
||||||
timeout=20,
|
timeout=20,
|
||||||
verify=True,
|
verify=True,
|
||||||
proxies=None
|
proxies=None,
|
||||||
|
cert=None
|
||||||
)
|
)
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.post')
|
@patch('corelibs.requests_handling.caller.requests.post')
|
||||||
@@ -234,40 +250,46 @@ class TestCallerPost:
|
|||||||
assert mock_post.call_args[1]["json"] == data
|
assert mock_post.call_args[1]["json"] == data
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.post')
|
@patch('corelibs.requests_handling.caller.requests.post')
|
||||||
def test_post_invalid_schema_returns_none(self, mock_post: Mock, capsys: Any):
|
def test_post_invalid_schema_returns_none(self, mock_post: Mock):
|
||||||
"""Test POST request with invalid URL schema returns None"""
|
"""Test POST request with invalid URL schema returns ErrorResponse"""
|
||||||
mock_post.side_effect = requests.exceptions.InvalidSchema("Invalid URL")
|
mock_post.side_effect = requests.exceptions.InvalidSchema("Invalid URL")
|
||||||
|
|
||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.post("invalid://example.com", data={"test": "data"})
|
response = caller.post("invalid://example.com", data={"test": "data"})
|
||||||
|
|
||||||
assert response is None
|
assert isinstance(response, ErrorResponse)
|
||||||
captured = capsys.readouterr()
|
assert response.code == 200
|
||||||
assert "Invalid URL during 'post'" in captured.out
|
assert "Invalid URL during 'post'" in response.message
|
||||||
|
assert response.action == "post"
|
||||||
|
assert response.url == "invalid://example.com"
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.post')
|
@patch('corelibs.requests_handling.caller.requests.post')
|
||||||
def test_post_timeout_returns_none(self, mock_post: Mock, capsys: Any):
|
def test_post_timeout_returns_none(self, mock_post: Mock):
|
||||||
"""Test POST request timeout returns None"""
|
"""Test POST request timeout returns ErrorResponse"""
|
||||||
mock_post.side_effect = requests.exceptions.ReadTimeout("Timeout")
|
mock_post.side_effect = requests.exceptions.ReadTimeout("Timeout")
|
||||||
|
|
||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.post("https://api.example.com/data", data={"test": "data"})
|
response = caller.post("https://api.example.com/data", data={"test": "data"})
|
||||||
|
|
||||||
assert response is None
|
assert isinstance(response, ErrorResponse)
|
||||||
captured = capsys.readouterr()
|
assert response.code == 300
|
||||||
assert "Timeout (20s) during 'post'" in captured.out
|
assert "Timeout (20s) during 'post'" in response.message
|
||||||
|
assert response.action == "post"
|
||||||
|
assert response.url == "https://api.example.com/data"
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.post')
|
@patch('corelibs.requests_handling.caller.requests.post')
|
||||||
def test_post_connection_error_returns_none(self, mock_post: Mock, capsys: Any):
|
def test_post_connection_error_returns_none(self, mock_post: Mock):
|
||||||
"""Test POST request connection error returns None"""
|
"""Test POST request connection error returns ErrorResponse"""
|
||||||
mock_post.side_effect = requests.exceptions.ConnectionError("Connection failed")
|
mock_post.side_effect = requests.exceptions.ConnectionError("Connection failed")
|
||||||
|
|
||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.post("https://api.example.com/data", data={"test": "data"})
|
response = caller.post("https://api.example.com/data", data={"test": "data"})
|
||||||
|
|
||||||
assert response is None
|
assert isinstance(response, ErrorResponse)
|
||||||
captured = capsys.readouterr()
|
assert response.code == 400
|
||||||
assert "Connection error during 'post'" in captured.out
|
assert "Connection error during 'post'" in response.message
|
||||||
|
assert response.action == "post"
|
||||||
|
assert response.url == "https://api.example.com/data"
|
||||||
|
|
||||||
|
|
||||||
class TestCallerPut:
|
class TestCallerPut:
|
||||||
@@ -292,7 +314,8 @@ class TestCallerPut:
|
|||||||
headers={"Content-Type": "application/json"},
|
headers={"Content-Type": "application/json"},
|
||||||
timeout=20,
|
timeout=20,
|
||||||
verify=True,
|
verify=True,
|
||||||
proxies=None
|
proxies=None,
|
||||||
|
cert=None
|
||||||
)
|
)
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.put')
|
@patch('corelibs.requests_handling.caller.requests.put')
|
||||||
@@ -311,16 +334,18 @@ class TestCallerPut:
|
|||||||
assert mock_put.call_args[1]["params"] == params
|
assert mock_put.call_args[1]["params"] == params
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.put')
|
@patch('corelibs.requests_handling.caller.requests.put')
|
||||||
def test_put_timeout_returns_none(self, mock_put: Mock, capsys: Any):
|
def test_put_timeout_returns_none(self, mock_put: Mock):
|
||||||
"""Test PUT request timeout returns None"""
|
"""Test PUT request timeout returns ErrorResponse"""
|
||||||
mock_put.side_effect = requests.exceptions.ReadTimeout("Timeout")
|
mock_put.side_effect = requests.exceptions.ReadTimeout("Timeout")
|
||||||
|
|
||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.put("https://api.example.com/data/1", data={"test": "data"})
|
response = caller.put("https://api.example.com/data/1", data={"test": "data"})
|
||||||
|
|
||||||
assert response is None
|
assert isinstance(response, ErrorResponse)
|
||||||
captured = capsys.readouterr()
|
assert response.code == 300
|
||||||
assert "Timeout (20s) during 'put'" in captured.out
|
assert "Timeout (20s) during 'put'" in response.message
|
||||||
|
assert response.action == "put"
|
||||||
|
assert response.url == "https://api.example.com/data/1"
|
||||||
|
|
||||||
|
|
||||||
class TestCallerPatch:
|
class TestCallerPatch:
|
||||||
@@ -345,7 +370,8 @@ class TestCallerPatch:
|
|||||||
headers={"Content-Type": "application/json"},
|
headers={"Content-Type": "application/json"},
|
||||||
timeout=20,
|
timeout=20,
|
||||||
verify=True,
|
verify=True,
|
||||||
proxies=None
|
proxies=None,
|
||||||
|
cert=None
|
||||||
)
|
)
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.patch')
|
@patch('corelibs.requests_handling.caller.requests.patch')
|
||||||
@@ -364,16 +390,18 @@ class TestCallerPatch:
|
|||||||
assert mock_patch.call_args[1]["params"] == params
|
assert mock_patch.call_args[1]["params"] == params
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.patch')
|
@patch('corelibs.requests_handling.caller.requests.patch')
|
||||||
def test_patch_connection_error_returns_none(self, mock_patch: Mock, capsys: Any):
|
def test_patch_connection_error_returns_none(self, mock_patch: Mock):
|
||||||
"""Test PATCH request connection error returns None"""
|
"""Test PATCH request connection error returns ErrorResponse"""
|
||||||
mock_patch.side_effect = requests.exceptions.ConnectionError("Connection failed")
|
mock_patch.side_effect = requests.exceptions.ConnectionError("Connection failed")
|
||||||
|
|
||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.patch("https://api.example.com/data/1", data={"test": "data"})
|
response = caller.patch("https://api.example.com/data/1", data={"test": "data"})
|
||||||
|
|
||||||
assert response is None
|
assert isinstance(response, ErrorResponse)
|
||||||
captured = capsys.readouterr()
|
assert response.code == 400
|
||||||
assert "Connection error during 'patch'" in captured.out
|
assert "Connection error during 'patch'" in response.message
|
||||||
|
assert response.action == "patch"
|
||||||
|
assert response.url == "https://api.example.com/data/1"
|
||||||
|
|
||||||
|
|
||||||
class TestCallerDelete:
|
class TestCallerDelete:
|
||||||
@@ -396,7 +424,8 @@ class TestCallerDelete:
|
|||||||
headers={"Authorization": "Bearer token"},
|
headers={"Authorization": "Bearer token"},
|
||||||
timeout=20,
|
timeout=20,
|
||||||
verify=True,
|
verify=True,
|
||||||
proxies=None
|
proxies=None,
|
||||||
|
cert=None
|
||||||
)
|
)
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.delete')
|
@patch('corelibs.requests_handling.caller.requests.delete')
|
||||||
@@ -414,16 +443,18 @@ class TestCallerDelete:
|
|||||||
assert mock_delete.call_args[1]["params"] == params
|
assert mock_delete.call_args[1]["params"] == params
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.delete')
|
@patch('corelibs.requests_handling.caller.requests.delete')
|
||||||
def test_delete_invalid_schema_returns_none(self, mock_delete: Mock, capsys: Any):
|
def test_delete_invalid_schema_returns_none(self, mock_delete: Mock):
|
||||||
"""Test DELETE request with invalid URL schema returns None"""
|
"""Test DELETE request with invalid URL schema returns ErrorResponse"""
|
||||||
mock_delete.side_effect = requests.exceptions.InvalidSchema("Invalid URL")
|
mock_delete.side_effect = requests.exceptions.InvalidSchema("Invalid URL")
|
||||||
|
|
||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.delete("invalid://example.com/data/1")
|
response = caller.delete("invalid://example.com/data/1")
|
||||||
|
|
||||||
assert response is None
|
assert isinstance(response, ErrorResponse)
|
||||||
captured = capsys.readouterr()
|
assert response.code == 200
|
||||||
assert "Invalid URL during 'delete'" in captured.out
|
assert "Invalid URL during 'delete'" in response.message
|
||||||
|
assert response.action == "delete"
|
||||||
|
assert response.url == "invalid://example.com/data/1"
|
||||||
|
|
||||||
|
|
||||||
class TestCallerParametrized:
|
class TestCallerParametrized:
|
||||||
@@ -492,7 +523,7 @@ class TestCallerParametrized:
|
|||||||
])
|
])
|
||||||
@patch('corelibs.requests_handling.caller.requests.get')
|
@patch('corelibs.requests_handling.caller.requests.get')
|
||||||
def test_exception_handling(
|
def test_exception_handling(
|
||||||
self, mock_get: Mock, exception_class: type, expected_message: str, capsys: Any
|
self, mock_get: Mock, exception_class: type, expected_message: str
|
||||||
):
|
):
|
||||||
"""Test exception handling for all exception types"""
|
"""Test exception handling for all exception types"""
|
||||||
mock_get.side_effect = exception_class("Test error")
|
mock_get.side_effect = exception_class("Test error")
|
||||||
@@ -500,9 +531,8 @@ class TestCallerParametrized:
|
|||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.get("https://api.example.com/data")
|
response = caller.get("https://api.example.com/data")
|
||||||
|
|
||||||
assert response is None
|
assert isinstance(response, ErrorResponse)
|
||||||
captured = capsys.readouterr()
|
assert expected_message in response.message
|
||||||
assert expected_message in captured.out
|
|
||||||
|
|
||||||
|
|
||||||
class TestCallerIntegration:
|
class TestCallerIntegration:
|
||||||
@@ -599,7 +629,8 @@ class TestCallerEdgeCases:
|
|||||||
headers={},
|
headers={},
|
||||||
timeout=20,
|
timeout=20,
|
||||||
verify=True,
|
verify=True,
|
||||||
proxies=None
|
proxies=None,
|
||||||
|
cert=None
|
||||||
)
|
)
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.post')
|
@patch('corelibs.requests_handling.caller.requests.post')
|
||||||
@@ -659,7 +690,8 @@ class TestCallerEdgeCases:
|
|||||||
headers={},
|
headers={},
|
||||||
timeout=20,
|
timeout=20,
|
||||||
verify=True,
|
verify=True,
|
||||||
proxies=None
|
proxies=None,
|
||||||
|
cert=None
|
||||||
)
|
)
|
||||||
|
|
||||||
@patch('corelibs.requests_handling.caller.requests.get')
|
@patch('corelibs.requests_handling.caller.requests.get')
|
||||||
@@ -679,7 +711,8 @@ class TestCallerEdgeCases:
|
|||||||
headers={},
|
headers={},
|
||||||
timeout=20,
|
timeout=20,
|
||||||
verify=True,
|
verify=True,
|
||||||
proxies=None
|
proxies=None,
|
||||||
|
cert=None
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_timeout_zero(self):
|
def test_timeout_zero(self):
|
||||||
@@ -730,9 +763,10 @@ class TestCallerProxyHandling:
|
|||||||
mock_response = Mock(spec=requests.Response)
|
mock_response = Mock(spec=requests.Response)
|
||||||
mock_get.return_value = mock_response
|
mock_get.return_value = mock_response
|
||||||
|
|
||||||
proxy = {
|
proxy: ProxyConfig = {
|
||||||
"http": "http://proxy.example.com:8080",
|
"type": "socks5",
|
||||||
"https": "https://proxy.example.com:8080"
|
"host": "proxy.example.com:8080",
|
||||||
|
"port": "8080"
|
||||||
}
|
}
|
||||||
caller = Caller(header={}, proxy=proxy)
|
caller = Caller(header={}, proxy=proxy)
|
||||||
caller.get("https://api.example.com/data")
|
caller.get("https://api.example.com/data")
|
||||||
@@ -746,9 +780,10 @@ class TestCallerProxyHandling:
|
|||||||
mock_response = Mock(spec=requests.Response)
|
mock_response = Mock(spec=requests.Response)
|
||||||
mock_post.return_value = mock_response
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
proxy = {
|
proxy: ProxyConfig = {
|
||||||
"http": "http://user:pass@proxy.example.com:8080",
|
"type": "socks5",
|
||||||
"https": "https://user:pass@proxy.example.com:8080"
|
"host": "proxy.example.com:8080",
|
||||||
|
"port": "8080"
|
||||||
}
|
}
|
||||||
caller = Caller(header={}, proxy=proxy)
|
caller = Caller(header={}, proxy=proxy)
|
||||||
caller.post("https://api.example.com/data", data={"test": "data"})
|
caller.post("https://api.example.com/data", data={"test": "data"})
|
||||||
@@ -789,7 +824,7 @@ class TestCallerResponseHandling:
|
|||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.get("https://api.example.com/data")
|
response = caller.get("https://api.example.com/data")
|
||||||
|
|
||||||
assert response is not None
|
assert not isinstance(response, ErrorResponse)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.text == "Success"
|
assert response.text == "Success"
|
||||||
assert response.json() == {"status": "ok"}
|
assert response.json() == {"status": "ok"}
|
||||||
@@ -805,7 +840,7 @@ class TestCallerResponseHandling:
|
|||||||
caller = Caller(header={})
|
caller = Caller(header={})
|
||||||
response = caller.get("https://api.example.com/data")
|
response = caller.get("https://api.example.com/data")
|
||||||
|
|
||||||
assert response is not None
|
assert not isinstance(response, ErrorResponse)
|
||||||
assert response.status_code == status_code
|
assert response.status_code == status_code
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,516 +0,0 @@
|
|||||||
"""
|
|
||||||
PyTest: string_handling/text_colors
|
|
||||||
"""
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from corelibs.string_handling.text_colors import Colors
|
|
||||||
|
|
||||||
|
|
||||||
class TestColorsInitialState:
|
|
||||||
"""Tests for Colors class initial state"""
|
|
||||||
|
|
||||||
def test_bold_initial_value(self):
|
|
||||||
"""Test that bold has correct ANSI code"""
|
|
||||||
assert Colors.bold == '\033[1m'
|
|
||||||
|
|
||||||
def test_underline_initial_value(self):
|
|
||||||
"""Test that underline has correct ANSI code"""
|
|
||||||
assert Colors.underline == '\033[4m'
|
|
||||||
|
|
||||||
def test_end_initial_value(self):
|
|
||||||
"""Test that end has correct ANSI code"""
|
|
||||||
assert Colors.end == '\033[0m'
|
|
||||||
|
|
||||||
def test_reset_initial_value(self):
|
|
||||||
"""Test that reset has correct ANSI code"""
|
|
||||||
assert Colors.reset == '\033[0m'
|
|
||||||
|
|
||||||
|
|
||||||
class TestColorsNormal:
|
|
||||||
"""Tests for normal color ANSI codes"""
|
|
||||||
|
|
||||||
def test_black_normal(self):
|
|
||||||
"""Test black color code"""
|
|
||||||
assert Colors.black == "\033[30m"
|
|
||||||
|
|
||||||
def test_red_normal(self):
|
|
||||||
"""Test red color code"""
|
|
||||||
assert Colors.red == "\033[31m"
|
|
||||||
|
|
||||||
def test_green_normal(self):
|
|
||||||
"""Test green color code"""
|
|
||||||
assert Colors.green == "\033[32m"
|
|
||||||
|
|
||||||
def test_yellow_normal(self):
|
|
||||||
"""Test yellow color code"""
|
|
||||||
assert Colors.yellow == "\033[33m"
|
|
||||||
|
|
||||||
def test_blue_normal(self):
|
|
||||||
"""Test blue color code"""
|
|
||||||
assert Colors.blue == "\033[34m"
|
|
||||||
|
|
||||||
def test_magenta_normal(self):
|
|
||||||
"""Test magenta color code"""
|
|
||||||
assert Colors.magenta == "\033[35m"
|
|
||||||
|
|
||||||
def test_cyan_normal(self):
|
|
||||||
"""Test cyan color code"""
|
|
||||||
assert Colors.cyan == "\033[36m"
|
|
||||||
|
|
||||||
def test_white_normal(self):
|
|
||||||
"""Test white color code"""
|
|
||||||
assert Colors.white == "\033[37m"
|
|
||||||
|
|
||||||
|
|
||||||
class TestColorsBold:
|
|
||||||
"""Tests for bold color ANSI codes"""
|
|
||||||
|
|
||||||
def test_black_bold(self):
|
|
||||||
"""Test black bold color code"""
|
|
||||||
assert Colors.black_bold == "\033[1;30m"
|
|
||||||
|
|
||||||
def test_red_bold(self):
|
|
||||||
"""Test red bold color code"""
|
|
||||||
assert Colors.red_bold == "\033[1;31m"
|
|
||||||
|
|
||||||
def test_green_bold(self):
|
|
||||||
"""Test green bold color code"""
|
|
||||||
assert Colors.green_bold == "\033[1;32m"
|
|
||||||
|
|
||||||
def test_yellow_bold(self):
|
|
||||||
"""Test yellow bold color code"""
|
|
||||||
assert Colors.yellow_bold == "\033[1;33m"
|
|
||||||
|
|
||||||
def test_blue_bold(self):
|
|
||||||
"""Test blue bold color code"""
|
|
||||||
assert Colors.blue_bold == "\033[1;34m"
|
|
||||||
|
|
||||||
def test_magenta_bold(self):
|
|
||||||
"""Test magenta bold color code"""
|
|
||||||
assert Colors.magenta_bold == "\033[1;35m"
|
|
||||||
|
|
||||||
def test_cyan_bold(self):
|
|
||||||
"""Test cyan bold color code"""
|
|
||||||
assert Colors.cyan_bold == "\033[1;36m"
|
|
||||||
|
|
||||||
def test_white_bold(self):
|
|
||||||
"""Test white bold color code"""
|
|
||||||
assert Colors.white_bold == "\033[1;37m"
|
|
||||||
|
|
||||||
|
|
||||||
class TestColorsBright:
|
|
||||||
"""Tests for bright color ANSI codes"""
|
|
||||||
|
|
||||||
def test_black_bright(self):
|
|
||||||
"""Test black bright color code"""
|
|
||||||
assert Colors.black_bright == '\033[90m'
|
|
||||||
|
|
||||||
def test_red_bright(self):
|
|
||||||
"""Test red bright color code"""
|
|
||||||
assert Colors.red_bright == '\033[91m'
|
|
||||||
|
|
||||||
def test_green_bright(self):
|
|
||||||
"""Test green bright color code"""
|
|
||||||
assert Colors.green_bright == '\033[92m'
|
|
||||||
|
|
||||||
def test_yellow_bright(self):
|
|
||||||
"""Test yellow bright color code"""
|
|
||||||
assert Colors.yellow_bright == '\033[93m'
|
|
||||||
|
|
||||||
def test_blue_bright(self):
|
|
||||||
"""Test blue bright color code"""
|
|
||||||
assert Colors.blue_bright == '\033[94m'
|
|
||||||
|
|
||||||
def test_magenta_bright(self):
|
|
||||||
"""Test magenta bright color code"""
|
|
||||||
assert Colors.magenta_bright == '\033[95m'
|
|
||||||
|
|
||||||
def test_cyan_bright(self):
|
|
||||||
"""Test cyan bright color code"""
|
|
||||||
assert Colors.cyan_bright == '\033[96m'
|
|
||||||
|
|
||||||
def test_white_bright(self):
|
|
||||||
"""Test white bright color code"""
|
|
||||||
assert Colors.white_bright == '\033[97m'
|
|
||||||
|
|
||||||
|
|
||||||
class TestColorsDisable:
|
|
||||||
"""Tests for Colors.disable() method"""
|
|
||||||
|
|
||||||
def setup_method(self):
|
|
||||||
"""Reset colors before each test"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
|
|
||||||
def teardown_method(self):
|
|
||||||
"""Reset colors after each test"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
|
|
||||||
def test_disable_bold_and_underline(self):
|
|
||||||
"""Test that disable() sets bold and underline to empty strings"""
|
|
||||||
Colors.disable()
|
|
||||||
assert Colors.bold == ''
|
|
||||||
assert Colors.underline == ''
|
|
||||||
|
|
||||||
def test_disable_end_and_reset(self):
|
|
||||||
"""Test that disable() sets end and reset to empty strings"""
|
|
||||||
Colors.disable()
|
|
||||||
assert Colors.end == ''
|
|
||||||
assert Colors.reset == ''
|
|
||||||
|
|
||||||
def test_disable_normal_colors(self):
|
|
||||||
"""Test that disable() sets all normal colors to empty strings"""
|
|
||||||
Colors.disable()
|
|
||||||
assert Colors.black == ''
|
|
||||||
assert Colors.red == ''
|
|
||||||
assert Colors.green == ''
|
|
||||||
assert Colors.yellow == ''
|
|
||||||
assert Colors.blue == ''
|
|
||||||
assert Colors.magenta == ''
|
|
||||||
assert Colors.cyan == ''
|
|
||||||
assert Colors.white == ''
|
|
||||||
|
|
||||||
def test_disable_bold_colors(self):
|
|
||||||
"""Test that disable() sets all bold colors to empty strings"""
|
|
||||||
Colors.disable()
|
|
||||||
assert Colors.black_bold == ''
|
|
||||||
assert Colors.red_bold == ''
|
|
||||||
assert Colors.green_bold == ''
|
|
||||||
assert Colors.yellow_bold == ''
|
|
||||||
assert Colors.blue_bold == ''
|
|
||||||
assert Colors.magenta_bold == ''
|
|
||||||
assert Colors.cyan_bold == ''
|
|
||||||
assert Colors.white_bold == ''
|
|
||||||
|
|
||||||
def test_disable_bright_colors(self):
|
|
||||||
"""Test that disable() sets all bright colors to empty strings"""
|
|
||||||
Colors.disable()
|
|
||||||
assert Colors.black_bright == ''
|
|
||||||
assert Colors.red_bright == ''
|
|
||||||
assert Colors.green_bright == ''
|
|
||||||
assert Colors.yellow_bright == ''
|
|
||||||
assert Colors.blue_bright == ''
|
|
||||||
assert Colors.magenta_bright == ''
|
|
||||||
assert Colors.cyan_bright == ''
|
|
||||||
assert Colors.white_bright == ''
|
|
||||||
|
|
||||||
def test_disable_all_colors_at_once(self):
|
|
||||||
"""Test that all color attributes are empty after disable()"""
|
|
||||||
Colors.disable()
|
|
||||||
# Check that all public attributes are empty strings
|
|
||||||
for attr in dir(Colors):
|
|
||||||
if not attr.startswith('_') and attr not in ['disable', 'reset_colors']:
|
|
||||||
assert getattr(Colors, attr) == '', f"{attr} should be empty after disable()"
|
|
||||||
|
|
||||||
|
|
||||||
class TestColorsResetColors:
|
|
||||||
"""Tests for Colors.reset_colors() method"""
|
|
||||||
|
|
||||||
def setup_method(self):
|
|
||||||
"""Disable colors before each test"""
|
|
||||||
Colors.disable()
|
|
||||||
|
|
||||||
def teardown_method(self):
|
|
||||||
"""Reset colors after each test"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
|
|
||||||
def test_reset_bold_and_underline(self):
|
|
||||||
"""Test that reset_colors() restores bold and underline"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert Colors.bold == '\033[1m'
|
|
||||||
assert Colors.underline == '\033[4m'
|
|
||||||
|
|
||||||
def test_reset_end_and_reset(self):
|
|
||||||
"""Test that reset_colors() restores end and reset"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert Colors.end == '\033[0m'
|
|
||||||
assert Colors.reset == '\033[0m'
|
|
||||||
|
|
||||||
def test_reset_normal_colors(self):
|
|
||||||
"""Test that reset_colors() restores all normal colors"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert Colors.black == "\033[30m"
|
|
||||||
assert Colors.red == "\033[31m"
|
|
||||||
assert Colors.green == "\033[32m"
|
|
||||||
assert Colors.yellow == "\033[33m"
|
|
||||||
assert Colors.blue == "\033[34m"
|
|
||||||
assert Colors.magenta == "\033[35m"
|
|
||||||
assert Colors.cyan == "\033[36m"
|
|
||||||
assert Colors.white == "\033[37m"
|
|
||||||
|
|
||||||
def test_reset_bold_colors(self):
|
|
||||||
"""Test that reset_colors() restores all bold colors"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert Colors.black_bold == "\033[1;30m"
|
|
||||||
assert Colors.red_bold == "\033[1;31m"
|
|
||||||
assert Colors.green_bold == "\033[1;32m"
|
|
||||||
assert Colors.yellow_bold == "\033[1;33m"
|
|
||||||
assert Colors.blue_bold == "\033[1;34m"
|
|
||||||
assert Colors.magenta_bold == "\033[1;35m"
|
|
||||||
assert Colors.cyan_bold == "\033[1;36m"
|
|
||||||
assert Colors.white_bold == "\033[1;37m"
|
|
||||||
|
|
||||||
def test_reset_bright_colors(self):
|
|
||||||
"""Test that reset_colors() restores all bright colors"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert Colors.black_bright == '\033[90m'
|
|
||||||
assert Colors.red_bright == '\033[91m'
|
|
||||||
assert Colors.green_bright == '\033[92m'
|
|
||||||
assert Colors.yellow_bright == '\033[93m'
|
|
||||||
assert Colors.blue_bright == '\033[94m'
|
|
||||||
assert Colors.magenta_bright == '\033[95m'
|
|
||||||
assert Colors.cyan_bright == '\033[96m'
|
|
||||||
assert Colors.white_bright == '\033[97m'
|
|
||||||
|
|
||||||
|
|
||||||
class TestColorsDisableAndReset:
|
|
||||||
"""Tests for disable and reset cycle"""
|
|
||||||
|
|
||||||
def setup_method(self):
|
|
||||||
"""Reset colors before each test"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
|
|
||||||
def teardown_method(self):
|
|
||||||
"""Reset colors after each test"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
|
|
||||||
def test_disable_then_reset_cycle(self):
|
|
||||||
"""Test that colors can be disabled and then reset multiple times"""
|
|
||||||
# Initial state
|
|
||||||
original_red = Colors.red
|
|
||||||
|
|
||||||
# Disable
|
|
||||||
Colors.disable()
|
|
||||||
assert Colors.red == ''
|
|
||||||
|
|
||||||
# Reset
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert Colors.red == original_red
|
|
||||||
|
|
||||||
# Disable again
|
|
||||||
Colors.disable()
|
|
||||||
assert Colors.red == ''
|
|
||||||
|
|
||||||
# Reset again
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert Colors.red == original_red
|
|
||||||
|
|
||||||
def test_multiple_disables(self):
|
|
||||||
"""Test that calling disable() multiple times is safe"""
|
|
||||||
Colors.disable()
|
|
||||||
Colors.disable()
|
|
||||||
Colors.disable()
|
|
||||||
assert Colors.red == ''
|
|
||||||
assert Colors.blue == ''
|
|
||||||
|
|
||||||
def test_multiple_resets(self):
|
|
||||||
"""Test that calling reset_colors() multiple times is safe"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
Colors.reset_colors()
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert Colors.red == "\033[31m"
|
|
||||||
assert Colors.blue == "\033[34m"
|
|
||||||
|
|
||||||
|
|
||||||
class TestColorsUsage:
|
|
||||||
"""Tests for practical usage of Colors class"""
|
|
||||||
|
|
||||||
def setup_method(self):
|
|
||||||
"""Reset colors before each test"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
|
|
||||||
def teardown_method(self):
|
|
||||||
"""Reset colors after each test"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
|
|
||||||
def test_colored_string_with_reset(self):
|
|
||||||
"""Test creating a colored string with reset"""
|
|
||||||
result = f"{Colors.red}Error{Colors.end}"
|
|
||||||
assert result == "\033[31mError\033[0m"
|
|
||||||
|
|
||||||
def test_bold_colored_string(self):
|
|
||||||
"""Test creating a bold colored string"""
|
|
||||||
result = f"{Colors.bold}{Colors.yellow}Warning{Colors.end}"
|
|
||||||
assert result == "\033[1m\033[33mWarning\033[0m"
|
|
||||||
|
|
||||||
def test_underline_colored_string(self):
|
|
||||||
"""Test creating an underlined colored string"""
|
|
||||||
result = f"{Colors.underline}{Colors.blue}Info{Colors.end}"
|
|
||||||
assert result == "\033[4m\033[34mInfo\033[0m"
|
|
||||||
|
|
||||||
def test_bold_underline_colored_string(self):
|
|
||||||
"""Test creating a bold and underlined colored string"""
|
|
||||||
result = f"{Colors.bold}{Colors.underline}{Colors.green}Success{Colors.end}"
|
|
||||||
assert result == "\033[1m\033[4m\033[32mSuccess\033[0m"
|
|
||||||
|
|
||||||
def test_multiple_colors_in_string(self):
|
|
||||||
"""Test using multiple colors in one string"""
|
|
||||||
result = f"{Colors.red}Red{Colors.end} {Colors.blue}Blue{Colors.end}"
|
|
||||||
assert result == "\033[31mRed\033[0m \033[34mBlue\033[0m"
|
|
||||||
|
|
||||||
def test_bright_color_usage(self):
|
|
||||||
"""Test using bright color variants"""
|
|
||||||
result = f"{Colors.cyan_bright}Bright Cyan{Colors.end}"
|
|
||||||
assert result == "\033[96mBright Cyan\033[0m"
|
|
||||||
|
|
||||||
def test_bold_color_shortcut(self):
|
|
||||||
"""Test using bold color shortcuts"""
|
|
||||||
result = f"{Colors.red_bold}Bold Red{Colors.end}"
|
|
||||||
assert result == "\033[1;31mBold Red\033[0m"
|
|
||||||
|
|
||||||
def test_disabled_colors_produce_plain_text(self):
|
|
||||||
"""Test that disabled colors produce plain text without ANSI codes"""
|
|
||||||
Colors.disable()
|
|
||||||
result = f"{Colors.red}Error{Colors.end}"
|
|
||||||
assert result == "Error"
|
|
||||||
assert "\033[" not in result
|
|
||||||
|
|
||||||
def test_disabled_bold_underline_produce_plain_text(self):
|
|
||||||
"""Test that disabled formatting produces plain text"""
|
|
||||||
Colors.disable()
|
|
||||||
result = f"{Colors.bold}{Colors.underline}{Colors.green}Success{Colors.end}"
|
|
||||||
assert result == "Success"
|
|
||||||
assert "\033[" not in result
|
|
||||||
|
|
||||||
|
|
||||||
class TestColorsPrivateAttributes:
|
|
||||||
"""Tests to ensure private attributes are not directly accessible"""
|
|
||||||
|
|
||||||
def test_private_bold_not_accessible(self):
|
|
||||||
"""Test that __BOLD is private"""
|
|
||||||
with pytest.raises(AttributeError):
|
|
||||||
_ = Colors.__BOLD
|
|
||||||
|
|
||||||
def test_private_colors_not_accessible(self):
|
|
||||||
"""Test that private color attributes are not accessible"""
|
|
||||||
with pytest.raises(AttributeError):
|
|
||||||
_ = Colors.__RED
|
|
||||||
with pytest.raises(AttributeError):
|
|
||||||
_ = Colors.__GREEN
|
|
||||||
|
|
||||||
|
|
||||||
# Parametrized tests
|
|
||||||
@pytest.mark.parametrize("color_attr,expected_code", [
|
|
||||||
("black", "\033[30m"),
|
|
||||||
("red", "\033[31m"),
|
|
||||||
("green", "\033[32m"),
|
|
||||||
("yellow", "\033[33m"),
|
|
||||||
("blue", "\033[34m"),
|
|
||||||
("magenta", "\033[35m"),
|
|
||||||
("cyan", "\033[36m"),
|
|
||||||
("white", "\033[37m"),
|
|
||||||
])
|
|
||||||
def test_normal_colors_parametrized(color_attr: str, expected_code: str):
|
|
||||||
"""Parametrized test for normal colors"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert getattr(Colors, color_attr) == expected_code
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("color_attr,expected_code", [
|
|
||||||
("black_bold", "\033[1;30m"),
|
|
||||||
("red_bold", "\033[1;31m"),
|
|
||||||
("green_bold", "\033[1;32m"),
|
|
||||||
("yellow_bold", "\033[1;33m"),
|
|
||||||
("blue_bold", "\033[1;34m"),
|
|
||||||
("magenta_bold", "\033[1;35m"),
|
|
||||||
("cyan_bold", "\033[1;36m"),
|
|
||||||
("white_bold", "\033[1;37m"),
|
|
||||||
])
|
|
||||||
def test_bold_colors_parametrized(color_attr: str, expected_code: str):
|
|
||||||
"""Parametrized test for bold colors"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert getattr(Colors, color_attr) == expected_code
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("color_attr,expected_code", [
|
|
||||||
("black_bright", '\033[90m'),
|
|
||||||
("red_bright", '\033[91m'),
|
|
||||||
("green_bright", '\033[92m'),
|
|
||||||
("yellow_bright", '\033[93m'),
|
|
||||||
("blue_bright", '\033[94m'),
|
|
||||||
("magenta_bright", '\033[95m'),
|
|
||||||
("cyan_bright", '\033[96m'),
|
|
||||||
("white_bright", '\033[97m'),
|
|
||||||
])
|
|
||||||
def test_bright_colors_parametrized(color_attr: str, expected_code: str):
|
|
||||||
"""Parametrized test for bright colors"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert getattr(Colors, color_attr) == expected_code
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("color_attr", [
|
|
||||||
"bold", "underline", "end", "reset",
|
|
||||||
"black", "red", "green", "yellow", "blue", "magenta", "cyan", "white",
|
|
||||||
"black_bold", "red_bold", "green_bold", "yellow_bold",
|
|
||||||
"blue_bold", "magenta_bold", "cyan_bold", "white_bold",
|
|
||||||
"black_bright", "red_bright", "green_bright", "yellow_bright",
|
|
||||||
"blue_bright", "magenta_bright", "cyan_bright", "white_bright",
|
|
||||||
])
|
|
||||||
def test_disable_all_attributes_parametrized(color_attr: str):
|
|
||||||
"""Parametrized test that all color attributes are disabled"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
Colors.disable()
|
|
||||||
assert getattr(Colors, color_attr) == ''
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("color_attr", [
|
|
||||||
"bold", "underline", "end", "reset",
|
|
||||||
"black", "red", "green", "yellow", "blue", "magenta", "cyan", "white",
|
|
||||||
"black_bold", "red_bold", "green_bold", "yellow_bold",
|
|
||||||
"blue_bold", "magenta_bold", "cyan_bold", "white_bold",
|
|
||||||
"black_bright", "red_bright", "green_bright", "yellow_bright",
|
|
||||||
"blue_bright", "magenta_bright", "cyan_bright", "white_bright",
|
|
||||||
])
|
|
||||||
def test_reset_all_attributes_parametrized(color_attr: str):
|
|
||||||
"""Parametrized test that all color attributes are reset"""
|
|
||||||
Colors.disable()
|
|
||||||
Colors.reset_colors()
|
|
||||||
assert getattr(Colors, color_attr) != ''
|
|
||||||
assert '\033[' in getattr(Colors, color_attr)
|
|
||||||
|
|
||||||
|
|
||||||
# Edge case tests
|
|
||||||
class TestColorsEdgeCases:
|
|
||||||
"""Tests for edge cases and special scenarios"""
|
|
||||||
|
|
||||||
def setup_method(self):
|
|
||||||
"""Reset colors before each test"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
|
|
||||||
def teardown_method(self):
|
|
||||||
"""Reset colors after each test"""
|
|
||||||
Colors.reset_colors()
|
|
||||||
|
|
||||||
def test_colors_class_is_not_instantiable(self):
|
|
||||||
"""Test that Colors class can be instantiated (it's not abstract)"""
|
|
||||||
# The class uses static methods, but can be instantiated
|
|
||||||
instance = Colors()
|
|
||||||
assert isinstance(instance, Colors)
|
|
||||||
|
|
||||||
def test_static_methods_work_on_instance(self):
|
|
||||||
"""Test that static methods work when called on instance"""
|
|
||||||
instance = Colors()
|
|
||||||
instance.disable()
|
|
||||||
assert Colors.red == ''
|
|
||||||
instance.reset_colors()
|
|
||||||
assert Colors.red == "\033[31m"
|
|
||||||
|
|
||||||
def test_concatenation_of_multiple_effects(self):
|
|
||||||
"""Test concatenating multiple color effects"""
|
|
||||||
result = f"{Colors.bold}{Colors.underline}{Colors.red_bright}Test{Colors.reset}"
|
|
||||||
assert "\033[1m" in result # bold
|
|
||||||
assert "\033[4m" in result # underline
|
|
||||||
assert "\033[91m" in result # red bright
|
|
||||||
assert "\033[0m" in result # reset
|
|
||||||
|
|
||||||
def test_empty_string_with_colors(self):
|
|
||||||
"""Test applying colors to empty string"""
|
|
||||||
result = f"{Colors.red}{Colors.end}"
|
|
||||||
assert result == "\033[31m\033[0m"
|
|
||||||
|
|
||||||
def test_nested_color_changes(self):
|
|
||||||
"""Test nested color changes in string"""
|
|
||||||
result = f"{Colors.red}Red {Colors.blue}Blue{Colors.end} Red again{Colors.end}"
|
|
||||||
assert result == "\033[31mRed \033[34mBlue\033[0m Red again\033[0m"
|
|
||||||
|
|
||||||
|
|
||||||
# __END__
|
|
||||||
480
uv.lock
generated
480
uv.lock
generated
@@ -1,480 +0,0 @@
|
|||||||
version = 1
|
|
||||||
revision = 3
|
|
||||||
requires-python = ">=3.13"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "certifi"
|
|
||||||
version = "2025.11.12"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cffi"
|
|
||||||
version = "2.0.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "pycparser", marker = "implementation_name != 'PyPy'" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "charset-normalizer"
|
|
||||||
version = "3.4.4"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "colorama"
|
|
||||||
version = "0.4.6"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "corelibs"
|
|
||||||
version = "0.39.2"
|
|
||||||
source = { editable = "." }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "corelibs-datetime" },
|
|
||||||
{ name = "corelibs-enum-base" },
|
|
||||||
{ name = "corelibs-var" },
|
|
||||||
{ name = "cryptography" },
|
|
||||||
{ name = "jmespath" },
|
|
||||||
{ name = "jsonpath-ng" },
|
|
||||||
{ name = "psutil" },
|
|
||||||
{ name = "requests" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dev-dependencies]
|
|
||||||
dev = [
|
|
||||||
{ name = "deepdiff" },
|
|
||||||
{ name = "pytest" },
|
|
||||||
{ name = "pytest-cov" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.metadata]
|
|
||||||
requires-dist = [
|
|
||||||
{ name = "corelibs-datetime", specifier = ">=1.0.1", index = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" },
|
|
||||||
{ name = "corelibs-enum-base", specifier = ">=1.0.0", index = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" },
|
|
||||||
{ name = "corelibs-var", specifier = ">=1.0.0", index = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" },
|
|
||||||
{ name = "cryptography", specifier = ">=46.0.3" },
|
|
||||||
{ name = "jmespath", specifier = ">=1.0.1" },
|
|
||||||
{ name = "jsonpath-ng", specifier = ">=1.7.0" },
|
|
||||||
{ name = "psutil", specifier = ">=7.0.0" },
|
|
||||||
{ name = "requests", specifier = ">=2.32.4" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.metadata.requires-dev]
|
|
||||||
dev = [
|
|
||||||
{ name = "deepdiff", specifier = ">=8.6.1" },
|
|
||||||
{ name = "pytest", specifier = ">=8.4.1" },
|
|
||||||
{ name = "pytest-cov", specifier = ">=6.2.1" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "corelibs-datetime"
|
|
||||||
version = "1.0.1"
|
|
||||||
source = { registry = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "corelibs-var" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-datetime/1.0.1/corelibs_datetime-1.0.1.tar.gz", hash = "sha256:ff58c6f824f35b87b1a5c153f65fdd82b65e42bb5a649d46d9115dc5fa61042f" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-datetime/1.0.1/corelibs_datetime-1.0.1-py3-none-any.whl", hash = "sha256:f1a4d431f9f913dd39976a119ff8a2db34e966c61b1775c26b0da72a8bdb5ec1" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "corelibs-enum-base"
|
|
||||||
version = "1.0.0"
|
|
||||||
source = { registry = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" }
|
|
||||||
sdist = { url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-enum-base/1.0.0/corelibs_enum_base-1.0.0.tar.gz", hash = "sha256:c696a297d88f674d40e5d190f396909b5f663a995ac735e545ceb5bb4907121d" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-enum-base/1.0.0/corelibs_enum_base-1.0.0-py3-none-any.whl", hash = "sha256:c305d4063c69021aaf9ef75fbcce961039dae3c3de7820febeac7082c998a1f8" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "corelibs-var"
|
|
||||||
version = "1.0.0"
|
|
||||||
source = { registry = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/" }
|
|
||||||
sdist = { url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-var/1.0.0/corelibs_var-1.0.0.tar.gz", hash = "sha256:b85d6fd3802a1b687290666e4b1dbb47cf9723aa72bf73eb004e9e4936776364" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://git.egplusww.jp/api/packages/PyPI/pypi/files/corelibs-var/1.0.0/corelibs_var-1.0.0-py3-none-any.whl", hash = "sha256:a3546785bf9c94eec08b5c500b69b971e83e11d92bc0e4d3cbd9411a561fdbc2" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "coverage"
|
|
||||||
version = "7.12.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/89/26/4a96807b193b011588099c3b5c89fbb05294e5b90e71018e065465f34eb6/coverage-7.12.0.tar.gz", hash = "sha256:fc11e0a4e372cb5f282f16ef90d4a585034050ccda536451901abfb19a57f40c", size = 819341, upload-time = "2025-11-18T13:34:20.766Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b8/14/771700b4048774e48d2c54ed0c674273702713c9ee7acdfede40c2666747/coverage-7.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:47324fffca8d8eae7e185b5bb20c14645f23350f870c1649003618ea91a78941", size = 217725, upload-time = "2025-11-18T13:32:49.22Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/17/a7/3aa4144d3bcb719bf67b22d2d51c2d577bf801498c13cb08f64173e80497/coverage-7.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ccf3b2ede91decd2fb53ec73c1f949c3e034129d1e0b07798ff1d02ea0c8fa4a", size = 218098, upload-time = "2025-11-18T13:32:50.78Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/fc/9c/b846bbc774ff81091a12a10203e70562c91ae71badda00c5ae5b613527b1/coverage-7.12.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b365adc70a6936c6b0582dc38746b33b2454148c02349345412c6e743efb646d", size = 249093, upload-time = "2025-11-18T13:32:52.554Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/76/b6/67d7c0e1f400b32c883e9342de4a8c2ae7c1a0b57c5de87622b7262e2309/coverage-7.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bc13baf85cd8a4cfcf4a35c7bc9d795837ad809775f782f697bf630b7e200211", size = 251686, upload-time = "2025-11-18T13:32:54.862Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/cc/75/b095bd4b39d49c3be4bffbb3135fea18a99a431c52dd7513637c0762fecb/coverage-7.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:099d11698385d572ceafb3288a5b80fe1fc58bf665b3f9d362389de488361d3d", size = 252930, upload-time = "2025-11-18T13:32:56.417Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/6e/f3/466f63015c7c80550bead3093aacabf5380c1220a2a93c35d374cae8f762/coverage-7.12.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:473dc45d69694069adb7680c405fb1e81f60b2aff42c81e2f2c3feaf544d878c", size = 249296, upload-time = "2025-11-18T13:32:58.074Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/27/86/eba2209bf2b7e28c68698fc13437519a295b2d228ba9e0ec91673e09fa92/coverage-7.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:583f9adbefd278e9de33c33d6846aa8f5d164fa49b47144180a0e037f0688bb9", size = 251068, upload-time = "2025-11-18T13:32:59.646Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ec/55/ca8ae7dbba962a3351f18940b359b94c6bafdd7757945fdc79ec9e452dc7/coverage-7.12.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2089cc445f2dc0af6f801f0d1355c025b76c24481935303cf1af28f636688f0", size = 249034, upload-time = "2025-11-18T13:33:01.481Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7a/d7/39136149325cad92d420b023b5fd900dabdd1c3a0d1d5f148ef4a8cedef5/coverage-7.12.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:950411f1eb5d579999c5f66c62a40961f126fc71e5e14419f004471957b51508", size = 248853, upload-time = "2025-11-18T13:33:02.935Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/fe/b6/76e1add8b87ef60e00643b0b7f8f7bb73d4bf5249a3be19ebefc5793dd25/coverage-7.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b1aab7302a87bafebfe76b12af681b56ff446dc6f32ed178ff9c092ca776e6bc", size = 250619, upload-time = "2025-11-18T13:33:04.336Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/95/87/924c6dc64f9203f7a3c1832a6a0eee5a8335dbe5f1bdadcc278d6f1b4d74/coverage-7.12.0-cp313-cp313-win32.whl", hash = "sha256:d7e0d0303c13b54db495eb636bc2465b2fb8475d4c8bcec8fe4b5ca454dfbae8", size = 220261, upload-time = "2025-11-18T13:33:06.493Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/91/77/dd4aff9af16ff776bf355a24d87eeb48fc6acde54c907cc1ea89b14a8804/coverage-7.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:ce61969812d6a98a981d147d9ac583a36ac7db7766f2e64a9d4d059c2fe29d07", size = 221072, upload-time = "2025-11-18T13:33:07.926Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/70/49/5c9dc46205fef31b1b226a6e16513193715290584317fd4df91cdaf28b22/coverage-7.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bcec6f47e4cb8a4c2dc91ce507f6eefc6a1b10f58df32cdc61dff65455031dfc", size = 219702, upload-time = "2025-11-18T13:33:09.631Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/9b/62/f87922641c7198667994dd472a91e1d9b829c95d6c29529ceb52132436ad/coverage-7.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:459443346509476170d553035e4a3eed7b860f4fe5242f02de1010501956ce87", size = 218420, upload-time = "2025-11-18T13:33:11.153Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/85/dd/1cc13b2395ef15dbb27d7370a2509b4aee77890a464fb35d72d428f84871/coverage-7.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:04a79245ab2b7a61688958f7a855275997134bc84f4a03bc240cf64ff132abf6", size = 218773, upload-time = "2025-11-18T13:33:12.569Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/74/40/35773cc4bb1e9d4658d4fb669eb4195b3151bef3bbd6f866aba5cd5dac82/coverage-7.12.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:09a86acaaa8455f13d6a99221d9654df249b33937b4e212b4e5a822065f12aa7", size = 260078, upload-time = "2025-11-18T13:33:14.037Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ec/ee/231bb1a6ffc2905e396557585ebc6bdc559e7c66708376d245a1f1d330fc/coverage-7.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:907e0df1b71ba77463687a74149c6122c3f6aac56c2510a5d906b2f368208560", size = 262144, upload-time = "2025-11-18T13:33:15.601Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/28/be/32f4aa9f3bf0b56f3971001b56508352c7753915345d45fab4296a986f01/coverage-7.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9b57e2d0ddd5f0582bae5437c04ee71c46cd908e7bc5d4d0391f9a41e812dd12", size = 264574, upload-time = "2025-11-18T13:33:17.354Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/68/7c/00489fcbc2245d13ab12189b977e0cf06ff3351cb98bc6beba8bd68c5902/coverage-7.12.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:58c1c6aa677f3a1411fe6fb28ec3a942e4f665df036a3608816e0847fad23296", size = 259298, upload-time = "2025-11-18T13:33:18.958Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/96/b4/f0760d65d56c3bea95b449e02570d4abd2549dc784bf39a2d4721a2d8ceb/coverage-7.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4c589361263ab2953e3c4cd2a94db94c4ad4a8e572776ecfbad2389c626e4507", size = 262150, upload-time = "2025-11-18T13:33:20.644Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c5/71/9a9314df00f9326d78c1e5a910f520d599205907432d90d1c1b7a97aa4b1/coverage-7.12.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:91b810a163ccad2e43b1faa11d70d3cf4b6f3d83f9fd5f2df82a32d47b648e0d", size = 259763, upload-time = "2025-11-18T13:33:22.189Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/10/34/01a0aceed13fbdf925876b9a15d50862eb8845454301fe3cdd1df08b2182/coverage-7.12.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:40c867af715f22592e0d0fb533a33a71ec9e0f73a6945f722a0c85c8c1cbe3a2", size = 258653, upload-time = "2025-11-18T13:33:24.239Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/8d/04/81d8fd64928acf1574bbb0181f66901c6c1c6279c8ccf5f84259d2c68ae9/coverage-7.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:68b0d0a2d84f333de875666259dadf28cc67858bc8fd8b3f1eae84d3c2bec455", size = 260856, upload-time = "2025-11-18T13:33:26.365Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f2/76/fa2a37bfaeaf1f766a2d2360a25a5297d4fb567098112f6517475eee120b/coverage-7.12.0-cp313-cp313t-win32.whl", hash = "sha256:73f9e7fbd51a221818fd11b7090eaa835a353ddd59c236c57b2199486b116c6d", size = 220936, upload-time = "2025-11-18T13:33:28.165Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f9/52/60f64d932d555102611c366afb0eb434b34266b1d9266fc2fe18ab641c47/coverage-7.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:24cff9d1f5743f67db7ba46ff284018a6e9aeb649b67aa1e70c396aa1b7cb23c", size = 222001, upload-time = "2025-11-18T13:33:29.656Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/77/df/c303164154a5a3aea7472bf323b7c857fed93b26618ed9fc5c2955566bb0/coverage-7.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c87395744f5c77c866d0f5a43d97cc39e17c7f1cb0115e54a2fe67ca75c5d14d", size = 220273, upload-time = "2025-11-18T13:33:31.415Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bf/2e/fc12db0883478d6e12bbd62d481210f0c8daf036102aa11434a0c5755825/coverage-7.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a1c59b7dc169809a88b21a936eccf71c3895a78f5592051b1af8f4d59c2b4f92", size = 217777, upload-time = "2025-11-18T13:33:32.86Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/1f/c1/ce3e525d223350c6ec16b9be8a057623f54226ef7f4c2fee361ebb6a02b8/coverage-7.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8787b0f982e020adb732b9f051f3e49dd5054cebbc3f3432061278512a2b1360", size = 218100, upload-time = "2025-11-18T13:33:34.532Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/15/87/113757441504aee3808cb422990ed7c8bcc2d53a6779c66c5adef0942939/coverage-7.12.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5ea5a9f7dc8877455b13dd1effd3202e0bca72f6f3ab09f9036b1bcf728f69ac", size = 249151, upload-time = "2025-11-18T13:33:36.135Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d9/1d/9529d9bd44049b6b05bb319c03a3a7e4b0a8a802d28fa348ad407e10706d/coverage-7.12.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fdba9f15849534594f60b47c9a30bc70409b54947319a7c4fd0e8e3d8d2f355d", size = 251667, upload-time = "2025-11-18T13:33:37.996Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/11/bb/567e751c41e9c03dc29d3ce74b8c89a1e3396313e34f255a2a2e8b9ebb56/coverage-7.12.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a00594770eb715854fb1c57e0dea08cce6720cfbc531accdb9850d7c7770396c", size = 253003, upload-time = "2025-11-18T13:33:39.553Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e4/b3/c2cce2d8526a02fb9e9ca14a263ca6fc074449b33a6afa4892838c903528/coverage-7.12.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5560c7e0d82b42eb1951e4f68f071f8017c824ebfd5a6ebe42c60ac16c6c2434", size = 249185, upload-time = "2025-11-18T13:33:42.086Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0e/a7/967f93bb66e82c9113c66a8d0b65ecf72fc865adfba5a145f50c7af7e58d/coverage-7.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2e26b481c9159c2773a37947a9718cfdc58893029cdfb177531793e375cfc", size = 251025, upload-time = "2025-11-18T13:33:43.634Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b9/b2/f2f6f56337bc1af465d5b2dc1ee7ee2141b8b9272f3bf6213fcbc309a836/coverage-7.12.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6e1a8c066dabcde56d5d9fed6a66bc19a2883a3fe051f0c397a41fc42aedd4cc", size = 248979, upload-time = "2025-11-18T13:33:46.04Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f4/7a/bf4209f45a4aec09d10a01a57313a46c0e0e8f4c55ff2965467d41a92036/coverage-7.12.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f7ba9da4726e446d8dd8aae5a6cd872511184a5d861de80a86ef970b5dacce3e", size = 248800, upload-time = "2025-11-18T13:33:47.546Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b8/b7/1e01b8696fb0521810f60c5bbebf699100d6754183e6cc0679bf2ed76531/coverage-7.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e0f483ab4f749039894abaf80c2f9e7ed77bbf3c737517fb88c8e8e305896a17", size = 250460, upload-time = "2025-11-18T13:33:49.537Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/71/ae/84324fb9cb46c024760e706353d9b771a81b398d117d8c1fe010391c186f/coverage-7.12.0-cp314-cp314-win32.whl", hash = "sha256:76336c19a9ef4a94b2f8dc79f8ac2da3f193f625bb5d6f51a328cd19bfc19933", size = 220533, upload-time = "2025-11-18T13:33:51.16Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e2/71/1033629deb8460a8f97f83e6ac4ca3b93952e2b6f826056684df8275e015/coverage-7.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:7c1059b600aec6ef090721f8f633f60ed70afaffe8ecab85b59df748f24b31fe", size = 221348, upload-time = "2025-11-18T13:33:52.776Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0a/5f/ac8107a902f623b0c251abdb749be282dc2ab61854a8a4fcf49e276fce2f/coverage-7.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:172cf3a34bfef42611963e2b661302a8931f44df31629e5b1050567d6b90287d", size = 219922, upload-time = "2025-11-18T13:33:54.316Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/79/6e/f27af2d4da367f16077d21ef6fe796c874408219fa6dd3f3efe7751bd910/coverage-7.12.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:aa7d48520a32cb21c7a9b31f81799e8eaec7239db36c3b670be0fa2403828d1d", size = 218511, upload-time = "2025-11-18T13:33:56.343Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/67/dd/65fd874aa460c30da78f9d259400d8e6a4ef457d61ab052fd248f0050558/coverage-7.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:90d58ac63bc85e0fb919f14d09d6caa63f35a5512a2205284b7816cafd21bb03", size = 218771, upload-time = "2025-11-18T13:33:57.966Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/55/e0/7c6b71d327d8068cb79c05f8f45bf1b6145f7a0de23bbebe63578fe5240a/coverage-7.12.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca8ecfa283764fdda3eae1bdb6afe58bf78c2c3ec2b2edcb05a671f0bba7b3f9", size = 260151, upload-time = "2025-11-18T13:33:59.597Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/49/ce/4697457d58285b7200de6b46d606ea71066c6e674571a946a6ea908fb588/coverage-7.12.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:874fe69a0785d96bd066059cd4368022cebbec1a8958f224f0016979183916e6", size = 262257, upload-time = "2025-11-18T13:34:01.166Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/2f/33/acbc6e447aee4ceba88c15528dbe04a35fb4d67b59d393d2e0d6f1e242c1/coverage-7.12.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b3c889c0b8b283a24d721a9eabc8ccafcfc3aebf167e4cd0d0e23bf8ec4e339", size = 264671, upload-time = "2025-11-18T13:34:02.795Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/87/ec/e2822a795c1ed44d569980097be839c5e734d4c0c1119ef8e0a073496a30/coverage-7.12.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8bb5b894b3ec09dcd6d3743229dc7f2c42ef7787dc40596ae04c0edda487371e", size = 259231, upload-time = "2025-11-18T13:34:04.397Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/72/c5/a7ec5395bb4a49c9b7ad97e63f0c92f6bf4a9e006b1393555a02dae75f16/coverage-7.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:79a44421cd5fba96aa57b5e3b5a4d3274c449d4c622e8f76882d76635501fd13", size = 262137, upload-time = "2025-11-18T13:34:06.068Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/67/0c/02c08858b764129f4ecb8e316684272972e60777ae986f3865b10940bdd6/coverage-7.12.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:33baadc0efd5c7294f436a632566ccc1f72c867f82833eb59820ee37dc811c6f", size = 259745, upload-time = "2025-11-18T13:34:08.04Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/5a/04/4fd32b7084505f3829a8fe45c1a74a7a728cb251aaadbe3bec04abcef06d/coverage-7.12.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:c406a71f544800ef7e9e0000af706b88465f3573ae8b8de37e5f96c59f689ad1", size = 258570, upload-time = "2025-11-18T13:34:09.676Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/48/35/2365e37c90df4f5342c4fa202223744119fe31264ee2924f09f074ea9b6d/coverage-7.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e71bba6a40883b00c6d571599b4627f50c360b3d0d02bfc658168936be74027b", size = 260899, upload-time = "2025-11-18T13:34:11.259Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/05/56/26ab0464ca733fa325e8e71455c58c1c374ce30f7c04cebb88eabb037b18/coverage-7.12.0-cp314-cp314t-win32.whl", hash = "sha256:9157a5e233c40ce6613dead4c131a006adfda70e557b6856b97aceed01b0e27a", size = 221313, upload-time = "2025-11-18T13:34:12.863Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/da/1c/017a3e1113ed34d998b27d2c6dba08a9e7cb97d362f0ec988fcd873dcf81/coverage-7.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:e84da3a0fd233aeec797b981c51af1cabac74f9bd67be42458365b30d11b5291", size = 222423, upload-time = "2025-11-18T13:34:15.14Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/4c/36/bcc504fdd5169301b52568802bb1b9cdde2e27a01d39fbb3b4b508ab7c2c/coverage-7.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:01d24af36fedda51c2b1aca56e4330a3710f83b02a5ff3743a6b015ffa7c9384", size = 220459, upload-time = "2025-11-18T13:34:17.222Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ce/a3/43b749004e3c09452e39bb56347a008f0a0668aad37324a99b5c8ca91d9e/coverage-7.12.0-py3-none-any.whl", hash = "sha256:159d50c0b12e060b15ed3d39f87ed43d4f7f7ad40b8a534f4dd331adbb51104a", size = 209503, upload-time = "2025-11-18T13:34:18.892Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cryptography"
|
|
||||||
version = "46.0.3"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "deepdiff"
|
|
||||||
version = "8.6.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "orderly-set" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/19/76/36c9aab3d5c19a94091f7c6c6e784efca50d87b124bf026c36e94719f33c/deepdiff-8.6.1.tar.gz", hash = "sha256:ec56d7a769ca80891b5200ec7bd41eec300ced91ebcc7797b41eb2b3f3ff643a", size = 634054, upload-time = "2025-09-03T19:40:41.461Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f7/e6/efe534ef0952b531b630780e19cabd416e2032697019d5295defc6ef9bd9/deepdiff-8.6.1-py3-none-any.whl", hash = "sha256:ee8708a7f7d37fb273a541fa24ad010ed484192cd0c4ffc0fa0ed5e2d4b9e78b", size = 91378, upload-time = "2025-09-03T19:40:39.679Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "idna"
|
|
||||||
version = "3.11"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "iniconfig"
|
|
||||||
version = "2.3.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "jmespath"
|
|
||||||
version = "1.0.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "jsonpath-ng"
|
|
||||||
version = "1.7.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "ply" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/6d/86/08646239a313f895186ff0a4573452038eed8c86f54380b3ebac34d32fb2/jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c", size = 37838, upload-time = "2024-10-11T15:41:42.404Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/35/5a/73ecb3d82f8615f32ccdadeb9356726d6cae3a4bbc840b437ceb95708063/jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6", size = 30105, upload-time = "2024-11-20T17:58:30.418Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "orderly-set"
|
|
||||||
version = "5.5.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/4a/88/39c83c35d5e97cc203e9e77a4f93bf87ec89cf6a22ac4818fdcc65d66584/orderly_set-5.5.0.tar.gz", hash = "sha256:e87185c8e4d8afa64e7f8160ee2c542a475b738bc891dc3f58102e654125e6ce", size = 27414, upload-time = "2025-07-10T20:10:55.885Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/12/27/fb8d7338b4d551900fa3e580acbe7a0cf655d940e164cb5c00ec31961094/orderly_set-5.5.0-py3-none-any.whl", hash = "sha256:46f0b801948e98f427b412fcabb831677194c05c3b699b80de260374baa0b1e7", size = 13068, upload-time = "2025-07-10T20:10:54.377Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "packaging"
|
|
||||||
version = "25.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pluggy"
|
|
||||||
version = "1.6.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ply"
|
|
||||||
version = "3.11"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", size = 159130, upload-time = "2018-02-15T19:01:31.097Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce", size = 49567, upload-time = "2018-02-15T19:01:27.172Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "psutil"
|
|
||||||
version = "7.1.3"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/2e/bb/6670bded3e3236eb4287c7bcdc167e9fae6e1e9286e437f7111caed2f909/psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353", size = 239843, upload-time = "2025-11-02T12:26:11.968Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b8/66/853d50e75a38c9a7370ddbeefabdd3d3116b9c31ef94dc92c6729bc36bec/psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b", size = 240369, upload-time = "2025-11-02T12:26:14.358Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/41/bd/313aba97cb5bfb26916dc29cf0646cbe4dd6a89ca69e8c6edce654876d39/psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9", size = 288210, upload-time = "2025-11-02T12:26:16.699Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c2/fa/76e3c06e760927a0cfb5705eb38164254de34e9bd86db656d4dbaa228b04/psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f", size = 291182, upload-time = "2025-11-02T12:26:18.848Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0f/1d/5774a91607035ee5078b8fd747686ebec28a962f178712de100d00b78a32/psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7", size = 250466, upload-time = "2025-11-02T12:26:21.183Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/00/ca/e426584bacb43a5cb1ac91fae1937f478cd8fbe5e4ff96574e698a2c77cd/psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264", size = 245756, upload-time = "2025-11-02T12:26:23.148Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pycparser"
|
|
||||||
version = "2.23"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pygments"
|
|
||||||
version = "2.19.2"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pytest"
|
|
||||||
version = "9.0.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
|
||||||
{ name = "iniconfig" },
|
|
||||||
{ name = "packaging" },
|
|
||||||
{ name = "pluggy" },
|
|
||||||
{ name = "pygments" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pytest-cov"
|
|
||||||
version = "7.0.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "coverage" },
|
|
||||||
{ name = "pluggy" },
|
|
||||||
{ name = "pytest" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "requests"
|
|
||||||
version = "2.32.5"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "certifi" },
|
|
||||||
{ name = "charset-normalizer" },
|
|
||||||
{ name = "idna" },
|
|
||||||
{ name = "urllib3" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "urllib3"
|
|
||||||
version = "2.5.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
|
|
||||||
]
|
|
||||||
Reference in New Issue
Block a user