Compare commits
225 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d098eb58f3 | ||
|
|
5319a059ad | ||
|
|
163b8c4018 | ||
|
|
6322b95068 | ||
|
|
715ed1f9c2 | ||
|
|
82a759dd21 | ||
|
|
fe913608c4 | ||
|
|
79f9c5d1c6 | ||
|
|
3d091129e2 | ||
|
|
1a978f786d | ||
|
|
51669d3c5f | ||
|
|
d128dcb479 | ||
|
|
84286593f6 | ||
|
|
8d97f09e5e | ||
|
|
2748bc19be | ||
|
|
0b3c8fc774 | ||
|
|
7da18e0f00 | ||
|
|
49e38081ad | ||
|
|
a14f993a31 | ||
|
|
ae938f9909 | ||
|
|
f91e0bb93a | ||
|
|
d3f61005cf | ||
|
|
2923a3e88b | ||
|
|
a73ced0067 | ||
|
|
f89b91fe7f | ||
|
|
5950485d46 | ||
|
|
f349927a63 | ||
|
|
dfe8890598 | ||
|
|
d224876a8e | ||
|
|
17e8c76b94 | ||
|
|
9034a31cd6 | ||
|
|
523e61c9f7 | ||
|
|
cf575ded90 | ||
|
|
11a75d8532 | ||
|
|
6593e11332 | ||
|
|
c310f669d6 | ||
|
|
f327f47c3f | ||
|
|
acd61e825e | ||
|
|
895701da59 | ||
|
|
e0fb0db1f0 | ||
|
|
dc7e56106e | ||
|
|
90e5179980 | ||
|
|
9db39003c4 | ||
|
|
4ffe372434 | ||
|
|
a00c27c465 | ||
|
|
1f7f4b8d53 | ||
|
|
baca79ce82 | ||
|
|
4265be6430 | ||
|
|
c16b086467 | ||
|
|
48a98c0206 | ||
|
|
f1788f057f | ||
|
|
0ad8883809 | ||
|
|
51e9b1ce7c | ||
|
|
0d3104f60a | ||
|
|
d29f827fc9 | ||
|
|
282fe1f7c0 | ||
|
|
afce5043e4 | ||
|
|
5996bb1fc0 | ||
|
|
06a17d7c30 | ||
|
|
af7633183c | ||
|
|
1280b2f855 | ||
|
|
2e0b1f5951 | ||
|
|
548d7491b8 | ||
|
|
ad99115544 | ||
|
|
52919cbc49 | ||
|
|
7f2dc13c31 | ||
|
|
592652cff1 | ||
|
|
6a1724695e | ||
|
|
037210756e | ||
|
|
4e78d83092 | ||
|
|
0e6331fa6a | ||
|
|
c98c5df63c | ||
|
|
0981c74da9 | ||
|
|
31518799f6 | ||
|
|
e8b4b9b48e | ||
|
|
cd06272b38 | ||
|
|
c5ab4352e3 | ||
|
|
0da4a6b70a | ||
|
|
11c5f3387c | ||
|
|
3ed0171e17 | ||
|
|
c7b38b0d70 | ||
|
|
caf0039de4 | ||
|
|
2637e1e42c | ||
|
|
d0a1673965 | ||
|
|
07e5d23f72 | ||
|
|
fb4fdb6857 | ||
|
|
d642a13b6e | ||
|
|
8967031f91 | ||
|
|
89caada4cc | ||
|
|
b3616269bc | ||
|
|
4fa22813ce | ||
|
|
3ee3a0dce0 | ||
|
|
1226721bc0 | ||
|
|
a76eae0cc7 | ||
|
|
53cf2a6f48 | ||
|
|
fe69530b38 | ||
|
|
bf83c1c394 | ||
|
|
84ce43ab93 | ||
|
|
5e0765ee24 | ||
|
|
6edf9398b7 | ||
|
|
30bf9c1bcb | ||
|
|
0b59f3cc7a | ||
|
|
2544fad9ce | ||
|
|
e579ef5834 | ||
|
|
543e9766a1 | ||
|
|
4c3611aba7 | ||
|
|
dadc14563a | ||
|
|
c1eda7305b | ||
|
|
2f4e236350 | ||
|
|
b858936c68 | ||
|
|
78ce30283e | ||
|
|
f85fbb86af | ||
|
|
ed22105ec8 | ||
|
|
7c5af588c7 | ||
|
|
2690a285d9 | ||
|
|
bb60a570d0 | ||
|
|
ca0ab2d7d1 | ||
|
|
38bae7fb46 | ||
|
|
14466c3ff8 | ||
|
|
fe824f9fb4 | ||
|
|
ef5981b473 | ||
|
|
7d1ee70cf6 | ||
|
|
7c72d99619 | ||
|
|
b32887a6d8 | ||
|
|
37a197e7f1 | ||
|
|
74cb3d2c54 | ||
|
|
d19abcabc7 | ||
|
|
f8ae6609c7 | ||
|
|
cbd39ff161 | ||
|
|
f8905a176c | ||
|
|
847288e91f | ||
|
|
446d9d5217 | ||
|
|
3a7a1659f0 | ||
|
|
bc23006a34 | ||
|
|
6090995eba | ||
|
|
60db747d6d | ||
|
|
a7a4141f58 | ||
|
|
2b04cbe239 | ||
|
|
765cc061c1 | ||
|
|
80319385f0 | ||
|
|
29dd906fe0 | ||
|
|
d5dc4028c3 | ||
|
|
0df049d453 | ||
|
|
0bd7c1f685 | ||
|
|
2f08ecabbf | ||
|
|
12af1c80dc | ||
|
|
a52b6e0a55 | ||
|
|
a586cf65e2 | ||
|
|
e2e7882bfa | ||
|
|
4f9c2b9d5f | ||
|
|
5203bcf1ea | ||
|
|
f1e3bc8559 | ||
|
|
b97ca6f064 | ||
|
|
d1ea9874da | ||
|
|
3cd3f87d68 | ||
|
|
582937b866 | ||
|
|
2b8240c156 | ||
|
|
abf4b7ac89 | ||
|
|
9c49f83c16 | ||
|
|
3a625ed0ee | ||
|
|
2cfbf4bb90 | ||
|
|
5767533668 | ||
|
|
24798f19ca | ||
|
|
26f8249187 | ||
|
|
dcefa564da | ||
|
|
edd35dccea | ||
|
|
ea527ea60c | ||
|
|
fd5e1db22b | ||
|
|
39e23faf7f | ||
|
|
de285b531a | ||
|
|
0a29a592f9 | ||
|
|
e045b1d3b5 | ||
|
|
280e5fa861 | ||
|
|
472d3495b5 | ||
|
|
2778ac6870 | ||
|
|
743a0a8ac9 | ||
|
|
694712ed2e | ||
|
|
ea3b4f1790 | ||
|
|
da68818d4f | ||
|
|
db6a3b53c5 | ||
|
|
82b089498e | ||
|
|
948b0dd5e7 | ||
|
|
4acc0b51b1 | ||
|
|
a626b738a9 | ||
|
|
7119844313 | ||
|
|
5763f57830 | ||
|
|
70e8ceecce | ||
|
|
acbe1ac692 | ||
|
|
99bca2c467 | ||
|
|
b74ed1f30e | ||
|
|
8082ab78a1 | ||
|
|
c69076f517 | ||
|
|
648ab001b6 | ||
|
|
447034046e | ||
|
|
0770ac0bb4 | ||
|
|
aa2fbd4f70 | ||
|
|
58c8447531 | ||
|
|
bcca43d774 | ||
|
|
e9ccfe7ad2 | ||
|
|
6c2637ad34 | ||
|
|
7183d05dd6 | ||
|
|
b45ca85cd3 | ||
|
|
4ca45ebc73 | ||
|
|
6902768fed | ||
|
|
3f9f2ceaac | ||
|
|
2a248bd249 | ||
|
|
c559a6bafb | ||
|
|
19d7e9b5ed | ||
|
|
3e5a5accf7 | ||
|
|
424c91945a | ||
|
|
c657dc564e | ||
|
|
208f002284 | ||
|
|
084ecc01e0 | ||
|
|
08cb994d8d | ||
|
|
67f1a6688d | ||
|
|
efb7968e93 | ||
|
|
fe7c7db004 | ||
|
|
79d1ccae9a | ||
|
|
6e69af4aa8 | ||
|
|
d500b7d473 | ||
|
|
ef599a1aad | ||
|
|
2d197134f1 | ||
|
|
717080a009 | ||
|
|
19197c71ff | ||
|
|
051b93f2d8 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,3 +4,4 @@
|
||||
.mypy_cache/
|
||||
**/.env
|
||||
.coverage
|
||||
uv.lock
|
||||
|
||||
133
README.md
Normal file
133
README.md
Normal file
@@ -0,0 +1,133 @@
|
||||
# CoreLibs for Python
|
||||
|
||||
> [!warning]
|
||||
> This is pre-production, location of methods and names of paths can change
|
||||
>
|
||||
> This will be split up into modules per file and this will be just a collection holder
|
||||
> See [Deprecated](#deprecated) below
|
||||
|
||||
This is a pip package that can be installed into any project and covers the following parts
|
||||
|
||||
- logging update with exception logs
|
||||
- requests wrapper for easier auth pass on access
|
||||
- dict fingerprinting
|
||||
- sending email
|
||||
- jmespath search
|
||||
- json helpers for conten replace and output
|
||||
- dump outputs for data for debugging
|
||||
- progress printing
|
||||
- string formatting, time creation, byte formatting
|
||||
- Enum base class
|
||||
- SQLite simple IO class
|
||||
- Symmetric encryption
|
||||
|
||||
## Current list
|
||||
|
||||
- config_handling: simple INI config file data loader with check/convert/etc
|
||||
- csv_interface: csv dict writer/reader helper
|
||||
- debug_handling: various debug helpers like data dumper, timer, utilization, etc
|
||||
- db_handling: SQLite interface class
|
||||
- encyption_handling: symmetric encryption
|
||||
- email_handling: simple email sending
|
||||
- file_handling: crc handling for file content and file names, progress bar
|
||||
- json_handling: jmespath support and json date support, replace content in dict with json paths
|
||||
- iterator_handling: list and dictionary handling support (search, fingerprinting, etc)
|
||||
- logging_handling: extend log and also error message handling
|
||||
- requests_handling: requests wrapper for better calls with auth headers
|
||||
- script_handling: pid lock file handling, abort timer
|
||||
- string_handling: byte format, datetime format, datetime compare, hashing, string formats for numbers, double byte string format, etc
|
||||
- var_handling: var type checkers, enum base class
|
||||
|
||||
## Unfinished
|
||||
|
||||
- csv_handling/csv_interface: The CSV DictWriter interface is just in a very basic way implemented
|
||||
- script_handling/script_helpers: No idea if there is need for this, tests are written but not finished
|
||||
|
||||
## Deprecated
|
||||
|
||||
All content in this module will move to stand alone libraries, as of now the following entries have moved and will throw deprecated warnings if used
|
||||
|
||||
- var_handling.enum_base: corelibs-enum-base
|
||||
- var_handling.var_helpers: corelibs-var
|
||||
- datetime_handling: corelibs-datetime
|
||||
- string_handling.text_colors: corelibs-text-colors
|
||||
|
||||
## UV setup
|
||||
|
||||
uv must be [installed](https://docs.astral.sh/uv/getting-started/installation/)
|
||||
|
||||
## How to publish
|
||||
|
||||
Have the following setup in `project.toml`
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "opj-pypi"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
explicit = true
|
||||
```
|
||||
|
||||
```sh
|
||||
uv build
|
||||
uv publish --index opj-pypi --token <gitea token>
|
||||
```
|
||||
|
||||
## Use package
|
||||
|
||||
We must set the full index URL here because we run with "--no-project"
|
||||
|
||||
```sh
|
||||
uv run --with corelibs --index opj-pypi=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/ --no-project -- python -c "import corelibs"
|
||||
```
|
||||
|
||||
### Python tests
|
||||
|
||||
All python tests are the tests/ folder. They are structured by the source folder layout
|
||||
|
||||
run them with
|
||||
|
||||
```sh
|
||||
uv run pytest
|
||||
```
|
||||
|
||||
Get a coverate report
|
||||
|
||||
```sh
|
||||
uv run pytest --cov=corelibs
|
||||
uv run pytest --cov=corelibs --cov-report=term-missing
|
||||
```
|
||||
|
||||
### Other tests
|
||||
|
||||
In the test-run folder usage and run tests are located, runt them below
|
||||
|
||||
```sh
|
||||
uv run test-run/<script>
|
||||
```
|
||||
|
||||
## How to install in another project
|
||||
|
||||
This will also add the index entry
|
||||
|
||||
```sh
|
||||
uv add corelibs --index opj-pypi=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/
|
||||
```
|
||||
|
||||
## Python venv setup
|
||||
|
||||
After clone, run the command below to install all dependenciss
|
||||
|
||||
```sh
|
||||
uv sync
|
||||
```
|
||||
|
||||
## NOTE on TLS problems
|
||||
|
||||
> [!warning] TLS problems with Netskope
|
||||
|
||||
If the Netskope service is running all uv runs will fail unless either --native-tls is set or the enviroment variable SSL_CERT_FILE is set, see blow
|
||||
|
||||
```sh
|
||||
export SSL_CERT_FILE='/Library/Application Support/Netskope/STAgent/data/nscacert_combined.pem'
|
||||
```
|
||||
106
ReadMe.md
106
ReadMe.md
@@ -1,106 +0,0 @@
|
||||
# CoreLibs for Python
|
||||
|
||||
This is a pip package that can be installed into any project and covers the following pars
|
||||
|
||||
- logging update with exception logs
|
||||
- requests wrapper for easier auth pass on access
|
||||
- dict fingerprinting
|
||||
- jmespath search
|
||||
- dump outputs for data
|
||||
- progress printing
|
||||
- string formatting, time creation, byte formatting
|
||||
|
||||
## Current list
|
||||
|
||||
- csv_handling: csv dict writer helper
|
||||
- debug_handling: various debug helpers like data dumper, timer, utilization, etc
|
||||
- file_handling: crc handling for file content and file names, progress bar
|
||||
- json_handling: jmespath support and json date support
|
||||
- list_dict_handling: list and dictionary handling support (search, fingerprinting, etc)
|
||||
- logging_handling: extend log and also error message handling
|
||||
- requests_handling: requests wrapper for better calls with auth headers
|
||||
- script_handling: pid lock file handling, abort timer
|
||||
- string_handling: byte format, datetime format, hashing, string formats for numbrers, double byte string format, etc
|
||||
|
||||
## UV setup
|
||||
|
||||
uv must be [installed](https://docs.astral.sh/uv/getting-started/installation/)
|
||||
|
||||
## How to publish
|
||||
|
||||
Have the following setup in `project.toml`
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "egra-gitea"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
explicit = true
|
||||
```
|
||||
|
||||
```sh
|
||||
uv build --native-tls
|
||||
uv publish --index egra-gitea --token <gitea token> --native-tls
|
||||
```
|
||||
|
||||
## Test package
|
||||
|
||||
We must set the full index URL here because we run with "--no-project"
|
||||
|
||||
```sh
|
||||
uv run --with corelibs --index egra-gitea=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/ --no-project --native-tls -- python -c "import corelibs"
|
||||
```
|
||||
|
||||
### Python tests
|
||||
|
||||
All python tests are the tests/ folder. They are structured by the source folder layout
|
||||
|
||||
run them with
|
||||
|
||||
```sh
|
||||
uv run pytest
|
||||
```
|
||||
|
||||
Get a coverate report
|
||||
|
||||
```sh
|
||||
uv run pytest --cov=corelibs
|
||||
```
|
||||
|
||||
### Other tests
|
||||
|
||||
In the test-run folder usage and run tests are located
|
||||
|
||||
```sh
|
||||
uv run --native-tls test-run/progress/progress_test.py
|
||||
```
|
||||
|
||||
```sh
|
||||
uv run --native-tls test-run/double_byte_string_format/double_byte_string_format.py
|
||||
```
|
||||
|
||||
```sh
|
||||
uv run --native-tls test-run/timestamp_strings/timestamp_strings.py
|
||||
```
|
||||
|
||||
## How to install in another project
|
||||
|
||||
This will also add the index entry
|
||||
|
||||
```sh
|
||||
uv add corelibs --index egra-gitea=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/ --native-tls
|
||||
```
|
||||
|
||||
## Python venv setup
|
||||
|
||||
In the folder where the script will be located
|
||||
|
||||
```sh
|
||||
uv venv --python 3.13
|
||||
```
|
||||
|
||||
Install all neded dependencies
|
||||
|
||||
```sh
|
||||
uv sync
|
||||
```
|
||||
11
SECURITY.md
Normal file
11
SECURITY.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Security Policy
|
||||
|
||||
This software follows the [Semver 2.0 scheme](https://semver.org/).
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Only the latest version is supported
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Open a ticket to report a secuirty problem
|
||||
7
ToDo.md
7
ToDo.md
@@ -1,4 +1,7 @@
|
||||
# ToDo list
|
||||
|
||||
- stub files .pyi
|
||||
- fix all remaning check errors
|
||||
- [x] stub files .pyi
|
||||
- [ ] Add tests for all, we need 100% test coverate
|
||||
- [x] Log: add custom format for "stack_correct" if set, this will override the normal stack block
|
||||
- [ ] Log: add rotate for size based
|
||||
- [ ] All folders and file names need to be revisited for naming and content collection
|
||||
|
||||
@@ -1,34 +1,47 @@
|
||||
# MARK: Project info
|
||||
[project]
|
||||
name = "corelibs"
|
||||
version = "0.7.0"
|
||||
version = "0.48.0"
|
||||
description = "Collection of utils for Python scripts"
|
||||
readme = "ReadMe.md"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
"corelibs-datetime>=1.0.1",
|
||||
"corelibs-enum-base>=1.0.0",
|
||||
"corelibs-text-colors>=1.0.0",
|
||||
"corelibs-var>=1.0.0",
|
||||
"cryptography>=46.0.3",
|
||||
"jmespath>=1.0.1",
|
||||
"jsonpath-ng>=1.7.0",
|
||||
"psutil>=7.0.0",
|
||||
"requests>=2.32.4",
|
||||
"requests[socks]>=2.32.5",
|
||||
]
|
||||
# set this to disable publish to pypi (pip)
|
||||
# classifiers = ["Private :: Do Not Upload"]
|
||||
|
||||
# MARK: build target
|
||||
[[tool.uv.index]]
|
||||
name = "egra-gitea"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
explicit = true
|
||||
|
||||
# MARK: build system
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
# set this to disable publish to pypi (pip)
|
||||
# classifiers = ["Private :: Do Not Upload"]
|
||||
|
||||
# MARK: build target
|
||||
[[tool.uv.index]]
|
||||
name = "opj-pypi"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
|
||||
[tool.uv.sources]
|
||||
corelibs-enum-base = { index = "opj-pypi" }
|
||||
corelibs-datetime = { index = "opj-pypi" }
|
||||
corelibs-var = { index = "opj-pypi" }
|
||||
corelibs-text-colors = { index = "opj-pypi" }
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"deepdiff>=8.6.1",
|
||||
"pytest>=8.4.1",
|
||||
"pytest-cov>=6.2.1",
|
||||
"typing-extensions>=4.15.0",
|
||||
]
|
||||
|
||||
# MARK: Python linting
|
||||
@@ -53,6 +66,38 @@ notes = ["FIXME", "TODO"]
|
||||
notes-rgx = '(FIXME|TODO)(\((TTD-|#)\[0-9]+\))'
|
||||
[tool.flake8]
|
||||
max-line-length = 120
|
||||
ignore = [
|
||||
"E741", # ignore ambigious variable name
|
||||
"W504" # Line break occurred after a binary operator [wrong triggered by "or" in if]
|
||||
]
|
||||
[tool.pylint.MASTER]
|
||||
# this is for the tests/etc folders
|
||||
init-hook='import sys; sys.path.append("src/")'
|
||||
|
||||
# MARK: Testing
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = [
|
||||
"tests",
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = [
|
||||
"*/tests/*",
|
||||
"*/test_*.py",
|
||||
"*/__init__.py"
|
||||
]
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_lines = [
|
||||
"pragma: no cover",
|
||||
"def __repr__",
|
||||
"def __str__",
|
||||
"raise AssertionError",
|
||||
"raise NotImplementedError",
|
||||
"if __name__ == .__main__.:"
|
||||
]
|
||||
exclude_also = [
|
||||
"def __.*__\\(",
|
||||
"def __.*\\(",
|
||||
"def _.*\\(",
|
||||
]
|
||||
|
||||
54
src/corelibs/check_handling/regex_constants.py
Normal file
54
src/corelibs/check_handling/regex_constants.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""
|
||||
List of regex compiled strings that can be used
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
|
||||
def compile_re(reg: str) -> re.Pattern[str]:
|
||||
"""
|
||||
compile a regex with verbose flag
|
||||
|
||||
Arguments:
|
||||
reg {str} -- _description_
|
||||
|
||||
Returns:
|
||||
re.Pattern[str] -- _description_
|
||||
"""
|
||||
return re.compile(reg, re.VERBOSE)
|
||||
|
||||
|
||||
# email regex
|
||||
SUB_EMAIL_BASIC_REGEX: str = r"""
|
||||
[A-Za-z0-9!#$%&'*+\-\/=?^_`{|}~][A-Za-z0-9!#$%:\(\)&'*+\-\/=?^_`{|}~\.]{0,63}
|
||||
@(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[a-zA-Z]{2,6}
|
||||
"""
|
||||
EMAIL_BASIC_REGEX = rf"^{SUB_EMAIL_BASIC_REGEX}$"
|
||||
# name + email regex for email sending type like "foo bar" <email@mail.com>
|
||||
NAME_EMAIL_SIMPLE_REGEX = r"""
|
||||
^\s*(?:"(?P<name1>[^"]+)"\s*<(?P<email1>[^>]+)>|
|
||||
(?P<name2>.+?)\s*<(?P<email2>[^>]+)>|
|
||||
<(?P<email3>[^>]+)>|
|
||||
(?P<email4>[^\s<>]+))\s*$
|
||||
"""
|
||||
# name + email with the basic regex set
|
||||
NAME_EMAIL_BASIC_REGEX = rf"""
|
||||
^\s*(?:
|
||||
"(?P<name1>[^"]+)"\s*<(?P<email1>{SUB_EMAIL_BASIC_REGEX})>|
|
||||
(?P<name2>.+?)\s*<(?P<email2>{SUB_EMAIL_BASIC_REGEX})>|
|
||||
<(?P<email3>{SUB_EMAIL_BASIC_REGEX})>|
|
||||
(?P<email4>{SUB_EMAIL_BASIC_REGEX})
|
||||
)\s*$
|
||||
"""
|
||||
# Domain regex with localhost
|
||||
DOMAIN_WITH_LOCALHOST_REGEX: str = r"""
|
||||
^(?:localhost|(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[A-Za-z]{2,})$
|
||||
"""
|
||||
# domain regex with loclhost and optional port
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX: str = r"""
|
||||
^(?:localhost|(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[A-Za-z]{2,})(?::\d+)?$
|
||||
"""
|
||||
# Domain, no localhost
|
||||
DOMAIN_REGEX: str = r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[A-Za-z]{2,}$"
|
||||
|
||||
# __END__
|
||||
23
src/corelibs/check_handling/regex_constants_compiled.py
Normal file
23
src/corelibs/check_handling/regex_constants_compiled.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
List of regex compiled strings that can be used
|
||||
"""
|
||||
|
||||
from corelibs.check_handling.regex_constants import (
|
||||
compile_re,
|
||||
EMAIL_BASIC_REGEX,
|
||||
NAME_EMAIL_SIMPLE_REGEX,
|
||||
NAME_EMAIL_BASIC_REGEX,
|
||||
DOMAIN_WITH_LOCALHOST_REGEX,
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||
DOMAIN_REGEX
|
||||
)
|
||||
|
||||
# all above in compiled form
|
||||
COMPILED_EMAIL_BASIC_REGEX = compile_re(EMAIL_BASIC_REGEX)
|
||||
COMPILED_NAME_EMAIL_SIMPLE_REGEX = compile_re(NAME_EMAIL_SIMPLE_REGEX)
|
||||
COMPILED_NAME_EMAIL_BASIC_REGEX = compile_re(NAME_EMAIL_BASIC_REGEX)
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX = compile_re(DOMAIN_WITH_LOCALHOST_REGEX)
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX = compile_re(DOMAIN_WITH_LOCALHOST_PORT_REGEX)
|
||||
COMPILED_DOMAIN_REGEX = compile_re(DOMAIN_REGEX)
|
||||
|
||||
# __END__
|
||||
587
src/corelibs/config_handling/settings_loader.py
Normal file
587
src/corelibs/config_handling/settings_loader.py
Normal file
@@ -0,0 +1,587 @@
|
||||
"""
|
||||
Load settings file for a certain group
|
||||
Check data for existing and valid
|
||||
Additional check for override settings as arguments
|
||||
"""
|
||||
|
||||
import re
|
||||
import configparser
|
||||
from typing import Any, Tuple, Sequence, cast
|
||||
from pathlib import Path
|
||||
from corelibs_var.var_helpers import is_int, is_float, str_to_bool
|
||||
from corelibs.logging_handling.log import Log
|
||||
from corelibs.iterator_handling.list_helpers import convert_to_list, is_list_in_list
|
||||
from corelibs.config_handling.settings_loader_handling.settings_loader_check import SettingsLoaderCheck
|
||||
|
||||
|
||||
class SettingsLoader:
|
||||
"""
|
||||
Settings Loader with Argument parser
|
||||
"""
|
||||
|
||||
# split char
|
||||
DEFAULT_ELEMENT_SPLIT_CHAR: str = ','
|
||||
|
||||
CONVERT_TO_LIST: list[str] = ['str', 'int', 'float', 'bool', 'auto']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
args: dict[str, Any],
|
||||
config_file: Path,
|
||||
log: 'Log | None' = None,
|
||||
always_print: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
init the Settings loader
|
||||
|
||||
Args:
|
||||
args (dict): Script Arguments
|
||||
config_file (Path): config file including path
|
||||
log (Log | None): Lop class, if set errors are written to this
|
||||
always_print (bool): Set to true to always print errors, even if Log is available
|
||||
element_split_char (str): Split character, default is ','
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
"""
|
||||
self.args = args
|
||||
self.config_file = config_file
|
||||
self.log = log
|
||||
self.always_print = always_print
|
||||
# config parser, load config file first
|
||||
self.config_parser: configparser.ConfigParser | None = self.__load_config_file()
|
||||
# for check settings, abort flag
|
||||
self.__check_settings_abort: bool = False
|
||||
|
||||
# error messages for raise ValueError
|
||||
self.__error_msg: list[str] = []
|
||||
|
||||
# MARK: load settings
|
||||
def load_settings(
|
||||
self,
|
||||
config_id: str,
|
||||
config_validate: dict[str, list[str]] | None = None,
|
||||
allow_not_exist: bool = False
|
||||
) -> dict[str, str]:
|
||||
"""
|
||||
neutral settings loader
|
||||
|
||||
The settings values on the right side are seen as a list if they have "," inside (see ELEMENT SPLIT CHAR)
|
||||
but only if the "check:list." is set
|
||||
|
||||
for the allowe entries set, each set is "key => checks", check set is "check type:settings"
|
||||
key: the key name in the settings file
|
||||
check: check set with the following allowed entries on the left side for type
|
||||
- mandatory: must be set as "mandatory:yes", if the key entry is missing or empty throws error
|
||||
- check: see __check_settings for the settings currently available
|
||||
- matching: a | list of entries where the value has to match too
|
||||
- in: the right side is another KEY value from the settings where this value must be inside
|
||||
- split: character to split entries, if set check:list+ must be set if checks are needed
|
||||
- convert: convert to int, float -> if element is number convert, else leave as is
|
||||
- empty: convert empty to, if nothing set on the right side then convert to None type
|
||||
|
||||
TODO: there should be a config/options argument for general settings
|
||||
|
||||
Args:
|
||||
config_id (str): what block to load
|
||||
config_validate (dict[str, list[str]]): list of allowed entries sets
|
||||
allow_not_exist (bool): If set to True, does not throw an error, but returns empty set
|
||||
|
||||
Returns:
|
||||
dict[str, str]: key = value list
|
||||
"""
|
||||
# reset error message list before run
|
||||
self.__error_msg = []
|
||||
# default set entries
|
||||
entry_set_empty: dict[str, str | None] = {}
|
||||
# entries that have to be split
|
||||
entry_split_char: dict[str, str] = {}
|
||||
# entries that should be converted
|
||||
entry_convert: dict[str, str] = {}
|
||||
# no args to set
|
||||
args_overrride: list[str] = []
|
||||
# all the settings for the config id given
|
||||
settings: dict[str, dict[str, Any]] = {
|
||||
config_id: {},
|
||||
}
|
||||
if config_validate is None:
|
||||
config_validate = {}
|
||||
if self.config_parser is not None:
|
||||
try:
|
||||
# load all data as is, validation is done afterwards
|
||||
settings[config_id] = dict(self.config_parser[config_id])
|
||||
except KeyError as e:
|
||||
if allow_not_exist is True:
|
||||
return {}
|
||||
raise ValueError(self.__print(
|
||||
f"[!] Cannot read [{config_id}] block in the file {self.config_file}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
try:
|
||||
for key, checks in config_validate.items():
|
||||
skip = True
|
||||
split_char = self.DEFAULT_ELEMENT_SPLIT_CHAR
|
||||
# if one is set as list in check -> do not skip, but add to list
|
||||
for check in checks:
|
||||
if check.startswith("convert:"):
|
||||
try:
|
||||
[_, convert_to] = check.split(":")
|
||||
if convert_to not in self.CONVERT_TO_LIST:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the convert type is invalid {check}: {convert_to}",
|
||||
'CRITICAL'
|
||||
))
|
||||
entry_convert[key] = convert_to
|
||||
except ValueError as e:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the convert type setup for entry failed: {check}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
if check.startswith('empty:'):
|
||||
try:
|
||||
[_, empty_set] = check.split(":")
|
||||
if not empty_set:
|
||||
empty_set = None
|
||||
entry_set_empty[key] = empty_set
|
||||
except ValueError as e:
|
||||
print(f"VALUE ERROR: {key}")
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the empty set type for entry failed: {check}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
# split char, also check to not set it twice, first one only
|
||||
if check.startswith("split:") and not entry_split_char.get(key):
|
||||
try:
|
||||
[_, split_char] = check.split(":")
|
||||
if len(split_char) == 0:
|
||||
self.__print(
|
||||
(
|
||||
f"[*] In [{config_id}] the [{key}] split char character is empty, "
|
||||
f"fallback to: {self.DEFAULT_ELEMENT_SPLIT_CHAR}"
|
||||
),
|
||||
"WARNING"
|
||||
)
|
||||
split_char = self.DEFAULT_ELEMENT_SPLIT_CHAR
|
||||
entry_split_char[key] = split_char
|
||||
skip = False
|
||||
except ValueError as e:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the split character setup for entry failed: {check}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
if check == "args_override:yes":
|
||||
args_overrride.append(key)
|
||||
if skip:
|
||||
continue
|
||||
if settings[config_id][key]:
|
||||
settings[config_id][key] = [
|
||||
__value.replace(" ", "")
|
||||
for __value in settings[config_id][key].split(split_char)
|
||||
]
|
||||
else:
|
||||
settings[config_id][key] = []
|
||||
except KeyError as e:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] Cannot read [{config_id}] block because the entry [{e}] could not be found",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
else:
|
||||
# ignore error if arguments are set
|
||||
if not self.__check_arguments(config_validate, True):
|
||||
raise ValueError(self.__print(f"[!] Cannot find file: {self.config_file}", 'CRITICAL'))
|
||||
# base set
|
||||
settings[config_id] = {}
|
||||
# make sure all are set
|
||||
# if we have arguments set, this override config settings
|
||||
error: bool = False
|
||||
for entry, validate in config_validate.items():
|
||||
# if we have command line option set, this one overrides config
|
||||
if (args_entry := self.__get_arg(entry)) is not None:
|
||||
self.__print(f"[*] Command line option override for: {entry}", 'WARNING')
|
||||
if (
|
||||
# only set if flagged as allowed override from args
|
||||
entry in args_overrride and
|
||||
(isinstance(args_entry, list) and entry_split_char.get(entry)) or
|
||||
(not isinstance(args_entry, list) and not entry_split_char.get(entry))
|
||||
):
|
||||
# args is list, but entry has not split, do not set
|
||||
settings[config_id][entry] = args_entry
|
||||
# validate checks
|
||||
for check in validate:
|
||||
# CHECKS
|
||||
# - mandatory
|
||||
# - check: regex check (see SettingsLoaderCheck class for entries)
|
||||
# - matching: entry in given list
|
||||
# - in: entry in other setting entry list
|
||||
# - length: for string length
|
||||
# - range: for int/float range check
|
||||
# mandatory check
|
||||
if check == "mandatory:yes" and (
|
||||
not settings[config_id].get(entry) or settings[config_id].get(entry) == ['']
|
||||
):
|
||||
error = True
|
||||
self.__print(f"[!] Missing content entry for: {entry}", 'ERROR')
|
||||
# skip if empty none
|
||||
if settings[config_id].get(entry) is None:
|
||||
continue
|
||||
if check.startswith("check:"):
|
||||
# replace the check and run normal checks
|
||||
settings[config_id][entry] = self.__check_settings(
|
||||
check, entry, settings[config_id][entry]
|
||||
)
|
||||
if self.__check_settings_abort is True:
|
||||
error = True
|
||||
elif check.startswith("matching:"):
|
||||
checks = check.replace("matching:", "").split("|")
|
||||
if __result := is_list_in_list(convert_to_list(settings[config_id][entry]), list(checks)):
|
||||
error = True
|
||||
self.__print(f"[!] [{entry}] '{__result}' not matching {checks}", 'ERROR')
|
||||
elif check.startswith("in:"):
|
||||
check = check.replace("in:", "")
|
||||
# skip if check does not exist, and set error
|
||||
if settings[config_id].get(check) is None:
|
||||
error = True
|
||||
self.__print(f"[!] [{entry}] '{check}' target does not exist", 'ERROR')
|
||||
continue
|
||||
# entry must be in check entry
|
||||
# in for list, else equal with convert to string
|
||||
if (
|
||||
__result := is_list_in_list(
|
||||
convert_to_list(settings[config_id][entry]),
|
||||
__checks := convert_to_list(settings[config_id][check])
|
||||
)
|
||||
):
|
||||
self.__print(f"[!] [{entry}] '{__result}' must be in the '{__checks}' values list", 'ERROR')
|
||||
error = True
|
||||
elif check.startswith('length:'):
|
||||
check = check.replace("length:", "")
|
||||
# length can be: n, n-, n-m, -m
|
||||
# as: equal, >= >=< =<
|
||||
self.__build_from_to_equal(entry, check)
|
||||
if not self.__length_range_validate(
|
||||
entry,
|
||||
'length',
|
||||
cast(list[str], convert_to_list(settings[config_id][entry])),
|
||||
self.__build_from_to_equal(entry, check, convert_to_int=True)
|
||||
):
|
||||
error = True
|
||||
elif check.startswith('range:'):
|
||||
check = check.replace("range:", "")
|
||||
if not self.__length_range_validate(
|
||||
entry,
|
||||
'range',
|
||||
cast(list[str], convert_to_list(settings[config_id][entry])),
|
||||
self.__build_from_to_equal(entry, check)
|
||||
):
|
||||
error = True
|
||||
# after post clean up if we have empty entries and we are mandatory
|
||||
if check == "mandatory:yes" and (
|
||||
not settings[config_id].get(entry) or settings[config_id].get(entry) == ['']
|
||||
):
|
||||
error = True
|
||||
self.__print(f"[!] Missing content entry for: {entry}", 'ERROR')
|
||||
if error is True:
|
||||
self.__print("[!] Missing or incorrect settings data. Cannot proceed", 'CRITICAL')
|
||||
raise ValueError(
|
||||
"Missing or incorrect settings data. Cannot proceed: " + "; ".join(self.__error_msg)
|
||||
)
|
||||
# set empty
|
||||
for [entry, empty_set] in entry_set_empty.items():
|
||||
# if set, skip, else set to empty value
|
||||
if settings[config_id].get(entry) or isinstance(settings[config_id].get(entry), list):
|
||||
continue
|
||||
settings[config_id][entry] = empty_set
|
||||
# Convert input
|
||||
for [entry, convert_type] in entry_convert.items():
|
||||
if convert_type in ["int", "any"] and is_int(settings[config_id][entry]):
|
||||
settings[config_id][entry] = int(settings[config_id][entry])
|
||||
elif convert_type in ["float", "any"] and is_float(settings[config_id][entry]):
|
||||
settings[config_id][entry] = float(settings[config_id][entry])
|
||||
elif convert_type in ["bool", "any"] and (
|
||||
settings[config_id][entry].lower() == "true" or
|
||||
settings[config_id][entry].lower() == "false"
|
||||
):
|
||||
try:
|
||||
settings[config_id][entry] = str_to_bool(settings[config_id][entry])
|
||||
except ValueError:
|
||||
self.__print(
|
||||
f"[!] Could not convert to boolean for '{entry}': {settings[config_id][entry]}",
|
||||
'ERROR'
|
||||
)
|
||||
# string is always string
|
||||
# TODO: empty and int/float/bool: set to none?
|
||||
|
||||
return settings[config_id]
|
||||
|
||||
# MARK: build from/to/requal logic
|
||||
def __build_from_to_equal(
|
||||
self, entry: str, check: str, convert_to_int: bool = False
|
||||
) -> Tuple[float | None, float | None, float | None]:
|
||||
"""
|
||||
split out the "n-m" part to get the to/from/equal
|
||||
|
||||
Arguments:
|
||||
entry {str} -- _description_
|
||||
check {str} -- _description_
|
||||
|
||||
Returns:
|
||||
Tuple[float | None, float | None, float | None] -- _description_
|
||||
|
||||
Throws:
|
||||
ValueError if range/length entries are not float
|
||||
"""
|
||||
__from = None
|
||||
__to = None
|
||||
__equal = None
|
||||
try:
|
||||
[__from, __to] = check.split('-')
|
||||
if (__from and not is_float(__from)) or (__to and not is_float(__to)):
|
||||
raise ValueError(self.__print(
|
||||
f"[{entry}] Check value for length is not in: {check}",
|
||||
'CRITICAL'
|
||||
))
|
||||
if len(__from) == 0:
|
||||
__from = None
|
||||
if len(__to) == 0:
|
||||
__to = None
|
||||
except ValueError as e:
|
||||
if not is_float(__equal := check):
|
||||
raise ValueError(self.__print(
|
||||
f"[{entry}] Check value for length is not a valid integer: {check}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
if len(__equal) == 0:
|
||||
__equal = None
|
||||
# makre sure this is all int or None
|
||||
if __from is not None:
|
||||
__from = int(__from) if convert_to_int else float(__from)
|
||||
if __to is not None:
|
||||
__to = int(__to) if convert_to_int else float(__to)
|
||||
if __equal is not None:
|
||||
__equal = int(__equal) if convert_to_int else float(__equal)
|
||||
return (
|
||||
__from,
|
||||
__to,
|
||||
__equal
|
||||
)
|
||||
|
||||
# MARK: length/range validation
|
||||
def __length_range_validate(
|
||||
self,
|
||||
entry: str,
|
||||
check_type: str,
|
||||
values: Sequence[str | int | float],
|
||||
check: Tuple[float | None, float | None, float | None],
|
||||
) -> bool:
|
||||
(__from, __to, __equal) = check
|
||||
valid = True
|
||||
for value_raw in convert_to_list(values):
|
||||
# skip no tset values for range check
|
||||
if not value_raw:
|
||||
continue
|
||||
value = 0
|
||||
error_mark = ''
|
||||
if check_type == 'length':
|
||||
error_mark = 'length'
|
||||
value = len(str(value_raw))
|
||||
elif check_type == 'range':
|
||||
error_mark = 'range'
|
||||
value = float(str(value_raw))
|
||||
if __equal is not None and value != __equal:
|
||||
self.__print(f"[!] [{entry}] '{value_raw}' {error_mark} does not match {__equal}", 'ERROR')
|
||||
valid = False
|
||||
continue
|
||||
if __from is not None and __to is None and value < __from:
|
||||
self.__print(f"[!] [{entry}] '{value_raw}' {error_mark} smaller than minimum {__from}", 'ERROR')
|
||||
valid = False
|
||||
continue
|
||||
if __from is None and __to is not None and value > __to:
|
||||
self.__print(f"[!] [{entry}] '{value_raw}' {error_mark} larger than maximum {__to}", 'ERROR')
|
||||
valid = False
|
||||
continue
|
||||
if __from is not None and __to is not None and (
|
||||
value < __from or value > __to
|
||||
):
|
||||
self.__print(
|
||||
f"[!] [{entry}] '{value_raw}' {error_mark} outside valid range {__from} to {__to}",
|
||||
'ERROR'
|
||||
)
|
||||
valid = False
|
||||
continue
|
||||
return valid
|
||||
|
||||
# MARK: load config file data from file
|
||||
def __load_config_file(self) -> configparser.ConfigParser | None:
|
||||
"""
|
||||
load and parse the config file
|
||||
if not loadable return None
|
||||
"""
|
||||
# remove file name and get base path and check
|
||||
if not self.config_file.parent.is_dir():
|
||||
raise ValueError(f"Cannot find the config folder: {self.config_file.parent}")
|
||||
config = configparser.ConfigParser()
|
||||
if self.config_file.is_file():
|
||||
config.read(self.config_file)
|
||||
return config
|
||||
return None
|
||||
|
||||
# MARK: regex clean up one
|
||||
def __clean_invalid_setting(
|
||||
self,
|
||||
entry: str,
|
||||
validate: str,
|
||||
value: str,
|
||||
regex: str,
|
||||
regex_clean: str | None,
|
||||
replace: str = "",
|
||||
print_error: bool = True,
|
||||
) -> str:
|
||||
"""
|
||||
check is a string is invalid, print optional error message and clean up string
|
||||
|
||||
Args:
|
||||
entry (str): what entry key
|
||||
validate (str): validate type
|
||||
value (str): the value to check against
|
||||
regex (str): regex used for checking as r'...'
|
||||
regex_clean (str): regex used for cleaning as r'...'
|
||||
replace (str): replace with character. Defaults to ''
|
||||
print_error (bool): print the error message. Defaults to True
|
||||
"""
|
||||
check = re.compile(regex, re.VERBOSE)
|
||||
clean: re.Pattern[str] | None = None
|
||||
if regex_clean is not None:
|
||||
clean = re.compile(regex_clean, re.VERBOSE)
|
||||
# value must be set if clean is None, else empty value is allowed and will fail
|
||||
if (clean is None and value or clean) and not check.search(value):
|
||||
self.__print(
|
||||
f"[!] Invalid content for '{entry}' with check '{validate}' and data: {value}",
|
||||
'ERROR', print_error
|
||||
)
|
||||
# clean up if clean up is not none, else return EMPTY string
|
||||
if clean is not None:
|
||||
return clean.sub(replace, value)
|
||||
self.__check_settings_abort = True
|
||||
return ''
|
||||
# else return as is
|
||||
return value
|
||||
|
||||
# MARK: check settings, regx
|
||||
def __check_settings(
|
||||
self,
|
||||
check: str, entry: str, setting_value: list[str] | str
|
||||
) -> list[str] | str:
|
||||
"""
|
||||
check each setting valid
|
||||
The settings are defined in the SettingsLoaderCheck class
|
||||
|
||||
Args:
|
||||
check (str): What check to run
|
||||
entry (str): Variable name, just for information message
|
||||
setting_value (list[str | int] | str | int): settings value data
|
||||
|
||||
Returns:
|
||||
list[str | int] |111 str | int: cleaned up settings value data
|
||||
"""
|
||||
check = check.replace("check:", "")
|
||||
# get the check settings
|
||||
__check_settings = SettingsLoaderCheck.CHECK_SETTINGS.get(check)
|
||||
if __check_settings is None:
|
||||
raise ValueError(self.__print(
|
||||
f"[{entry}] Cannot get SettingsLoaderCheck.CHECK_SETTINGS for {check}",
|
||||
'CRITICAL'
|
||||
))
|
||||
# reset the abort check
|
||||
self.__check_settings_abort = False
|
||||
# either removes or replaces invalid characters in the list
|
||||
if isinstance(setting_value, list):
|
||||
# clean up invalid characters
|
||||
# loop over result and keep only filled (strip empty)
|
||||
setting_value = [e for e in [
|
||||
self.__clean_invalid_setting(
|
||||
entry, check, str(__entry),
|
||||
__check_settings['regex'], __check_settings['regex_clean'], __check_settings['replace']
|
||||
)
|
||||
for __entry in setting_value
|
||||
] if e]
|
||||
else:
|
||||
setting_value = self.__clean_invalid_setting(
|
||||
entry, check, str(setting_value),
|
||||
__check_settings['regex'], __check_settings['regex_clean'], __check_settings['replace']
|
||||
)
|
||||
# else:
|
||||
# self.__print(f"[!] Unkown type to check", 'ERROR)
|
||||
# return data
|
||||
return setting_value
|
||||
|
||||
# MARK: check arguments, for config file load fail
|
||||
def __check_arguments(self, arguments: dict[str, list[str]], all_set: bool = False) -> bool:
|
||||
"""
|
||||
check if ast least one argument is set
|
||||
|
||||
Args:
|
||||
arguments (list[str]): _description_
|
||||
|
||||
Returns:
|
||||
bool: _description_
|
||||
"""
|
||||
count_set = 0
|
||||
count_arguments = 0
|
||||
has_argument = False
|
||||
for argument, validate in arguments.items():
|
||||
# if argument is mandatory add to count, if not mandatory set has "has" to skip error
|
||||
mandatory = any(entry == "mandatory:yes" for entry in validate)
|
||||
if not mandatory:
|
||||
has_argument = True
|
||||
continue
|
||||
count_arguments += 1
|
||||
if self.__get_arg(argument):
|
||||
has_argument = True
|
||||
count_set += 1
|
||||
# for all set, True only if all are set
|
||||
if all_set is True:
|
||||
has_argument = count_set == count_arguments
|
||||
|
||||
return has_argument
|
||||
|
||||
# MARK: get argument from args dict
|
||||
def __get_arg(self, entry: str) -> Any:
|
||||
"""
|
||||
check if an argument entry xists, if None -> returns None else value of argument
|
||||
|
||||
Arguments:
|
||||
entry {str} -- _description_
|
||||
|
||||
Returns:
|
||||
Any -- _description_
|
||||
"""
|
||||
if self.args.get(entry) is None:
|
||||
return None
|
||||
return self.args.get(entry)
|
||||
|
||||
# MARK: error print
|
||||
def __print(self, msg: str, level: str, print_error: bool = True) -> str:
|
||||
"""
|
||||
print out error, if Log class is set then print to log instead
|
||||
|
||||
Arguments:
|
||||
msg {str} -- _description_
|
||||
level {str} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
print_error {bool} -- _description_ (default: {True})
|
||||
"""
|
||||
if self.log is not None:
|
||||
if not Log.validate_log_level(level):
|
||||
level = 'ERROR'
|
||||
self.log.logger.log(Log.get_log_level_int(level), msg, stacklevel=2)
|
||||
if self.log is None or self.always_print:
|
||||
if print_error:
|
||||
print(f"[SettingsLoader] {msg}")
|
||||
if level == 'ERROR':
|
||||
# remove any prefix [!] for error message list
|
||||
self.__error_msg.append(msg.replace('[!] ', '').strip())
|
||||
return msg
|
||||
|
||||
|
||||
# __END__
|
||||
@@ -0,0 +1,81 @@
|
||||
"""
|
||||
Class of checks that can be run on value entries
|
||||
"""
|
||||
|
||||
from typing import TypedDict
|
||||
from corelibs.check_handling.regex_constants import (
|
||||
EMAIL_BASIC_REGEX, DOMAIN_WITH_LOCALHOST_REGEX, DOMAIN_WITH_LOCALHOST_PORT_REGEX, DOMAIN_REGEX
|
||||
)
|
||||
|
||||
|
||||
class SettingsLoaderCheckValue(TypedDict):
|
||||
"""Settings check entries"""
|
||||
|
||||
regex: str
|
||||
# if None, then on error we exit, eles we clean up data
|
||||
regex_clean: str | None
|
||||
replace: str
|
||||
|
||||
|
||||
class SettingsLoaderCheck:
|
||||
"""
|
||||
check:<NAME> or check:list+<NAME>
|
||||
"""
|
||||
|
||||
CHECK_SETTINGS: dict[str, SettingsLoaderCheckValue] = {
|
||||
"int": {
|
||||
"regex": r"^[0-9]+$",
|
||||
"regex_clean": r"[^0-9]",
|
||||
"replace": "",
|
||||
},
|
||||
"string.alphanumeric": {
|
||||
"regex": r"^[a-zA-Z0-9]+$",
|
||||
"regex_clean": r"[^a-zA-Z0-9]",
|
||||
"replace": "",
|
||||
},
|
||||
"string.alphanumeric.lower.dash": {
|
||||
"regex": r"^[a-z0-9-]+$",
|
||||
"regex_clean": r"[^a-z0-9-]",
|
||||
"replace": "",
|
||||
},
|
||||
# A-Z a-z 0-9 _ - . ONLY
|
||||
# This one does not remove, but replaces with _
|
||||
"string.alphanumeric.extended.replace": {
|
||||
"regex": r"^[_.a-zA-Z0-9-]+$",
|
||||
"regex_clean": r"[^_.a-zA-Z0-9-]",
|
||||
"replace": "_",
|
||||
},
|
||||
# This does a baisc email check, only alphanumeric with special characters
|
||||
"string.email.basic": {
|
||||
"regex": EMAIL_BASIC_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Domain check, including localhost no port
|
||||
"string.domain.with-localhost": {
|
||||
"regex": DOMAIN_WITH_LOCALHOST_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Domain check, with localhost and port
|
||||
"string.domain.with-localhost.port": {
|
||||
"regex": DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Domain check, no pure localhost allowed
|
||||
"string.domain": {
|
||||
"regex": DOMAIN_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Basic date check, does not validate date itself
|
||||
"string.date": {
|
||||
"regex": r"^\d{4}[/-]\d{1,2}[/-]\d{1,2}$",
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# __END__
|
||||
170
src/corelibs/csv_handling/csv_interface.py
Normal file
170
src/corelibs/csv_handling/csv_interface.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""
|
||||
Write to CSV file
|
||||
- each class set is one file write with one header set
|
||||
"""
|
||||
|
||||
from typing import Any, Sequence
|
||||
from pathlib import Path
|
||||
from collections import Counter
|
||||
import csv
|
||||
from corelibs.file_handling.file_bom_encoding import is_bom_encoded, is_bom_encoded_info
|
||||
from corelibs.exceptions.csv_exceptions import (
|
||||
NoCsvReader, CompulsoryCsvHeaderCheckFailed, CsvHeaderDataMissing
|
||||
)
|
||||
|
||||
ENCODING = 'utf-8'
|
||||
ENCODING_UTF8_SIG = 'utf-8-sig'
|
||||
DELIMITER = ","
|
||||
QUOTECHAR = '"'
|
||||
# type: _QuotingType
|
||||
QUOTING = csv.QUOTE_MINIMAL
|
||||
|
||||
|
||||
class CsvWriter:
|
||||
"""
|
||||
write to a CSV file
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
file_name: Path,
|
||||
header_mapping: dict[str, str],
|
||||
header_order: list[str] | None = None,
|
||||
encoding: str = ENCODING,
|
||||
delimiter: str = DELIMITER,
|
||||
quotechar: str = QUOTECHAR,
|
||||
quoting: Any = QUOTING,
|
||||
):
|
||||
self.__file_name = file_name
|
||||
# Key: index for write for the line dict, Values: header entries
|
||||
self.header_mapping = header_mapping
|
||||
self.header: Sequence[str] = list(header_mapping.values())
|
||||
self.__delimiter = delimiter
|
||||
self.__quotechar = quotechar
|
||||
self.__quoting = quoting
|
||||
self.__encoding = encoding
|
||||
self.csv_file_writer = self.__open_csv(header_order)
|
||||
|
||||
def __open_csv(self, header_order: list[str] | None) -> csv.DictWriter[str]:
|
||||
"""
|
||||
open csv file for writing, write headers
|
||||
|
||||
Note that if there is no header_order set we use the order in header dictionary
|
||||
|
||||
Arguments:
|
||||
line {list[str] | None} -- optional dedicated header order
|
||||
|
||||
Returns:
|
||||
csv.DictWriter[str] | None: _description_
|
||||
"""
|
||||
# if header order is set, make sure all header value fields exist
|
||||
if not self.header:
|
||||
raise CsvHeaderDataMissing("No header data available to write CSV file")
|
||||
header_values = self.header
|
||||
if header_order is not None:
|
||||
if Counter(header_values) != Counter(header_order):
|
||||
raise CompulsoryCsvHeaderCheckFailed(
|
||||
"header order does not match header values: "
|
||||
f"{', '.join(header_values)} != {', '.join(header_order)}"
|
||||
)
|
||||
header_values = header_order
|
||||
# no duplicates
|
||||
if len(header_values) != len(set(header_values)):
|
||||
raise CompulsoryCsvHeaderCheckFailed(f"Header must have unique values only: {', '.join(header_values)}")
|
||||
try:
|
||||
fp = open(
|
||||
self.__file_name,
|
||||
"w",
|
||||
encoding=self.__encoding
|
||||
)
|
||||
csv_file_writer = csv.DictWriter(
|
||||
fp,
|
||||
fieldnames=header_values,
|
||||
delimiter=self.__delimiter,
|
||||
quotechar=self.__quotechar,
|
||||
quoting=self.__quoting,
|
||||
)
|
||||
csv_file_writer.writeheader()
|
||||
return csv_file_writer
|
||||
except OSError as err:
|
||||
raise NoCsvReader(f"Could not open CSV file for writing: {err}") from err
|
||||
|
||||
def write_csv(self, line: dict[str, str]) -> None:
|
||||
"""
|
||||
write member csv line
|
||||
|
||||
Arguments:
|
||||
line {dict[str, str]} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
csv_row: dict[str, Any] = {}
|
||||
# only write entries that are in the header list
|
||||
for key, value in self.header_mapping.items():
|
||||
csv_row[value] = line[key]
|
||||
self.csv_file_writer.writerow(csv_row)
|
||||
|
||||
|
||||
class CsvReader:
|
||||
"""
|
||||
read from a CSV file
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
file_name: Path,
|
||||
header_check: Sequence[str] | None = None,
|
||||
encoding: str = ENCODING,
|
||||
delimiter: str = DELIMITER,
|
||||
quotechar: str = QUOTECHAR,
|
||||
quoting: Any = QUOTING,
|
||||
):
|
||||
self.__file_name = file_name
|
||||
self.__header_check = header_check
|
||||
self.__delimiter = delimiter
|
||||
self.__quotechar = quotechar
|
||||
self.__quoting = quoting
|
||||
self.__encoding = encoding
|
||||
self.header: Sequence[str] | None = None
|
||||
self.csv_file_reader = self.__open_csv()
|
||||
|
||||
def __open_csv(self) -> csv.DictReader[str]:
|
||||
"""
|
||||
open csv file for reading
|
||||
|
||||
Returns:
|
||||
csv.DictReader | None: _description_
|
||||
"""
|
||||
try:
|
||||
# if UTF style check if this is BOM
|
||||
if self.__encoding.lower().startswith('utf-') and is_bom_encoded(self.__file_name):
|
||||
bom_info = is_bom_encoded_info(self.__file_name)
|
||||
if bom_info['encoding'] == 'utf-8':
|
||||
self.__encoding = ENCODING_UTF8_SIG
|
||||
else:
|
||||
self.__encoding = bom_info['encoding'] or self.__encoding
|
||||
fp = open(
|
||||
self.__file_name,
|
||||
"r", encoding=self.__encoding
|
||||
)
|
||||
csv_file_reader = csv.DictReader(
|
||||
fp,
|
||||
delimiter=self.__delimiter,
|
||||
quotechar=self.__quotechar,
|
||||
quoting=self.__quoting,
|
||||
)
|
||||
self.header = csv_file_reader.fieldnames
|
||||
if not self.header:
|
||||
raise CsvHeaderDataMissing("No header data available in CSV file")
|
||||
if self.__header_check is not None:
|
||||
header_diff = set(self.__header_check).difference(set(self.header or []))
|
||||
if header_diff:
|
||||
raise CompulsoryCsvHeaderCheckFailed(
|
||||
f"CSV header does not match expected header: {', '.join(header_diff)} missing"
|
||||
)
|
||||
return csv_file_reader
|
||||
except OSError as err:
|
||||
raise NoCsvReader(f"Could not open CSV file for reading: {err}") from err
|
||||
|
||||
# __END__
|
||||
@@ -1,93 +0,0 @@
|
||||
"""
|
||||
Write to CSV file
|
||||
- each class set is one file write with one header set
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from pathlib import Path
|
||||
from collections import Counter
|
||||
import csv
|
||||
|
||||
|
||||
class CsvWriter:
|
||||
"""
|
||||
write to a CSV file
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: Path,
|
||||
file_name: str,
|
||||
header: dict[str, str],
|
||||
header_order: list[str] | None = None
|
||||
):
|
||||
self.path = path
|
||||
self.file_name = file_name
|
||||
# Key: index for write for the line dict, Values: header entries
|
||||
self.header = header
|
||||
self.csv_file_writer = self.__open_csv(header_order)
|
||||
|
||||
def __open_csv(self, header_order: list[str] | None) -> 'csv.DictWriter[str] | None':
|
||||
"""
|
||||
open csv file for writing, write headers
|
||||
|
||||
Note that if there is no header_order set we use the order in header dictionary
|
||||
|
||||
Arguments:
|
||||
line {list[str] | None} -- optional dedicated header order
|
||||
|
||||
Returns:
|
||||
csv.DictWriter[str] | None: _description_
|
||||
"""
|
||||
# if header order is set, make sure all header value fields exist
|
||||
header_values = self.header.values()
|
||||
if header_order is not None:
|
||||
if Counter(header_values) != Counter(header_order):
|
||||
print(
|
||||
"header order does not match header values: "
|
||||
f"{', '.join(header_values)} != {', '.join(header_order)}"
|
||||
)
|
||||
return None
|
||||
header_values = header_order
|
||||
# no duplicates
|
||||
if len(header_values) != len(set(header_values)):
|
||||
print(f"Header must have unique values only: {', '.join(header_values)}")
|
||||
return None
|
||||
try:
|
||||
fp = open(
|
||||
self.path.joinpath(self.file_name),
|
||||
"w", encoding="utf-8"
|
||||
)
|
||||
csv_file_writer = csv.DictWriter(
|
||||
fp,
|
||||
fieldnames=header_values,
|
||||
delimiter=",",
|
||||
quotechar='"',
|
||||
quoting=csv.QUOTE_MINIMAL,
|
||||
)
|
||||
csv_file_writer.writeheader()
|
||||
return csv_file_writer
|
||||
except OSError as err:
|
||||
print("OS error:", err)
|
||||
return None
|
||||
|
||||
def write_csv(self, line: dict[str, str]) -> bool:
|
||||
"""
|
||||
write member csv line
|
||||
|
||||
Arguments:
|
||||
line {dict[str, str]} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
if self.csv_file_writer is None:
|
||||
return False
|
||||
csv_row: dict[str, Any] = {}
|
||||
# only write entries that are in the header list
|
||||
for key, value in self.header.items():
|
||||
csv_row[value] = line[key]
|
||||
self.csv_file_writer.writerow(csv_row)
|
||||
return True
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/datetime_handling/__init__.py
Normal file
0
src/corelibs/datetime_handling/__init__.py
Normal file
235
src/corelibs/datetime_handling/datetime_helpers.py
Normal file
235
src/corelibs/datetime_handling/datetime_helpers.py
Normal file
@@ -0,0 +1,235 @@
|
||||
"""
|
||||
Various string based date/time helpers
|
||||
"""
|
||||
|
||||
from datetime import datetime, time
|
||||
from warnings import deprecated
|
||||
from zoneinfo import ZoneInfo
|
||||
from corelibs_datetime import datetime_helpers
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.create_time instead")
|
||||
def create_time(timestamp: float, timestamp_format: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""
|
||||
just takes a timestamp and prints out humand readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timestamp_format {_type_} -- _description_ (default: {"%Y-%m-%d %H:%M:%S"})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return datetime_helpers.create_time(timestamp, timestamp_format)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.get_system_timezone instead")
|
||||
def get_system_timezone():
|
||||
"""Get system timezone using datetime's automatic detection"""
|
||||
# Get current time with system timezone
|
||||
return datetime_helpers.get_system_timezone()
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_timezone_data instead")
|
||||
def parse_timezone_data(timezone_tz: str = '') -> ZoneInfo:
|
||||
"""
|
||||
parses a string to get the ZoneInfo
|
||||
If not set or not valid gets local time,
|
||||
if that is not possible get UTC
|
||||
|
||||
Keyword Arguments:
|
||||
timezone_tz {str} -- _description_ (default: {''})
|
||||
|
||||
Returns:
|
||||
ZoneInfo -- _description_
|
||||
"""
|
||||
return datetime_helpers.parse_timezone_data(timezone_tz)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.get_datetime_iso8601 instead")
|
||||
def get_datetime_iso8601(timezone_tz: str | ZoneInfo = '', sep: str = 'T', timespec: str = 'microseconds') -> str:
|
||||
"""
|
||||
set a datetime in the iso8601 format with microseconds
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
try:
|
||||
return datetime_helpers.get_datetime_iso8601(timezone_tz, sep, timespec)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.validate_date instead")
|
||||
def validate_date(date: str, not_before: datetime | None = None, not_after: datetime | None = None) -> bool:
|
||||
"""
|
||||
check if Y-m-d or Y/m/d are parsable and valid
|
||||
|
||||
Arguments:
|
||||
date {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return datetime_helpers.validate_date(date, not_before, not_after)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_flexible_date instead")
|
||||
def parse_flexible_date(
|
||||
date_str: str,
|
||||
timezone_tz: str | ZoneInfo | None = None,
|
||||
shift_time_zone: bool = True
|
||||
) -> datetime | None:
|
||||
"""
|
||||
Parse date string in multiple formats
|
||||
will add time zone info if not None
|
||||
on default it will change the TZ and time to the new time zone
|
||||
if no TZ info is set in date_str, then localtime is assumed
|
||||
|
||||
Arguments:
|
||||
date_str {str} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timezone_tz {str | ZoneInfo | None} -- _description_ (default: {None})
|
||||
shift_time_zone {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
datetime | None -- _description_
|
||||
"""
|
||||
return datetime_helpers.parse_flexible_date(
|
||||
date_str,
|
||||
timezone_tz,
|
||||
shift_time_zone
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.compare_dates instead")
|
||||
def compare_dates(date1_str: str, date2_str: str) -> None | bool:
|
||||
"""
|
||||
compare two dates, if the first one is newer than the second one return True
|
||||
If the dates are equal then false will be returned
|
||||
on error return None
|
||||
|
||||
Arguments:
|
||||
date1_str {str} -- _description_
|
||||
date2_str {str} -- _description_
|
||||
|
||||
Returns:
|
||||
None | bool -- _description_
|
||||
"""
|
||||
return datetime_helpers.compare_dates(date1_str, date2_str)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.find_newest_datetime_in_list instead")
|
||||
def find_newest_datetime_in_list(date_list: list[str]) -> None | str:
|
||||
"""
|
||||
Find the newest date from a list of ISO 8601 formatted date strings.
|
||||
Handles potential parsing errors gracefully.
|
||||
|
||||
Args:
|
||||
date_list (list): List of date strings in format '2025-08-06T16:17:39.747+09:00'
|
||||
|
||||
Returns:
|
||||
str: The date string with the newest/latest date, or None if list is empty or all dates are invalid
|
||||
"""
|
||||
return datetime_helpers.find_newest_datetime_in_list(date_list)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_day_of_week_range instead")
|
||||
def parse_day_of_week_range(dow_days: str) -> list[tuple[int, str]]:
|
||||
"""
|
||||
Parse a day of week list/range string and return a list of tuples with day index and name.
|
||||
Allowed are short (eg Mon) or long names (eg Monday).
|
||||
|
||||
Arguments:
|
||||
dow_days {str} -- A comma-separated list of days or ranges (e.g., "Mon,Wed-Fri")
|
||||
|
||||
Raises:
|
||||
ValueError: If the input format is invalid or if duplicate days are found.
|
||||
|
||||
Returns:
|
||||
list[tuple[int, str]] -- A list of tuples containing the day index and name.
|
||||
"""
|
||||
# we have Sun twice because it can be 0 or 7
|
||||
# Mon is 1 and Sun is 7, which is ISO standard
|
||||
try:
|
||||
return datetime_helpers.parse_day_of_week_range(dow_days)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_time_range instead")
|
||||
def parse_time_range(time_str: str, time_format: str = "%H:%M") -> tuple[time, time]:
|
||||
"""
|
||||
Parse a time range string in the format "HH:MM-HH:MM" and return a tuple of two time objects.
|
||||
|
||||
Arguments:
|
||||
time_str {str} -- The time range string to parse.
|
||||
|
||||
Raises:
|
||||
ValueError: Invalid time block set
|
||||
ValueError: Invalid time format
|
||||
ValueError: Start time must be before end time
|
||||
|
||||
Returns:
|
||||
tuple[time, time] -- start time, end time: leading zeros formattd
|
||||
"""
|
||||
try:
|
||||
return datetime_helpers.parse_time_range(time_str, time_format)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.times_overlap_or_connect instead")
|
||||
def times_overlap_or_connect(time1: tuple[time, time], time2: tuple[time, time], allow_touching: bool = False) -> bool:
|
||||
"""
|
||||
Check if two time ranges overlap or connect
|
||||
|
||||
Args:
|
||||
time1 (tuple): (start_time, end_time) for first range
|
||||
time2 (tuple): (start_time, end_time) for second range
|
||||
allow_touching (bool): If True, touching ranges (e.g., 8:00-10:00 and 10:00-12:00) are allowed
|
||||
|
||||
Returns:
|
||||
bool: True if ranges overlap or connect (based on allow_touching)
|
||||
"""
|
||||
return datetime_helpers.times_overlap_or_connect(time1, time2, allow_touching)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.is_time_in_range instead")
|
||||
def is_time_in_range(current_time: str, start_time: str, end_time: str) -> bool:
|
||||
"""
|
||||
Check if current_time is within start_time and end_time (inclusive)
|
||||
Time format: "HH:MM" (24-hour format)
|
||||
|
||||
Arguments:
|
||||
current_time {str} -- _description_
|
||||
start_time {str} -- _description_
|
||||
end_time {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
# Convert string times to time objects
|
||||
return datetime_helpers.is_time_in_range(current_time, start_time, end_time)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.reorder_weekdays_from_today instead")
|
||||
def reorder_weekdays_from_today(base_day: str) -> dict[int, str]:
|
||||
"""
|
||||
Reorder the days of the week starting from the specified base_day.
|
||||
|
||||
Arguments:
|
||||
base_day {str} -- The day to start the week from (e.g., "Mon").
|
||||
|
||||
Returns:
|
||||
dict[int, str] -- A dictionary mapping day numbers to day names.
|
||||
"""
|
||||
try:
|
||||
return datetime_helpers.reorder_weekdays_from_today(base_day)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
# __END__
|
||||
88
src/corelibs/datetime_handling/timestamp_convert.py
Normal file
88
src/corelibs/datetime_handling/timestamp_convert.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""
|
||||
Convert timestamp strings with time units into seconds and vice versa.
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from corelibs_datetime import timestamp_convert
|
||||
from corelibs_datetime.timestamp_convert import TimeParseError as NewTimeParseError, TimeUnitError as NewTimeUnitError
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.TimeParseError instead")
|
||||
class TimeParseError(Exception):
|
||||
"""Custom exception for time parsing errors."""
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.TimeUnitError instead")
|
||||
class TimeUnitError(Exception):
|
||||
"""Custom exception for time parsing errors."""
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.convert_to_seconds instead")
|
||||
def convert_to_seconds(time_string: str | int | float) -> int:
|
||||
"""
|
||||
Conver a string with time units into a seconds string
|
||||
The following units are allowed
|
||||
Y: 365 days
|
||||
M: 30 days
|
||||
d, h, m, s
|
||||
|
||||
Arguments:
|
||||
time_string {str} -- _description_
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
|
||||
# skip out if this is a number of any type
|
||||
# numbers will br made float, rounded and then converted to int
|
||||
try:
|
||||
return timestamp_convert.convert_to_seconds(time_string)
|
||||
except NewTimeParseError as e:
|
||||
raise TimeParseError(f"Deprecated, use corelibs_datetime.timestamp_convert.TimeParseError: {e}") from e
|
||||
except NewTimeUnitError as e:
|
||||
raise TimeUnitError(f"Deprecated, use corelibs_datetime.timestamp_convert.TimeUnitError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.seconds_to_string instead")
|
||||
def seconds_to_string(seconds: str | int | float, show_microseconds: bool = False) -> str:
|
||||
"""
|
||||
Convert seconds to compact human readable format (e.g., "1d 2h 3m 4.567s")
|
||||
Zero values are omitted.
|
||||
milliseconds if requested are added as fractional part of seconds.
|
||||
Supports negative values with "-" prefix
|
||||
if not int or float, will return as is
|
||||
|
||||
Args:
|
||||
seconds (float): Time in seconds (can be negative)
|
||||
show_microseconds (bool): Whether to show microseconds precision
|
||||
|
||||
Returns:
|
||||
str: Compact human readable time format
|
||||
"""
|
||||
return timestamp_convert.seconds_to_string(seconds, show_microseconds)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.convert_timestamp instead")
|
||||
def convert_timestamp(timestamp: float | int | str, show_microseconds: bool = True) -> str:
|
||||
"""
|
||||
format timestamp into human readable format. This function will add 0 values between set values
|
||||
for example if we have 1d 1s it would output 1d 0h 0m 1s
|
||||
Milliseconds will be shown if set, and added with ms at the end
|
||||
Negative values will be prefixed with "-"
|
||||
if not int or float, will return as is
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
show_micro {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return timestamp_convert.convert_timestamp(timestamp, show_microseconds)
|
||||
|
||||
# __END__
|
||||
21
src/corelibs/datetime_handling/timestamp_strings.py
Normal file
21
src/corelibs/datetime_handling/timestamp_strings.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""
|
||||
Current timestamp strings and time zones
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from zoneinfo import ZoneInfo
|
||||
from corelibs_datetime import timestamp_strings
|
||||
|
||||
|
||||
class TimestampStrings(timestamp_strings.TimestampStrings):
|
||||
"""
|
||||
set default time stamps
|
||||
"""
|
||||
|
||||
TIME_ZONE: str = 'Asia/Tokyo'
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_strings.TimestampStrings instead")
|
||||
def __init__(self, time_zone: str | ZoneInfo | None = None):
|
||||
super().__init__(time_zone)
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/db_handling/__init__.py
Normal file
0
src/corelibs/db_handling/__init__.py
Normal file
76
src/corelibs/db_handling/sql_main.py
Normal file
76
src/corelibs/db_handling/sql_main.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Main SQL base for any SQL calls
|
||||
This is a wrapper for SQLiteIO or other future DB Interfaces
|
||||
[Note: at the moment only SQLiteIO is implemented]
|
||||
- on class creation connection with ValueError on fail
|
||||
- connect method checks if already connected and warns
|
||||
- connection class fails with ValueError if not valid target is selected (SQL wrapper type)
|
||||
- connected check class method
|
||||
- a process class that returns data as list or False if end or error
|
||||
|
||||
TODO: adapt more CoreLibs DB IO class flow here
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Literal
|
||||
from corelibs.debug_handling.debug_helpers import call_stack
|
||||
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
IDENT_SPLIT_CHARACTER: str = ':'
|
||||
|
||||
|
||||
class SQLMain:
|
||||
"""Main SQL interface class"""
|
||||
def __init__(self, log: 'Logger', db_ident: str):
|
||||
self.log = log
|
||||
self.dbh: SQLiteIO | None = None
|
||||
self.db_target: str | None = None
|
||||
self.connect(db_ident)
|
||||
if not self.connected():
|
||||
raise ValueError(f'Failed to connect to database [{call_stack()}]')
|
||||
|
||||
def connect(self, db_ident: str):
|
||||
"""setup basic connection"""
|
||||
if self.dbh is not None and self.dbh.conn is not None:
|
||||
self.log.warning(f"A database connection already exists for: {self.db_target} [{call_stack()}]")
|
||||
return
|
||||
self.db_target, db_dsn = db_ident.split(IDENT_SPLIT_CHARACTER)
|
||||
match self.db_target:
|
||||
case 'sqlite':
|
||||
# this is a Path only at the moment
|
||||
self.dbh = SQLiteIO(self.log, db_dsn, row_factory='Dict')
|
||||
case _:
|
||||
raise ValueError(f'SQL interface for {self.db_target} is not implemented [{call_stack()}]')
|
||||
if not self.dbh.db_connected():
|
||||
raise ValueError(f"DB Connection failed for: {self.db_target} [{call_stack()}]")
|
||||
|
||||
def close(self):
|
||||
"""close connection"""
|
||||
if self.dbh is None or not self.connected():
|
||||
return
|
||||
# self.log.info(f"Close DB Connection: {self.db_target} [{call_stack()}]")
|
||||
self.dbh.db_close()
|
||||
|
||||
def connected(self) -> bool:
|
||||
"""check connectuon"""
|
||||
if self.dbh is None or not self.dbh.db_connected():
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
return True
|
||||
|
||||
def process_query(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""mini wrapper for execute query"""
|
||||
if self.dbh is not None:
|
||||
result = self.dbh.execute_query(query, params)
|
||||
if result is False:
|
||||
return False
|
||||
else:
|
||||
self.log.error(f"Problem connecting to db: {self.db_target} [{call_stack()}]")
|
||||
return False
|
||||
return result
|
||||
|
||||
# __END__
|
||||
214
src/corelibs/db_handling/sqlite_io.py
Normal file
214
src/corelibs/db_handling/sqlite_io.py
Normal file
@@ -0,0 +1,214 @@
|
||||
"""
|
||||
SQLite DB::IO
|
||||
Will be moved to the CoreLibs
|
||||
also method names are subject to change
|
||||
"""
|
||||
|
||||
# import gc
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, TYPE_CHECKING
|
||||
import sqlite3
|
||||
from corelibs.debug_handling.debug_helpers import call_stack
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
class SQLiteIO():
|
||||
"""Mini SQLite interface"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
log: 'Logger',
|
||||
db_name: str | Path,
|
||||
autocommit: bool = False,
|
||||
enable_fkey: bool = True,
|
||||
row_factory: str | None = None
|
||||
):
|
||||
self.log = log
|
||||
self.db_name = db_name
|
||||
self.autocommit = autocommit
|
||||
self.enable_fkey = enable_fkey
|
||||
self.row_factory = row_factory
|
||||
self.conn: sqlite3.Connection | None = self.db_connect()
|
||||
|
||||
# def __del__(self):
|
||||
# self.db_close()
|
||||
|
||||
def db_connect(self) -> sqlite3.Connection | None:
|
||||
"""
|
||||
Connect to SQLite database, create if it doesn't exist
|
||||
"""
|
||||
try:
|
||||
# Connect to database (creates if doesn't exist)
|
||||
self.conn = sqlite3.connect(self.db_name, autocommit=self.autocommit)
|
||||
self.conn.setconfig(sqlite3.SQLITE_DBCONFIG_ENABLE_FKEY, True)
|
||||
# self.conn.execute("PRAGMA journal_mode=WAL")
|
||||
# self.log.debug(f"Connected to database: {self.db_name}")
|
||||
|
||||
def dict_factory(cursor: sqlite3.Cursor, row: list[Any]):
|
||||
fields = [column[0] for column in cursor.description]
|
||||
return dict(zip(fields, row))
|
||||
|
||||
match self.row_factory:
|
||||
case 'Row':
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
case 'Dict':
|
||||
self.conn.row_factory = dict_factory
|
||||
case _:
|
||||
self.conn.row_factory = None
|
||||
|
||||
return self.conn
|
||||
except (sqlite3.Error, sqlite3.OperationalError) as e:
|
||||
self.log.error(f"Error connecting to database [{type(e).__name__}] [{self.db_name}]: {e} [{call_stack()}]")
|
||||
self.log.error(f"Error code: {e.sqlite_errorcode if hasattr(e, 'sqlite_errorcode') else 'N/A'}")
|
||||
self.log.error(f"Error name: {e.sqlite_errorname if hasattr(e, 'sqlite_errorname') else 'N/A'}")
|
||||
return None
|
||||
|
||||
def db_close(self):
|
||||
"""close connection"""
|
||||
if self.conn is not None:
|
||||
self.conn.close()
|
||||
self.conn = None
|
||||
|
||||
def db_connected(self) -> bool:
|
||||
"""
|
||||
Return True if db connection is not none
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return True if self.conn else False
|
||||
|
||||
def __content_exists(self, content_name: str, sql_type: str) -> bool:
|
||||
"""
|
||||
Check if some content name for a certain type exists
|
||||
|
||||
Arguments:
|
||||
content_name {str} -- _description_
|
||||
sql_type {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
if self.conn is None:
|
||||
return False
|
||||
try:
|
||||
cursor = self.conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE type = ? AND name = ?
|
||||
""", (sql_type, content_name,))
|
||||
return cursor.fetchone() is not None
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error checking table [{content_name}/{sql_type}] existence: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def table_exists(self, table_name: str) -> bool:
|
||||
"""
|
||||
Check if a table exists in the database
|
||||
"""
|
||||
return self.__content_exists(table_name, 'table')
|
||||
|
||||
def trigger_exists(self, trigger_name: str) -> bool:
|
||||
"""
|
||||
Check if a triggere exits
|
||||
"""
|
||||
return self.__content_exists(trigger_name, 'trigger')
|
||||
|
||||
def index_exists(self, index_name: str) -> bool:
|
||||
"""
|
||||
Check if a triggere exits
|
||||
"""
|
||||
return self.__content_exists(index_name, 'index')
|
||||
|
||||
def meta_data_detail(self, table_name: str) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""table detail"""
|
||||
query_show_table = """
|
||||
SELECT
|
||||
ti.cid, ti.name, ti.type, ti.'notnull', ti.dflt_value, ti.pk,
|
||||
il_ii.idx_name, il_ii.idx_unique, il_ii.idx_origin, il_ii.idx_partial
|
||||
FROM
|
||||
sqlite_schema AS m,
|
||||
pragma_table_info(m.name) AS ti
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
il.name AS idx_name, il.'unique' AS idx_unique, il.origin AS idx_origin, il.partial AS idx_partial,
|
||||
ii.cid AS tbl_cid
|
||||
FROM
|
||||
sqlite_schema AS m,
|
||||
pragma_index_list(m.name) AS il,
|
||||
pragma_index_info(il.name) AS ii
|
||||
WHERE m.name = ?1
|
||||
) AS il_ii ON (ti.cid = il_ii.tbl_cid)
|
||||
WHERE
|
||||
m.name = ?1
|
||||
"""
|
||||
return self.execute_query(query_show_table, (table_name,))
|
||||
|
||||
def execute_cursor(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> sqlite3.Cursor | Literal[False]:
|
||||
"""execute a cursor, used in execute query or return one and for fetch_row"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
cursor = self.conn.cursor()
|
||||
if params:
|
||||
cursor.execute(query, params)
|
||||
else:
|
||||
cursor.execute(query)
|
||||
return cursor
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during executing cursor [{query}:{params}]: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def execute_query(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""query execute with or without params, returns result"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
if (cursor := self.execute_cursor(query, params)) is False:
|
||||
return False
|
||||
# fetch before commit because we need to get the RETURN before
|
||||
result = cursor.fetchall()
|
||||
# this is for INSERT/UPDATE/CREATE only
|
||||
self.conn.commit()
|
||||
return result
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during executing query [{query}:{params}]: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def return_one(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> tuple[Any, ...] | dict[str, Any] | Literal[False] | None:
|
||||
"""return one row, only for SELECT"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
if (cursor := self.execute_cursor(query, params)) is False:
|
||||
return False
|
||||
return cursor.fetchone()
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during return one: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def fetch_row(
|
||||
self, cursor: sqlite3.Cursor | Literal[False]
|
||||
) -> tuple[Any, ...] | dict[str, Any] | Literal[False] | None:
|
||||
"""read from cursor"""
|
||||
if self.conn is None or cursor is False:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
return cursor.fetchone()
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during fetch row: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
# __END__
|
||||
76
src/corelibs/debug_handling/debug_helpers.py
Normal file
76
src/corelibs/debug_handling/debug_helpers.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Various debug helpers
|
||||
"""
|
||||
|
||||
import traceback
|
||||
import os
|
||||
import sys
|
||||
from typing import Tuple, Type
|
||||
from types import TracebackType
|
||||
|
||||
# _typeshed.OptExcInfo
|
||||
OptExcInfo = Tuple[None, None, None] | Tuple[Type[BaseException], BaseException, TracebackType]
|
||||
|
||||
|
||||
def call_stack(
|
||||
start: int = 0,
|
||||
skip_last: int = -1,
|
||||
separator: str = ' -> ',
|
||||
reset_start_if_empty: bool = False
|
||||
) -> str:
|
||||
"""
|
||||
get the trace for the last entry
|
||||
|
||||
Keyword Arguments:
|
||||
start {int} -- start, if too might output will empty until reset_start_if_empty is set (default: {0})
|
||||
skip_last {int} -- how many of the last are skipped, defaults to -1 for current method (default: {-1})
|
||||
seperator {str} -- add stack separator, if empty defaults to ' -> ' (default: { -> })
|
||||
reset_start_if_empty {bool} -- if no stack returned because of too high start,
|
||||
reset to 0 for full read (default: {False})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# stack = traceback.extract_stack()[start:depth]
|
||||
# how many of the last entries we skip (so we do not get self), default is -1
|
||||
# start cannot be negative
|
||||
if skip_last > 0:
|
||||
skip_last = skip_last * -1
|
||||
stack = traceback.extract_stack()
|
||||
__stack = stack[start:skip_last]
|
||||
# start possible to high, reset start to 0
|
||||
if not __stack and reset_start_if_empty:
|
||||
start = 0
|
||||
__stack = stack[start:skip_last]
|
||||
if not separator:
|
||||
separator = ' -> '
|
||||
# print(f"* HERE: {dump_data(stack)}")
|
||||
return f"{separator}".join(f"{os.path.basename(f.filename)}:{f.name}:{f.lineno}" for f in __stack)
|
||||
|
||||
|
||||
def exception_stack(
|
||||
exc_stack: OptExcInfo | None = None,
|
||||
separator: str = ' -> '
|
||||
) -> str:
|
||||
"""
|
||||
Exception traceback, if no sys.exc_info is set, run internal
|
||||
|
||||
Keyword Arguments:
|
||||
exc_stack {OptExcInfo | None} -- _description_ (default: {None})
|
||||
separator {str} -- _description_ (default: {' -> '})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
if exc_stack is not None:
|
||||
_, _, exc_traceback = exc_stack
|
||||
else:
|
||||
exc_traceback = None
|
||||
_, _, exc_traceback = sys.exc_info()
|
||||
stack = traceback.extract_tb(exc_traceback)
|
||||
if not separator:
|
||||
separator = ' -> '
|
||||
# print(f"* HERE: {dump_data(stack)}")
|
||||
return f"{separator}".join(f"{os.path.basename(f.filename)}:{f.name}:{f.lineno}" for f in stack)
|
||||
|
||||
# __END__
|
||||
@@ -6,7 +6,7 @@ import json
|
||||
from typing import Any
|
||||
|
||||
|
||||
def dump_data(data: dict[Any, Any] | list[Any] | str | None) -> str:
|
||||
def dump_data(data: Any, use_indent: bool = True) -> str:
|
||||
"""
|
||||
dump formated output from dict/list
|
||||
|
||||
@@ -16,6 +16,7 @@ def dump_data(data: dict[Any, Any] | list[Any] | str | None) -> str:
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return json.dumps(data, indent=4, ensure_ascii=False, default=str)
|
||||
indent = 4 if use_indent else None
|
||||
return json.dumps(data, indent=indent, ensure_ascii=False, default=str)
|
||||
|
||||
# __END__
|
||||
@@ -4,10 +4,10 @@ Various small helpers for data writing
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
if TYPE_CHECKING:
|
||||
from io import TextIOWrapper
|
||||
from io import TextIOWrapper, StringIO
|
||||
|
||||
|
||||
def write_l(line: str, fpl: 'TextIOWrapper | None' = None, print_line: bool = False):
|
||||
def write_l(line: str, fpl: 'TextIOWrapper | StringIO | None' = None, print_line: bool = False):
|
||||
"""
|
||||
Write a line to screen and to output file
|
||||
|
||||
|
||||
0
src/corelibs/email_handling/__init__.py
Normal file
0
src/corelibs/email_handling/__init__.py
Normal file
219
src/corelibs/email_handling/send_email.py
Normal file
219
src/corelibs/email_handling/send_email.py
Normal file
@@ -0,0 +1,219 @@
|
||||
"""
|
||||
Send email wrapper
|
||||
"""
|
||||
|
||||
import smtplib
|
||||
from email.message import EmailMessage
|
||||
from email.header import Header
|
||||
from email.utils import formataddr, parseaddr
|
||||
from typing import TYPE_CHECKING, Any
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
class SendEmail:
|
||||
"""
|
||||
send emails based on a template to a list of receivers
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
log: "Logger",
|
||||
settings: dict[str, Any],
|
||||
template: dict[str, str],
|
||||
from_email: str,
|
||||
combined_send: bool = True,
|
||||
receivers: list[str] | None = None,
|
||||
data: list[dict[str, str]] | None = None,
|
||||
):
|
||||
"""
|
||||
init send email class
|
||||
|
||||
Args:
|
||||
template (dict): Dictionary with body and subject
|
||||
from_email (str): from email as "Name" <email>
|
||||
combined_send (bool): True for sending as one set for all receivers
|
||||
receivers (list): list of emails to send to
|
||||
data (dict): data to replace in template
|
||||
args (Namespace): _description_
|
||||
"""
|
||||
self.log = log
|
||||
self.settings = settings
|
||||
# internal settings
|
||||
self.template = template
|
||||
self.from_email = from_email
|
||||
self.combined_send = combined_send
|
||||
self.receivers = receivers
|
||||
self.data = data
|
||||
|
||||
def send_email(
|
||||
self,
|
||||
data: list[dict[str, str]] | None,
|
||||
receivers: list[str] | None,
|
||||
template: dict[str, str] | None = None,
|
||||
from_email: str | None = None,
|
||||
combined_send: bool | None = None,
|
||||
test_only: bool | None = None
|
||||
):
|
||||
"""
|
||||
build email and send
|
||||
|
||||
Arguments:
|
||||
data {list[dict[str, str]] | None} -- _description_
|
||||
receivers {list[str] | None} -- _description_
|
||||
combined_send {bool | None} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
template {dict[str, str] | None} -- _description_ (default: {None})
|
||||
from_email {str | None} -- _description_ (default: {None})
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
ValueError: _description_
|
||||
"""
|
||||
if data is None and self.data is not None:
|
||||
data = self.data
|
||||
if data is None:
|
||||
raise ValueError("No replace data set, cannot send email")
|
||||
if receivers is None and self.receivers is not None:
|
||||
receivers = self.receivers
|
||||
if receivers is None:
|
||||
raise ValueError("No receivers list set, cannot send email")
|
||||
if combined_send is None:
|
||||
combined_send = self.combined_send
|
||||
if test_only is not None:
|
||||
self.settings['test'] = test_only
|
||||
|
||||
if template is None:
|
||||
template = self.template
|
||||
if from_email is None:
|
||||
from_email = self.from_email
|
||||
|
||||
if not template['subject'] or not template['body']:
|
||||
raise ValueError("Both Subject and Body must be set")
|
||||
|
||||
self.log.debug(
|
||||
"[EMAIL]:\n"
|
||||
f"Subject: {template['subject']}\n"
|
||||
f"Body: {template['body']}\n"
|
||||
f"From: {from_email}\n"
|
||||
f"Combined send: {combined_send}\n"
|
||||
f"Receivers: {receivers}\n"
|
||||
f"Replace data: {data}"
|
||||
)
|
||||
|
||||
# send email
|
||||
self.send_email_list(
|
||||
self.prepare_email_content(
|
||||
from_email, template, data
|
||||
),
|
||||
receivers,
|
||||
combined_send,
|
||||
test_only
|
||||
)
|
||||
|
||||
def prepare_email_content(
|
||||
self,
|
||||
from_email: str,
|
||||
template: dict[str, str],
|
||||
data: list[dict[str, str]],
|
||||
) -> list[EmailMessage]:
|
||||
"""
|
||||
prepare email for sending
|
||||
|
||||
Args:
|
||||
template (dict): template data for this email
|
||||
data (dict): data to replace in email
|
||||
|
||||
Returns:
|
||||
list: Email Message Objects as list
|
||||
"""
|
||||
_subject = ""
|
||||
_body = ""
|
||||
msg: list[EmailMessage] = []
|
||||
for replace in data:
|
||||
_subject = template["subject"]
|
||||
_body = template["body"]
|
||||
for key, value in replace.items():
|
||||
placeholder = f"{{{{{key}}}}}"
|
||||
_subject = _subject.replace(placeholder, value)
|
||||
_body = _body.replace(placeholder, value)
|
||||
name, addr = parseaddr(from_email)
|
||||
if name:
|
||||
# Encode the name part with MIME encoding
|
||||
encoded_name = str(Header(name, 'utf-8'))
|
||||
from_email_encoded = formataddr((encoded_name, addr))
|
||||
else:
|
||||
from_email_encoded = from_email
|
||||
# create a simple email and add subhect, from email
|
||||
msg_email = EmailMessage()
|
||||
# msg.set_content(_body, charset='utf-8', cte='quoted-printable')
|
||||
msg_email.set_content(_body, charset="utf-8")
|
||||
msg_email["Subject"] = _subject
|
||||
msg_email["From"] = from_email_encoded
|
||||
# push to array for sening
|
||||
msg.append(msg_email)
|
||||
return msg
|
||||
|
||||
def send_email_list(
|
||||
self,
|
||||
emails: list[EmailMessage],
|
||||
receivers: list[str],
|
||||
combined_send: bool | None = None,
|
||||
test_only: bool | None = None
|
||||
):
|
||||
"""
|
||||
send email to receivers list
|
||||
|
||||
Args:
|
||||
email (list): Email Message object with set obdy, subject, from as list
|
||||
receivers (array): email receivers list as array
|
||||
combined_send (bool): True for sending as one set for all receivers
|
||||
"""
|
||||
|
||||
if test_only is not None:
|
||||
self.settings['test'] = test_only
|
||||
|
||||
# localhost (postfix does the rest)
|
||||
smtp = None
|
||||
smtp_host = self.settings.get('smtp_host', "localhost")
|
||||
try:
|
||||
smtp = smtplib.SMTP(smtp_host)
|
||||
except ConnectionRefusedError as e:
|
||||
self.log.error("Could not open SMTP connection to: %s, %s", smtp_host, e)
|
||||
# prepare receiver list
|
||||
receivers_encoded: list[str] = []
|
||||
for __receiver in receivers:
|
||||
to_name, to_addr = parseaddr(__receiver)
|
||||
if to_name:
|
||||
# Encode the name part with MIME encoding
|
||||
encoded_to_name = str(Header(to_name, 'utf-8'))
|
||||
receivers_encoded.append(formataddr((encoded_to_name, to_addr)))
|
||||
else:
|
||||
receivers_encoded.append(__receiver)
|
||||
# loop over messages and then over recievers
|
||||
for msg in emails:
|
||||
if combined_send is True:
|
||||
msg["To"] = ", ".join(receivers_encoded)
|
||||
if not self.settings.get('test'):
|
||||
if smtp is not None:
|
||||
smtp.send_message(msg, msg["From"], receivers_encoded)
|
||||
else:
|
||||
self.log.info(f"[EMAIL] Test, not sending email\n{msg}")
|
||||
else:
|
||||
for receiver in receivers_encoded:
|
||||
self.log.debug(f"===> Send to: {receiver}")
|
||||
if "To" in msg:
|
||||
msg.replace_header("To", receiver)
|
||||
else:
|
||||
msg["To"] = receiver
|
||||
if not self.settings.get('test'):
|
||||
if smtp is not None:
|
||||
smtp.send_message(msg)
|
||||
else:
|
||||
self.log.info(f"[EMAIL] Test, not sending email\n{msg}")
|
||||
# close smtp
|
||||
if smtp is not None:
|
||||
smtp.quit()
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/encryption_handling/__init__.py
Normal file
0
src/corelibs/encryption_handling/__init__.py
Normal file
152
src/corelibs/encryption_handling/symmetric_encryption.py
Normal file
152
src/corelibs/encryption_handling/symmetric_encryption.py
Normal file
@@ -0,0 +1,152 @@
|
||||
"""
|
||||
simple symmetric encryption
|
||||
Will be moved to CoreLibs
|
||||
TODO: set key per encryption run
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import base64
|
||||
import hashlib
|
||||
from typing import TypedDict, cast
|
||||
from cryptography.fernet import Fernet
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
|
||||
|
||||
class PackageData(TypedDict):
|
||||
"""encryption package"""
|
||||
encrypted_data: str
|
||||
salt: str
|
||||
key_hash: str
|
||||
|
||||
|
||||
class SymmetricEncryption:
|
||||
"""
|
||||
simple encryption
|
||||
|
||||
the encrypted package has "encrypted_data" and "salt" as fields, salt is needed to create the
|
||||
key from the password to decrypt
|
||||
"""
|
||||
|
||||
def __init__(self, password: str):
|
||||
if not password:
|
||||
raise ValueError("A password must be set")
|
||||
self.password = password
|
||||
self.password_hash = hashlib.sha256(password.encode('utf-8')).hexdigest()
|
||||
|
||||
def __derive_key_from_password(self, password: str, salt: bytes) -> bytes:
|
||||
_password = password.encode('utf-8')
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=32,
|
||||
salt=salt,
|
||||
iterations=100000,
|
||||
)
|
||||
key = base64.urlsafe_b64encode(kdf.derive(_password))
|
||||
return key
|
||||
|
||||
def __encrypt_with_metadata(self, data: str | bytes) -> PackageData:
|
||||
"""Encrypt data and include salt if password-based"""
|
||||
# convert to bytes (for encoding)
|
||||
if isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
|
||||
# generate salt and key from password
|
||||
salt = os.urandom(16)
|
||||
key = self.__derive_key_from_password(self.password, salt)
|
||||
# init the cypher suit
|
||||
cipher_suite = Fernet(key)
|
||||
|
||||
encrypted_data = cipher_suite.encrypt(data)
|
||||
|
||||
# If using password, include salt in the result
|
||||
return {
|
||||
'encrypted_data': base64.urlsafe_b64encode(encrypted_data).decode('utf-8'),
|
||||
'salt': base64.urlsafe_b64encode(salt).decode('utf-8'),
|
||||
'key_hash': hashlib.sha256(key).hexdigest()
|
||||
}
|
||||
|
||||
def encrypt_with_metadata(self, data: str | bytes, return_as: str = 'str') -> str | bytes | PackageData:
|
||||
"""encrypt with metadata, but returns data in string"""
|
||||
match return_as:
|
||||
case 'str':
|
||||
return self.encrypt_with_metadata_return_str(data)
|
||||
case 'json':
|
||||
return self.encrypt_with_metadata_return_str(data)
|
||||
case 'bytes':
|
||||
return self.encrypt_with_metadata_return_bytes(data)
|
||||
case 'dict':
|
||||
return self.encrypt_with_metadata_return_dict(data)
|
||||
case _:
|
||||
# default is string json
|
||||
return self.encrypt_with_metadata_return_str(data)
|
||||
|
||||
def encrypt_with_metadata_return_dict(self, data: str | bytes) -> PackageData:
|
||||
"""encrypt with metadata, but returns data as PackageData dict"""
|
||||
return self.__encrypt_with_metadata(data)
|
||||
|
||||
def encrypt_with_metadata_return_str(self, data: str | bytes) -> str:
|
||||
"""encrypt with metadata, but returns data in string"""
|
||||
return json.dumps(self.__encrypt_with_metadata(data))
|
||||
|
||||
def encrypt_with_metadata_return_bytes(self, data: str | bytes) -> bytes:
|
||||
"""encrypt with metadata, but returns data in bytes"""
|
||||
return json.dumps(self.__encrypt_with_metadata(data)).encode('utf-8')
|
||||
|
||||
def decrypt_with_metadata(self, encrypted_package: str | bytes | PackageData, password: str | None = None) -> str:
|
||||
"""Decrypt data that may include metadata"""
|
||||
try:
|
||||
# Try to parse as JSON (password-based encryption)
|
||||
if isinstance(encrypted_package, bytes):
|
||||
package_data = cast(PackageData, json.loads(encrypted_package.decode('utf-8')))
|
||||
elif isinstance(encrypted_package, str):
|
||||
package_data = cast(PackageData, json.loads(str(encrypted_package)))
|
||||
else:
|
||||
package_data = encrypted_package
|
||||
|
||||
encrypted_data = base64.urlsafe_b64decode(package_data['encrypted_data'])
|
||||
salt = base64.urlsafe_b64decode(package_data['salt'])
|
||||
pwd = password or self.password
|
||||
key = self.__derive_key_from_password(pwd, salt)
|
||||
if package_data['key_hash'] != hashlib.sha256(key).hexdigest():
|
||||
raise ValueError("Key hash is not matching, possible invalid password")
|
||||
cipher_suite = Fernet(key)
|
||||
decrypted_data = cipher_suite.decrypt(encrypted_data)
|
||||
|
||||
except (json.JSONDecodeError, KeyError, UnicodeDecodeError) as e:
|
||||
raise ValueError(f"Invalid encrypted package format {e}") from e
|
||||
|
||||
return decrypted_data.decode('utf-8')
|
||||
|
||||
@staticmethod
|
||||
def encrypt_data(data: str | bytes, password: str) -> str:
|
||||
"""
|
||||
Static method to encrypt some data
|
||||
|
||||
Arguments:
|
||||
data {str | bytes} -- _description_
|
||||
password {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
encryptor = SymmetricEncryption(password)
|
||||
return encryptor.encrypt_with_metadata_return_str(data)
|
||||
|
||||
@staticmethod
|
||||
def decrypt_data(data: str | bytes | PackageData, password: str) -> str:
|
||||
"""
|
||||
Static method to decrypt some data
|
||||
|
||||
Arguments:
|
||||
data {str | bytes | PackageData} -- _description_
|
||||
password {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
decryptor = SymmetricEncryption(password)
|
||||
return decryptor.decrypt_with_metadata(data, password=password)
|
||||
|
||||
# __END__
|
||||
23
src/corelibs/exceptions/csv_exceptions.py
Normal file
23
src/corelibs/exceptions/csv_exceptions.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
Exceptions for csv file reading and processing
|
||||
"""
|
||||
|
||||
|
||||
class NoCsvReader(Exception):
|
||||
"""
|
||||
CSV reader is none
|
||||
"""
|
||||
|
||||
|
||||
class CsvHeaderDataMissing(Exception):
|
||||
"""
|
||||
The csv reader returned None as headers, the header column in the csv file is missing
|
||||
"""
|
||||
|
||||
|
||||
class CompulsoryCsvHeaderCheckFailed(Exception):
|
||||
"""
|
||||
raise if the header is not matching to the excpeted values
|
||||
"""
|
||||
|
||||
# __END__
|
||||
75
src/corelibs/file_handling/file_bom_encoding.py
Normal file
75
src/corelibs/file_handling/file_bom_encoding.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""
|
||||
File check if BOM encoded, needed for CSV load
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
class BomEncodingInfo(TypedDict):
|
||||
"""BOM encoding info"""
|
||||
has_bom: bool
|
||||
bom_type: str | None
|
||||
encoding: str | None
|
||||
bom_length: int
|
||||
bom_pattern: bytes | None
|
||||
|
||||
|
||||
def is_bom_encoded(file_path: Path) -> bool:
|
||||
"""
|
||||
Detect if a file is BOM encoded
|
||||
|
||||
Args:
|
||||
file_path (str): Path to the file to check
|
||||
|
||||
Returns:
|
||||
bool: True if file has BOM, False otherwise
|
||||
"""
|
||||
return is_bom_encoded_info(file_path)['has_bom']
|
||||
|
||||
|
||||
def is_bom_encoded_info(file_path: Path) -> BomEncodingInfo:
|
||||
"""
|
||||
Enhanced BOM detection with additional file analysis
|
||||
|
||||
Args:
|
||||
file_path (str): Path to the file to check
|
||||
|
||||
Returns:
|
||||
dict: Comprehensive BOM and encoding information
|
||||
"""
|
||||
try:
|
||||
# Read first 1024 bytes for analysis
|
||||
with open(file_path, 'rb') as f:
|
||||
header = f.read(4)
|
||||
|
||||
bom_patterns = {
|
||||
b'\xef\xbb\xbf': ('UTF-8', 'utf-8', 3),
|
||||
b'\xff\xfe\x00\x00': ('UTF-32 LE', 'utf-32-le', 4),
|
||||
b'\x00\x00\xfe\xff': ('UTF-32 BE', 'utf-32-be', 4),
|
||||
b'\xff\xfe': ('UTF-16 LE', 'utf-16-le', 2),
|
||||
b'\xfe\xff': ('UTF-16 BE', 'utf-16-be', 2),
|
||||
}
|
||||
|
||||
for bom_pattern, (encoding_name, encoding, length) in bom_patterns.items():
|
||||
if header.startswith(bom_pattern):
|
||||
return {
|
||||
'has_bom': True,
|
||||
'bom_type': encoding_name,
|
||||
'encoding': encoding,
|
||||
'bom_length': length,
|
||||
'bom_pattern': bom_pattern
|
||||
}
|
||||
|
||||
return {
|
||||
'has_bom': False,
|
||||
'bom_type': None,
|
||||
'encoding': None,
|
||||
'bom_length': 0,
|
||||
'bom_pattern': None
|
||||
}
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error checking BOM encoding: {e}") from e
|
||||
|
||||
|
||||
# __END__
|
||||
@@ -7,7 +7,12 @@ import shutil
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def remove_all_in_directory(directory: Path, ignore_files: list[str] | None = None, verbose: bool = False) -> bool:
|
||||
def remove_all_in_directory(
|
||||
directory: Path,
|
||||
ignore_files: list[str] | None = None,
|
||||
verbose: bool = False,
|
||||
dry_run: bool = False
|
||||
) -> bool:
|
||||
"""
|
||||
remove all files and folders in a directory
|
||||
can exclude files or folders
|
||||
@@ -24,7 +29,10 @@ def remove_all_in_directory(directory: Path, ignore_files: list[str] | None = No
|
||||
if ignore_files is None:
|
||||
ignore_files = []
|
||||
if verbose:
|
||||
print(f"Remove old files in: {directory.name} [", end="", flush=True)
|
||||
print(
|
||||
f"{'[DRY RUN] ' if dry_run else ''}Remove old files in: {directory.name} [",
|
||||
end="", flush=True
|
||||
)
|
||||
# remove all files and folders in given directory by recursive globbing
|
||||
for file in directory.rglob("*"):
|
||||
# skip if in ignore files
|
||||
@@ -32,11 +40,13 @@ def remove_all_in_directory(directory: Path, ignore_files: list[str] | None = No
|
||||
continue
|
||||
# remove one file, or a whole directory
|
||||
if file.is_file():
|
||||
os.remove(file)
|
||||
if not dry_run:
|
||||
os.remove(file)
|
||||
if verbose:
|
||||
print(".", end="", flush=True)
|
||||
elif file.is_dir():
|
||||
shutil.rmtree(file)
|
||||
if not dry_run:
|
||||
shutil.rmtree(file)
|
||||
if verbose:
|
||||
print("/", end="", flush=True)
|
||||
if verbose:
|
||||
|
||||
0
src/corelibs/iterator_handling/__init__.py
Normal file
0
src/corelibs/iterator_handling/__init__.py
Normal file
@@ -2,23 +2,41 @@
|
||||
wrapper around search path
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, TypedDict, NotRequired
|
||||
from warnings import deprecated
|
||||
|
||||
|
||||
class ArraySearchList(TypedDict):
|
||||
"""find in array from list search dict"""
|
||||
key: str
|
||||
value: str | bool | int | float | list[str | None]
|
||||
case_sensitive: NotRequired[bool]
|
||||
|
||||
|
||||
@deprecated("Use find_in_array_from_list()")
|
||||
def array_search(
|
||||
search_params: list[dict[str, str | bool | list[str | None]]],
|
||||
search_params: list[ArraySearchList],
|
||||
data: list[dict[str, Any]],
|
||||
return_index: bool = False
|
||||
) -> list[dict[str, Any]]:
|
||||
"""depreacted, old call order"""
|
||||
return find_in_array_from_list(data, search_params, return_index)
|
||||
|
||||
|
||||
def find_in_array_from_list(
|
||||
data: list[dict[str, Any]],
|
||||
search_params: list[ArraySearchList],
|
||||
return_index: bool = False
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
search in an array of dicts with an array of Key/Value set
|
||||
search in an list of dicts with an list of Key/Value set
|
||||
all Key/Value sets must match
|
||||
Value set can be list for OR match
|
||||
option: case_senstive: default True
|
||||
|
||||
Args:
|
||||
search_params (list): List of search params in "Key"/"Value" lists with options
|
||||
data (list): data to search in, must be a list
|
||||
search_params (list): List of search params in "key"/"value" lists with options
|
||||
return_index (bool): return index of list [default False]
|
||||
|
||||
Raises:
|
||||
@@ -32,18 +50,20 @@ def array_search(
|
||||
"""
|
||||
if not isinstance(search_params, list): # type: ignore
|
||||
raise ValueError("search_params must be a list")
|
||||
keys = []
|
||||
keys: list[str] = []
|
||||
# check that key and value exist and are set
|
||||
for search in search_params:
|
||||
if not search.get('Key') or not search.get('Value'):
|
||||
if not search.get('key') or not search.get('value'):
|
||||
raise KeyError(
|
||||
f"Either Key '{search.get('Key', '')}' or "
|
||||
f"Value '{search.get('Value', '')}' is missing or empty"
|
||||
f"Either Key '{search.get('key', '')}' or "
|
||||
f"Value '{search.get('value', '')}' is missing or empty"
|
||||
)
|
||||
# if double key -> abort
|
||||
if search.get("Key") in keys:
|
||||
if search.get("key") in keys:
|
||||
raise KeyError(
|
||||
f"Key {search.get('Key', '')} already exists in search_params"
|
||||
f"Key {search.get('key', '')} already exists in search_params"
|
||||
)
|
||||
keys.append(str(search['key']))
|
||||
|
||||
return_items: list[dict[str, Any]] = []
|
||||
for si_idx, search_item in enumerate(data):
|
||||
@@ -55,20 +75,20 @@ def array_search(
|
||||
# lower case left side
|
||||
# TODO: allow nested Keys. eg "Key: ["Key a", "key b"]" to be ["Key a"]["key b"]
|
||||
if search.get("case_sensitive", True) is False:
|
||||
search_value = search_item.get(str(search['Key']), "").lower()
|
||||
search_value = search_item.get(str(search['key']), "").lower()
|
||||
else:
|
||||
search_value = search_item.get(str(search['Key']), "")
|
||||
search_value = search_item.get(str(search['key']), "")
|
||||
# lower case right side
|
||||
if isinstance(search['Value'], list):
|
||||
if isinstance(search['value'], list):
|
||||
search_in = [
|
||||
str(k).lower()
|
||||
if search.get("case_sensitive", True) is False else k
|
||||
for k in search['Value']
|
||||
]
|
||||
str(k).lower()
|
||||
if search.get("case_sensitive", True) is False else k
|
||||
for k in search['value']
|
||||
]
|
||||
elif search.get("case_sensitive", True) is False:
|
||||
search_in = str(search['Value']).lower()
|
||||
search_in = str(search['value']).lower()
|
||||
else:
|
||||
search_in = search['Value']
|
||||
search_in = search['value']
|
||||
# compare check
|
||||
if (
|
||||
(
|
||||
@@ -60,4 +60,22 @@ def build_dict(
|
||||
delete_keys_from_set(any_dict, ignore_entries)
|
||||
)
|
||||
|
||||
|
||||
def set_entry(dict_set: dict[str, Any], key: str, value_set: Any) -> dict[str, Any]:
|
||||
"""
|
||||
set a new entry in the dict set
|
||||
|
||||
Arguments:
|
||||
key {str} -- _description_
|
||||
dict_set {dict[str, Any]} -- _description_
|
||||
value_set {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
dict[str, Any] -- _description_
|
||||
"""
|
||||
if not dict_set.get(key):
|
||||
dict_set[key] = {}
|
||||
dict_set[key] = value_set
|
||||
return dict_set
|
||||
|
||||
# __END__
|
||||
85
src/corelibs/iterator_handling/dict_mask.py
Normal file
85
src/corelibs/iterator_handling/dict_mask.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""
|
||||
Dict helpers
|
||||
"""
|
||||
|
||||
|
||||
from typing import TypeAlias, Union, Dict, List, Any, cast
|
||||
|
||||
# definitions for the mask run below
|
||||
MaskableValue: TypeAlias = Union[str, int, float, bool, None]
|
||||
NestedDict: TypeAlias = Dict[str, Union[MaskableValue, List[Any], 'NestedDict']]
|
||||
ProcessableValue: TypeAlias = Union[MaskableValue, List[Any], NestedDict]
|
||||
|
||||
|
||||
def mask(
|
||||
data_set: dict[str, Any],
|
||||
mask_keys: list[str] | None = None,
|
||||
mask_str: str = "***",
|
||||
mask_str_edges: str = '_',
|
||||
skip: bool = False
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
mask data for output
|
||||
Checks if mask_keys list exist in any key in the data set either from the start or at the end
|
||||
|
||||
Use the mask_str_edges to define how searches inside a string should work. Default it must start
|
||||
and end with '_', remove to search string in string
|
||||
|
||||
Arguments:
|
||||
data_set {dict[str, str]} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
mask_keys {list[str] | None} -- _description_ (default: {None})
|
||||
mask_str {str} -- _description_ (default: {"***"})
|
||||
mask_str_edges {str} -- _description_ (default: {"_"})
|
||||
skip {bool} -- if set to true skip (default: {False})
|
||||
|
||||
Returns:
|
||||
dict[str, str] -- _description_
|
||||
"""
|
||||
if skip is True:
|
||||
return data_set
|
||||
if mask_keys is None:
|
||||
mask_keys = ["encryption", "password", "secret"]
|
||||
else:
|
||||
# make sure it is lower case
|
||||
mask_keys = [mask_key.lower() for mask_key in mask_keys]
|
||||
|
||||
def should_mask_key(key: str) -> bool:
|
||||
"""Check if a key should be masked"""
|
||||
__key_lower = key.lower()
|
||||
return any(
|
||||
__key_lower.startswith(mask_key) or
|
||||
__key_lower.endswith(mask_key) or
|
||||
f"{mask_str_edges}{mask_key}{mask_str_edges}" in __key_lower
|
||||
for mask_key in mask_keys
|
||||
)
|
||||
|
||||
def mask_recursive(obj: ProcessableValue) -> ProcessableValue:
|
||||
"""Recursively mask values in nested structures"""
|
||||
if isinstance(obj, dict):
|
||||
return {
|
||||
key: mask_value(value) if should_mask_key(key) else mask_recursive(value)
|
||||
for key, value in obj.items()
|
||||
}
|
||||
if isinstance(obj, list):
|
||||
return [mask_recursive(item) for item in obj]
|
||||
return obj
|
||||
|
||||
def mask_value(value: Any) -> Any:
|
||||
"""Handle masking based on value type"""
|
||||
if isinstance(value, list):
|
||||
# Mask each individual value in the list
|
||||
return [mask_str for _ in cast('list[Any]', value)]
|
||||
if isinstance(value, dict):
|
||||
# Recursively process the dictionary instead of masking the whole thing
|
||||
return mask_recursive(cast('ProcessableValue', value))
|
||||
# Mask primitive values
|
||||
return mask_str
|
||||
|
||||
return {
|
||||
key: mask_value(value) if should_mask_key(key) else mask_recursive(value)
|
||||
for key, value in data_set.items()
|
||||
}
|
||||
|
||||
# __END__
|
||||
73
src/corelibs/iterator_handling/fingerprint.py
Normal file
73
src/corelibs/iterator_handling/fingerprint.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
Various dictionary, object and list hashers
|
||||
"""
|
||||
|
||||
import json
|
||||
import hashlib
|
||||
from typing import Any, cast, Sequence
|
||||
|
||||
|
||||
def hash_object(obj: Any) -> str:
|
||||
"""
|
||||
RECOMMENDED for new use
|
||||
Create a hash for any dict or list with mixed key types
|
||||
|
||||
Arguments:
|
||||
obj {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
def normalize(o: Any) -> Any:
|
||||
if isinstance(o, dict):
|
||||
# Sort by repr of keys to handle mixed types (str, int, etc.)
|
||||
o = cast(dict[Any, Any], o)
|
||||
return tuple(sorted((repr(k), normalize(v)) for k, v in o.items()))
|
||||
if isinstance(o, (list, tuple)):
|
||||
o = cast(Sequence[Any], o)
|
||||
return tuple(normalize(item) for item in o)
|
||||
return repr(o)
|
||||
|
||||
normalized = normalize(obj)
|
||||
return hashlib.sha256(str(normalized).encode()).hexdigest()
|
||||
|
||||
|
||||
def dict_hash_frozen(data: dict[Any, Any]) -> int:
|
||||
"""
|
||||
NOT RECOMMENDED, use dict_hash_crc or hash_object instead
|
||||
If used, DO NOT CHANGE
|
||||
hash a dict via freeze
|
||||
|
||||
Args:
|
||||
data (dict): _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return hash(frozenset(data.items()))
|
||||
|
||||
|
||||
def dict_hash_crc(data: dict[Any, Any] | list[Any]) -> str:
|
||||
"""
|
||||
LEGACY METHOD, must be kept for fallback, if used by other code, DO NOT CHANGE
|
||||
Create a sha256 hash over dict or list
|
||||
alternative for
|
||||
dict_hash_frozen
|
||||
|
||||
Args:
|
||||
data (dict[Any, Any] | list[Any]): _description_
|
||||
|
||||
Returns:
|
||||
str: sha256 hash, prefiex with HO_ if fallback used
|
||||
"""
|
||||
try:
|
||||
return hashlib.sha256(
|
||||
# IT IS IMPORTANT THAT THE BELOW CALL STAYS THE SAME AND DOES NOT CHANGE OR WE WILL GET DIFFERENT HASHES
|
||||
# separators=(',', ':') to get rid of spaces, but if this is used the hash will be different, DO NOT ADD
|
||||
json.dumps(data, sort_keys=True, ensure_ascii=True, default=str).encode('utf-8')
|
||||
).hexdigest()
|
||||
except TypeError:
|
||||
# Fallback tod different hasher, will return DIFFERENT hash than above, so only usable in int/str key mixes
|
||||
return "HO_" + hash_object(data)
|
||||
|
||||
# __END__
|
||||
75
src/corelibs/iterator_handling/list_helpers.py
Normal file
75
src/corelibs/iterator_handling/list_helpers.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""
|
||||
List type helpers
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Any, Sequence
|
||||
|
||||
|
||||
def convert_to_list(
|
||||
entry: str | int | float | bool | Sequence[str | int | float | bool | Sequence[Any]]
|
||||
) -> Sequence[str | int | float | bool | Sequence[Any]]:
|
||||
"""
|
||||
Convert any of the non list values (except dictionary) to a list
|
||||
|
||||
Arguments:
|
||||
entry {str | int | float | bool | list[str | int | float | bool]} -- _description_
|
||||
|
||||
Returns:
|
||||
list[str | int | float | bool] -- _description_
|
||||
"""
|
||||
if isinstance(entry, list):
|
||||
return entry
|
||||
return [entry]
|
||||
|
||||
|
||||
def is_list_in_list(
|
||||
list_a: Sequence[str | int | float | bool | Sequence[Any]],
|
||||
list_b: Sequence[str | int | float | bool | Sequence[Any]]
|
||||
) -> Sequence[str | int | float | bool | Sequence[Any]]:
|
||||
"""
|
||||
Return entries from list_a that are not in list_b
|
||||
Type safe compare
|
||||
|
||||
Arguments:
|
||||
list_a {list[Any]} -- _description_
|
||||
list_b {list[Any]} -- _description_
|
||||
|
||||
Returns:
|
||||
list[Any] -- _description_
|
||||
"""
|
||||
# Create sets of (value, type) tuples
|
||||
set_a = set((item, type(item)) for item in list_a)
|
||||
set_b = set((item, type(item)) for item in list_b)
|
||||
|
||||
# Get the difference and extract just the values
|
||||
return [item for item, _ in set_a - set_b]
|
||||
|
||||
|
||||
def make_unique_list_of_dicts(dict_list: list[Any]) -> list[Any]:
|
||||
"""
|
||||
Create a list of unique dictionary entries
|
||||
|
||||
Arguments:
|
||||
dict_list {list[Any]} -- _description_
|
||||
|
||||
Returns:
|
||||
list[Any] -- _description_
|
||||
"""
|
||||
try:
|
||||
# try json dumps, can fail with int and str index types
|
||||
return list(
|
||||
{
|
||||
json.dumps(d, sort_keys=True, ensure_ascii=True, separators=(',', ':')): d
|
||||
for d in dict_list
|
||||
}.values()
|
||||
)
|
||||
except TypeError:
|
||||
# Fallback for non-serializable entries, slow but works
|
||||
unique: list[Any] = []
|
||||
for d in dict_list:
|
||||
if d not in unique:
|
||||
unique.append(d)
|
||||
return unique
|
||||
|
||||
# __END__
|
||||
@@ -28,8 +28,12 @@ def jmespath_search(search_data: dict[Any, Any] | list[Any], search_params: str)
|
||||
raise ValueError(f"Compile failed: {search_params}: {excp}") from excp
|
||||
except jmespath.exceptions.ParseError as excp:
|
||||
raise ValueError(f"Parse failed: {search_params}: {excp}") from excp
|
||||
except jmespath.exceptions.JMESPathTypeError as excp:
|
||||
raise ValueError(f"Search failed with JMESPathTypeError: {search_params}: {excp}") from excp
|
||||
except TypeError as excp:
|
||||
raise ValueError(f"Type error for search_params: {excp}") from excp
|
||||
return search_result
|
||||
|
||||
# TODO: compile jmespath setup
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -3,15 +3,17 @@ json encoder for datetime
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from json import JSONEncoder
|
||||
from json import JSONEncoder, dumps
|
||||
from datetime import datetime, date
|
||||
import copy
|
||||
from jsonpath_ng import parse # pyright: ignore[reportMissingTypeStubs, reportUnknownVariableType]
|
||||
|
||||
|
||||
# subclass JSONEncoder
|
||||
class DateTimeEncoder(JSONEncoder):
|
||||
"""
|
||||
Override the default method
|
||||
cls=DateTimeEncoder
|
||||
dumps(..., cls=DateTimeEncoder, ...)
|
||||
"""
|
||||
def default(self, o: Any) -> str | None:
|
||||
if isinstance(o, (date, datetime)):
|
||||
@@ -19,13 +21,44 @@ class DateTimeEncoder(JSONEncoder):
|
||||
return None
|
||||
|
||||
|
||||
def default(obj: Any) -> str | None:
|
||||
def default_isoformat(obj: Any) -> str | None:
|
||||
"""
|
||||
default override
|
||||
default=default
|
||||
dumps(..., default=default, ...)
|
||||
"""
|
||||
if isinstance(obj, (date, datetime)):
|
||||
return obj.isoformat()
|
||||
return None
|
||||
|
||||
|
||||
def json_dumps(data: Any):
|
||||
"""
|
||||
wrapper for json.dumps with sure dump without throwing Exceptions
|
||||
|
||||
Arguments:
|
||||
data {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return dumps(data, ensure_ascii=False, default=str)
|
||||
|
||||
|
||||
def modify_with_jsonpath(data: dict[Any, Any], path: str, new_value: Any):
|
||||
"""
|
||||
Modify dictionary using JSONPath (more powerful than JMESPath for modifications)
|
||||
"""
|
||||
result = copy.deepcopy(data)
|
||||
jsonpath_expr = parse(path) # pyright: ignore[reportUnknownVariableType]
|
||||
|
||||
# Find and update all matches
|
||||
matches = jsonpath_expr.find(result) # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType]
|
||||
for match in matches: # pyright: ignore[reportUnknownVariableType]
|
||||
match.full_path.update(result, new_value) # pyright: ignore[reportUnknownMemberType]
|
||||
|
||||
return result
|
||||
|
||||
# __END__
|
||||
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
"""
|
||||
Dict helpers
|
||||
"""
|
||||
|
||||
|
||||
def mask(
|
||||
data_set: dict[str, str],
|
||||
mask_keys: list[str] | None = None,
|
||||
mask_str: str = "***",
|
||||
skip: bool = False
|
||||
) -> dict[str, str]:
|
||||
"""
|
||||
mask data for output
|
||||
Checks if mask_keys list exist in any key in the data set either from the start or at the end
|
||||
|
||||
Arguments:
|
||||
data_set {dict[str, str]} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
mask_keys {list[str] | None} -- _description_ (default: {None})
|
||||
mask_str {str} -- _description_ (default: {"***"})
|
||||
skip {bool} -- _description_ (default: {False})
|
||||
|
||||
Returns:
|
||||
dict[str, str] -- _description_
|
||||
"""
|
||||
if skip is True:
|
||||
return data_set
|
||||
if mask_keys is None:
|
||||
mask_keys = ["password", "secret"]
|
||||
return {
|
||||
key: mask_str
|
||||
if any(key.startswith(mask_key) or key.endswith(mask_key) for mask_key in mask_keys) else value
|
||||
for key, value in data_set.items()
|
||||
}
|
||||
|
||||
# __END__
|
||||
@@ -1,39 +0,0 @@
|
||||
"""
|
||||
Various dictionary, object and list hashers
|
||||
"""
|
||||
|
||||
import json
|
||||
import hashlib
|
||||
from typing import Any
|
||||
|
||||
|
||||
def dict_hash_frozen(data: dict[Any, Any]) -> int:
|
||||
"""
|
||||
hash a dict via freeze
|
||||
|
||||
Args:
|
||||
data (dict): _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return hash(frozenset(data.items()))
|
||||
|
||||
|
||||
def dict_hash_crc(data: dict[Any, Any] | list[Any]) -> str:
|
||||
"""
|
||||
Create a sha256 hash over dict
|
||||
alternative for
|
||||
dict_hash_frozen
|
||||
|
||||
Args:
|
||||
data (dict | list): _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return hashlib.sha256(
|
||||
json.dumps(data, sort_keys=True, ensure_ascii=True).encode('utf-8')
|
||||
).hexdigest()
|
||||
|
||||
# __END__
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,89 @@
|
||||
"""
|
||||
All logging levels
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class LoggingLevel(Enum):
|
||||
"""
|
||||
Log class levels
|
||||
"""
|
||||
NOTSET = logging.NOTSET # 0
|
||||
DEBUG = logging.DEBUG # 10
|
||||
INFO = logging.INFO # 20
|
||||
WARNING = logging.WARNING # 30
|
||||
ERROR = logging.ERROR # 40
|
||||
CRITICAL = logging.CRITICAL # 50
|
||||
ALERT = 55 # 55 (for Sys log)
|
||||
EMERGENCY = 60 # 60 (for Sys log)
|
||||
EXCEPTION = 70 # 70 (manualy set, error but with higher level)
|
||||
# Alternative names
|
||||
WARN = logging.WARN # 30 (alias for WARNING)
|
||||
FATAL = logging.FATAL # 50 (alias for CRITICAL)
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, level_str: str):
|
||||
"""Convert string to LogLevel enum"""
|
||||
try:
|
||||
return cls[level_str.upper()]
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Invalid log level: {level_str}") from e
|
||||
except AttributeError as e:
|
||||
raise ValueError(f"Invalid log level: {level_str}") from e
|
||||
|
||||
@classmethod
|
||||
def from_int(cls, level_int: int):
|
||||
"""Convert integer to LogLevel enum"""
|
||||
try:
|
||||
return cls(level_int)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid log level: {level_int}") from e
|
||||
|
||||
@classmethod
|
||||
def from_any(cls, level_any: Any):
|
||||
"""
|
||||
Convert any vale
|
||||
if self LoggingLevel return as is, else try to convert from int or string
|
||||
|
||||
Arguments:
|
||||
level_any {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
if isinstance(level_any, LoggingLevel):
|
||||
return level_any
|
||||
if isinstance(level_any, int):
|
||||
return cls.from_int(level_any)
|
||||
return cls.from_string(level_any)
|
||||
|
||||
def to_logging_level(self):
|
||||
"""Convert to logging module level"""
|
||||
return self.value
|
||||
|
||||
def to_lower_case(self):
|
||||
"""return loser case"""
|
||||
return self.name.lower()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def includes(self, level: 'LoggingLevel'):
|
||||
"""
|
||||
if given level is included in set level
|
||||
eg: INFO set, ERROR is included in INFO because INFO level would print ERROR
|
||||
"""
|
||||
return self.value <= level.value
|
||||
|
||||
def is_higher_than(self, level: 'LoggingLevel'):
|
||||
"""if given value is higher than set"""
|
||||
return self.value > level.value
|
||||
|
||||
def is_lower_than(self, level: 'LoggingLevel'):
|
||||
"""if given value is lower than set"""
|
||||
return self.value < level.value
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/math_handling/__init__.py
Normal file
0
src/corelibs/math_handling/__init__.py
Normal file
35
src/corelibs/math_handling/math_helpers.py
Normal file
35
src/corelibs/math_handling/math_helpers.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
Various math helpers
|
||||
"""
|
||||
|
||||
import math
|
||||
|
||||
|
||||
def gcd(a: int, b: int):
|
||||
"""
|
||||
Calculate: Greatest Common Divisor
|
||||
|
||||
Arguments:
|
||||
a {int} -- _description_
|
||||
b {int} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return math.gcd(a, b)
|
||||
|
||||
|
||||
def lcd(a: int, b: int):
|
||||
"""
|
||||
Calculate: Least Common Denominator
|
||||
|
||||
Arguments:
|
||||
a {int} -- _description_
|
||||
b {int} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return math.lcm(a, b)
|
||||
|
||||
# __END__
|
||||
20
src/corelibs/requests_handling/auth_helpers.py
Normal file
20
src/corelibs/requests_handling/auth_helpers.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""
|
||||
Various HTTP auth helpers
|
||||
"""
|
||||
|
||||
from base64 import b64encode
|
||||
|
||||
|
||||
def basic_auth(username: str, password: str) -> str:
|
||||
"""
|
||||
setup basic auth, for debug
|
||||
|
||||
Arguments:
|
||||
username {str} -- _description_
|
||||
password {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
token = b64encode(f"{username}:{password}".encode('utf-8')).decode("ascii")
|
||||
return f'Basic {token}'
|
||||
@@ -3,31 +3,61 @@ requests lib interface
|
||||
V2 call type
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
import warnings
|
||||
from typing import Any, TypedDict, cast
|
||||
import requests
|
||||
# to hide the verfiy warnings because of the bad SSL settings from Netskope, Akamai, etc
|
||||
warnings.filterwarnings('ignore', message='Unverified HTTPS request')
|
||||
from requests import exceptions
|
||||
|
||||
|
||||
class ErrorResponse:
|
||||
"""
|
||||
Error response structure. This is returned if a request could not be completed
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
code: int,
|
||||
message: str,
|
||||
action: str,
|
||||
url: str,
|
||||
exception: exceptions.InvalidSchema | exceptions.ReadTimeout | exceptions.ConnectionError | None = None
|
||||
) -> None:
|
||||
self.code = code
|
||||
self.message = message
|
||||
self.action = action
|
||||
self.url = url
|
||||
self.exception_name = type(exception).__name__ if exception is not None else None
|
||||
self.exception_trace = exception if exception is not None else None
|
||||
|
||||
|
||||
class ProxyConfig(TypedDict):
|
||||
"""
|
||||
Socks proxy settings
|
||||
"""
|
||||
type: str
|
||||
host: str
|
||||
port: str
|
||||
|
||||
|
||||
class Caller:
|
||||
"""_summary_"""
|
||||
"""
|
||||
requests lib interface
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
header: dict[str, str],
|
||||
verify: bool = True,
|
||||
timeout: int = 20,
|
||||
proxy: dict[str, str] | None = None
|
||||
proxy: ProxyConfig | None = None,
|
||||
verify: bool = True,
|
||||
ca_file: str | None = None
|
||||
):
|
||||
self.headers = header
|
||||
self.timeout: int = timeout
|
||||
self.cafile = "/Library/Application Support/Netskope/STAgent/data/nscacert.pem"
|
||||
self.ca_file = ca_file
|
||||
self.verify = verify
|
||||
self.proxy = proxy
|
||||
self.proxy = cast(dict[str, str], proxy) if proxy is not None else None
|
||||
|
||||
def __timeout(self, timeout: int | None) -> int:
|
||||
if timeout is not None:
|
||||
if timeout is not None and timeout >= 0:
|
||||
return timeout
|
||||
return self.timeout
|
||||
|
||||
@@ -38,7 +68,7 @@ class Caller:
|
||||
data: dict[str, Any] | None = None,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | None:
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""
|
||||
call wrapper, on error returns None
|
||||
|
||||
@@ -55,67 +85,96 @@ class Caller:
|
||||
if data is None:
|
||||
data = {}
|
||||
try:
|
||||
response = None
|
||||
if action == "get":
|
||||
response = requests.get(
|
||||
return requests.get(
|
||||
url,
|
||||
params=params,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
proxies=self.proxy,
|
||||
cert=self.ca_file
|
||||
)
|
||||
elif action == "post":
|
||||
response = requests.post(
|
||||
if action == "post":
|
||||
return requests.post(
|
||||
url,
|
||||
params=params,
|
||||
json=data,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
proxies=self.proxy,
|
||||
cert=self.ca_file
|
||||
)
|
||||
elif action == "put":
|
||||
response = requests.put(
|
||||
if action == "put":
|
||||
return requests.put(
|
||||
url,
|
||||
params=params,
|
||||
json=data,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
proxies=self.proxy,
|
||||
cert=self.ca_file
|
||||
)
|
||||
elif action == "patch":
|
||||
response = requests.patch(
|
||||
if action == "patch":
|
||||
return requests.patch(
|
||||
url,
|
||||
params=params,
|
||||
json=data,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
proxies=self.proxy,
|
||||
cert=self.ca_file
|
||||
)
|
||||
elif action == "delete":
|
||||
response = requests.delete(
|
||||
if action == "delete":
|
||||
return requests.delete(
|
||||
url,
|
||||
params=params,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
proxies=self.proxy,
|
||||
cert=self.ca_file
|
||||
)
|
||||
return response
|
||||
except requests.exceptions.InvalidSchema as e:
|
||||
print(f"Invalid URL during '{action}' for {url}:\n\t{e}")
|
||||
return None
|
||||
except requests.exceptions.ReadTimeout as e:
|
||||
print(f"Timeout ({self.timeout}s) during '{action}' for {url}:\n\t{e}")
|
||||
return None
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
print(f"Connection error during '{action}' for {url}:\n\t{e}")
|
||||
return None
|
||||
return ErrorResponse(
|
||||
100,
|
||||
f"Unsupported action '{action}'",
|
||||
action,
|
||||
url
|
||||
)
|
||||
except exceptions.InvalidSchema as e:
|
||||
return ErrorResponse(
|
||||
200,
|
||||
f"Invalid URL during '{action}' for {url}",
|
||||
action,
|
||||
url,
|
||||
e
|
||||
)
|
||||
except exceptions.ReadTimeout as e:
|
||||
return ErrorResponse(
|
||||
300,
|
||||
f"Timeout ({self.timeout}s) during '{action}' for {url}",
|
||||
action,
|
||||
url,
|
||||
e
|
||||
)
|
||||
except exceptions.ConnectionError as e:
|
||||
return ErrorResponse(
|
||||
400,
|
||||
f"Connection error during '{action}' for {url}",
|
||||
action,
|
||||
url,
|
||||
e
|
||||
)
|
||||
|
||||
def get(self, url: str, params: dict[str, Any] | None = None) -> requests.Response | None:
|
||||
def get(
|
||||
self,
|
||||
url: str,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""
|
||||
get data
|
||||
|
||||
@@ -126,11 +185,15 @@ class Caller:
|
||||
Returns:
|
||||
requests.Response: _description_
|
||||
"""
|
||||
return self.__call('get', url, params=params)
|
||||
return self.__call('get', url, params=params, timeout=timeout)
|
||||
|
||||
def post(
|
||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
||||
) -> requests.Response | None:
|
||||
self,
|
||||
url: str,
|
||||
data: dict[str, Any] | None = None,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""
|
||||
post data
|
||||
|
||||
@@ -142,11 +205,15 @@ class Caller:
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('post', url, data, params)
|
||||
return self.__call('post', url, data, params, timeout=timeout)
|
||||
|
||||
def put(
|
||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
||||
) -> requests.Response | None:
|
||||
self,
|
||||
url: str,
|
||||
data: dict[str, Any] | None = None,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
@@ -157,11 +224,15 @@ class Caller:
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('put', url, data, params)
|
||||
return self.__call('put', url, data, params, timeout=timeout)
|
||||
|
||||
def patch(
|
||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
||||
) -> requests.Response | None:
|
||||
self,
|
||||
url: str,
|
||||
data: dict[str, Any] | None = None,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
@@ -172,9 +243,14 @@ class Caller:
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('patch', url, data, params)
|
||||
return self.__call('patch', url, data, params, timeout=timeout)
|
||||
|
||||
def delete(self, url: str, params: dict[str, Any] | None = None) -> requests.Response | None:
|
||||
def delete(
|
||||
self,
|
||||
url: str,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | ErrorResponse:
|
||||
"""
|
||||
delete
|
||||
|
||||
@@ -185,6 +261,6 @@ class Caller:
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('delete', url, params=params)
|
||||
return self.__call('delete', url, params=params, timeout=timeout)
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -32,7 +32,7 @@ show_position(file pos optional)
|
||||
import time
|
||||
from typing import Literal
|
||||
from math import floor
|
||||
from corelibs.string_handling.datetime_helpers import convert_timestamp
|
||||
from corelibs_datetime.timestamp_convert import convert_timestamp
|
||||
from corelibs.string_handling.byte_helpers import format_bytes
|
||||
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
"""
|
||||
Various string based date/time helpers
|
||||
"""
|
||||
|
||||
from math import floor
|
||||
import time
|
||||
|
||||
|
||||
def convert_timestamp(timestamp: float | int, show_micro: bool = True) -> str:
|
||||
"""
|
||||
format timestamp into human readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
show_micro {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# cut of the ms, but first round them up to four
|
||||
__timestamp_ms_split = str(round(timestamp, 4)).split(".")
|
||||
timestamp = int(__timestamp_ms_split[0])
|
||||
try:
|
||||
ms = int(__timestamp_ms_split[1])
|
||||
except IndexError:
|
||||
ms = 0
|
||||
timegroups = (86400, 3600, 60, 1)
|
||||
output: list[int] = []
|
||||
for i in timegroups:
|
||||
output.append(int(floor(timestamp / i)))
|
||||
timestamp = timestamp % i
|
||||
# output has days|hours|min|sec ms
|
||||
time_string = ""
|
||||
if output[0]:
|
||||
time_string = f"{output[0]}d"
|
||||
if output[0] or output[1]:
|
||||
time_string += f"{output[1]}h "
|
||||
if output[0] or output[1] or output[2]:
|
||||
time_string += f"{output[2]}m "
|
||||
time_string += f"{output[3]}s"
|
||||
if show_micro:
|
||||
time_string += f" {ms}ms" if ms else " 0ms"
|
||||
return time_string
|
||||
|
||||
|
||||
def create_time(timestamp: float, timestamp_format: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""
|
||||
just takes a timestamp and prints out humand readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timestamp_format {_type_} -- _description_ (default: {"%Y-%m-%d %H:%M:%S"})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return time.strftime(timestamp_format, time.localtime(timestamp))
|
||||
|
||||
# __END__
|
||||
@@ -2,6 +2,7 @@
|
||||
String helpers
|
||||
"""
|
||||
|
||||
import re
|
||||
from decimal import Decimal, getcontext
|
||||
from textwrap import shorten
|
||||
|
||||
@@ -101,4 +102,21 @@ def format_number(number: float, precision: int = 0) -> str:
|
||||
"f}"
|
||||
).format(_number)
|
||||
|
||||
|
||||
def prepare_url_slash(url: str) -> str:
|
||||
"""
|
||||
if the URL does not start with /, add slash
|
||||
strip all double slashes in URL
|
||||
|
||||
Arguments:
|
||||
url {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
url = re.sub(r'\/+', '/', url)
|
||||
if not url.startswith("/"):
|
||||
url = "/" + url
|
||||
return url
|
||||
|
||||
# __END__
|
||||
|
||||
18
src/corelibs/string_handling/text_colors.py
Normal file
18
src/corelibs/string_handling/text_colors.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""
|
||||
Basic ANSI colors
|
||||
|
||||
Set colors with print(f"something {Colors.yellow}colorful{Colors.end})
|
||||
bold + underline + color combinations are possible.
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from corelibs_text_colors.text_colors import Colors as ColorsNew
|
||||
|
||||
|
||||
@deprecated("Use src.corelibs_text_colors.text_colors instead")
|
||||
class Colors(ColorsNew):
|
||||
"""
|
||||
ANSI colors defined
|
||||
"""
|
||||
|
||||
# __END__
|
||||
@@ -1,26 +0,0 @@
|
||||
"""
|
||||
Current timestamp strings and time zones
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
||||
|
||||
|
||||
class TimestampStrings:
|
||||
"""
|
||||
set default time stamps
|
||||
"""
|
||||
|
||||
time_zone: str = 'Asia/Tokyo'
|
||||
|
||||
def __init__(self, time_zone: str | None = None):
|
||||
self.timestamp_now = datetime.now()
|
||||
self.time_zone = time_zone if time_zone is not None else __class__.time_zone
|
||||
try:
|
||||
self.timestamp_now_tz = datetime.now(ZoneInfo(self.time_zone))
|
||||
except ZoneInfoNotFoundError as e:
|
||||
raise ValueError(f'Zone could not be loaded [{self.time_zone}]: {e}') from e
|
||||
self.today = self.timestamp_now.strftime('%Y-%m-%d')
|
||||
self.timestamp = self.timestamp_now.strftime("%Y-%m-%d %H:%M:%S")
|
||||
self.timestamp_tz = self.timestamp_now_tz.strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||
self.timestamp_file = self.timestamp_now.strftime("%Y-%m-%d_%H%M%S")
|
||||
0
src/corelibs/var_handling/__init__.py
Normal file
0
src/corelibs/var_handling/__init__.py
Normal file
25
src/corelibs/var_handling/enum_base.py
Normal file
25
src/corelibs/var_handling/enum_base.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""
|
||||
Enum base classes
|
||||
"""
|
||||
|
||||
import warnings
|
||||
from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
||||
|
||||
|
||||
class EnumBase(CorelibsEnumBase):
|
||||
"""
|
||||
base for enum
|
||||
|
||||
.. deprecated::
|
||||
Use corelibs_enum_base.EnumBase instead
|
||||
DEPRECATED: Use corelibs_enum_base.enum_base.EnumBase instead
|
||||
|
||||
lookup_any and from_any will return "EnumBase" and the sub class name
|
||||
run the return again to "from_any" to get a clean value, or cast it
|
||||
"""
|
||||
|
||||
|
||||
# At the module level, issue a deprecation warning
|
||||
warnings.warn("Use corelibs_enum_base.enum_base.EnumBase instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __EMD__
|
||||
15
src/corelibs/var_handling/enum_base.pyi
Normal file
15
src/corelibs/var_handling/enum_base.pyi
Normal file
@@ -0,0 +1,15 @@
|
||||
"""
|
||||
Enum base classes [STPUB]
|
||||
"""
|
||||
|
||||
from typing_extensions import deprecated
|
||||
from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
||||
|
||||
|
||||
@deprecated("Use corelibs_enum_base.enum_base.EnumBase instead")
|
||||
class EnumBase(CorelibsEnumBase):
|
||||
"""
|
||||
base for enum
|
||||
lookup_any and from_any will return "EnumBase" and the sub class name
|
||||
run the return again to "from_any" to get a clean value, or cast it
|
||||
"""
|
||||
54
src/corelibs/var_handling/var_helpers.py
Normal file
54
src/corelibs/var_handling/var_helpers.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""
|
||||
variable convert, check, etc helepr
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from warnings import deprecated
|
||||
import corelibs_var.var_helpers
|
||||
|
||||
|
||||
@deprecated("Use corelibs_var.var_helpers.is_int instead")
|
||||
def is_int(string: Any) -> bool:
|
||||
"""
|
||||
check if a value is int
|
||||
|
||||
Arguments:
|
||||
string {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return corelibs_var.var_helpers.is_int(string)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_var.var_helpers.is_float instead")
|
||||
def is_float(string: Any) -> bool:
|
||||
"""
|
||||
check if a value is float
|
||||
|
||||
Arguments:
|
||||
string {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return corelibs_var.var_helpers.is_float(string)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_var.var_helpers.str_to_bool instead")
|
||||
def str_to_bool(string: str):
|
||||
"""
|
||||
convert string to bool
|
||||
|
||||
Arguments:
|
||||
s {str} -- _description_
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return corelibs_var.var_helpers.str_to_bool(string)
|
||||
|
||||
# __END__
|
||||
109
test-run/check_handling/regex_checks.py
Normal file
109
test-run/check_handling/regex_checks.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""
|
||||
Test check andling for regex checks
|
||||
"""
|
||||
|
||||
from corelibs_text_colors.text_colors import Colors
|
||||
from corelibs.check_handling.regex_constants import (
|
||||
compile_re, DOMAIN_WITH_LOCALHOST_REGEX, EMAIL_BASIC_REGEX, NAME_EMAIL_BASIC_REGEX, SUB_EMAIL_BASIC_REGEX
|
||||
)
|
||||
from corelibs.check_handling.regex_constants_compiled import (
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX, COMPILED_EMAIL_BASIC_REGEX,
|
||||
COMPILED_NAME_EMAIL_SIMPLE_REGEX, COMPILED_NAME_EMAIL_BASIC_REGEX
|
||||
)
|
||||
|
||||
NAME_EMAIL_SIMPLE_REGEX = r"""
|
||||
^\s*(?:"(?P<name1>[^"]+)"\s*<(?P<email1>[^>]+)>|
|
||||
(?P<name2>.+?)\s*<(?P<email2>[^>]+)>|
|
||||
<(?P<email3>[^>]+)>|
|
||||
(?P<email4>[^\s<>]+))\s*$
|
||||
"""
|
||||
|
||||
|
||||
def domain_test():
|
||||
"""
|
||||
domain regex test
|
||||
"""
|
||||
print("=" * 30)
|
||||
test_domains = [
|
||||
"example.com",
|
||||
"localhost",
|
||||
"subdomain.localhost",
|
||||
"test.localhost.com",
|
||||
"some-domain.org"
|
||||
]
|
||||
|
||||
regex_domain_check = COMPILED_DOMAIN_WITH_LOCALHOST_REGEX
|
||||
print(f"REGEX: {DOMAIN_WITH_LOCALHOST_REGEX}")
|
||||
print(f"Check regex: {regex_domain_check.search('localhost')}")
|
||||
|
||||
for domain in test_domains:
|
||||
if regex_domain_check.search(domain):
|
||||
print(f"Matched: {domain}")
|
||||
else:
|
||||
print(f"Did not match: {domain}")
|
||||
|
||||
|
||||
def email_test():
|
||||
"""
|
||||
email regex test
|
||||
"""
|
||||
print("=" * 30)
|
||||
email_list = """
|
||||
e@bar.com
|
||||
<f@foobar.com>
|
||||
"Master" <foobar@bar.com>
|
||||
"not valid" not@valid.com
|
||||
also not valid not@valid.com
|
||||
some header <something@bar.com>
|
||||
test master <master@master.com>
|
||||
日本語 <japan@jp.net>
|
||||
"ひほん カケ苦" <foo@bar.com>
|
||||
single@entry.com
|
||||
arsch@popsch.com
|
||||
test open <open@open.com>
|
||||
"""
|
||||
|
||||
print(f"REGEX: SUB_EMAIL_BASIC_REGEX: {SUB_EMAIL_BASIC_REGEX}")
|
||||
print(f"REGEX: EMAIL_BASIC_REGEX: {EMAIL_BASIC_REGEX}")
|
||||
print(f"REGEX: COMPILED_NAME_EMAIL_SIMPLE_REGEX: {COMPILED_NAME_EMAIL_SIMPLE_REGEX}")
|
||||
print(f"REGEX: NAME_EMAIL_BASIC_REGEX: {NAME_EMAIL_BASIC_REGEX}")
|
||||
|
||||
basic_email = COMPILED_EMAIL_BASIC_REGEX
|
||||
sub_basic_email = compile_re(SUB_EMAIL_BASIC_REGEX)
|
||||
simple_name_email_regex = COMPILED_NAME_EMAIL_SIMPLE_REGEX
|
||||
full_name_email_regex = COMPILED_NAME_EMAIL_BASIC_REGEX
|
||||
for email in email_list.splitlines():
|
||||
email = email.strip()
|
||||
if not email:
|
||||
continue
|
||||
print(f">>> Testing: {email}")
|
||||
if not basic_email.match(email):
|
||||
print(f"{Colors.red}[EMAIL ] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[EMAIL ] Matched : {email}{Colors.reset}")
|
||||
if not sub_basic_email.match(email):
|
||||
print(f"{Colors.red}[SUB ] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[SUB ] Matched : {email}{Colors.reset}")
|
||||
if not simple_name_email_regex.match(email):
|
||||
print(f"{Colors.red}[SIMPLE] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[SIMPLE] Matched : {email}{Colors.reset}")
|
||||
if not full_name_email_regex.match(email):
|
||||
print(f"{Colors.red}[FULL ] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[FULL ] Matched : {email}{Colors.reset}")
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Test regex checks
|
||||
"""
|
||||
domain_test()
|
||||
email_test()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
44
test-run/config_handling/config/settings.ini
Normal file
44
test-run/config_handling/config/settings.ini
Normal file
@@ -0,0 +1,44 @@
|
||||
[TestA]
|
||||
foo=bar
|
||||
overload_from_args=bar
|
||||
foobar=1
|
||||
bar=st
|
||||
arg_overload=should_not_be_set_because_of_command_line_is_list
|
||||
arg_overload_list=too,be,long
|
||||
arg_overload_not_set=this should not be set because of override flag
|
||||
just_values=too,be,long
|
||||
some_match=foo
|
||||
some_match_list=foo,bar
|
||||
test_list=a,b,c,d f, g h
|
||||
other_list=a|b|c|d|
|
||||
third_list=xy|ab|df|fg
|
||||
empty_list=
|
||||
str_length=foobar
|
||||
int_range=20
|
||||
int_range_not_set=
|
||||
int_range_not_set_empty_set=5
|
||||
bool_var=True
|
||||
#
|
||||
match_target=foo
|
||||
match_target_list=foo,bar,baz
|
||||
#
|
||||
match_source_a=foo
|
||||
match_source_b=foo
|
||||
; match_source_c=foo
|
||||
match_source_list=foo,bar
|
||||
|
||||
[TestB]
|
||||
element_a=Static energy
|
||||
element_b=123.5
|
||||
element_c=True
|
||||
elemend_d=AB:CD;EF
|
||||
email=foo@bar.com,other+bar-fee@domain-com.cp,
|
||||
email_not_mandatory=
|
||||
email_bad=gii@bar.com
|
||||
|
||||
[LoadTest]
|
||||
a.b.c=foo
|
||||
d:e:f=bar
|
||||
|
||||
[ErrorTest]
|
||||
some_value=42
|
||||
2
test-run/config_handling/log/.gitignore
vendored
Normal file
2
test-run/config_handling/log/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
148
test-run/config_handling/settings_loader.py
Normal file
148
test-run/config_handling/settings_loader.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""
|
||||
Settings loader test
|
||||
"""
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log
|
||||
from corelibs.config_handling.settings_loader import SettingsLoader
|
||||
from corelibs.config_handling.settings_loader_handling.settings_loader_check import SettingsLoaderCheck
|
||||
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
CONFIG_DIR: Path = Path("config")
|
||||
LOG_DIR: Path = Path("log")
|
||||
CONFIG_FILE: str = "settings.ini"
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Main run
|
||||
"""
|
||||
|
||||
# for log testing
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'settings_loader.log'),
|
||||
log_name="Settings Loader",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
"log_level_file": 'DEBUG',
|
||||
}
|
||||
)
|
||||
log.logger.info('Settings loader')
|
||||
|
||||
value = "2025/1/1"
|
||||
regex_c = re.compile(SettingsLoaderCheck.CHECK_SETTINGS['string.date']['regex'], re.VERBOSE)
|
||||
result = regex_c.search(value)
|
||||
log.info(f"regex {regex_c} check against {value} -> {result}")
|
||||
|
||||
sl = SettingsLoader(
|
||||
{
|
||||
'overload_from_args': 'OVERLOAD from ARGS',
|
||||
'arg_overload': ['should', 'not', 'be', 'set'],
|
||||
'arg_overload_list': ['overload', 'this', 'list'],
|
||||
'arg_overload_not_set': "DO_NOT_SET",
|
||||
},
|
||||
ROOT_PATH.joinpath(CONFIG_DIR, CONFIG_FILE),
|
||||
log=log
|
||||
)
|
||||
try:
|
||||
config_load = 'TestA'
|
||||
config_data = sl.load_settings(
|
||||
config_load,
|
||||
{
|
||||
# "doesnt": ["split:,"],
|
||||
"overload_from_args": ["args_override:yes", "mandatory:yes"],
|
||||
"foobar": ["check:int"],
|
||||
"bar": ["mandatory:yes"],
|
||||
"arg_overload_list": ["args_override:yes", "split:,",],
|
||||
"arg_overload_not_set": [],
|
||||
"some_match": ["matching:foo|bar"],
|
||||
"some_match_list": ["split:,", "matching:foo|bar"],
|
||||
"test_list": [
|
||||
"check:string.alphanumeric",
|
||||
"split:,"
|
||||
],
|
||||
"other_list": ["split:|"],
|
||||
"third_list": [
|
||||
"split:|",
|
||||
"check:string.alphanumeric"
|
||||
],
|
||||
"empty_list": [
|
||||
"split:,",
|
||||
],
|
||||
"str_length": [
|
||||
"length:2-10"
|
||||
],
|
||||
"int_range": [
|
||||
"range:2-50"
|
||||
],
|
||||
"int_range_not_set": [
|
||||
"range:2-50"
|
||||
],
|
||||
"int_range_not_set_empty_set": [
|
||||
"empty:"
|
||||
],
|
||||
"bool_var": ["convert:bool"],
|
||||
"match_target": ["matching:foo"],
|
||||
"match_target_list": ["split:,", "matching:foo|bar|baz",],
|
||||
"match_source_a": ["in:match_target"],
|
||||
"match_source_b": ["in:match_target_list"],
|
||||
"match_source_list": ["split:,", "in:match_target_list"],
|
||||
}
|
||||
)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
try:
|
||||
config_load = 'TestB'
|
||||
config_data = sl.load_settings(
|
||||
config_load,
|
||||
{
|
||||
"email": [
|
||||
"split:,",
|
||||
"mandatory:yes",
|
||||
"check:string.email.basic"
|
||||
],
|
||||
"email_not_mandatory": [
|
||||
"split:,",
|
||||
# "mandatory:yes",
|
||||
"check:string.email.basic"
|
||||
],
|
||||
"email_bad": [
|
||||
"split:,",
|
||||
"mandatory:yes",
|
||||
"check:string.email.basic"
|
||||
]
|
||||
}
|
||||
)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
try:
|
||||
config_load = 'LoadTest'
|
||||
config_data = sl.load_settings(config_load)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
try:
|
||||
config_load = 'ErrorTest'
|
||||
config_data = sl.load_settings(
|
||||
config_load,
|
||||
{
|
||||
"some_value": [
|
||||
"check:string.email.basic",
|
||||
],
|
||||
}
|
||||
)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
236
test-run/datetime_handling/datetime_helpers.py
Normal file
236
test-run/datetime_handling/datetime_helpers.py
Normal file
@@ -0,0 +1,236 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
date string helper test
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from corelibs.datetime_handling.datetime_helpers import (
|
||||
get_datetime_iso8601, get_system_timezone, parse_timezone_data, validate_date,
|
||||
parse_flexible_date, compare_dates, find_newest_datetime_in_list,
|
||||
parse_day_of_week_range, parse_time_range, times_overlap_or_connect, is_time_in_range,
|
||||
reorder_weekdays_from_today
|
||||
)
|
||||
|
||||
|
||||
def __get_datetime_iso8601():
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
for tz in [
|
||||
'', 'Asia/Tokyo', 'UTC', 'Europe/Vienna',
|
||||
'America/New_York', 'Australia/Sydney',
|
||||
'invalid'
|
||||
]:
|
||||
print(f"{tz} -> {get_datetime_iso8601(tz)}")
|
||||
|
||||
|
||||
def __parse_timezone_data():
|
||||
for tz in [
|
||||
'JST', 'KST', 'UTC', 'CET', 'CEST',
|
||||
]:
|
||||
print(f"{tz} -> {parse_timezone_data(tz)}")
|
||||
|
||||
|
||||
def __validate_date():
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
|
||||
test_dates = [
|
||||
"2024-01-01",
|
||||
"2024-02-29", # Leap year
|
||||
"2023-02-29", # Invalid date
|
||||
"2024-13-01", # Invalid month
|
||||
"2024-00-10", # Invalid month
|
||||
"2024-04-31", # Invalid day
|
||||
"invalid-date"
|
||||
]
|
||||
|
||||
for date_str in test_dates:
|
||||
is_valid = validate_date(date_str)
|
||||
print(f"Date '{date_str}' is valid: {is_valid}")
|
||||
|
||||
# also test not before and not after
|
||||
not_before_dates = [
|
||||
"2023-12-31",
|
||||
"2024-01-01",
|
||||
"2024-02-29",
|
||||
]
|
||||
not_after_dates = [
|
||||
"2024-12-31",
|
||||
"2024-11-30",
|
||||
"2025-01-01",
|
||||
]
|
||||
|
||||
for date_str in not_before_dates:
|
||||
datetime.strptime(date_str, "%Y-%m-%d") # Ensure valid date format
|
||||
is_valid = validate_date(date_str, not_before=datetime.strptime("2024-01-01", "%Y-%m-%d"))
|
||||
print(f"Date '{date_str}' is valid (not before 2024-01-01): {is_valid}")
|
||||
|
||||
for date_str in not_after_dates:
|
||||
is_valid = validate_date(date_str, not_after=datetime.strptime("2024-12-31", "%Y-%m-%d"))
|
||||
print(f"Date '{date_str}' is valid (not after 2024-12-31): {is_valid}")
|
||||
|
||||
for date_str in test_dates:
|
||||
is_valid = validate_date(
|
||||
date_str,
|
||||
not_before=datetime.strptime("2024-01-01", "%Y-%m-%d"),
|
||||
not_after=datetime.strptime("2024-12-31", "%Y-%m-%d")
|
||||
)
|
||||
print(f"Date '{date_str}' is valid (2024 only): {is_valid}")
|
||||
|
||||
|
||||
def __parse_flexible_date():
|
||||
for date_str in [
|
||||
"2024-01-01",
|
||||
"01/02/2024",
|
||||
"February 29, 2024",
|
||||
"Invalid date",
|
||||
"2025-01-01 12:18:10",
|
||||
"2025-01-01 12:18:10.566",
|
||||
"2025-01-01T12:18:10.566",
|
||||
"2025-01-01T12:18:10.566+02:00",
|
||||
]:
|
||||
print(f"{date_str} -> {parse_flexible_date(date_str)}")
|
||||
|
||||
|
||||
def __compare_dates():
|
||||
|
||||
for date1, date2 in [
|
||||
("2024-01-01 12:00:00", "2024-01-01 15:30:00"),
|
||||
("2024-01-02", "2024-01-01"),
|
||||
("2024-01-01T10:00:00+02:00", "2024-01-01T08:00:00Z"),
|
||||
("invalid-date", "2024-01-01"),
|
||||
("2024-01-01", "invalid-date"),
|
||||
("invalid-date", "also-invalid"),
|
||||
]:
|
||||
result = compare_dates(date1, date2)
|
||||
print(f"Comparing '{date1}' and '{date2}': {result}")
|
||||
|
||||
|
||||
def __find_newest_datetime_in_list():
|
||||
date_list = [
|
||||
"2024-01-01 12:00:00",
|
||||
"2024-01-02 09:30:00",
|
||||
"2023-12-31 23:59:59",
|
||||
"2024-01-02 15:45:00",
|
||||
"2024-01-02T15:45:00.001",
|
||||
"invalid-date",
|
||||
]
|
||||
newest_date = find_newest_datetime_in_list(date_list)
|
||||
print(f"Newest date in list: {newest_date}")
|
||||
|
||||
|
||||
def __parse_day_of_week_range():
|
||||
ranges = [
|
||||
"Mon-Fri",
|
||||
"Saturday-Sunday",
|
||||
"Wed-Mon",
|
||||
"Fri-Fri",
|
||||
"mon-tue",
|
||||
"Invalid-Range"
|
||||
]
|
||||
for range_str in ranges:
|
||||
try:
|
||||
days = parse_day_of_week_range(range_str)
|
||||
print(f"Day range '{range_str}' -> {days}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error parsing day range '{range_str}': {e}")
|
||||
|
||||
|
||||
def __parse_time_range():
|
||||
ranges = [
|
||||
"08:00-17:00",
|
||||
"22:00-06:00",
|
||||
"12:30-12:30",
|
||||
"invalid-range"
|
||||
]
|
||||
for range_str in ranges:
|
||||
try:
|
||||
start_time, end_time = parse_time_range(range_str)
|
||||
print(f"Time range '{range_str}' -> Start: {start_time}, End: {end_time}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error parsing time range '{range_str}': {e}")
|
||||
|
||||
|
||||
def __times_overlap_or_connect():
|
||||
time_format = "%H:%M"
|
||||
time_ranges = [
|
||||
(("08:00", "12:00"), ("11:00", "15:00")), # Overlap
|
||||
(("22:00", "02:00"), ("01:00", "05:00")), # Overlap across midnight
|
||||
(("10:00", "12:00"), ("12:00", "14:00")), # Connect
|
||||
(("09:00", "11:00"), ("12:00", "14:00")), # No overlap
|
||||
]
|
||||
for (start1, end1), (start2, end2) in time_ranges:
|
||||
start1 = datetime.strptime(start1, time_format).time()
|
||||
end1 = datetime.strptime(end1, time_format).time()
|
||||
start2 = datetime.strptime(start2, time_format).time()
|
||||
end2 = datetime.strptime(end2, time_format).time()
|
||||
overlap = times_overlap_or_connect((start1, end1), (start2, end2))
|
||||
overlap_connect = times_overlap_or_connect((start1, end1), (start2, end2), True)
|
||||
print(f"Time ranges {start1}-{end1} and {start2}-{end2} overlap/connect: {overlap}/{overlap_connect}")
|
||||
|
||||
|
||||
def __is_time_in_range():
|
||||
time_format = "%H:%M:%S"
|
||||
test_cases = [
|
||||
("10:00:00", "09:00:00", "11:00:00"),
|
||||
("23:30:00", "22:00:00", "01:00:00"), # Across midnight
|
||||
("05:00:00", "06:00:00", "10:00:00"), # Not in range
|
||||
("12:00:00", "12:00:00", "12:00:00"), # Exact match
|
||||
]
|
||||
for (check_time, start_time, end_time) in test_cases:
|
||||
start_time = datetime.strptime(start_time, time_format).time()
|
||||
end_time = datetime.strptime(end_time, time_format).time()
|
||||
in_range = is_time_in_range(
|
||||
f"{check_time}", start_time.strftime("%H:%M:%S"), end_time.strftime("%H:%M:%S")
|
||||
)
|
||||
print(f"Time {check_time} in range {start_time}-{end_time}: {in_range}")
|
||||
|
||||
|
||||
def __reorder_weekdays_from_today():
|
||||
for base_day in [
|
||||
"Tue", "Wed", "Sunday", "Fri", "InvalidDay"
|
||||
]:
|
||||
try:
|
||||
reordered_days = reorder_weekdays_from_today(base_day)
|
||||
print(f"Reordered weekdays from {base_day}: {reordered_days}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error reordering weekdays from '{base_day}': {e}")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
print("\nDatetime ISO 8601 tests:\n")
|
||||
__get_datetime_iso8601()
|
||||
print("\nSystem time test:")
|
||||
print(f"System time: {get_system_timezone()}")
|
||||
print("\nParse timezone data tests:\n")
|
||||
__parse_timezone_data()
|
||||
print("\nValidate date tests:\n")
|
||||
__validate_date()
|
||||
print("\nParse flexible date tests:\n")
|
||||
__parse_flexible_date()
|
||||
print("\nCompare dates tests:\n")
|
||||
__compare_dates()
|
||||
print("\nFind newest datetime in list tests:\n")
|
||||
__find_newest_datetime_in_list()
|
||||
print("\nParse day of week range tests:\n")
|
||||
__parse_day_of_week_range()
|
||||
print("\nParse time range tests:\n")
|
||||
__parse_time_range()
|
||||
print("\nTimes overlap or connect tests:\n")
|
||||
__times_overlap_or_connect()
|
||||
print("\nIs time in range tests:\n")
|
||||
__is_time_in_range()
|
||||
print("\nReorder weekdays from today tests:\n")
|
||||
__reorder_weekdays_from_today()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
92
test-run/datetime_handling/timestamp_convert.py
Normal file
92
test-run/datetime_handling/timestamp_convert.py
Normal file
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
timestamp string checks
|
||||
"""
|
||||
|
||||
from corelibs.datetime_handling.timestamp_convert import (
|
||||
convert_timestamp, seconds_to_string, convert_to_seconds, TimeParseError, TimeUnitError
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
print("\n--- Testing convert_to_seconds ---\n")
|
||||
test_cases = [
|
||||
"5M 6d", # 5 months, 6 days
|
||||
"2h 30m 45s", # 2 hours, 30 minutes, 45 seconds
|
||||
"1Y 2M 3d", # 1 year, 2 months, 3 days
|
||||
"1h", # 1 hour
|
||||
"30m", # 30 minutes
|
||||
"2 hours 15 minutes", # 2 hours, 15 minutes
|
||||
"1d 12h", # 1 day, 12 hours
|
||||
"3M 2d 4h", # 3 months, 2 days, 4 hours
|
||||
"45s", # 45 seconds
|
||||
"-45s", # -45 seconds
|
||||
"-1h", # -1 hour
|
||||
"-30m", # -30 minutes
|
||||
"-2h 30m 45s", # -2 hours, 30 minutes, 45 seconds
|
||||
"-1d 12h", # -1 day, 12 hours
|
||||
"-3M 2d 4h", # -3 months, 2 days, 4 hours
|
||||
"-1Y 2M 3d", # -1 year, 2 months, 3 days
|
||||
"-2 hours 15 minutes", # -2 hours, 15 minutes
|
||||
"-1 year 2 months", # -1 year, 2 months
|
||||
"-2Y 6M 15d 8h 30m 45s", # Complex negative example
|
||||
"1 year 2 months", # 1 year, 2 months
|
||||
"2Y 6M 15d 8h 30m 45s", # Complex example
|
||||
# invalid tests
|
||||
"5M 6d 2M", # months appears twice
|
||||
"2h 30m 45s 1h", # hours appears twice
|
||||
"1d 2 days", # days appears twice (short and long form)
|
||||
"30m 45 minutes", # minutes appears twice
|
||||
"1Y 2 years", # years appears twice
|
||||
"1x 2 yrs", # invalid names
|
||||
|
||||
123, # int
|
||||
789.12, # float
|
||||
456.56, # float, high
|
||||
"4566", # int as string
|
||||
"5551.12", # float as string
|
||||
"5551.56", # float, high as string
|
||||
]
|
||||
|
||||
for time_string in test_cases:
|
||||
try:
|
||||
result = convert_to_seconds(time_string)
|
||||
print(f"Human readable to seconds: {time_string} => {result}")
|
||||
except (TimeParseError, TimeUnitError) as e:
|
||||
print(f"Error encountered for {time_string}: {type(e).__name__}: {e}")
|
||||
|
||||
print("\n--- Testing seconds_to_string and convert_timestamp ---\n")
|
||||
|
||||
test_values = [
|
||||
'as is string',
|
||||
-172800.001234, # -2 days, -0.001234 seconds
|
||||
-90061.789, # -1 day, -1 hour, -1 minute, -1.789 seconds
|
||||
-3661.456, # -1 hour, -1 minute, -1.456 seconds
|
||||
-65.123, # -1 minute, -5.123 seconds
|
||||
-1.5, # -1.5 seconds
|
||||
-0.001, # -1 millisecond
|
||||
-0.000001, # -1 microsecond
|
||||
0, # 0 seconds
|
||||
0.000001, # 1 microsecond
|
||||
0.001, # 1 millisecond
|
||||
1.5, # 1.5 seconds
|
||||
65.123, # 1 minute, 5.123 seconds
|
||||
3661.456, # 1 hour, 1 minute, 1.456 seconds
|
||||
90061.789, # 1 day, 1 hour, 1 minute, 1.789 seconds
|
||||
172800.001234 # 2 days, 0.001234 seconds
|
||||
]
|
||||
|
||||
for time_value in test_values:
|
||||
result = seconds_to_string(time_value, show_microseconds=True)
|
||||
result_alt = convert_timestamp(time_value, show_microseconds=True)
|
||||
print(f"Seconds to human readable: {time_value} => {result} / {result_alt}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
2
test-run/db_handling/database/.gitignore
vendored
Normal file
2
test-run/db_handling/database/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
2
test-run/db_handling/log/.gitignore
vendored
Normal file
2
test-run/db_handling/log/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
139
test-run/db_handling/sql_main.py
Normal file
139
test-run/db_handling/sql_main.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""
|
||||
SQL Main wrapper test
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import json
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log, Logger
|
||||
from corelibs.db_handling.sql_main import SQLMain
|
||||
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
DATABASE_DIR: Path = Path("database")
|
||||
LOG_DIR: Path = Path("log")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'sqlite_main.log'),
|
||||
log_name="SQLite Main",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
"log_level_file": 'DEBUG',
|
||||
}
|
||||
)
|
||||
sql_main = SQLMain(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_ident=f"sqlite:{ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_main.db')}"
|
||||
)
|
||||
if sql_main.connected():
|
||||
log.info("SQL Main connected successfully")
|
||||
else:
|
||||
log.error('SQL Main connection failed')
|
||||
if sql_main.dbh is None:
|
||||
log.error('SQL Main DBH instance is None')
|
||||
return
|
||||
|
||||
if sql_main.dbh.trigger_exists('trg_test_a_set_date_updated_on_update'):
|
||||
log.info("Trigger trg_test_a_set_date_updated_on_update exists")
|
||||
if sql_main.dbh.table_exists('test_a'):
|
||||
log.info("Table test_a exists, dropping for clean test")
|
||||
sql_main.dbh.execute_query("DROP TABLE test_a;")
|
||||
# create a dummy table
|
||||
table_sql = """
|
||||
CREATE TABLE IF NOT EXISTS test_a (
|
||||
test_a_id INTEGER PRIMARY KEY,
|
||||
date_created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%f', 'now')),
|
||||
date_updated TEXT,
|
||||
uid TEXT NOT NULL UNIQUE,
|
||||
set_current_timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
text_a TEXT,
|
||||
content,
|
||||
int_a INTEGER,
|
||||
float_a REAL
|
||||
);
|
||||
"""
|
||||
|
||||
result = sql_main.dbh.execute_query(table_sql)
|
||||
log.debug(f"Create table result: {result}")
|
||||
trigger_sql = """
|
||||
CREATE TRIGGER trg_test_a_set_date_updated_on_update
|
||||
AFTER UPDATE ON test_a
|
||||
FOR EACH ROW
|
||||
WHEN OLD.date_updated IS NULL OR NEW.date_updated = OLD.date_updated
|
||||
BEGIN
|
||||
UPDATE test_a
|
||||
SET date_updated = (strftime('%Y-%m-%d %H:%M:%f', 'now'))
|
||||
WHERE test_a_id = NEW.test_a_id;
|
||||
END;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(trigger_sql)
|
||||
log.debug(f"Create trigger result: {result}")
|
||||
result = sql_main.dbh.meta_data_detail('test_a')
|
||||
log.debug(f"Table meta data detail: {dump_data(result)}")
|
||||
# INSERT DATA
|
||||
sql = """
|
||||
INSERT INTO test_a (uid, text_a, content, int_a, float_a)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
RETURNING test_a_id, uid;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[1] Insert data result: {dump_data(result)}")
|
||||
__uid: str = ''
|
||||
if result is not False:
|
||||
# first one only of interest
|
||||
result = dict(result[0])
|
||||
__uid = str(result.get('uid', ''))
|
||||
# second insert
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[2] Insert data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.execute_query("SELECT * FROM test_a;")
|
||||
log.debug(f"Select data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row result: {dump_data(result)}")
|
||||
sql = """
|
||||
UPDATE test_a
|
||||
SET text_a = ?
|
||||
WHERE uid = ?;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
'Some updated text A',
|
||||
__uid,
|
||||
)
|
||||
)
|
||||
log.debug(f"Update data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row after update result: {dump_data(result)}")
|
||||
|
||||
sql_main.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
146
test-run/db_handling/sqlite_io.py
Normal file
146
test-run/db_handling/sqlite_io.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""
|
||||
SQLite IO test
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import json
|
||||
import sqlite3
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log, Logger
|
||||
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
DATABASE_DIR: Path = Path("database")
|
||||
LOG_DIR: Path = Path("log")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'sqlite_io.log'),
|
||||
log_name="SQLite IO",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
"log_level_file": 'DEBUG',
|
||||
}
|
||||
)
|
||||
db = SQLiteIO(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_name=ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_io.db'),
|
||||
row_factory='Dict'
|
||||
)
|
||||
if db.db_connected():
|
||||
log.info(f"Connected to DB: {db.db_name}")
|
||||
if db.trigger_exists('trg_test_a_set_date_updated_on_update'):
|
||||
log.info("Trigger trg_test_a_set_date_updated_on_update exists")
|
||||
if db.table_exists('test_a'):
|
||||
log.info("Table test_a exists, dropping for clean test")
|
||||
db.execute_query("DROP TABLE test_a;")
|
||||
# create a dummy table
|
||||
table_sql = """
|
||||
CREATE TABLE IF NOT EXISTS test_a (
|
||||
test_a_id INTEGER PRIMARY KEY,
|
||||
date_created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%f', 'now')),
|
||||
date_updated TEXT,
|
||||
uid TEXT NOT NULL UNIQUE,
|
||||
set_current_timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
text_a TEXT,
|
||||
content,
|
||||
int_a INTEGER,
|
||||
float_a REAL
|
||||
);
|
||||
"""
|
||||
result = db.execute_query(table_sql)
|
||||
log.debug(f"Create table result: {result}")
|
||||
trigger_sql = """
|
||||
CREATE TRIGGER trg_test_a_set_date_updated_on_update
|
||||
AFTER UPDATE ON test_a
|
||||
FOR EACH ROW
|
||||
WHEN OLD.date_updated IS NULL OR NEW.date_updated = OLD.date_updated
|
||||
BEGIN
|
||||
UPDATE test_a
|
||||
SET date_updated = (strftime('%Y-%m-%d %H:%M:%f', 'now'))
|
||||
WHERE test_a_id = NEW.test_a_id;
|
||||
END;
|
||||
"""
|
||||
result = db.execute_query(trigger_sql)
|
||||
log.debug(f"Create trigger result: {result}")
|
||||
result = db.meta_data_detail('test_a')
|
||||
log.debug(f"Table meta data detail: {dump_data(result)}")
|
||||
# INSERT DATA
|
||||
sql = """
|
||||
INSERT INTO test_a (uid, text_a, content, int_a, float_a)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
RETURNING test_a_id, uid;
|
||||
"""
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[1] Insert data result: {dump_data(result)}")
|
||||
__uid: str = ''
|
||||
if result is not False:
|
||||
# first one only of interest
|
||||
result = dict(result[0])
|
||||
__uid = str(result.get('uid', ''))
|
||||
# second insert
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[2] Insert data result: {dump_data(result)}")
|
||||
result = db.execute_query("SELECT * FROM test_a;")
|
||||
log.debug(f"Select data result: {dump_data(result)}")
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row result: {dump_data(result)}")
|
||||
sql = """
|
||||
UPDATE test_a
|
||||
SET text_a = ?
|
||||
WHERE uid = ?;
|
||||
"""
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
'Some updated text A',
|
||||
__uid,
|
||||
)
|
||||
)
|
||||
log.debug(f"Update data result: {dump_data(result)}")
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row after update result: {dump_data(result)}")
|
||||
|
||||
db.db_close()
|
||||
|
||||
db = SQLiteIO(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_name=ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_io.db'),
|
||||
row_factory='Row'
|
||||
)
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
if result is not None and result is not False:
|
||||
log.debug(f"Fetch row result: {dump_data(result)} -> {dict(result)} -> {result.keys()}")
|
||||
log.debug(f"Access via index: {result[5]} -> {result['text_a']}")
|
||||
if isinstance(result, sqlite3.Row):
|
||||
log.debug('Result is sqlite3.Row as expected')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
34
test-run/encryption/symmetric_encryption.py
Normal file
34
test-run/encryption/symmetric_encryption.py
Normal file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Symmetric encryption test
|
||||
"""
|
||||
|
||||
import json
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.encryption_handling.symmetric_encryption import SymmetricEncryption
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
password = "strongpassword"
|
||||
se = SymmetricEncryption(password)
|
||||
|
||||
plaintext = "Hello, World!"
|
||||
ciphertext = se.encrypt_with_metadata_return_str(plaintext)
|
||||
decrypted = se.decrypt_with_metadata(ciphertext)
|
||||
print(f"Encrypted: {dump_data(json.loads(ciphertext))}")
|
||||
print(f"Input: {plaintext} -> {decrypted}")
|
||||
|
||||
static_ciphertext = SymmetricEncryption.encrypt_data(plaintext, password)
|
||||
decrypted = SymmetricEncryption.decrypt_data(static_ciphertext, password)
|
||||
print(f"Static Encrypted: {dump_data(json.loads(static_ciphertext))}")
|
||||
print(f"Input: {plaintext} -> {decrypted}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
31
test-run/file_handling/file_bom_check.py
Normal file
31
test-run/file_handling/file_bom_check.py
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
BOM check for files
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from corelibs.file_handling.file_bom_encoding import is_bom_encoded, is_bom_encoded_info
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Check files for BOM encoding
|
||||
"""
|
||||
base_path = Path(__file__).resolve().parent
|
||||
for file_path in [
|
||||
'test-data/sample_with_bom.csv',
|
||||
'test-data/sample_without_bom.csv',
|
||||
]:
|
||||
has_bom = is_bom_encoded(base_path.joinpath(file_path))
|
||||
bom_info = is_bom_encoded_info(base_path.joinpath(file_path))
|
||||
print(f'File: {file_path}')
|
||||
print(f' Has BOM: {has_bom}')
|
||||
print(f' BOM Info: {dump_data(bom_info)}')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
6
test-run/file_handling/test-data/sample_with_bom.csv
Normal file
6
test-run/file_handling/test-data/sample_with_bom.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
Name,Age,City,Country
|
||||
John Doe,25,New York,USA
|
||||
Jane Smith,30,London,UK
|
||||
山田太郎,28,東京,Japan
|
||||
María García,35,Madrid,Spain
|
||||
François Dupont,42,Paris,France
|
||||
|
6
test-run/file_handling/test-data/sample_without_bom.csv
Normal file
6
test-run/file_handling/test-data/sample_without_bom.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
Name,Age,City,Country
|
||||
John Doe,25,New York,USA
|
||||
Jane Smith,30,London,UK
|
||||
山田太郎,28,東京,Japan
|
||||
María García,35,Madrid,Spain
|
||||
François Dupont,42,Paris,France
|
||||
|
169
test-run/iterator_handling/data_search.py
Normal file
169
test-run/iterator_handling/data_search.py
Normal file
@@ -0,0 +1,169 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Search data tests
|
||||
iterator_handling.data_search
|
||||
"""
|
||||
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.iterator_handling.data_search import find_in_array_from_list, ArraySearchList
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
data = [
|
||||
{
|
||||
"lookup_value_p": "A01",
|
||||
"lookup_value_c": "B01",
|
||||
"replace_value": "R01",
|
||||
},
|
||||
{
|
||||
"lookup_value_p": "A02",
|
||||
"lookup_value_c": "B02",
|
||||
"replace_value": "R02",
|
||||
},
|
||||
{
|
||||
"lookup_value_p": "A03",
|
||||
"lookup_value_c": "B03",
|
||||
"replace_value": "R03",
|
||||
},
|
||||
]
|
||||
test_foo = ArraySearchList(
|
||||
key="lookup_value_p",
|
||||
value="A01"
|
||||
)
|
||||
result = find_in_array_from_list(data, [test_foo])
|
||||
print(f"Search A: {dump_data(test_foo)} -> {dump_data(result)}")
|
||||
|
||||
search: list[ArraySearchList] = [
|
||||
{
|
||||
"key": "lookup_value_p",
|
||||
"value": "A01"
|
||||
},
|
||||
{
|
||||
"key": "lookup_value_c",
|
||||
"value": "B01"
|
||||
},
|
||||
]
|
||||
result = find_in_array_from_list(data, search)
|
||||
print(f"Search B: {dump_data(search)} -> {dump_data(result)}")
|
||||
|
||||
search: list[ArraySearchList] = [
|
||||
{
|
||||
"key": "lookup_value_p",
|
||||
"value": "A01"
|
||||
},
|
||||
{
|
||||
"key": "lookup_value_c",
|
||||
"value": "B01"
|
||||
},
|
||||
{
|
||||
"key": "lookup_value_c",
|
||||
"value": "B02"
|
||||
},
|
||||
]
|
||||
try:
|
||||
result = find_in_array_from_list(data, search)
|
||||
print(f"Search C: {dump_data(search)} -> {dump_data(result)}")
|
||||
except KeyError as e:
|
||||
print(f"Search C raised KeyError: {e}")
|
||||
|
||||
search: list[ArraySearchList] = [
|
||||
{
|
||||
"key": "lookup_value_p",
|
||||
"value": "A01"
|
||||
},
|
||||
{
|
||||
"key": "lookup_value_c",
|
||||
"value": ["B01", "B02"]
|
||||
},
|
||||
]
|
||||
try:
|
||||
result = find_in_array_from_list(data, search)
|
||||
print(f"Search D: {dump_data(search)} -> {dump_data(result)}")
|
||||
except KeyError as e:
|
||||
print(f"Search D raised KeyError: {e}")
|
||||
|
||||
search: list[ArraySearchList] = [
|
||||
{
|
||||
"key": "lookup_value_p",
|
||||
"value": ["A01", "A03"]
|
||||
},
|
||||
{
|
||||
"key": "lookup_value_c",
|
||||
"value": ["B01", "B02"]
|
||||
},
|
||||
]
|
||||
try:
|
||||
result = find_in_array_from_list(data, search)
|
||||
print(f"Search E: {dump_data(search)} -> {dump_data(result)}")
|
||||
except KeyError as e:
|
||||
print(f"Search E raised KeyError: {e}")
|
||||
|
||||
search: list[ArraySearchList] = [
|
||||
{
|
||||
"key": "lookup_value_p",
|
||||
"value": "NOT FOUND"
|
||||
},
|
||||
]
|
||||
try:
|
||||
result = find_in_array_from_list(data, search)
|
||||
print(f"Search F: {dump_data(search)} -> {dump_data(result)}")
|
||||
except KeyError as e:
|
||||
print(f"Search F raised KeyError: {e}")
|
||||
|
||||
data = [
|
||||
{
|
||||
"sd_user_id": "1593",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1592",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1596",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1594",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1595",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1861",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1862",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
},
|
||||
{
|
||||
"sd_user_id": "1860",
|
||||
"email": "",
|
||||
"employee_id": ""
|
||||
}
|
||||
]
|
||||
result = find_in_array_from_list(data, [ArraySearchList(
|
||||
key="sd_user_id",
|
||||
value="1593"
|
||||
)])
|
||||
print(f"Search F: -> {dump_data(result)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
120
test-run/iterator_handling/dict_helpers.py
Normal file
120
test-run/iterator_handling/dict_helpers.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""
|
||||
Iterator helper testing
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.iterator_handling.dict_mask import mask
|
||||
from corelibs.iterator_handling.dict_helpers import set_entry
|
||||
|
||||
|
||||
def __mask():
|
||||
data = {
|
||||
# "user": "john",
|
||||
# "encryption_key": "Secret key",
|
||||
# "ENCRYPTION.TEST": "Secret key test",
|
||||
# "inside_password_test": "Hide this",
|
||||
"password": ["secret1", "secret2"], # List value gets masked
|
||||
# "config": {
|
||||
# "db_password": {"primary": "secret", "backup": "secret2"}, # Dict value gets masked
|
||||
# "api_keys": ["key1", "key2", "key3"] # List value gets masked
|
||||
# },
|
||||
# "items": [ # List value that doesn't get masked, but gets processed recursively
|
||||
# {"name": "item1", "secret_key": "itemsecret"},
|
||||
# {"name": "item2", "passwords": ["pass1", "pass2"]}
|
||||
# ],
|
||||
# "normal_list": ["item1", "item2", "item3"] # Normal list, not masked
|
||||
}
|
||||
data = {
|
||||
"config": {
|
||||
# "password": ["secret1", "secret2"],
|
||||
# "password_other": {"password": ["secret1", "secret2"]},
|
||||
# "database": {
|
||||
# "host": "localhost",
|
||||
# "password": "db_secret",
|
||||
# "users": [
|
||||
# {"name": "admin", "password": "admin123"},
|
||||
# {"name": "user", "secret_key": "user456"}
|
||||
# ]
|
||||
# },
|
||||
# "api": {
|
||||
# # "endpoints": ["api1", "api2"],
|
||||
# "encryption_settings": {
|
||||
# "enabled": True,
|
||||
# "secret": "api_secret"
|
||||
# }
|
||||
# }
|
||||
"secret_key": "normal_value",
|
||||
"api_key": "normal_value",
|
||||
"my_key_value": "normal_value",
|
||||
}
|
||||
}
|
||||
data = {
|
||||
"basic": {
|
||||
"log_level_console": "DEBUG",
|
||||
"log_level_file": "DEBUG",
|
||||
"storage_interface": "sqlite",
|
||||
"content_start_date": "2023-1-1",
|
||||
"encryption_key": "ENCRYPTION_KEY"
|
||||
},
|
||||
"email": {
|
||||
"alert_email": [
|
||||
"test+z-sd@tequila.jp"
|
||||
]
|
||||
},
|
||||
"poller": {
|
||||
"max_forks": "1",
|
||||
"interface": "Zac"
|
||||
},
|
||||
"pusher": {
|
||||
"max_forks": "3",
|
||||
"interface": "Screendragon"
|
||||
},
|
||||
"api:Zac": {
|
||||
"type": "zac",
|
||||
"client_id": "oro_zac_demo",
|
||||
"client_secret": "CLIENT_SECRET",
|
||||
"username": "zacuser",
|
||||
"password": "ZACuser3",
|
||||
"hostname": "e-gra2.zac.ai",
|
||||
"appname": "e-gra2_api_trial",
|
||||
"api_path": "b/api/v2"
|
||||
},
|
||||
"api:Screendragon": {
|
||||
"type": "screendragon",
|
||||
"client_id": "omniprostaging",
|
||||
"encryption_client": "SOME_SECRET",
|
||||
"client_encryption": "SOME_SECRET",
|
||||
"secret_client": "SOME_SECRET",
|
||||
"client_secret": "SOME_SECRET",
|
||||
"hostname": "omniprostaging.screendragon.com",
|
||||
"appname": "sdapi",
|
||||
"api_path": "api"
|
||||
}
|
||||
}
|
||||
result = mask(data)
|
||||
print(f"** In: {dump_data(data)}")
|
||||
print(f"===> Masked: {dump_data(result)}")
|
||||
|
||||
|
||||
def __set_dict_value_entry():
|
||||
|
||||
dict_empty: dict[str, Any] = {}
|
||||
new = set_entry(dict_empty, 'a.b.c', 1)
|
||||
print(f"[1] Set dict entry: {dump_data(new)}")
|
||||
new = set_entry(new, 'dict', {'key': 'value'})
|
||||
print(f"[2] Set dict entry: {dump_data(new)}")
|
||||
new = set_entry(new, 'list', [1, 2, 3])
|
||||
print(f"[3] Set dict entry: {dump_data(new)}")
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Test: corelibs.string_handling.string_helpers
|
||||
"""
|
||||
__mask()
|
||||
__set_dict_value_entry()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
89
test-run/iterator_handling/list_helpers.py
Normal file
89
test-run/iterator_handling/list_helpers.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""
|
||||
test list helpers
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.iterator_handling.list_helpers import is_list_in_list, convert_to_list, make_unique_list_of_dicts
|
||||
from corelibs.iterator_handling.fingerprint import dict_hash_crc
|
||||
|
||||
|
||||
def __test_is_list_in_list_a():
|
||||
list_a = [1, "hello", 3.14, True, "world"]
|
||||
list_b = ["hello", True, 42]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
print(f"RESULT: {result}")
|
||||
|
||||
|
||||
def __convert_list():
|
||||
source = "hello"
|
||||
result = convert_to_list(source)
|
||||
print(f"IN: {source} -> {result}")
|
||||
|
||||
|
||||
def __make_unique_list_of_dicts():
|
||||
dict_list = [
|
||||
{"a": 1, "b": 2, "nested": {"x": 10, "y": 20}},
|
||||
{"a": 1, "b": 2, "nested": {"x": 10, "y": 20}},
|
||||
{"b": 2, "a": 1, "nested": {"y": 20, "x": 10}},
|
||||
{"b": 2, "a": 1, "nested": {"y": 20, "x": 30}},
|
||||
{"a": 3, "b": 4, "nested": {"x": 30, "y": 40}}
|
||||
]
|
||||
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||
dhf = dict_hash_crc(unique_dicts)
|
||||
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||
|
||||
dict_list = [
|
||||
{"a": 1, 1: "one"},
|
||||
{1: "one", "a": 1},
|
||||
{"a": 2, 1: "one"}
|
||||
]
|
||||
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||
dhf = dict_hash_crc(unique_dicts)
|
||||
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||
|
||||
dict_list = [
|
||||
{"a": 1, "b": [1, 2, 3]},
|
||||
{"b": [1, 2, 3], "a": 1},
|
||||
{"a": 1, "b": [1, 2, 4]},
|
||||
1, 2, "String", 1, "Foobar"
|
||||
]
|
||||
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||
dhf = dict_hash_crc(unique_dicts)
|
||||
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||
|
||||
dict_list: list[Any] = [
|
||||
[],
|
||||
{},
|
||||
[],
|
||||
{},
|
||||
{"a": []},
|
||||
{"a": []},
|
||||
{"a": {}},
|
||||
{"a": {}},
|
||||
]
|
||||
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||
dhf = dict_hash_crc(unique_dicts)
|
||||
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||
|
||||
dict_list: list[Any] = [
|
||||
(1, 2),
|
||||
(1, 2),
|
||||
(2, 3),
|
||||
]
|
||||
unique_dicts = make_unique_list_of_dicts(dict_list)
|
||||
dhf = dict_hash_crc(unique_dicts)
|
||||
print(f"Unique dicts: {dump_data(unique_dicts)} [{dhf}]")
|
||||
|
||||
|
||||
def main():
|
||||
"""List helpers test runner"""
|
||||
__test_is_list_in_list_a()
|
||||
__convert_list()
|
||||
__make_unique_list_of_dicts()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
54
test-run/json_handling/jmespath_helper.py
Normal file
54
test-run/json_handling/jmespath_helper.py
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
jmes path testing
|
||||
"""
|
||||
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.json_handling.jmespath_helper import jmespath_search
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
__set = {
|
||||
'a': 'b',
|
||||
'foobar': [1, 2, 'a'],
|
||||
'bar': {
|
||||
'a': 1,
|
||||
'b': 'c'
|
||||
},
|
||||
'baz': [
|
||||
{
|
||||
'aa': 1,
|
||||
'ab': 'cc'
|
||||
},
|
||||
{
|
||||
'ba': 2,
|
||||
'bb': 'dd'
|
||||
},
|
||||
],
|
||||
'foo': {
|
||||
'a': [1, 2, 3],
|
||||
'b': ['a', 'b', 'c']
|
||||
}
|
||||
}
|
||||
|
||||
__get = [
|
||||
'a',
|
||||
'bar.a',
|
||||
'foo.a',
|
||||
'baz[].aa',
|
||||
"[?\"c\" && contains(\"c\", 'b')]",
|
||||
"[?contains(\"c\", 'b')]",
|
||||
]
|
||||
for __jmespath in __get:
|
||||
result = jmespath_search(__set, __jmespath)
|
||||
print(f"GET {__jmespath}: {dump_data(result)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
52
test-run/json_handling/json_replace.py
Normal file
52
test-run/json_handling/json_replace.py
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
JSON content replace tets
|
||||
"""
|
||||
|
||||
from deepdiff import DeepDiff
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.json_handling.json_helper import modify_with_jsonpath
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
__data = {
|
||||
'a': 'b',
|
||||
'foobar': [1, 2, 'a'],
|
||||
'bar': {
|
||||
'a': 1,
|
||||
'b': 'c'
|
||||
},
|
||||
'baz': [
|
||||
{
|
||||
'aa': 1,
|
||||
'ab': 'cc'
|
||||
},
|
||||
{
|
||||
'ba': 2,
|
||||
'bb': 'dd'
|
||||
},
|
||||
],
|
||||
'foo': {
|
||||
'a': [1, 2, 3],
|
||||
'b': ['a', 'b', 'c']
|
||||
}
|
||||
}
|
||||
|
||||
# Modify some values using JSONPath
|
||||
__replace_data = modify_with_jsonpath(__data, 'bar.a', 42)
|
||||
__replace_data = modify_with_jsonpath(__replace_data, 'foo.b[1]', 'modified')
|
||||
__replace_data = modify_with_jsonpath(__replace_data, 'baz[0].ab', 'changed')
|
||||
|
||||
print(f"Original Data:\n{dump_data(__data)}\n")
|
||||
print(f"Modified Data:\n{dump_data(__replace_data)}\n")
|
||||
print(f"Differences:\n{dump_data(DeepDiff(__data, __replace_data, verbose_level=2))}\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
142
test-run/logging_handling/log.py
Normal file
142
test-run/logging_handling/log.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""
|
||||
Log logging_handling.log testing
|
||||
"""
|
||||
|
||||
# import atexit
|
||||
import sys
|
||||
from pathlib import Path
|
||||
# this is for testing only
|
||||
from corelibs.logging_handling.log import Log, Logger, ConsoleFormat, ConsoleFormatSettings
|
||||
from corelibs.debug_handling.debug_helpers import exception_stack, call_stack
|
||||
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Log testing
|
||||
"""
|
||||
script_path: Path = Path(__file__).resolve().parent
|
||||
log = Log(
|
||||
log_path=script_path.joinpath('log', 'test.log'),
|
||||
log_name="Test Log",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
# "log_level_console": None,
|
||||
"log_level_file": 'DEBUG',
|
||||
# "console_color_output_enabled": False,
|
||||
"per_run_log": True,
|
||||
# "console_format_type": ConsoleFormatSettings.NONE,
|
||||
# "console_format_type": ConsoleFormatSettings.MINIMAL,
|
||||
# "console_format_type": ConsoleFormat.TIME_MICROSECONDS | ConsoleFormat.NAME | ConsoleFormat.LEVEL,
|
||||
"console_format_type": None,
|
||||
# "console_format_type": ConsoleFormat.NAME,
|
||||
# "console_format_type": (
|
||||
# ConsoleFormat.TIME | ConsoleFormat.TIMEZONE | ConsoleFormat.LINENO | ConsoleFormat.LEVEL
|
||||
# ),
|
||||
}
|
||||
)
|
||||
logn = Logger(log.get_logger_settings())
|
||||
|
||||
log.info("ConsoleFormatType FILE is: %s", ConsoleFormat.FILE)
|
||||
log.info("ConsoleFormatSettings ALL is: %s", ConsoleFormatSettings.ALL)
|
||||
log.info("ConsoleFormatSettings lookup is: %s", ConsoleFormatSettings.from_string('ALL'))
|
||||
|
||||
log.logger.debug('[NORMAL] Debug test: %s', log.logger.name)
|
||||
log.lg.debug('[NORMAL] Debug test: %s', log.logger.name)
|
||||
log.debug('[NORMAL-] Debug test: %s', log.logger.name)
|
||||
logn.lg.debug('[NORMAL N] Debug test: %s', log.logger.name)
|
||||
logn.debug('[NORMAL N-] Debug test: %s', log.logger.name)
|
||||
log.logger.info('[NORMAL] Info test: %s', log.logger.name)
|
||||
log.info('[NORMAL-] Info test: %s', log.logger.name)
|
||||
log.logger.warning('[NORMAL] Warning test: %s', log.logger.name)
|
||||
log.warning('[NORMAL-] Warning test: %s', log.logger.name)
|
||||
log.logger.error('[NORMAL] Error test: %s', log.logger.name)
|
||||
log.error('[NORMAL-] Error test: %s', log.logger.name)
|
||||
log.logger.critical('[NORMAL] Critical test: %s', log.logger.name)
|
||||
log.critical('[NORMAL-] Critical test: %s', log.logger.name)
|
||||
log.logger.log(LoggingLevel.ALERT.value, '[NORMAL] alert test: %s', log.logger.name)
|
||||
log.alert('[NORMAL-] alert test: %s', log.logger.name)
|
||||
log.emergency('[NORMAL-] emergency test: %s', log.logger.name)
|
||||
log.logger.log(LoggingLevel.EMERGENCY.value, '[NORMAL] emergency test: %s', log.logger.name)
|
||||
log.exception('[NORMAL] Exception test: %s', log.logger.name)
|
||||
log.logger.log(LoggingLevel.EXCEPTION.value, '[NORMAL] exception test: %s', log.logger.name, exc_info=True)
|
||||
|
||||
bad_level = 'WRONG'
|
||||
if not Log.validate_log_level(bad_level):
|
||||
print(f"Invalid level: {bad_level}")
|
||||
good_level = 'WARNING'
|
||||
if Log.validate_log_level(good_level):
|
||||
print(f"Valid level: {good_level}")
|
||||
|
||||
print(f"ERROR is to_logging_level(): {LoggingLevel.ERROR.to_logging_level()}")
|
||||
print(f"ERROR is to_lower_case(): {LoggingLevel.ERROR.to_lower_case()}")
|
||||
print(f"ERROR is: {LoggingLevel.ERROR}")
|
||||
print(f"ERROR is value: {LoggingLevel.ERROR.value}")
|
||||
print(f"ERROR is name: {LoggingLevel.ERROR.name}")
|
||||
print(f"ERROR is from_string(lower): {LoggingLevel.from_string('ERROR')}")
|
||||
print(f"ERROR is from_string(upper): {LoggingLevel.from_string('ERROR')}")
|
||||
print(f"ERROR is from_int: {LoggingLevel.from_int(40)}")
|
||||
print(f"ERROR is from_any(text lower): {LoggingLevel.from_any('ERROR')}")
|
||||
print(f"ERROR is from_any(text upper): {LoggingLevel.from_any('ERROR')}")
|
||||
print(f"ERROR is from_any(int): {LoggingLevel.from_any(40)}")
|
||||
print(f"INFO <= ERROR: {LoggingLevel.INFO.includes(LoggingLevel.ERROR)}")
|
||||
print(f"INFO > ERROR: {LoggingLevel.INFO.is_higher_than(LoggingLevel.ERROR)}")
|
||||
print(f"INFO < ERROR: {LoggingLevel.INFO.is_lower_than(LoggingLevel.ERROR)}")
|
||||
print(f"INFO < ERROR: {LoggingLevel.INFO.is_lower_than(LoggingLevel.ERROR)}")
|
||||
|
||||
try:
|
||||
print(f"INVALID is A: {LoggingLevel.from_string('INVALID')}")
|
||||
except ValueError as e:
|
||||
print(f"* ERROR: {e}")
|
||||
|
||||
try:
|
||||
__test = 5 / 0
|
||||
print(f"Divied: {__test}")
|
||||
except ZeroDivisionError as e:
|
||||
print(f"** sys.exec_info(): {sys.exc_info()}")
|
||||
print(f"** sys.exec_info(): [{exception_stack()}] | [{exception_stack(sys.exc_info())}] | [{call_stack()}]")
|
||||
log.logger.critical("Divison through zero: %s", e)
|
||||
log.exception("Divison through zero: %s", e)
|
||||
|
||||
for handler in log.logger.handlers:
|
||||
print(
|
||||
f"** Handler (logger) {handler} [{handler.name}] -> "
|
||||
f"{handler.level} -> {LoggingLevel.from_any(handler.level)}"
|
||||
)
|
||||
|
||||
for key, handler in log.handlers.items():
|
||||
print(f"Handler (handlers) [{key}] {handler} -> {handler.level} -> {LoggingLevel.from_any(handler.level)}")
|
||||
log.set_log_level(Log.CONSOLE_HANDLER, LoggingLevel.ERROR)
|
||||
log.logger.warning('[NORMAL] Invisible Warning test: %s', log.logger.name)
|
||||
log.logger.error('[NORMAL] Visible Error test: %s', log.logger.name)
|
||||
log.logger.debug('[NORMAL] Visible Debug test: %s', log.logger.name)
|
||||
print(f"*** Any handler is minimum level ERROR: {log.any_handler_is_minimum_level(LoggingLevel.ERROR)}")
|
||||
print(f"*** Any handler is minimum level DEBUG: {log.any_handler_is_minimum_level(LoggingLevel.DEBUG)}")
|
||||
for handler in log.handlers.values():
|
||||
print(
|
||||
f"*** Setting handler {handler} is level {LoggingLevel.from_any(handler.level).name} -> "
|
||||
f"*** INC {LoggingLevel.from_any(handler.level).includes(LoggingLevel.DEBUG)}")
|
||||
|
||||
print(f"*** WARNING includes ERROR: {LoggingLevel.WARNING.includes(LoggingLevel.ERROR)}")
|
||||
print(f"*** ERROR includes WARNING: {LoggingLevel.ERROR.includes(LoggingLevel.WARNING)}")
|
||||
|
||||
log.set_log_level(Log.CONSOLE_HANDLER, LoggingLevel.DEBUG)
|
||||
log.debug('Current logging format: %s', log.log_settings['console_format_type'])
|
||||
log.debug('Current console formatter: %s', log.get_console_formatter())
|
||||
log.update_console_formatter(ConsoleFormat.TIME | ConsoleFormat.LINENO)
|
||||
log.info('Does hit show less A')
|
||||
log.debug('Current console formatter after A: %s', log.get_console_formatter())
|
||||
log.update_console_formatter(ConsoleFormat.TIME | ConsoleFormat.LINENO)
|
||||
log.info('Does hit show less B')
|
||||
log.debug('Current console formatter after B: %s', log.get_console_formatter())
|
||||
log.update_console_formatter(ConsoleFormatSettings.ALL)
|
||||
log.info('Does hit show less C')
|
||||
log.debug('Current console formatter after C: %s', log.get_console_formatter())
|
||||
print(f"*** Any handler is minimum level ERROR: {log.any_handler_is_minimum_level(LoggingLevel.ERROR)}")
|
||||
print(f"*** Any handler is minimum level DEBUG: {log.any_handler_is_minimum_level(LoggingLevel.DEBUG)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
2
test-run/logging_handling/log/.gitignore
vendored
Normal file
2
test-run/logging_handling/log/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
91
test-run/logging_handling/log_pool.py
Normal file
91
test-run/logging_handling/log_pool.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""
|
||||
Pool Queue log handling
|
||||
Thread Queue log handling
|
||||
"""
|
||||
|
||||
import random
|
||||
import time
|
||||
from multiprocessing import Queue
|
||||
import concurrent.futures
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from corelibs.logging_handling.log import Log
|
||||
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
||||
|
||||
|
||||
def work_function(log_name: str, worker_id: int, data: list[int]) -> int:
|
||||
"""
|
||||
simulate worker
|
||||
|
||||
Arguments:
|
||||
worker_id {int} -- _description_
|
||||
data {list[int]} -- _description_
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
log = logging.getLogger(f'{log_name}-WorkerFn-{worker_id}')
|
||||
log.info('Starting worker: %s', worker_id)
|
||||
time.sleep(random.uniform(1, 3))
|
||||
result = sum(data) * worker_id
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Queue log tester
|
||||
"""
|
||||
print("[START] Queue logger test")
|
||||
log_queue: 'Queue[str]' = Queue()
|
||||
script_path: Path = Path(__file__).resolve().parent
|
||||
log = Log(
|
||||
log_path=script_path.joinpath('log', 'test.log'),
|
||||
log_name="Test Log",
|
||||
log_settings={
|
||||
"log_level_console": 'INFO',
|
||||
"log_level_file": 'INFO',
|
||||
"log_queue": log_queue,
|
||||
}
|
||||
)
|
||||
|
||||
log.logger.debug('Pool Fork logging test')
|
||||
max_forks = 2
|
||||
data_sets = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
|
||||
with concurrent.futures.ProcessPoolExecutor(
|
||||
max_workers=max_forks,
|
||||
initializer=Log.init_worker_logging,
|
||||
initargs=(log_queue,)
|
||||
) as executor:
|
||||
log.logger.info('Start workers')
|
||||
futures = [
|
||||
executor.submit(work_function, log.log_name, worker_id, data)
|
||||
for worker_id, data in enumerate(data_sets, 1)
|
||||
]
|
||||
log.logger.info('Workders started')
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
log.logger.warning('Processing result: %s', future.result())
|
||||
print(f"Processing result: {future.result()}")
|
||||
|
||||
log.set_log_level('stream_handler', LoggingLevel.ERROR)
|
||||
log.logger.error('SECOND Start workers')
|
||||
futures = [
|
||||
executor.submit(work_function, log.log_name, worker_id, data)
|
||||
for worker_id, data in enumerate(data_sets, 1)
|
||||
]
|
||||
log.logger.info('[INVISIBLE] Workders started')
|
||||
log.logger.error('[VISIBLE] Second Workders started')
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
log.logger.error('Processing result: %s', future.result())
|
||||
print(f"Processing result: {future.result()}")
|
||||
|
||||
log.set_log_level('stream_handler', LoggingLevel.DEBUG)
|
||||
log.logger.info('[END] Queue logger test')
|
||||
log.stop_listener()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
66
test-run/logging_handling/log_queue.py
Normal file
66
test-run/logging_handling/log_queue.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""
|
||||
Log logging_handling.log testing
|
||||
"""
|
||||
|
||||
# import atexit
|
||||
from pathlib import Path
|
||||
from multiprocessing import Queue
|
||||
import time
|
||||
# this is for testing only
|
||||
from corelibs.logging_handling.log import Log
|
||||
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Log testing
|
||||
"""
|
||||
script_path: Path = Path(__file__).resolve().parent
|
||||
|
||||
log_queue: 'Queue[str]' = Queue()
|
||||
log_q = Log(
|
||||
log_path=script_path.joinpath('log', 'test_queue.log'),
|
||||
log_name="Test Log",
|
||||
log_settings={
|
||||
"log_level_console": 'WARNING',
|
||||
"log_level_file": 'ERROR',
|
||||
"log_queue": log_queue
|
||||
# "console_color_output_enabled": False,
|
||||
}
|
||||
)
|
||||
|
||||
log_q.logger.debug('[QUEUE] Debug test: %s', log_q.logger.name)
|
||||
log_q.logger.info('[QUEUE] Info test: %s', log_q.logger.name)
|
||||
log_q.logger.warning('[QUEUE] Warning test: %s', log_q.logger.name)
|
||||
log_q.logger.error('[QUEUE] Error test: %s', log_q.logger.name)
|
||||
log_q.logger.critical('[QUEUE] Critical test: %s', log_q.logger.name)
|
||||
log_q.logger.log(LoggingLevel.EXCEPTION.value, '[QUEUE] Exception test: %s', log_q.logger.name, exc_info=True)
|
||||
time.sleep(0.1)
|
||||
|
||||
for handler in log_q.logger.handlers:
|
||||
print(f"[1] Handler (logger) {handler}")
|
||||
if log_q.listener is not None:
|
||||
for handler in log_q.listener.handlers:
|
||||
print(f"[1] Handler (queue) {handler}")
|
||||
for handler in log_q.handlers.items():
|
||||
print(f"[1] Handler (handlers) {handler}")
|
||||
|
||||
log_q.set_log_level('stream_handler', LoggingLevel.ERROR)
|
||||
log_q.logger.warning('[QUEUE-B] [INVISIBLE] Warning test: %s', log_q.logger.name)
|
||||
log_q.logger.error('[QUEUE-B] [VISIBLE] Error test: %s', log_q.logger.name)
|
||||
|
||||
for handler in log_q.logger.handlers:
|
||||
print(f"[2] Handler (logger) {handler}")
|
||||
if log_q.listener is not None:
|
||||
for handler in log_q.listener.handlers:
|
||||
print(f"[2] Handler (queue) {handler}")
|
||||
for handler in log_q.handlers.items():
|
||||
print(f"[2] Handler (handlers) {handler}")
|
||||
|
||||
log_q.stop_listener()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
31
test-run/logging_handling/log_queue_legacy.py
Normal file
31
test-run/logging_handling/log_queue_legacy.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""
|
||||
Log logging_handling.log testing
|
||||
"""
|
||||
|
||||
# import atexit
|
||||
from pathlib import Path
|
||||
from multiprocessing import Queue
|
||||
# this is for testing only
|
||||
from queue_logger.log_queue import QueueLogger
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Log testing
|
||||
"""
|
||||
script_path: Path = Path(__file__).resolve().parent
|
||||
|
||||
log_queue: 'Queue[str]' = Queue()
|
||||
log_q_legacy = QueueLogger(
|
||||
log_file=script_path.joinpath('log', 'test_queue_legacy.log'),
|
||||
log_name="Test Log Queue",
|
||||
log_queue=log_queue
|
||||
)
|
||||
log_q_legacy.mlog.info('Log test: %s', 'Queue Legacy')
|
||||
# log_q.stop_listener()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
96
test-run/logging_handling/queue_logger/log_queue.py
Normal file
96
test-run/logging_handling/queue_logger/log_queue.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""
|
||||
test queue logger interface
|
||||
NOTE: this has all moved to the default log interface
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.handlers
|
||||
from pathlib import Path
|
||||
from multiprocessing import Queue
|
||||
|
||||
|
||||
class QueueLogger:
|
||||
"""
|
||||
Queue logger
|
||||
"""
|
||||
|
||||
def __init__(self, log_file: Path, log_name: str, log_queue: 'Queue[str] | None' = None):
|
||||
self.log_file = log_file
|
||||
self.log_name = log_name
|
||||
self.handlers = self.setup_logging()
|
||||
self.log_queue: 'Queue[str]' = log_queue if log_queue is not None else Queue()
|
||||
self.listener = logging.handlers.QueueListener(self.log_queue, *self.handlers)
|
||||
self.listener.start()
|
||||
|
||||
self.mlog: logging.Logger = self.main_log(log_name)
|
||||
|
||||
def __del__(self):
|
||||
self.mlog.info("[%s] ================================>", "END")
|
||||
self.listener.stop()
|
||||
|
||||
def setup_logging(self):
|
||||
"""
|
||||
setup basic logging
|
||||
"""
|
||||
|
||||
# Create formatters
|
||||
file_formatter = logging.Formatter(
|
||||
'%(asctime)s - %(name)s - %(levelname)s - [PID:%(process)d] [%(filename)s:%(lineno)d] - %(message)s'
|
||||
)
|
||||
|
||||
console_formatter = logging.Formatter(
|
||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
|
||||
# Create handlers
|
||||
file_handler = logging.FileHandler(self.log_file)
|
||||
file_handler.setFormatter(file_formatter)
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setFormatter(console_formatter)
|
||||
console_handler.setLevel(logging.DEBUG)
|
||||
|
||||
return [file_handler, console_handler]
|
||||
|
||||
def main_log(self, log_name: str) -> logging.Logger:
|
||||
"""
|
||||
main logger
|
||||
|
||||
Arguments:
|
||||
log_name {str} -- _description_
|
||||
|
||||
Returns:
|
||||
logging.Logger -- _description_
|
||||
"""
|
||||
mlog_handler = logging.handlers.QueueHandler(self.log_queue)
|
||||
mlog = logging.getLogger(f'{log_name}-MainProcess')
|
||||
mlog.addHandler(mlog_handler)
|
||||
mlog.setLevel(logging.DEBUG)
|
||||
return mlog
|
||||
|
||||
@staticmethod
|
||||
def init_worker_logging(log_queue: 'Queue[str]', log_name: str, ):
|
||||
"""
|
||||
Initialize logging for worker processes
|
||||
"""
|
||||
|
||||
# Create QueueHandler
|
||||
queue_handler = logging.handlers.QueueHandler(log_queue)
|
||||
|
||||
# Setup root logger for this process
|
||||
# NOTE: This must be EMPTY or new SINGLE NEW logger is created, we need one for EACH fork
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
root_logger.handlers.clear()
|
||||
root_logger.addHandler(queue_handler)
|
||||
|
||||
root_logger.info('[LOGGER] Init log: %s - %s', log_queue, log_name)
|
||||
|
||||
return root_logger
|
||||
|
||||
def stop_listener(self):
|
||||
"""
|
||||
stop the listener
|
||||
"""
|
||||
self.listener.stop()
|
||||
@@ -9,8 +9,9 @@ from random import randint
|
||||
import sys
|
||||
import io
|
||||
from pathlib import Path
|
||||
from corelibs.file_handling.progress import Progress
|
||||
from corelibs.string_handling.datetime_helpers import convert_timestamp, create_time
|
||||
from corelibs.script_handling.progress import Progress
|
||||
from corelibs.datetime_handling.datetime_helpers import create_time
|
||||
from corelibs.datetime_handling.timestamp_convert import convert_timestamp
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
@@ -5,7 +5,8 @@ Test string_handling/string_helpers
|
||||
import sys
|
||||
from decimal import Decimal, getcontext
|
||||
from textwrap import shorten
|
||||
from corelibs.string_handling.string_helpers import shorten_string, format_number
|
||||
from corelibs.string_handling.string_helpers import shorten_string, format_number, prepare_url_slash
|
||||
from corelibs.string_handling.text_colors import Colors
|
||||
|
||||
|
||||
def __sh_shorten_string():
|
||||
@@ -16,7 +17,7 @@ def __sh_shorten_string():
|
||||
result = shorten_string(string, length, placeholder=placeholder)
|
||||
print(f"IN: {string} -> {result}")
|
||||
except ValueError as e:
|
||||
print(f"Failed: {e}")
|
||||
print(f"{Colors.red}Failed: {Colors.bold}{e}{Colors.end}")
|
||||
try:
|
||||
result = shorten(string, width=length, placeholder=placeholder)
|
||||
print(f"IN: {string} -> {result}")
|
||||
@@ -51,12 +52,47 @@ def __sh_format_number():
|
||||
print(f"Format {number} ({precision}) -> {result}")
|
||||
|
||||
|
||||
def __sh_colors():
|
||||
for color in [
|
||||
"black",
|
||||
"red",
|
||||
"green",
|
||||
"yellow",
|
||||
"blue",
|
||||
"magenta",
|
||||
"cyan",
|
||||
"white",
|
||||
]:
|
||||
for change in ['', '_bold', '_bright']:
|
||||
_color = f"{color}{change}"
|
||||
print(f"Color: {getattr(Colors, _color)}{_color}{Colors.end}")
|
||||
|
||||
print(f"Underline: {Colors.underline}UNDERLINE{Colors.reset}")
|
||||
print(f"Bold: {Colors.bold}BOLD{Colors.reset}")
|
||||
print(f"Underline/Yellow: {Colors.underline}{Colors.yellow}UNDERLINE YELLOW{Colors.reset}")
|
||||
print(f"Underline/Yellow/Bold: {Colors.underline}{Colors.bold}{Colors.yellow}UNDERLINE YELLOW BOLD{Colors.reset}")
|
||||
|
||||
|
||||
def __prepare_url_slash():
|
||||
urls = [
|
||||
"api/v1/resource",
|
||||
"/api/v1/resource",
|
||||
"///api//v1//resource//",
|
||||
"api//v1/resource/",
|
||||
]
|
||||
for url in urls:
|
||||
prepared = prepare_url_slash(url)
|
||||
print(f"IN: {url} -> OUT: {prepared}")
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Test: corelibs.string_handling.string_helpers
|
||||
"""
|
||||
__sh_shorten_string()
|
||||
__sh_format_number()
|
||||
__sh_colors()
|
||||
__prepare_url_slash()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -4,10 +4,12 @@
|
||||
Test for double byte format
|
||||
"""
|
||||
|
||||
from corelibs.string_handling.timestamp_strings import TimestampStrings
|
||||
from zoneinfo import ZoneInfo
|
||||
from corelibs.datetime_handling.timestamp_strings import TimestampStrings
|
||||
|
||||
|
||||
def main():
|
||||
"""test"""
|
||||
ts = TimestampStrings()
|
||||
print(f"TS: {ts.timestamp_now}")
|
||||
|
||||
@@ -16,6 +18,14 @@ def main():
|
||||
except ValueError as e:
|
||||
print(f"Value error: {e}")
|
||||
|
||||
ts = TimestampStrings("Europe/Vienna")
|
||||
print(f"TZ: {ts.time_zone} -> TS: {ts.timestamp_now_tz}")
|
||||
ts = TimestampStrings(ZoneInfo("Europe/Vienna"))
|
||||
print(f"TZ: {ts.time_zone} -> TS: {ts.timestamp_now_tz}")
|
||||
custom_tz = 'Europe/Paris'
|
||||
ts = TimestampStrings(time_zone=custom_tz)
|
||||
print(f"TZ: {ts.time_zone} -> TS: {ts.timestamp_now_tz}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
29
test-run/var_handling/enum_base.py
Normal file
29
test-run/var_handling/enum_base.py
Normal file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Enum handling
|
||||
"""
|
||||
|
||||
from corelibs.var_handling.enum_base import EnumBase
|
||||
|
||||
|
||||
class TestBlock(EnumBase):
|
||||
"""Test block enum"""
|
||||
BLOCK_A = "block_a"
|
||||
HAS_NUM = 5
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
|
||||
print(f"BLOCK A: {TestBlock.from_any('BLOCK_A')}")
|
||||
print(f"HAS NUM: {TestBlock.from_any(5)}")
|
||||
print(f"DIRECT BLOCK: {TestBlock.BLOCK_A.name} -> {TestBlock.BLOCK_A.value}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
0
tests/integration/__init__.py
Normal file
0
tests/integration/__init__.py
Normal file
0
tests/integration/fixtures/__init__.py
Normal file
0
tests/integration/fixtures/__init__.py
Normal file
1
tests/unit/check_handling/__init__.py
Normal file
1
tests/unit/check_handling/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Unit tests for check_handling module."""
|
||||
623
tests/unit/check_handling/test_regex_constants.py
Normal file
623
tests/unit/check_handling/test_regex_constants.py
Normal file
@@ -0,0 +1,623 @@
|
||||
"""
|
||||
Unit tests for regex_constants module.
|
||||
|
||||
Tests all regex patterns defined in the check_handling.regex_constants module.
|
||||
"""
|
||||
|
||||
import re
|
||||
import pytest
|
||||
from corelibs.check_handling.regex_constants import (
|
||||
compile_re,
|
||||
SUB_EMAIL_BASIC_REGEX,
|
||||
EMAIL_BASIC_REGEX,
|
||||
NAME_EMAIL_SIMPLE_REGEX,
|
||||
NAME_EMAIL_BASIC_REGEX,
|
||||
DOMAIN_WITH_LOCALHOST_REGEX,
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||
DOMAIN_REGEX
|
||||
)
|
||||
from corelibs.check_handling.regex_constants_compiled import (
|
||||
COMPILED_EMAIL_BASIC_REGEX,
|
||||
COMPILED_NAME_EMAIL_SIMPLE_REGEX,
|
||||
COMPILED_NAME_EMAIL_BASIC_REGEX,
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX,
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||
COMPILED_DOMAIN_REGEX,
|
||||
)
|
||||
|
||||
|
||||
class TestCompileRe:
|
||||
"""Test cases for the compile_re function."""
|
||||
|
||||
def test_compile_re_returns_pattern(self) -> None:
|
||||
"""Test that compile_re returns a compiled regex Pattern object."""
|
||||
pattern = compile_re(r"test")
|
||||
assert isinstance(pattern, re.Pattern)
|
||||
|
||||
def test_compile_re_with_verbose_flag(self) -> None:
|
||||
"""Test that compile_re compiles with VERBOSE flag."""
|
||||
# Verbose mode allows whitespace and comments in regex
|
||||
verbose_regex = r"""
|
||||
\d+ # digits
|
||||
\s+ # whitespace
|
||||
"""
|
||||
pattern = compile_re(verbose_regex)
|
||||
assert pattern.match("123 ")
|
||||
assert not pattern.match("abc")
|
||||
|
||||
def test_compile_re_simple_pattern(self) -> None:
|
||||
"""Test compile_re with a simple pattern."""
|
||||
pattern = compile_re(r"^\d{3}$")
|
||||
assert pattern.match("123")
|
||||
assert not pattern.match("12")
|
||||
assert not pattern.match("1234")
|
||||
|
||||
|
||||
class TestEmailBasicRegex:
|
||||
"""Test cases for EMAIL_BASIC_REGEX pattern."""
|
||||
|
||||
@pytest.fixture
|
||||
def email_pattern(self) -> re.Pattern[str]:
|
||||
"""Fixture that returns compiled email regex pattern."""
|
||||
return COMPILED_EMAIL_BASIC_REGEX
|
||||
|
||||
@pytest.mark.parametrize("valid_email", [
|
||||
"user@example.com",
|
||||
"test.user@example.com",
|
||||
"user+tag@example.co.uk",
|
||||
"first.last@subdomain.example.com",
|
||||
"user123@test-domain.com",
|
||||
"a@example.com",
|
||||
"user_name@example.com",
|
||||
"user-name@example.com",
|
||||
"user@sub.domain.example.com",
|
||||
"test!#$%&'*+-/=?^_`{|}~@example.com",
|
||||
"1234567890@example.com",
|
||||
"user@example-domain.com",
|
||||
"user@domain.co",
|
||||
# Regex allows these (even if not strictly RFC compliant):
|
||||
"user.@example.com", # ends with dot before @
|
||||
"user..name@example.com", # consecutive dots in local part
|
||||
])
|
||||
def test_valid_emails(
|
||||
self, email_pattern: re.Pattern[str], valid_email: str
|
||||
) -> None:
|
||||
"""Test that valid email addresses match the pattern."""
|
||||
assert email_pattern.match(valid_email), (
|
||||
f"Failed to match valid email: {valid_email}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("invalid_email", [
|
||||
"", # empty string
|
||||
"@example.com", # missing local part
|
||||
"user@", # missing domain
|
||||
"user", # no @ symbol
|
||||
"user@.com", # domain starts with dot
|
||||
"user@domain", # no TLD
|
||||
"user @example.com", # space in local part
|
||||
"user@exam ple.com", # space in domain
|
||||
".user@example.com", # starts with dot
|
||||
"user@-example.com", # domain starts with hyphen
|
||||
"user@example-.com", # domain part ends with hyphen
|
||||
"user@example.c", # TLD too short (1 char)
|
||||
"user@example.toolong", # TLD too long (>6 chars)
|
||||
"user@@example.com", # double @
|
||||
"user@example@com", # multiple @
|
||||
"user@.example.com", # domain starts with dot
|
||||
"user@example.com.", # ends with dot
|
||||
"user@123.456.789.012", # numeric TLD not allowed
|
||||
])
|
||||
def test_invalid_emails(
|
||||
self, email_pattern: re.Pattern[str], invalid_email: str
|
||||
) -> None:
|
||||
"""Test that invalid email addresses do not match the pattern."""
|
||||
assert not email_pattern.match(invalid_email), (
|
||||
f"Incorrectly matched invalid email: {invalid_email}"
|
||||
)
|
||||
|
||||
def test_email_max_local_part_length(
|
||||
self, email_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test email with maximum local part length (64 characters)."""
|
||||
# Local part can be up to 64 chars (first char + 63 more)
|
||||
local_part = "a" * 64
|
||||
email = f"{local_part}@example.com"
|
||||
assert email_pattern.match(email)
|
||||
|
||||
def test_email_exceeds_local_part_length(
|
||||
self, email_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test email exceeding maximum local part length."""
|
||||
# 65 characters should not match
|
||||
local_part = "a" * 65
|
||||
email = f"{local_part}@example.com"
|
||||
assert not email_pattern.match(email)
|
||||
|
||||
|
||||
class TestSubEmailBasicRegex:
|
||||
"""Test cases for SUB_EMAIL_BASIC_REGEX pattern (without anchors)."""
|
||||
|
||||
@pytest.fixture
|
||||
def sub_email_pattern(self) -> re.Pattern[str]:
|
||||
"""Fixture that returns compiled sub email regex pattern."""
|
||||
return compile_re(rf"^{SUB_EMAIL_BASIC_REGEX}$")
|
||||
|
||||
@pytest.mark.parametrize("valid_email", [
|
||||
"user@example.com",
|
||||
"test.user@example.com",
|
||||
"user+tag@example.co.uk",
|
||||
"first.last@subdomain.example.com",
|
||||
"user123@test-domain.com",
|
||||
"a@example.com",
|
||||
"user_name@example.com",
|
||||
"user-name@example.com",
|
||||
"user@sub.domain.example.com",
|
||||
"test!#$%&'*+-/=?^_`{|}~@example.com",
|
||||
"1234567890@example.com",
|
||||
])
|
||||
def test_valid_emails_match(self, sub_email_pattern: re.Pattern[str], valid_email: str) -> None:
|
||||
"""Test that valid email addresses match SUB_EMAIL_BASIC_REGEX."""
|
||||
assert sub_email_pattern.match(valid_email), (
|
||||
f"Failed to match valid email: {valid_email}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("invalid_email", [
|
||||
"",
|
||||
"@example.com",
|
||||
"user@",
|
||||
"user",
|
||||
"user@.com",
|
||||
"user@domain",
|
||||
"user @example.com",
|
||||
".user@example.com",
|
||||
"user@-example.com",
|
||||
"user@example-.com",
|
||||
"user@example.c",
|
||||
"user@example.toolong",
|
||||
])
|
||||
def test_invalid_emails_no_match(self, sub_email_pattern: re.Pattern[str], invalid_email: str) -> None:
|
||||
"""Test that invalid emails don't match SUB_EMAIL_BASIC_REGEX."""
|
||||
assert not sub_email_pattern.match(invalid_email), (
|
||||
f"Incorrectly matched invalid email: {invalid_email}"
|
||||
)
|
||||
|
||||
def test_sub_email_max_local_part_length(self, sub_email_pattern: re.Pattern[str]) -> None:
|
||||
"""Test email with maximum local part length (64 characters)."""
|
||||
local_part = "a" * 64
|
||||
email = f"{local_part}@example.com"
|
||||
assert sub_email_pattern.match(email)
|
||||
|
||||
def test_sub_email_exceeds_local_part_length(self, sub_email_pattern: re.Pattern[str]) -> None:
|
||||
"""Test email exceeding maximum local part length."""
|
||||
local_part = "a" * 65
|
||||
email = f"{local_part}@example.com"
|
||||
assert not sub_email_pattern.match(email)
|
||||
|
||||
|
||||
class TestNameEmailSimpleRegex:
|
||||
"""Test cases for NAME_EMAIL_SIMPLE_REGEX pattern."""
|
||||
|
||||
@pytest.fixture
|
||||
def name_email_simple_pattern(self) -> re.Pattern[str]:
|
||||
"""Fixture that returns compiled name+email simple regex pattern."""
|
||||
return COMPILED_NAME_EMAIL_SIMPLE_REGEX
|
||||
|
||||
@pytest.mark.parametrize("test_input,expected_groups", [
|
||||
('"John Doe" <john@example.com>', {'name1': 'John Doe', 'email1': 'john@example.com'}),
|
||||
('John Doe <john@example.com>', {'name2': 'John Doe', 'email2': 'john@example.com'}),
|
||||
('<john@example.com>', {'email3': 'john@example.com'}),
|
||||
('john@example.com', {'email4': 'john@example.com'}),
|
||||
(' "Jane Smith" <jane@test.com> ', {'name1': 'Jane Smith', 'email1': 'jane@test.com'}),
|
||||
('Bob <bob@test.org>', {'name2': 'Bob', 'email2': 'bob@test.org'}),
|
||||
])
|
||||
def test_valid_name_email_combinations(
|
||||
self, name_email_simple_pattern: re.Pattern[str], test_input: str, expected_groups: dict[str, str]
|
||||
) -> None:
|
||||
"""Test that valid name+email combinations match and extract correct groups."""
|
||||
match = name_email_simple_pattern.match(test_input)
|
||||
assert match is not None, f"Failed to match: {test_input}"
|
||||
|
||||
# Check that expected groups are present and match
|
||||
for group_name, expected_value in expected_groups.items():
|
||||
assert match.group(group_name) == expected_value, (
|
||||
f"Group {group_name} expected '{expected_value}', got '{match.group(group_name)}'"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("invalid_input", [
|
||||
"",
|
||||
"not an email",
|
||||
"<>",
|
||||
'"Name Only"',
|
||||
'Name <',
|
||||
'<email',
|
||||
'Name <<email@test.com>>',
|
||||
'Name <email@test.com',
|
||||
'Name email@test.com>',
|
||||
])
|
||||
def test_invalid_name_email_combinations(
|
||||
self, name_email_simple_pattern: re.Pattern[str], invalid_input: str
|
||||
) -> None:
|
||||
"""Test that invalid inputs don't match NAME_EMAIL_SIMPLE_REGEX."""
|
||||
assert not name_email_simple_pattern.match(invalid_input), (
|
||||
f"Incorrectly matched invalid input: {invalid_input}"
|
||||
)
|
||||
|
||||
def test_extract_name_from_quoted(
|
||||
self, name_email_simple_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test extracting name from quoted format."""
|
||||
match = name_email_simple_pattern.match('"Alice Wonder" <alice@example.com>')
|
||||
assert match is not None
|
||||
assert match.group('name1') == 'Alice Wonder'
|
||||
assert match.group('email1') == 'alice@example.com'
|
||||
|
||||
def test_extract_name_from_unquoted(
|
||||
self, name_email_simple_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test extracting name from unquoted format."""
|
||||
match = name_email_simple_pattern.match('Bob Builder <bob@example.com>')
|
||||
assert match is not None
|
||||
assert match.group('name2') == 'Bob Builder'
|
||||
assert match.group('email2') == 'bob@example.com'
|
||||
|
||||
def test_email_only_in_brackets(
|
||||
self, name_email_simple_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test email-only format in angle brackets."""
|
||||
match = name_email_simple_pattern.match('<charlie@example.com>')
|
||||
assert match is not None
|
||||
assert match.group('email3') == 'charlie@example.com'
|
||||
|
||||
def test_email_only_plain(
|
||||
self, name_email_simple_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test plain email format without brackets."""
|
||||
match = name_email_simple_pattern.match('dave@example.com')
|
||||
assert match is not None
|
||||
assert match.group('email4') == 'dave@example.com'
|
||||
|
||||
def test_whitespace_handling(
|
||||
self, name_email_simple_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test that leading/trailing whitespace is handled correctly."""
|
||||
match = name_email_simple_pattern.match(' "User Name" <user@example.com> ')
|
||||
assert match is not None
|
||||
assert match.group('name1') == 'User Name'
|
||||
assert match.group('email1') == 'user@example.com'
|
||||
|
||||
|
||||
class TestNameEmailBasicRegex:
|
||||
"""Test cases for NAME_EMAIL_BASIC_REGEX pattern with strict email validation."""
|
||||
|
||||
@pytest.fixture
|
||||
def name_email_basic_pattern(self) -> re.Pattern[str]:
|
||||
"""Fixture that returns compiled name+email basic regex pattern."""
|
||||
return COMPILED_NAME_EMAIL_BASIC_REGEX
|
||||
|
||||
@pytest.mark.parametrize("test_input,expected_name,expected_email", [
|
||||
('"John Doe" <john@example.com>', 'John Doe', 'john@example.com'),
|
||||
('John Doe <john@example.com>', 'John Doe', 'john@example.com'),
|
||||
('<john@example.com>', None, 'john@example.com'),
|
||||
('john@example.com', None, 'john@example.com'),
|
||||
(' "Jane Smith" <jane.smith@test.co.uk> ', 'Jane Smith', 'jane.smith@test.co.uk'),
|
||||
('Alice Wonder <alice+tag@example.com>', 'Alice Wonder', 'alice+tag@example.com'),
|
||||
])
|
||||
def test_valid_name_email_with_validation(
|
||||
self,
|
||||
name_email_basic_pattern: re.Pattern[str],
|
||||
test_input: str,
|
||||
expected_name: str | None,
|
||||
expected_email: str,
|
||||
) -> None:
|
||||
"""Test valid name+email with strict email validation."""
|
||||
match = name_email_basic_pattern.match(test_input)
|
||||
assert match is not None, f"Failed to match: {test_input}"
|
||||
|
||||
# Extract name and email from whichever group matched
|
||||
name = match.group('name1') or match.group('name2')
|
||||
email = (
|
||||
match.group('email1') or match.group('email2') or
|
||||
match.group('email3') or match.group('email4')
|
||||
)
|
||||
|
||||
assert name == expected_name, f"Expected name '{expected_name}', got '{name}'"
|
||||
assert email == expected_email, f"Expected email '{expected_email}', got '{email}'"
|
||||
|
||||
@pytest.mark.parametrize("invalid_input", [
|
||||
'"John Doe" <invalid.email>', # invalid email format
|
||||
'John Doe <@example.com>', # missing local part
|
||||
'<user@>', # missing domain
|
||||
'user@domain', # no TLD
|
||||
'"Name" <user @example.com>', # space in email
|
||||
'<.user@example.com>', # starts with dot
|
||||
'user@-example.com', # domain starts with hyphen
|
||||
'Name <user@example.c>', # TLD too short
|
||||
'Name <user@example.toolongdomain>', # TLD too long
|
||||
])
|
||||
def test_invalid_email_format_rejected(
|
||||
self, name_email_basic_pattern: re.Pattern[str], invalid_input: str
|
||||
) -> None:
|
||||
"""Test that inputs with invalid email formats are rejected."""
|
||||
assert not name_email_basic_pattern.match(invalid_input), (
|
||||
f"Incorrectly matched invalid input: {invalid_input}"
|
||||
)
|
||||
|
||||
def test_quoted_name_with_valid_email(
|
||||
self, name_email_basic_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test quoted name format with valid email."""
|
||||
match = name_email_basic_pattern.match('"Alice Wonder" <alice@example.com>')
|
||||
assert match is not None
|
||||
assert match.group('name1') == 'Alice Wonder'
|
||||
assert match.group('email1') == 'alice@example.com'
|
||||
|
||||
def test_unquoted_name_with_valid_email(
|
||||
self, name_email_basic_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test unquoted name format with valid email."""
|
||||
match = name_email_basic_pattern.match('Bob Builder <bob@example.com>')
|
||||
assert match is not None
|
||||
assert match.group('name2') == 'Bob Builder'
|
||||
assert match.group('email2') == 'bob@example.com'
|
||||
|
||||
def test_email_only_formats(
|
||||
self, name_email_basic_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test email-only formats (with and without brackets)."""
|
||||
# With brackets
|
||||
match1 = name_email_basic_pattern.match('<charlie@example.com>')
|
||||
assert match1 is not None
|
||||
assert match1.group('email3') == 'charlie@example.com'
|
||||
|
||||
# Without brackets
|
||||
match2 = name_email_basic_pattern.match('dave@example.com')
|
||||
assert match2 is not None
|
||||
assert match2.group('email4') == 'dave@example.com'
|
||||
|
||||
def test_whitespace_handling(
|
||||
self, name_email_basic_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test that leading/trailing whitespace is handled correctly."""
|
||||
match = name_email_basic_pattern.match(' "User" <user@example.com> ')
|
||||
assert match is not None
|
||||
assert match.group('name1') == 'User'
|
||||
assert match.group('email1') == 'user@example.com'
|
||||
|
||||
def test_special_characters_in_local_part(
|
||||
self, name_email_basic_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test email with special characters in local part."""
|
||||
match = name_email_basic_pattern.match('Test User <test!#$%&\'*+-/=?^_`{|}~@example.com>')
|
||||
assert match is not None
|
||||
assert match.group('name2') == 'Test User'
|
||||
assert match.group('email2') == 'test!#$%&\'*+-/=?^_`{|}~@example.com'
|
||||
|
||||
|
||||
class TestDomainWithLocalhostRegex:
|
||||
"""Test cases for DOMAIN_WITH_LOCALHOST_REGEX pattern."""
|
||||
|
||||
@pytest.fixture
|
||||
def domain_localhost_pattern(self) -> re.Pattern[str]:
|
||||
"""Fixture that returns compiled domain with localhost regex pattern."""
|
||||
return COMPILED_DOMAIN_WITH_LOCALHOST_REGEX
|
||||
|
||||
@pytest.mark.parametrize("valid_domain", [
|
||||
"localhost",
|
||||
"example.com",
|
||||
"subdomain.example.com",
|
||||
"sub.domain.example.com",
|
||||
"test-domain.com",
|
||||
"example.co.uk",
|
||||
"a.com",
|
||||
"test123.example.com",
|
||||
"my-site.example.org",
|
||||
"multi.level.subdomain.example.com",
|
||||
])
|
||||
def test_valid_domains(
|
||||
self, domain_localhost_pattern: re.Pattern[str], valid_domain: str
|
||||
) -> None:
|
||||
"""Test that valid domains (including localhost) match the pattern."""
|
||||
assert domain_localhost_pattern.match(valid_domain), (
|
||||
f"Failed to match valid domain: {valid_domain}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("invalid_domain", [
|
||||
"", # empty string
|
||||
"example", # no TLD
|
||||
"-example.com", # starts with hyphen
|
||||
"example-.com", # ends with hyphen
|
||||
".example.com", # starts with dot
|
||||
"example.com.", # ends with dot
|
||||
"example..com", # consecutive dots
|
||||
"exam ple.com", # space in domain
|
||||
"example.c", # TLD too short
|
||||
"localhost:8080", # port not allowed in this pattern
|
||||
"example.com:8080", # port not allowed in this pattern
|
||||
"@example.com", # invalid character
|
||||
"example@com", # invalid character
|
||||
])
|
||||
def test_invalid_domains(
|
||||
self, domain_localhost_pattern: re.Pattern[str], invalid_domain: str
|
||||
) -> None:
|
||||
"""Test that invalid domains do not match the pattern."""
|
||||
assert not domain_localhost_pattern.match(invalid_domain), (
|
||||
f"Incorrectly matched invalid domain: {invalid_domain}"
|
||||
)
|
||||
|
||||
|
||||
class TestDomainWithLocalhostPortRegex:
|
||||
"""Test cases for DOMAIN_WITH_LOCALHOST_PORT_REGEX pattern."""
|
||||
|
||||
@pytest.fixture
|
||||
def domain_localhost_port_pattern(self) -> re.Pattern[str]:
|
||||
"""Fixture that returns compiled domain and localhost with port pattern."""
|
||||
return COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX
|
||||
|
||||
@pytest.mark.parametrize("valid_domain", [
|
||||
"localhost",
|
||||
"localhost:8080",
|
||||
"localhost:3000",
|
||||
"localhost:80",
|
||||
"localhost:443",
|
||||
"localhost:65535",
|
||||
"example.com",
|
||||
"example.com:8080",
|
||||
"subdomain.example.com:3000",
|
||||
"test-domain.com:443",
|
||||
"example.co.uk",
|
||||
"example.co.uk:8000",
|
||||
"a.com:1",
|
||||
"multi.level.subdomain.example.com:9999",
|
||||
])
|
||||
def test_valid_domains_with_port(
|
||||
self, domain_localhost_port_pattern: re.Pattern[str], valid_domain: str
|
||||
) -> None:
|
||||
"""Test that valid domains with optional ports match the pattern."""
|
||||
assert domain_localhost_port_pattern.match(valid_domain), (
|
||||
f"Failed to match valid domain: {valid_domain}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("invalid_domain", [
|
||||
"", # empty string
|
||||
"example", # no TLD
|
||||
"-example.com", # starts with hyphen
|
||||
"example-.com", # ends with hyphen
|
||||
".example.com", # starts with dot
|
||||
"example.com.", # ends with dot
|
||||
"localhost:", # port without number
|
||||
"example.com:", # port without number
|
||||
"example.com:abc", # non-numeric port
|
||||
"example.com: 8080", # space before port
|
||||
"example.com:80 80", # space in port
|
||||
"exam ple.com", # space in domain
|
||||
"localhost :8080", # space before colon
|
||||
])
|
||||
def test_invalid_domains_with_port(
|
||||
self,
|
||||
domain_localhost_port_pattern: re.Pattern[str],
|
||||
invalid_domain: str,
|
||||
) -> None:
|
||||
"""Test that invalid domains do not match the pattern."""
|
||||
assert not domain_localhost_port_pattern.match(invalid_domain), (
|
||||
f"Incorrectly matched invalid domain: {invalid_domain}"
|
||||
)
|
||||
|
||||
def test_large_port_number(
|
||||
self, domain_localhost_port_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test domain with large port numbers."""
|
||||
assert domain_localhost_port_pattern.match("example.com:65535")
|
||||
# Regex doesn't validate port range
|
||||
assert domain_localhost_port_pattern.match("example.com:99999")
|
||||
|
||||
|
||||
class TestDomainRegex:
|
||||
"""Test cases for DOMAIN_REGEX pattern (no localhost)."""
|
||||
|
||||
@pytest.fixture
|
||||
def domain_pattern(self) -> re.Pattern[str]:
|
||||
"""Fixture that returns compiled domain regex pattern."""
|
||||
return COMPILED_DOMAIN_REGEX
|
||||
|
||||
@pytest.mark.parametrize("valid_domain", [
|
||||
"example.com",
|
||||
"subdomain.example.com",
|
||||
"sub.domain.example.com",
|
||||
"test-domain.com",
|
||||
"example.co.uk",
|
||||
"a.com",
|
||||
"test123.example.com",
|
||||
"my-site.example.org",
|
||||
"multi.level.subdomain.example.com",
|
||||
"example.co",
|
||||
])
|
||||
def test_valid_domains_no_localhost(
|
||||
self, domain_pattern: re.Pattern[str], valid_domain: str
|
||||
) -> None:
|
||||
"""Test that valid domains match the pattern."""
|
||||
assert domain_pattern.match(valid_domain), (
|
||||
f"Failed to match valid domain: {valid_domain}"
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("invalid_domain", [
|
||||
"", # empty string
|
||||
"localhost", # localhost not allowed
|
||||
"example", # no TLD
|
||||
"-example.com", # starts with hyphen
|
||||
"example-.com", # ends with hyphen
|
||||
".example.com", # starts with dot
|
||||
"example.com.", # ends with dot
|
||||
"example..com", # consecutive dots
|
||||
"exam ple.com", # space in domain
|
||||
"example.c", # TLD too short
|
||||
"example.com:8080", # port not allowed
|
||||
"@example.com", # invalid character
|
||||
"example@com", # invalid character
|
||||
])
|
||||
def test_invalid_domains_no_localhost(
|
||||
self, domain_pattern: re.Pattern[str], invalid_domain: str
|
||||
) -> None:
|
||||
"""Test that invalid domains do not match the pattern."""
|
||||
assert not domain_pattern.match(invalid_domain), (
|
||||
f"Incorrectly matched invalid domain: {invalid_domain}"
|
||||
)
|
||||
|
||||
def test_localhost_not_allowed(
|
||||
self, domain_pattern: re.Pattern[str]
|
||||
) -> None:
|
||||
"""Test that localhost is explicitly not allowed in DOMAIN_REGEX."""
|
||||
assert not domain_pattern.match("localhost")
|
||||
|
||||
|
||||
class TestRegexPatternConsistency:
|
||||
"""Test cases for consistency across regex patterns."""
|
||||
|
||||
def test_all_patterns_compile(self) -> None:
|
||||
"""Test that all regex patterns can be compiled without errors."""
|
||||
patterns = [
|
||||
EMAIL_BASIC_REGEX,
|
||||
NAME_EMAIL_SIMPLE_REGEX,
|
||||
NAME_EMAIL_BASIC_REGEX,
|
||||
DOMAIN_WITH_LOCALHOST_REGEX,
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||
DOMAIN_REGEX,
|
||||
]
|
||||
for pattern in patterns:
|
||||
compiled = compile_re(pattern)
|
||||
assert isinstance(compiled, re.Pattern)
|
||||
|
||||
def test_compiled_patterns_are_patterns(self) -> None:
|
||||
"""Test that all COMPILED_ constants are Pattern objects."""
|
||||
compiled_patterns = [
|
||||
COMPILED_EMAIL_BASIC_REGEX,
|
||||
COMPILED_NAME_EMAIL_SIMPLE_REGEX,
|
||||
COMPILED_NAME_EMAIL_BASIC_REGEX,
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX,
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||
COMPILED_DOMAIN_REGEX,
|
||||
]
|
||||
for pattern in compiled_patterns:
|
||||
assert isinstance(pattern, re.Pattern)
|
||||
|
||||
def test_domain_patterns_are_strings(self) -> None:
|
||||
"""Test that all regex constants are strings."""
|
||||
assert isinstance(EMAIL_BASIC_REGEX, str)
|
||||
assert isinstance(NAME_EMAIL_SIMPLE_REGEX, str)
|
||||
assert isinstance(NAME_EMAIL_BASIC_REGEX, str)
|
||||
assert isinstance(DOMAIN_WITH_LOCALHOST_REGEX, str)
|
||||
assert isinstance(DOMAIN_WITH_LOCALHOST_PORT_REGEX, str)
|
||||
assert isinstance(DOMAIN_REGEX, str)
|
||||
|
||||
def test_domain_patterns_hierarchy(self) -> None:
|
||||
"""Test that domain patterns follow expected hierarchy."""
|
||||
# DOMAIN_WITH_LOCALHOST_PORT_REGEX should accept everything
|
||||
# DOMAIN_WITH_LOCALHOST_REGEX accepts
|
||||
domain_localhost = COMPILED_DOMAIN_WITH_LOCALHOST_REGEX
|
||||
domain_localhost_port = COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX
|
||||
|
||||
test_cases = ["example.com", "subdomain.example.com", "localhost"]
|
||||
for test_case in test_cases:
|
||||
if domain_localhost.match(test_case):
|
||||
assert domain_localhost_port.match(test_case), (
|
||||
f"{test_case} should match both patterns"
|
||||
)
|
||||
881
tests/unit/config_handling/test_settings_loader.py
Normal file
881
tests/unit/config_handling/test_settings_loader.py
Normal file
@@ -0,0 +1,881 @@
|
||||
"""
|
||||
Unit tests for SettingsLoader class
|
||||
"""
|
||||
|
||||
import configparser
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock
|
||||
import pytest
|
||||
from pytest import CaptureFixture
|
||||
from corelibs.config_handling.settings_loader import SettingsLoader
|
||||
from corelibs.logging_handling.log import Log
|
||||
|
||||
|
||||
class TestSettingsLoaderInit:
|
||||
"""Test cases for SettingsLoader initialization"""
|
||||
|
||||
def test_init_with_valid_config_file(self, tmp_path: Path):
|
||||
"""Test initialization with a valid config file"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[Section]\nkey=value\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={},
|
||||
config_file=config_file,
|
||||
log=None,
|
||||
always_print=False
|
||||
)
|
||||
|
||||
assert loader.args == {}
|
||||
assert loader.config_file == config_file
|
||||
assert loader.log is None
|
||||
assert loader.always_print is False
|
||||
assert loader.config_parser is not None
|
||||
assert isinstance(loader.config_parser, configparser.ConfigParser)
|
||||
|
||||
def test_init_with_missing_config_file(self, tmp_path: Path):
|
||||
"""Test initialization with missing config file"""
|
||||
config_file = tmp_path.joinpath("missing.ini")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={},
|
||||
config_file=config_file,
|
||||
log=None,
|
||||
always_print=False
|
||||
)
|
||||
|
||||
assert loader.config_parser is None
|
||||
|
||||
def test_init_with_invalid_config_folder(self):
|
||||
"""Test initialization with invalid config folder path"""
|
||||
config_file = Path("/nonexistent/path/test.ini")
|
||||
|
||||
with pytest.raises(ValueError, match="Cannot find the config folder"):
|
||||
SettingsLoader(
|
||||
args={},
|
||||
config_file=config_file,
|
||||
log=None,
|
||||
always_print=False
|
||||
)
|
||||
|
||||
def test_init_with_log(self, tmp_path: Path):
|
||||
"""Test initialization with Log object"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[Section]\nkey=value\n")
|
||||
mock_log = Mock(spec=Log)
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={"test": "value"},
|
||||
config_file=config_file,
|
||||
log=mock_log,
|
||||
always_print=True
|
||||
)
|
||||
|
||||
assert loader.log == mock_log
|
||||
assert loader.always_print is True
|
||||
|
||||
|
||||
class TestLoadSettings:
|
||||
"""Test cases for load_settings method"""
|
||||
|
||||
def test_load_settings_basic(self, tmp_path: Path):
|
||||
"""Test loading basic settings without validation"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nkey1=value1\nkey2=value2\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings("TestSection")
|
||||
|
||||
assert result == {"key1": "value1", "key2": "value2"}
|
||||
|
||||
def test_load_settings_with_missing_section(self, tmp_path: Path):
|
||||
"""Test loading settings with missing section"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[OtherSection]\nkey=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="Cannot read \\[MissingSection\\]"):
|
||||
loader.load_settings("MissingSection")
|
||||
|
||||
def test_load_settings_allow_not_exist(self, tmp_path: Path):
|
||||
"""Test loading settings with allow_not_exist flag"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[OtherSection]\nkey=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings("MissingSection", allow_not_exist=True)
|
||||
|
||||
assert result == {}
|
||||
|
||||
def test_load_settings_mandatory_field_present(self, tmp_path: Path):
|
||||
"""Test mandatory field validation when field is present"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nrequired_field=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"required_field": ["mandatory:yes"]}
|
||||
)
|
||||
|
||||
assert result["required_field"] == "value"
|
||||
|
||||
def test_load_settings_mandatory_field_missing(self, tmp_path: Path):
|
||||
"""Test mandatory field validation when field is missing"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nother_field=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="Missing or incorrect settings data"):
|
||||
loader.load_settings(
|
||||
"TestSection",
|
||||
{"required_field": ["mandatory:yes"]}
|
||||
)
|
||||
|
||||
def test_load_settings_mandatory_field_empty(self, tmp_path: Path):
|
||||
"""Test mandatory field validation when field is empty"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nrequired_field=\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="Missing or incorrect settings data"):
|
||||
loader.load_settings(
|
||||
"TestSection",
|
||||
{"required_field": ["mandatory:yes"]}
|
||||
)
|
||||
|
||||
def test_load_settings_with_split(self, tmp_path: Path):
|
||||
"""Test splitting values into lists"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist_field=a,b,c,d\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"list_field": ["split:,"]}
|
||||
)
|
||||
|
||||
assert result["list_field"] == ["a", "b", "c", "d"]
|
||||
|
||||
def test_load_settings_with_custom_split_char(self, tmp_path: Path):
|
||||
"""Test splitting with custom delimiter"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist_field=a|b|c|d\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"list_field": ["split:|"]}
|
||||
)
|
||||
|
||||
assert result["list_field"] == ["a", "b", "c", "d"]
|
||||
|
||||
def test_load_settings_split_removes_spaces(self, tmp_path: Path):
|
||||
"""Test that split removes spaces from values"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist_field=a, b , c , d\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"list_field": ["split:,"]}
|
||||
)
|
||||
|
||||
assert result["list_field"] == ["a", "b", "c", "d"]
|
||||
|
||||
def test_load_settings_empty_split_char_fallback(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test fallback to default split char when empty"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist_field=a,b,c\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"list_field": ["split:"]}
|
||||
)
|
||||
|
||||
assert result["list_field"] == ["a", "b", "c"]
|
||||
captured = capsys.readouterr()
|
||||
assert "fallback to:" in captured.out
|
||||
|
||||
def test_load_settings_split_empty_value(self, tmp_path: Path):
|
||||
"""Test that split on empty value results in empty list"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist_field=\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"list_field": ["split:,"]}
|
||||
)
|
||||
|
||||
assert result["list_field"] == []
|
||||
|
||||
def test_load_settings_convert_to_int(self, tmp_path: Path):
|
||||
"""Test converting values to int"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=123\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"number": ["convert:int"]}
|
||||
)
|
||||
|
||||
assert result["number"] == 123
|
||||
assert isinstance(result["number"], int)
|
||||
|
||||
def test_load_settings_convert_to_float(self, tmp_path: Path):
|
||||
"""Test converting values to float"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=123.45\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"number": ["convert:float"]}
|
||||
)
|
||||
|
||||
assert result["number"] == 123.45
|
||||
assert isinstance(result["number"], float)
|
||||
|
||||
def test_load_settings_convert_to_bool_true(self, tmp_path: Path):
|
||||
"""Test converting values to boolean True"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nflag1=true\nflag2=True\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"flag1": ["convert:bool"], "flag2": ["convert:bool"]}
|
||||
)
|
||||
|
||||
assert result["flag1"] is True
|
||||
assert result["flag2"] is True
|
||||
|
||||
def test_load_settings_convert_to_bool_false(self, tmp_path: Path):
|
||||
"""Test converting values to boolean False"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nflag1=false\nflag2=False\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"flag1": ["convert:bool"], "flag2": ["convert:bool"]}
|
||||
)
|
||||
|
||||
assert result["flag1"] is False
|
||||
assert result["flag2"] is False
|
||||
|
||||
def test_load_settings_convert_invalid_type(self, tmp_path: Path):
|
||||
"""Test converting with invalid type raises error"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="convert type is invalid"):
|
||||
loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["convert:invalid"]}
|
||||
)
|
||||
|
||||
def test_load_settings_empty_set_to_none(self, tmp_path: Path):
|
||||
"""Test setting empty values to None"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nother=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"field": ["empty:"]}
|
||||
)
|
||||
|
||||
assert result["field"] is None
|
||||
|
||||
def test_load_settings_empty_set_to_custom_value(self, tmp_path: Path):
|
||||
"""Test setting empty values to custom value"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nother=value\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"field": ["empty:default"]}
|
||||
)
|
||||
|
||||
assert result["field"] == "default"
|
||||
|
||||
def test_load_settings_matching_valid(self, tmp_path: Path):
|
||||
"""Test matching validation with valid value"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nmode=production\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"mode": ["matching:development|staging|production"]}
|
||||
)
|
||||
|
||||
assert result["mode"] == "production"
|
||||
|
||||
def test_load_settings_matching_invalid(self, tmp_path: Path):
|
||||
"""Test matching validation with invalid value"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nmode=invalid\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="Missing or incorrect settings data"):
|
||||
loader.load_settings(
|
||||
"TestSection",
|
||||
{"mode": ["matching:development|staging|production"]}
|
||||
)
|
||||
|
||||
def test_load_settings_in_valid(self, tmp_path: Path):
|
||||
"""Test 'in' validation with valid value"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nallowed=a,b,c\nvalue=b\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{
|
||||
"allowed": ["split:,"],
|
||||
"value": ["in:allowed"]
|
||||
}
|
||||
)
|
||||
|
||||
assert result["value"] == "b"
|
||||
|
||||
def test_load_settings_in_invalid(self, tmp_path: Path):
|
||||
"""Test 'in' validation with invalid value"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nallowed=a,b,c\nvalue=d\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="Missing or incorrect settings data"):
|
||||
loader.load_settings(
|
||||
"TestSection",
|
||||
{
|
||||
"allowed": ["split:,"],
|
||||
"value": ["in:allowed"]
|
||||
}
|
||||
)
|
||||
|
||||
def test_load_settings_in_missing_target(self, tmp_path: Path):
|
||||
"""Test 'in' validation with missing target"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=a\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="Missing or incorrect settings data"):
|
||||
loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["in:missing_target"]}
|
||||
)
|
||||
|
||||
def test_load_settings_length_exact(self, tmp_path: Path):
|
||||
"""Test length validation with exact match"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["length:4"]}
|
||||
)
|
||||
|
||||
assert result["value"] == "test"
|
||||
|
||||
def test_load_settings_length_exact_invalid(self, tmp_path: Path):
|
||||
"""Test length validation with exact match failure"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="Missing or incorrect settings data"):
|
||||
loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["length:5"]}
|
||||
)
|
||||
|
||||
def test_load_settings_length_range(self, tmp_path: Path):
|
||||
"""Test length validation with range"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=testing\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["length:5-10"]}
|
||||
)
|
||||
|
||||
assert result["value"] == "testing"
|
||||
|
||||
def test_load_settings_length_min_only(self, tmp_path: Path):
|
||||
"""Test length validation with minimum only"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=testing\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["length:5-"]}
|
||||
)
|
||||
|
||||
assert result["value"] == "testing"
|
||||
|
||||
def test_load_settings_length_max_only(self, tmp_path: Path):
|
||||
"""Test length validation with maximum only"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["length:-10"]}
|
||||
)
|
||||
|
||||
assert result["value"] == "test"
|
||||
|
||||
def test_load_settings_range_valid(self, tmp_path: Path):
|
||||
"""Test range validation with valid value"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=25\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"number": ["range:10-50"]}
|
||||
)
|
||||
|
||||
assert result["number"] == "25"
|
||||
|
||||
def test_load_settings_range_invalid(self, tmp_path: Path):
|
||||
"""Test range validation with invalid value"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=100\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="Missing or incorrect settings data"):
|
||||
loader.load_settings(
|
||||
"TestSection",
|
||||
{"number": ["range:10-50"]}
|
||||
)
|
||||
|
||||
def test_load_settings_check_int_valid(self, tmp_path: Path):
|
||||
"""Test check:int with valid integer"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=12345\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"number": ["check:int"]}
|
||||
)
|
||||
|
||||
assert result["number"] == "12345"
|
||||
|
||||
def test_load_settings_check_int_cleanup(self, tmp_path: Path):
|
||||
"""Test check:int with cleanup"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nnumber=12a34b5\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"number": ["check:int"]}
|
||||
)
|
||||
|
||||
assert result["number"] == "12345"
|
||||
|
||||
def test_load_settings_check_email_valid(self, tmp_path: Path):
|
||||
"""Test check:string.email.basic with valid email"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nemail=test@example.com\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"email": ["check:string.email.basic"]}
|
||||
)
|
||||
|
||||
assert result["email"] == "test@example.com"
|
||||
|
||||
def test_load_settings_check_email_invalid(self, tmp_path: Path):
|
||||
"""Test check:string.email.basic with invalid email"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nemail=not-an-email\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="Missing or incorrect settings data"):
|
||||
loader.load_settings(
|
||||
"TestSection",
|
||||
{"email": ["check:string.email.basic"]}
|
||||
)
|
||||
|
||||
def test_load_settings_args_override(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test command line arguments override config values"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={"value": "arg_value"},
|
||||
config_file=config_file
|
||||
)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": []}
|
||||
)
|
||||
|
||||
assert result["value"] == "arg_value"
|
||||
captured = capsys.readouterr()
|
||||
assert "Command line option override" in captured.out
|
||||
|
||||
def test_load_settings_args_no_flag(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test default behavior (no args_override:yes) with list argument that has split"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=a,b,c\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={"value": ["x", "y", "z"]},
|
||||
config_file=config_file
|
||||
)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["split:,"]}
|
||||
)
|
||||
|
||||
# Without args_override:yes flag, should use config value (no override)
|
||||
assert result["value"] == ["a", "b", "c"]
|
||||
captured = capsys.readouterr()
|
||||
# Message is printed but without args_override:yes flag, override doesn't happen
|
||||
assert "Command line option override" in captured.out
|
||||
|
||||
def test_load_settings_args_list_no_split(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test that list arguments without split entry are skipped"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={"value": ["arg1", "arg2", "arg3"]},
|
||||
config_file=config_file
|
||||
)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": []}
|
||||
)
|
||||
|
||||
# Should keep config value since args is list but no split defined
|
||||
assert result["value"] == "config_value"
|
||||
captured = capsys.readouterr()
|
||||
# Message is printed but list without split prevents the override
|
||||
assert "Command line option override" in captured.out
|
||||
|
||||
def test_load_settings_args_list_with_split(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test that list arguments with split entry and args_override:yes are applied"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=a,b,c\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={"value": ["arg1", "arg2", "arg3"]},
|
||||
config_file=config_file
|
||||
)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["split:,", "args_override:yes"]}
|
||||
)
|
||||
|
||||
# Should use args value because split is defined AND args_override:yes is set
|
||||
assert result["value"] == ["arg1", "arg2", "arg3"]
|
||||
captured = capsys.readouterr()
|
||||
assert "Command line option override" in captured.out
|
||||
|
||||
def test_load_settings_args_no_with_mandatory(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test default behavior (no args_override:yes) with mandatory field and list args with split"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=config1,config2\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={"value": ["arg1", "arg2"]},
|
||||
config_file=config_file
|
||||
)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["mandatory:yes", "split:,"]}
|
||||
)
|
||||
|
||||
# Should use config value because args_override:yes is not set (default: no override)
|
||||
assert result["value"] == ["config1", "config2"]
|
||||
captured = capsys.readouterr()
|
||||
# Message is printed but without args_override:yes flag, override doesn't happen
|
||||
assert "Command line option override" in captured.out
|
||||
|
||||
def test_load_settings_args_no_with_mandatory_valid(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test default behavior with string args (always overrides due to current logic)"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={"value": "arg_value"},
|
||||
config_file=config_file
|
||||
)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["mandatory:yes"]}
|
||||
)
|
||||
|
||||
# Current behavior: string args without split always override (regardless of args_override:yes)
|
||||
assert result["value"] == "arg_value"
|
||||
captured = capsys.readouterr()
|
||||
assert "Command line option override" in captured.out
|
||||
|
||||
def test_load_settings_args_string_no_split(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test that string arguments with args_override:yes work normally"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=config_value\n")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={"value": "arg_value"},
|
||||
config_file=config_file
|
||||
)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["args_override:yes"]}
|
||||
)
|
||||
|
||||
# Should use args value for non-list args with args_override:yes
|
||||
assert result["value"] == "arg_value"
|
||||
captured = capsys.readouterr()
|
||||
assert "Command line option override" in captured.out
|
||||
|
||||
def test_load_settings_no_config_file_with_args(self, tmp_path: Path):
|
||||
"""Test loading settings without config file but with mandatory args"""
|
||||
config_file = tmp_path.joinpath("missing.ini")
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={"required": "value"},
|
||||
config_file=config_file
|
||||
)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"required": ["mandatory:yes"]}
|
||||
)
|
||||
|
||||
assert result["required"] == "value"
|
||||
|
||||
def test_load_settings_no_config_file_missing_args(self, tmp_path: Path):
|
||||
"""Test loading settings without config file and missing args"""
|
||||
config_file = tmp_path.joinpath("missing.ini")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="Cannot find file"):
|
||||
loader.load_settings(
|
||||
"TestSection",
|
||||
{"required": ["mandatory:yes"]}
|
||||
)
|
||||
|
||||
def test_load_settings_check_list_with_split(self, tmp_path: Path):
|
||||
"""Test check validation with list values"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist=abc,def,ghi\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"list": ["split:,", "check:string.alphanumeric"]}
|
||||
)
|
||||
|
||||
assert result["list"] == ["abc", "def", "ghi"]
|
||||
|
||||
def test_load_settings_check_list_cleanup(self, tmp_path: Path):
|
||||
"""Test check validation cleans up list values"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nlist=ab-c,de_f,gh!i\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"TestSection",
|
||||
{"list": ["split:,", "check:string.alphanumeric"]}
|
||||
)
|
||||
|
||||
assert result["list"] == ["abc", "def", "ghi"]
|
||||
|
||||
def test_load_settings_invalid_check_type(self, tmp_path: Path):
|
||||
"""Test with invalid check type"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text("[TestSection]\nvalue=test\n")
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
|
||||
with pytest.raises(ValueError, match="Cannot get SettingsLoaderCheck.CHECK_SETTINGS"):
|
||||
loader.load_settings(
|
||||
"TestSection",
|
||||
{"value": ["check:invalid.check.type"]}
|
||||
)
|
||||
|
||||
|
||||
class TestComplexScenarios:
|
||||
"""Test cases for complex real-world scenarios"""
|
||||
|
||||
def test_complex_validation_scenario(self, tmp_path: Path):
|
||||
"""Test complex scenario with multiple validations"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text(
|
||||
"[Production]\n"
|
||||
"environment=production\n"
|
||||
"allowed_envs=development,staging,production\n"
|
||||
"port=8080\n"
|
||||
"host=example.com\n"
|
||||
"timeout=30\n"
|
||||
"debug=false\n"
|
||||
"features=auth,logging,monitoring\n"
|
||||
)
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"Production",
|
||||
{
|
||||
"environment": [
|
||||
"mandatory:yes",
|
||||
"matching:development|staging|production",
|
||||
"in:allowed_envs"
|
||||
],
|
||||
"allowed_envs": ["split:,"],
|
||||
"port": ["mandatory:yes", "convert:int", "range:1-65535"],
|
||||
"host": ["mandatory:yes"],
|
||||
"timeout": ["convert:int", "range:1-"],
|
||||
"debug": ["convert:bool"],
|
||||
"features": ["split:,", "check:string.alphanumeric"],
|
||||
}
|
||||
)
|
||||
|
||||
assert result["environment"] == "production"
|
||||
assert result["allowed_envs"] == ["development", "staging", "production"]
|
||||
assert result["port"] == 8080
|
||||
assert isinstance(result["port"], int)
|
||||
assert result["host"] == "example.com"
|
||||
assert result["timeout"] == 30
|
||||
assert result["debug"] is False
|
||||
assert result["features"] == ["auth", "logging", "monitoring"]
|
||||
|
||||
def test_email_list_validation(self, tmp_path: Path):
|
||||
"""Test email list with validation"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text(
|
||||
"[EmailConfig]\n"
|
||||
"emails=test@example.com,admin@domain.org,user+tag@site.co.uk\n"
|
||||
)
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"EmailConfig",
|
||||
{"emails": ["split:,", "mandatory:yes", "check:string.email.basic"]}
|
||||
)
|
||||
|
||||
assert len(result["emails"]) == 3
|
||||
assert "test@example.com" in result["emails"]
|
||||
|
||||
def test_mixed_args_and_config(self, tmp_path: Path):
|
||||
"""Test mixing command line args and config file"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text(
|
||||
"[Settings]\n"
|
||||
"value1=config_value1\n"
|
||||
"value2=config_value2\n"
|
||||
)
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={"value1": "arg_value1"},
|
||||
config_file=config_file
|
||||
)
|
||||
result = loader.load_settings(
|
||||
"Settings",
|
||||
{"value1": [], "value2": []}
|
||||
)
|
||||
|
||||
assert result["value1"] == "arg_value1" # Overridden by arg
|
||||
assert result["value2"] == "config_value2" # From config
|
||||
|
||||
def test_multiple_check_types(self, tmp_path: Path):
|
||||
"""Test multiple different check types"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text(
|
||||
"[Checks]\n"
|
||||
"numbers=123,456,789\n"
|
||||
"alphas=abc,def,ghi\n"
|
||||
"emails=test@example.com\n"
|
||||
"date=2025-01-15\n"
|
||||
)
|
||||
|
||||
loader = SettingsLoader(args={}, config_file=config_file)
|
||||
result = loader.load_settings(
|
||||
"Checks",
|
||||
{
|
||||
"numbers": ["split:,", "check:int"],
|
||||
"alphas": ["split:,", "check:string.alphanumeric"],
|
||||
"emails": ["check:string.email.basic"],
|
||||
"date": ["check:string.date"],
|
||||
}
|
||||
)
|
||||
|
||||
assert result["numbers"] == ["123", "456", "789"]
|
||||
assert result["alphas"] == ["abc", "def", "ghi"]
|
||||
assert result["emails"] == "test@example.com"
|
||||
assert result["date"] == "2025-01-15"
|
||||
|
||||
def test_args_no_and_list_skip_combination(self, tmp_path: Path, capsys: CaptureFixture[str]):
|
||||
"""Test combination of args_override:yes flag and list argument skip behavior"""
|
||||
config_file = tmp_path.joinpath("test.ini")
|
||||
config_file.write_text(
|
||||
"[Settings]\n"
|
||||
"no_override=a,b,c\n"
|
||||
"list_no_split=config_list\n"
|
||||
"list_with_split=x,y,z\n"
|
||||
"normal=config_normal\n"
|
||||
)
|
||||
|
||||
loader = SettingsLoader(
|
||||
args={
|
||||
"no_override": ["arg1", "arg2"],
|
||||
"list_no_split": ["arg1", "arg2"],
|
||||
"list_with_split": ["p", "q", "r"],
|
||||
"normal": "arg_normal"
|
||||
},
|
||||
config_file=config_file
|
||||
)
|
||||
result = loader.load_settings(
|
||||
"Settings",
|
||||
{
|
||||
"no_override": ["split:,"],
|
||||
"list_no_split": [],
|
||||
"list_with_split": ["split:,", "args_override:yes"],
|
||||
"normal": ["args_override:yes"]
|
||||
}
|
||||
)
|
||||
|
||||
# Should use config value (no args_override:yes flag for list with split)
|
||||
assert result["no_override"] == ["a", "b", "c"]
|
||||
# Should use config value because args is list without split
|
||||
assert result["list_no_split"] == "config_list"
|
||||
# Should use args value because split is defined AND args_override:yes is set
|
||||
assert result["list_with_split"] == ["p", "q", "r"]
|
||||
# Should use args value (args_override:yes set for string arg)
|
||||
assert result["normal"] == "arg_normal"
|
||||
|
||||
captured = capsys.readouterr()
|
||||
# Should see override messages (even though list_no_split prints, it doesn't apply)
|
||||
assert "Command line option override" in captured.out
|
||||
|
||||
|
||||
# __END__
|
||||
3
tests/unit/db_handling/__init__.py
Normal file
3
tests/unit/db_handling/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""
|
||||
db_handling tests
|
||||
"""
|
||||
461
tests/unit/db_handling/test_sql_main.py
Normal file
461
tests/unit/db_handling/test_sql_main.py
Normal file
@@ -0,0 +1,461 @@
|
||||
"""
|
||||
PyTest: db_handling/sql_main
|
||||
Tests for SQLMain class - Main SQL interface wrapper
|
||||
|
||||
Note: Pylance warnings about "Redefining name from outer scope" in fixtures are expected.
|
||||
This is standard pytest fixture behavior where fixture parameters shadow fixture definitions.
|
||||
"""
|
||||
# pylint: disable=redefined-outer-name,too-many-public-methods,protected-access
|
||||
# pyright: reportUnknownParameterType=false, reportUnknownArgumentType=false
|
||||
# pyright: reportMissingParameterType=false, reportUnknownVariableType=false
|
||||
# pyright: reportArgumentType=false, reportGeneralTypeIssues=false
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Generator
|
||||
from unittest.mock import MagicMock, patch
|
||||
import pytest
|
||||
from corelibs.db_handling.sql_main import SQLMain, IDENT_SPLIT_CHARACTER
|
||||
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||
|
||||
|
||||
# Test fixtures
|
||||
@pytest.fixture
|
||||
def mock_logger() -> MagicMock:
|
||||
"""Create a mock logger for testing"""
|
||||
logger = MagicMock()
|
||||
logger.debug = MagicMock()
|
||||
logger.info = MagicMock()
|
||||
logger.warning = MagicMock()
|
||||
logger.error = MagicMock()
|
||||
return logger
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_db_path(tmp_path: Path) -> Path:
|
||||
"""Create a temporary database file path"""
|
||||
return tmp_path / "test_database.db"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_sqlite_io() -> Generator[MagicMock, None, None]:
|
||||
"""Create a mock SQLiteIO instance"""
|
||||
mock_io = MagicMock(spec=SQLiteIO)
|
||||
mock_io.conn = MagicMock()
|
||||
mock_io.db_connected = MagicMock(return_value=True)
|
||||
mock_io.db_close = MagicMock()
|
||||
mock_io.execute_query = MagicMock(return_value=[])
|
||||
yield mock_io
|
||||
|
||||
|
||||
# Test constant
|
||||
class TestConstants:
|
||||
"""Tests for module-level constants"""
|
||||
|
||||
def test_ident_split_character(self):
|
||||
"""Test that IDENT_SPLIT_CHARACTER is defined correctly"""
|
||||
assert IDENT_SPLIT_CHARACTER == ':'
|
||||
|
||||
|
||||
# Test SQLMain class initialization
|
||||
class TestSQLMainInit:
|
||||
"""Tests for SQLMain.__init__"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_successful_initialization_sqlite(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test successful initialization with SQLite"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
assert sql_main.log == mock_logger
|
||||
assert sql_main.dbh == mock_sqlite_instance
|
||||
assert sql_main.db_target == 'sqlite'
|
||||
mock_sqlite_class.assert_called_once_with(mock_logger, str(temp_db_path), row_factory='Dict')
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_initialization_connection_failure(self, mock_sqlite_class: MagicMock, mock_logger: MagicMock):
|
||||
"""Test initialization fails when connection cannot be established"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = None
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = 'sqlite:/path/to/db.db'
|
||||
with pytest.raises(ValueError, match='DB Connection failed for: sqlite'):
|
||||
SQLMain(mock_logger, db_ident)
|
||||
|
||||
def test_initialization_invalid_db_target(self, mock_logger: MagicMock):
|
||||
"""Test initialization with unsupported database target"""
|
||||
db_ident = 'postgresql:/path/to/db'
|
||||
with pytest.raises(ValueError, match='SQL interface for postgresql is not implemented'):
|
||||
SQLMain(mock_logger, db_ident)
|
||||
|
||||
def test_initialization_malformed_db_ident(self, mock_logger: MagicMock):
|
||||
"""Test initialization with malformed db_ident string"""
|
||||
db_ident = 'sqlite_no_colon'
|
||||
with pytest.raises(ValueError):
|
||||
SQLMain(mock_logger, db_ident)
|
||||
|
||||
|
||||
# Test SQLMain.connect method
|
||||
class TestSQLMainConnect:
|
||||
"""Tests for SQLMain.connect"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_connect_when_already_connected(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test connect warns when already connected"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
# Reset mock to check second call
|
||||
mock_logger.warning.reset_mock()
|
||||
|
||||
# Try to connect again
|
||||
sql_main.connect(f'sqlite:{temp_db_path}')
|
||||
|
||||
# Should have warned about existing connection
|
||||
mock_logger.warning.assert_called_once()
|
||||
assert 'already exists' in str(mock_logger.warning.call_args)
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_connect_sqlite_success(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test successful SQLite connection"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
sql_main = SQLMain.__new__(SQLMain)
|
||||
sql_main.log = mock_logger
|
||||
sql_main.dbh = None
|
||||
sql_main.db_target = None
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main.connect(db_ident)
|
||||
|
||||
assert sql_main.db_target == 'sqlite'
|
||||
assert sql_main.dbh == mock_sqlite_instance
|
||||
mock_sqlite_class.assert_called_once_with(mock_logger, str(temp_db_path), row_factory='Dict')
|
||||
|
||||
def test_connect_unsupported_database(self, mock_logger: MagicMock):
|
||||
"""Test connect with unsupported database type"""
|
||||
sql_main = SQLMain.__new__(SQLMain)
|
||||
sql_main.log = mock_logger
|
||||
sql_main.dbh = None
|
||||
sql_main.db_target = None
|
||||
|
||||
db_ident = 'mysql:/path/to/db'
|
||||
with pytest.raises(ValueError, match='SQL interface for mysql is not implemented'):
|
||||
sql_main.connect(db_ident)
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_connect_db_connection_failed(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test connect raises error when DB connection fails"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
sql_main = SQLMain.__new__(SQLMain)
|
||||
sql_main.log = mock_logger
|
||||
sql_main.dbh = None
|
||||
sql_main.db_target = None
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
with pytest.raises(ValueError, match='DB Connection failed for: sqlite'):
|
||||
sql_main.connect(db_ident)
|
||||
|
||||
|
||||
# Test SQLMain.close method
|
||||
class TestSQLMainClose:
|
||||
"""Tests for SQLMain.close"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_close_successful(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test successful database close"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.db_close = MagicMock()
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
sql_main.close()
|
||||
|
||||
mock_sqlite_instance.db_close.assert_called_once()
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_close_when_not_connected(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test close when not connected does nothing"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.db_close = MagicMock()
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
# Change db_connected to return False to simulate disconnection
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||
|
||||
sql_main.close()
|
||||
|
||||
# Should not raise error and should exit early
|
||||
assert mock_sqlite_instance.db_close.call_count == 0
|
||||
|
||||
def test_close_when_dbh_is_none(self, mock_logger: MagicMock):
|
||||
"""Test close when dbh is None"""
|
||||
sql_main = SQLMain.__new__(SQLMain)
|
||||
sql_main.log = mock_logger
|
||||
sql_main.dbh = None
|
||||
sql_main.db_target = 'sqlite'
|
||||
|
||||
# Should not raise error
|
||||
sql_main.close()
|
||||
|
||||
|
||||
# Test SQLMain.connected method
|
||||
class TestSQLMainConnected:
|
||||
"""Tests for SQLMain.connected"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_connected_returns_true(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test connected returns True when connected"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
assert sql_main.connected() is True
|
||||
mock_logger.warning.assert_not_called()
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_connected_returns_false_when_not_connected(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test connected returns False and warns when not connected"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
# Reset warning calls from init
|
||||
mock_logger.warning.reset_mock()
|
||||
|
||||
# Change db_connected to return False to simulate disconnection
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=False)
|
||||
|
||||
assert sql_main.connected() is False
|
||||
mock_logger.warning.assert_called_once()
|
||||
assert 'No connection' in str(mock_logger.warning.call_args)
|
||||
|
||||
def test_connected_returns_false_when_dbh_is_none(self, mock_logger: MagicMock):
|
||||
"""Test connected returns False when dbh is None"""
|
||||
sql_main = SQLMain.__new__(SQLMain)
|
||||
sql_main.log = mock_logger
|
||||
sql_main.dbh = None
|
||||
sql_main.db_target = 'sqlite'
|
||||
|
||||
assert sql_main.connected() is False
|
||||
mock_logger.warning.assert_called_once()
|
||||
|
||||
|
||||
# Test SQLMain.process_query method
|
||||
class TestSQLMainProcessQuery:
|
||||
"""Tests for SQLMain.process_query"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_process_query_success_no_params(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test successful query execution without parameters"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
expected_result = [{'id': 1, 'name': 'test'}]
|
||||
mock_sqlite_instance.execute_query = MagicMock(return_value=expected_result)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
query = "SELECT * FROM test"
|
||||
result = sql_main.process_query(query)
|
||||
|
||||
assert result == expected_result
|
||||
mock_sqlite_instance.execute_query.assert_called_once_with(query, None)
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_process_query_success_with_params(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test successful query execution with parameters"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
expected_result = [{'id': 1, 'name': 'test'}]
|
||||
mock_sqlite_instance.execute_query = MagicMock(return_value=expected_result)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
query = "SELECT * FROM test WHERE id = ?"
|
||||
params = (1,)
|
||||
result = sql_main.process_query(query, params)
|
||||
|
||||
assert result == expected_result
|
||||
mock_sqlite_instance.execute_query.assert_called_once_with(query, params)
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_process_query_returns_false_on_error(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test query returns False when execute_query fails"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.execute_query = MagicMock(return_value=False)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
query = "SELECT * FROM nonexistent"
|
||||
result = sql_main.process_query(query)
|
||||
|
||||
assert result is False
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_process_query_dbh_is_none(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test query returns False when dbh is None"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
# Manually set dbh to None
|
||||
sql_main.dbh = None
|
||||
|
||||
query = "SELECT * FROM test"
|
||||
result = sql_main.process_query(query)
|
||||
|
||||
assert result is False
|
||||
mock_logger.error.assert_called_once()
|
||||
assert 'Problem connecting to db' in str(mock_logger.error.call_args)
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_process_query_returns_empty_list(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test query returns empty list when no results"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.execute_query = MagicMock(return_value=[])
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
query = "SELECT * FROM test WHERE 1=0"
|
||||
result = sql_main.process_query(query)
|
||||
|
||||
assert result == []
|
||||
|
||||
|
||||
# Integration-like tests
|
||||
class TestSQLMainIntegration:
|
||||
"""Integration-like tests for complete workflows"""
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_full_workflow_connect_query_close(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test complete workflow: connect, query, close"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.execute_query = MagicMock(return_value=[{'count': 5}])
|
||||
mock_sqlite_instance.db_close = MagicMock()
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
# Execute query
|
||||
result = sql_main.process_query("SELECT COUNT(*) as count FROM test")
|
||||
assert result == [{'count': 5}]
|
||||
|
||||
# Check connected
|
||||
assert sql_main.connected() is True
|
||||
|
||||
# Close connection
|
||||
sql_main.close()
|
||||
mock_sqlite_instance.db_close.assert_called_once()
|
||||
|
||||
@patch('corelibs.db_handling.sql_main.SQLiteIO')
|
||||
def test_multiple_queries_same_connection(
|
||||
self, mock_sqlite_class: MagicMock, mock_logger: MagicMock, temp_db_path: Path
|
||||
):
|
||||
"""Test multiple queries on the same connection"""
|
||||
mock_sqlite_instance = MagicMock()
|
||||
mock_sqlite_instance.conn = MagicMock()
|
||||
mock_sqlite_instance.db_connected = MagicMock(return_value=True)
|
||||
mock_sqlite_instance.execute_query = MagicMock(side_effect=[
|
||||
[{'id': 1}],
|
||||
[{'id': 2}],
|
||||
[{'id': 3}]
|
||||
])
|
||||
mock_sqlite_class.return_value = mock_sqlite_instance
|
||||
|
||||
db_ident = f'sqlite:{temp_db_path}'
|
||||
sql_main = SQLMain(mock_logger, db_ident)
|
||||
|
||||
result1 = sql_main.process_query("SELECT * FROM test WHERE id = 1")
|
||||
result2 = sql_main.process_query("SELECT * FROM test WHERE id = 2")
|
||||
result3 = sql_main.process_query("SELECT * FROM test WHERE id = 3")
|
||||
|
||||
assert result1 == [{'id': 1}]
|
||||
assert result2 == [{'id': 2}]
|
||||
assert result3 == [{'id': 3}]
|
||||
assert mock_sqlite_instance.execute_query.call_count == 3
|
||||
|
||||
|
||||
# __END__
|
||||
1133
tests/unit/db_handling/test_sqlite_io.py
Normal file
1133
tests/unit/db_handling/test_sqlite_io.py
Normal file
File diff suppressed because it is too large
Load Diff
639
tests/unit/debug_handling/test_debug_helpers.py
Normal file
639
tests/unit/debug_handling/test_debug_helpers.py
Normal file
@@ -0,0 +1,639 @@
|
||||
"""
|
||||
Unit tests for debug_handling.debug_helpers module
|
||||
"""
|
||||
|
||||
import sys
|
||||
import pytest
|
||||
|
||||
from corelibs.debug_handling.debug_helpers import (
|
||||
call_stack,
|
||||
exception_stack,
|
||||
OptExcInfo
|
||||
)
|
||||
|
||||
|
||||
class TestCallStack:
|
||||
"""Test cases for call_stack function"""
|
||||
|
||||
def test_call_stack_basic(self):
|
||||
"""Test basic call_stack functionality"""
|
||||
result = call_stack()
|
||||
assert isinstance(result, str)
|
||||
assert "test_debug_helpers.py" in result
|
||||
assert "test_call_stack_basic" in result
|
||||
|
||||
def test_call_stack_with_default_separator(self):
|
||||
"""Test call_stack with default separator"""
|
||||
result = call_stack()
|
||||
assert " -> " in result
|
||||
|
||||
def test_call_stack_with_custom_separator(self):
|
||||
"""Test call_stack with custom separator"""
|
||||
result = call_stack(separator=" | ")
|
||||
assert " | " in result
|
||||
assert " -> " not in result
|
||||
|
||||
def test_call_stack_with_empty_separator(self):
|
||||
"""Test call_stack with empty separator (should default to ' -> ')"""
|
||||
result = call_stack(separator="")
|
||||
assert " -> " in result
|
||||
|
||||
def test_call_stack_format(self):
|
||||
"""Test call_stack output format (filename:function:lineno)"""
|
||||
result = call_stack()
|
||||
parts = result.split(" -> ")
|
||||
for part in parts:
|
||||
# Each part should have format: filename:function:lineno
|
||||
assert part.count(":") >= 2
|
||||
# Most parts should contain .py but some system frames might not
|
||||
# Just check that we have some .py files in the trace
|
||||
assert ".py" in result or "test_debug_helpers" in result
|
||||
|
||||
def test_call_stack_with_start_offset(self):
|
||||
"""Test call_stack with start offset"""
|
||||
result_no_offset = call_stack(start=0)
|
||||
result_with_offset = call_stack(start=2)
|
||||
|
||||
# With offset, we should get fewer frames
|
||||
parts_no_offset = result_no_offset.split(" -> ")
|
||||
parts_with_offset = result_with_offset.split(" -> ")
|
||||
|
||||
assert len(parts_with_offset) <= len(parts_no_offset)
|
||||
|
||||
def test_call_stack_with_skip_last(self):
|
||||
"""Test call_stack with skip_last parameter"""
|
||||
result_skip_default = call_stack(skip_last=-1)
|
||||
result_skip_more = call_stack(skip_last=-3)
|
||||
|
||||
# Skipping more should result in fewer frames
|
||||
parts_default = result_skip_default.split(" -> ")
|
||||
parts_more = result_skip_more.split(" -> ")
|
||||
|
||||
assert len(parts_more) <= len(parts_default)
|
||||
|
||||
def test_call_stack_skip_last_positive_converts_to_negative(self):
|
||||
"""Test that positive skip_last is converted to negative"""
|
||||
# Both should produce same result
|
||||
result_negative = call_stack(skip_last=-2)
|
||||
result_positive = call_stack(skip_last=2)
|
||||
|
||||
assert result_negative == result_positive
|
||||
|
||||
def test_call_stack_nested_calls(self):
|
||||
"""Test call_stack in nested function calls"""
|
||||
def level_one():
|
||||
return level_two()
|
||||
|
||||
def level_two():
|
||||
return level_three()
|
||||
|
||||
def level_three():
|
||||
return call_stack()
|
||||
|
||||
result = level_one()
|
||||
assert "level_one" in result
|
||||
assert "level_two" in result
|
||||
assert "level_three" in result
|
||||
|
||||
def test_call_stack_reset_start_if_empty_false(self):
|
||||
"""Test call_stack with high start value and reset_start_if_empty=False"""
|
||||
# Using a very high start value should result in empty stack
|
||||
result = call_stack(start=1000, reset_start_if_empty=False)
|
||||
assert result == ""
|
||||
|
||||
def test_call_stack_reset_start_if_empty_true(self):
|
||||
"""Test call_stack with high start value and reset_start_if_empty=True"""
|
||||
# Using a very high start value with reset should give non-empty result
|
||||
result = call_stack(start=1000, reset_start_if_empty=True)
|
||||
assert result != ""
|
||||
assert "test_debug_helpers.py" in result
|
||||
|
||||
def test_call_stack_contains_line_numbers(self):
|
||||
"""Test that call_stack includes line numbers"""
|
||||
result = call_stack()
|
||||
# Extract parts and check for numbers
|
||||
parts = result.split(" -> ")
|
||||
for part in parts:
|
||||
# Line numbers should be present (digits at the end)
|
||||
assert any(char.isdigit() for char in part)
|
||||
|
||||
def test_call_stack_separator_none(self):
|
||||
"""Test call_stack with None separator"""
|
||||
result = call_stack(separator="") # Use empty string instead of None
|
||||
# Empty string should be converted to default ' -> '
|
||||
assert " -> " in result
|
||||
|
||||
def test_call_stack_multiple_separators(self):
|
||||
"""Test call_stack with various custom separators"""
|
||||
separators = [" | ", " >> ", " => ", " / ", "\n"]
|
||||
|
||||
for sep in separators:
|
||||
result = call_stack(separator=sep)
|
||||
assert sep in result or result == "" # May be empty based on stack depth
|
||||
|
||||
|
||||
class TestExceptionStack:
|
||||
"""Test cases for exception_stack function"""
|
||||
|
||||
def test_exception_stack_with_active_exception(self):
|
||||
"""Test exception_stack when an exception is active"""
|
||||
try:
|
||||
raise ValueError("Test exception")
|
||||
except ValueError:
|
||||
result = exception_stack()
|
||||
assert isinstance(result, str)
|
||||
assert "test_debug_helpers.py" in result
|
||||
assert "test_exception_stack_with_active_exception" in result
|
||||
|
||||
def test_exception_stack_format(self):
|
||||
"""Test exception_stack output format"""
|
||||
try:
|
||||
raise RuntimeError("Test error")
|
||||
except RuntimeError:
|
||||
result = exception_stack()
|
||||
parts = result.split(" -> ")
|
||||
for part in parts:
|
||||
# Each part should have format: filename:function:lineno
|
||||
assert part.count(":") >= 2
|
||||
|
||||
def test_exception_stack_with_custom_separator(self):
|
||||
"""Test exception_stack with custom separator"""
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise TypeError("Test type error")
|
||||
inner_call()
|
||||
|
||||
try:
|
||||
nested_call()
|
||||
except TypeError:
|
||||
result = exception_stack(separator=" | ")
|
||||
# Only check separator if there are multiple frames
|
||||
if " | " in result or result.count(":") == 2:
|
||||
# Single frame or has separator
|
||||
assert isinstance(result, str)
|
||||
assert " -> " not in result
|
||||
|
||||
def test_exception_stack_with_empty_separator(self):
|
||||
"""Test exception_stack with empty separator (should default to ' -> ')"""
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise KeyError("Test key error")
|
||||
inner_call()
|
||||
|
||||
try:
|
||||
nested_call()
|
||||
except KeyError:
|
||||
result = exception_stack(separator="")
|
||||
# Should use default separator if multiple frames exist
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_exception_stack_separator_none(self):
|
||||
"""Test exception_stack with empty separator"""
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise IndexError("Test index error")
|
||||
inner_call()
|
||||
|
||||
try:
|
||||
nested_call()
|
||||
except IndexError:
|
||||
result = exception_stack(separator="") # Use empty string instead of None
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_exception_stack_nested_exceptions(self):
|
||||
"""Test exception_stack with nested function calls"""
|
||||
def level_one():
|
||||
level_two()
|
||||
|
||||
def level_two():
|
||||
level_three()
|
||||
|
||||
def level_three():
|
||||
raise ValueError("Nested exception")
|
||||
|
||||
try:
|
||||
level_one()
|
||||
except ValueError:
|
||||
result = exception_stack()
|
||||
# Should contain all levels in the stack
|
||||
assert "level_one" in result or "level_two" in result or "level_three" in result
|
||||
|
||||
def test_exception_stack_with_provided_exc_info(self):
|
||||
"""Test exception_stack with explicitly provided exc_info"""
|
||||
try:
|
||||
raise AttributeError("Test attribute error")
|
||||
except AttributeError:
|
||||
exc_info = sys.exc_info()
|
||||
result = exception_stack(exc_stack=exc_info)
|
||||
assert isinstance(result, str)
|
||||
assert len(result) > 0
|
||||
|
||||
def test_exception_stack_no_active_exception(self):
|
||||
"""Test exception_stack when no exception is active"""
|
||||
# This should handle the case gracefully
|
||||
# When no exception is active, sys.exc_info() returns (None, None, None)
|
||||
result = exception_stack()
|
||||
# With no traceback, should return empty string or handle gracefully
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_exception_stack_contains_line_numbers(self):
|
||||
"""Test that exception_stack includes line numbers"""
|
||||
try:
|
||||
raise OSError("Test OS error")
|
||||
except OSError:
|
||||
result = exception_stack()
|
||||
if result: # May be empty
|
||||
parts = result.split(" -> ")
|
||||
for part in parts:
|
||||
# Line numbers should be present
|
||||
assert any(char.isdigit() for char in part)
|
||||
|
||||
def test_exception_stack_multiple_exceptions(self):
|
||||
"""Test exception_stack captures the current exception only"""
|
||||
first_result = None
|
||||
second_result = None
|
||||
|
||||
try:
|
||||
raise ValueError("First exception")
|
||||
except ValueError:
|
||||
first_result = exception_stack()
|
||||
|
||||
try:
|
||||
raise TypeError("Second exception")
|
||||
except TypeError:
|
||||
second_result = exception_stack()
|
||||
|
||||
# Both should be valid but may differ
|
||||
assert isinstance(first_result, str)
|
||||
assert isinstance(second_result, str)
|
||||
|
||||
def test_exception_stack_with_multiple_separators(self):
|
||||
"""Test exception_stack with various custom separators"""
|
||||
separators = [" | ", " >> ", " => ", " / ", "\n"]
|
||||
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise ValueError("Test exception")
|
||||
inner_call()
|
||||
|
||||
for sep in separators:
|
||||
try:
|
||||
nested_call()
|
||||
except ValueError:
|
||||
result = exception_stack(separator=sep)
|
||||
assert isinstance(result, str)
|
||||
# Separator only appears if there are multiple frames
|
||||
|
||||
|
||||
class TestOptExcInfo:
|
||||
"""Test cases for OptExcInfo type definition"""
|
||||
|
||||
def test_opt_exc_info_type_none_tuple(self):
|
||||
"""Test OptExcInfo can be None tuple"""
|
||||
exc_info: OptExcInfo = (None, None, None)
|
||||
assert exc_info == (None, None, None)
|
||||
|
||||
def test_opt_exc_info_type_exception_tuple(self):
|
||||
"""Test OptExcInfo can be exception tuple"""
|
||||
try:
|
||||
raise ValueError("Test")
|
||||
except ValueError:
|
||||
exc_info: OptExcInfo = sys.exc_info()
|
||||
assert exc_info[0] is not None
|
||||
assert exc_info[1] is not None
|
||||
assert exc_info[2] is not None
|
||||
|
||||
def test_opt_exc_info_with_exception_stack(self):
|
||||
"""Test that OptExcInfo works with exception_stack function"""
|
||||
try:
|
||||
raise RuntimeError("Test runtime error")
|
||||
except RuntimeError:
|
||||
exc_info = sys.exc_info()
|
||||
result = exception_stack(exc_stack=exc_info)
|
||||
assert isinstance(result, str)
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests combining multiple scenarios"""
|
||||
|
||||
def test_call_stack_and_exception_stack_together(self):
|
||||
"""Test using both call_stack and exception_stack in error handling"""
|
||||
def faulty_function():
|
||||
_ = call_stack() # Get call stack before exception
|
||||
raise ValueError("Intentional error")
|
||||
|
||||
try:
|
||||
faulty_function()
|
||||
except ValueError:
|
||||
exception_trace = exception_stack()
|
||||
|
||||
assert isinstance(exception_trace, str)
|
||||
assert "faulty_function" in exception_trace or "test_debug_helpers.py" in exception_trace
|
||||
|
||||
def test_nested_exception_with_call_stack(self):
|
||||
"""Test call_stack within exception handling"""
|
||||
def outer():
|
||||
return inner()
|
||||
|
||||
def inner():
|
||||
try:
|
||||
raise RuntimeError("Inner error")
|
||||
except RuntimeError:
|
||||
return {
|
||||
'call_stack': call_stack(),
|
||||
'exception_stack': exception_stack()
|
||||
}
|
||||
|
||||
result = outer()
|
||||
assert 'call_stack' in result
|
||||
assert 'exception_stack' in result
|
||||
assert isinstance(result['call_stack'], str)
|
||||
assert isinstance(result['exception_stack'], str)
|
||||
|
||||
def test_multiple_nested_levels(self):
|
||||
"""Test with multiple nested function levels"""
|
||||
def level_a():
|
||||
return level_b()
|
||||
|
||||
def level_b():
|
||||
return level_c()
|
||||
|
||||
def level_c():
|
||||
return level_d()
|
||||
|
||||
def level_d():
|
||||
try:
|
||||
raise ValueError("Deep error")
|
||||
except ValueError:
|
||||
return {
|
||||
'call': call_stack(),
|
||||
'exception': exception_stack()
|
||||
}
|
||||
|
||||
result = level_a()
|
||||
# Should contain information about the call chain
|
||||
assert result['call']
|
||||
assert result['exception']
|
||||
|
||||
def test_different_separators_consistency(self):
|
||||
"""Test that different separators work consistently"""
|
||||
separators = [" -> ", " | ", " / ", " >> "]
|
||||
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise ValueError("Test")
|
||||
inner_call()
|
||||
|
||||
for sep in separators:
|
||||
try:
|
||||
nested_call()
|
||||
except ValueError:
|
||||
exc_result = exception_stack(separator=sep)
|
||||
call_result = call_stack(separator=sep)
|
||||
|
||||
assert isinstance(exc_result, str)
|
||||
assert isinstance(call_result, str)
|
||||
# Both should be valid strings (separator check only if multiple frames)
|
||||
|
||||
|
||||
class TestEdgeCases:
|
||||
"""Test edge cases and boundary conditions"""
|
||||
|
||||
def test_call_stack_with_zero_start(self):
|
||||
"""Test call_stack with start=0 (should include all frames)"""
|
||||
result = call_stack(start=0)
|
||||
assert isinstance(result, str)
|
||||
assert len(result) > 0
|
||||
|
||||
def test_call_stack_with_large_skip_last(self):
|
||||
"""Test call_stack with very large skip_last value"""
|
||||
result = call_stack(skip_last=-100)
|
||||
# Should handle gracefully, may be empty
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_exception_stack_none_exc_info(self):
|
||||
"""Test exception_stack with None as exc_stack"""
|
||||
result = exception_stack(exc_stack=None)
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_exception_stack_empty_tuple(self):
|
||||
"""Test exception_stack with empty exception info"""
|
||||
exc_info: OptExcInfo = (None, None, None)
|
||||
result = exception_stack(exc_stack=exc_info)
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_call_stack_special_characters_in_separator(self):
|
||||
"""Test call_stack with special characters in separator"""
|
||||
special_separators = ["\n", "\t", "->", "||", "//"]
|
||||
|
||||
for sep in special_separators:
|
||||
result = call_stack(separator=sep)
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_very_deep_call_stack(self):
|
||||
"""Test call_stack with very deep recursion (up to a limit)"""
|
||||
def recursive_call(depth: int, max_depth: int = 5) -> str:
|
||||
if depth >= max_depth:
|
||||
return call_stack()
|
||||
return recursive_call(depth + 1, max_depth)
|
||||
|
||||
result = recursive_call(0)
|
||||
assert isinstance(result, str)
|
||||
# Should contain multiple recursive_call entries
|
||||
assert result.count("recursive_call") > 0
|
||||
|
||||
def test_exception_stack_different_exception_types(self):
|
||||
"""Test exception_stack with various exception types"""
|
||||
exception_types = [
|
||||
ValueError("value"),
|
||||
TypeError("type"),
|
||||
KeyError("key"),
|
||||
IndexError("index"),
|
||||
AttributeError("attr"),
|
||||
RuntimeError("runtime"),
|
||||
]
|
||||
|
||||
for exc in exception_types:
|
||||
try:
|
||||
raise exc
|
||||
except (ValueError, TypeError, KeyError, IndexError, AttributeError, RuntimeError):
|
||||
result = exception_stack()
|
||||
assert isinstance(result, str)
|
||||
|
||||
|
||||
class TestRealWorldScenarios:
|
||||
"""Test real-world debugging scenarios"""
|
||||
|
||||
def test_debugging_workflow(self):
|
||||
"""Test typical debugging workflow with both functions"""
|
||||
def process_data(data: str) -> str:
|
||||
_ = call_stack() # Capture call stack for debugging
|
||||
if not data:
|
||||
raise ValueError("No data provided")
|
||||
return data.upper()
|
||||
|
||||
# Success case
|
||||
result = process_data("test")
|
||||
assert result == "TEST"
|
||||
|
||||
# Error case
|
||||
try:
|
||||
process_data("")
|
||||
except ValueError:
|
||||
exc_trace = exception_stack()
|
||||
assert isinstance(exc_trace, str)
|
||||
|
||||
def test_logging_context(self):
|
||||
"""Test using call_stack for logging context"""
|
||||
def get_logging_context():
|
||||
return {
|
||||
'timestamp': 'now',
|
||||
'stack': call_stack(start=1, separator=" > "),
|
||||
'function': 'get_logging_context'
|
||||
}
|
||||
|
||||
context = get_logging_context()
|
||||
assert 'stack' in context
|
||||
assert 'timestamp' in context
|
||||
assert isinstance(context['stack'], str)
|
||||
|
||||
def test_error_reporting(self):
|
||||
"""Test comprehensive error reporting"""
|
||||
def dangerous_operation() -> dict[str, str]:
|
||||
try:
|
||||
# Simulate some operation
|
||||
_ = 1 / 0
|
||||
except ZeroDivisionError:
|
||||
return {
|
||||
'error': 'Division by zero',
|
||||
'call_stack': call_stack(),
|
||||
'exception_stack': exception_stack(),
|
||||
}
|
||||
return {} # Fallback return
|
||||
|
||||
error_report = dangerous_operation()
|
||||
assert error_report is not None
|
||||
assert 'error' in error_report
|
||||
assert 'call_stack' in error_report
|
||||
assert 'exception_stack' in error_report
|
||||
assert error_report['error'] == 'Division by zero'
|
||||
|
||||
def test_function_tracing(self):
|
||||
"""Test function call tracing"""
|
||||
traces: list[str] = []
|
||||
|
||||
def traced_function_a() -> str:
|
||||
traces.append(call_stack())
|
||||
return traced_function_b()
|
||||
|
||||
def traced_function_b() -> str:
|
||||
traces.append(call_stack())
|
||||
return traced_function_c()
|
||||
|
||||
def traced_function_c() -> str:
|
||||
traces.append(call_stack())
|
||||
return "done"
|
||||
|
||||
result = traced_function_a()
|
||||
assert result == "done"
|
||||
assert len(traces) == 3
|
||||
# Each trace should be different (different call depths)
|
||||
assert all(isinstance(t, str) for t in traces)
|
||||
|
||||
def test_exception_chain_tracking(self):
|
||||
"""Test tracking exception chains"""
|
||||
exception_traces: list[str] = []
|
||||
|
||||
def operation_one() -> None:
|
||||
try:
|
||||
operation_two()
|
||||
except ValueError:
|
||||
exception_traces.append(exception_stack())
|
||||
raise
|
||||
|
||||
def operation_two() -> None:
|
||||
try:
|
||||
operation_three()
|
||||
except TypeError as exc:
|
||||
exception_traces.append(exception_stack())
|
||||
raise ValueError("Wrapped error") from exc
|
||||
|
||||
def operation_three() -> None:
|
||||
raise TypeError("Original error")
|
||||
|
||||
try:
|
||||
operation_one()
|
||||
except ValueError:
|
||||
exception_traces.append(exception_stack())
|
||||
|
||||
# Should have captured multiple exception stacks
|
||||
assert len(exception_traces) > 0
|
||||
assert all(isinstance(t, str) for t in exception_traces)
|
||||
|
||||
|
||||
class TestParametrized:
|
||||
"""Parametrized tests for comprehensive coverage"""
|
||||
|
||||
@pytest.mark.parametrize("start", [0, 1, 2, 5, 10])
|
||||
def test_call_stack_various_starts(self, start: int) -> None:
|
||||
"""Test call_stack with various start values"""
|
||||
result = call_stack(start=start)
|
||||
assert isinstance(result, str)
|
||||
|
||||
@pytest.mark.parametrize("skip_last", [-1, -2, -3, -5, 1, 2, 3, 5])
|
||||
def test_call_stack_various_skip_lasts(self, skip_last: int) -> None:
|
||||
"""Test call_stack with various skip_last values"""
|
||||
result = call_stack(skip_last=skip_last)
|
||||
assert isinstance(result, str)
|
||||
|
||||
@pytest.mark.parametrize("separator", [" -> ", " | ", " / ", " >> ", " => ", "\n", "\t"])
|
||||
def test_call_stack_various_separators(self, separator: str) -> None:
|
||||
"""Test call_stack with various separators"""
|
||||
result = call_stack(separator=separator)
|
||||
assert isinstance(result, str)
|
||||
if result:
|
||||
assert separator in result
|
||||
|
||||
@pytest.mark.parametrize("reset_start", [True, False])
|
||||
def test_call_stack_reset_start_variations(self, reset_start: bool) -> None:
|
||||
"""Test call_stack with reset_start_if_empty variations"""
|
||||
result = call_stack(start=100, reset_start_if_empty=reset_start)
|
||||
assert isinstance(result, str)
|
||||
if reset_start:
|
||||
assert len(result) > 0 # Should have content after reset
|
||||
else:
|
||||
assert len(result) == 0 # Should be empty
|
||||
|
||||
@pytest.mark.parametrize("separator", [" -> ", " | ", " / ", " >> ", "\n"])
|
||||
def test_exception_stack_various_separators(self, separator: str) -> None:
|
||||
"""Test exception_stack with various separators"""
|
||||
def nested_call():
|
||||
def inner_call():
|
||||
raise ValueError("Test")
|
||||
inner_call()
|
||||
|
||||
try:
|
||||
nested_call()
|
||||
except ValueError:
|
||||
result = exception_stack(separator=separator)
|
||||
assert isinstance(result, str)
|
||||
# Check that result is valid (separator only if multiple frames exist)
|
||||
|
||||
@pytest.mark.parametrize("exception_type", [
|
||||
ValueError,
|
||||
TypeError,
|
||||
KeyError,
|
||||
IndexError,
|
||||
AttributeError,
|
||||
RuntimeError,
|
||||
OSError,
|
||||
])
|
||||
def test_exception_stack_various_exception_types(self, exception_type: type[Exception]) -> None:
|
||||
"""Test exception_stack with various exception types"""
|
||||
try:
|
||||
raise exception_type("Test exception")
|
||||
except (ValueError, TypeError, KeyError, IndexError, AttributeError, RuntimeError, OSError):
|
||||
result = exception_stack()
|
||||
assert isinstance(result, str)
|
||||
|
||||
# __END__
|
||||
288
tests/unit/debug_handling/test_dump_data.py
Normal file
288
tests/unit/debug_handling/test_dump_data.py
Normal file
@@ -0,0 +1,288 @@
|
||||
"""
|
||||
Unit tests for debug_handling.dump_data module
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime, date
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
|
||||
|
||||
class TestDumpData:
|
||||
"""Test cases for dump_data function"""
|
||||
|
||||
def test_dump_simple_dict(self):
|
||||
"""Test dumping a simple dictionary"""
|
||||
data = {"name": "John", "age": 30}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_simple_list(self):
|
||||
"""Test dumping a simple list"""
|
||||
data = [1, 2, 3, 4, 5]
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_nested_dict(self):
|
||||
"""Test dumping a nested dictionary"""
|
||||
data = {
|
||||
"user": {
|
||||
"name": "Alice",
|
||||
"address": {
|
||||
"city": "Tokyo",
|
||||
"country": "Japan"
|
||||
}
|
||||
}
|
||||
}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_mixed_types(self):
|
||||
"""Test dumping data with mixed types"""
|
||||
data = {
|
||||
"string": "test",
|
||||
"number": 42,
|
||||
"float": 3.14,
|
||||
"boolean": True,
|
||||
"null": None,
|
||||
"list": [1, 2, 3]
|
||||
}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_with_indent_default(self):
|
||||
"""Test that indent is applied by default"""
|
||||
data = {"a": 1, "b": 2}
|
||||
result = dump_data(data)
|
||||
|
||||
# With indent, result should contain newlines
|
||||
assert "\n" in result
|
||||
assert " " in result # 4 spaces for indent
|
||||
|
||||
def test_dump_with_indent_true(self):
|
||||
"""Test explicit indent=True"""
|
||||
data = {"a": 1, "b": 2}
|
||||
result = dump_data(data, use_indent=True)
|
||||
|
||||
# With indent, result should contain newlines
|
||||
assert "\n" in result
|
||||
assert " " in result # 4 spaces for indent
|
||||
|
||||
def test_dump_without_indent(self):
|
||||
"""Test dumping without indentation"""
|
||||
data = {"a": 1, "b": 2}
|
||||
result = dump_data(data, use_indent=False)
|
||||
|
||||
# Without indent, result should be compact
|
||||
assert "\n" not in result
|
||||
assert result == '{"a": 1, "b": 2}'
|
||||
|
||||
def test_dump_unicode_characters(self):
|
||||
"""Test that unicode characters are preserved (ensure_ascii=False)"""
|
||||
data = {"message": "こんにちは", "emoji": "😀", "german": "Müller"}
|
||||
result = dump_data(data)
|
||||
|
||||
# Unicode characters should be preserved, not escaped
|
||||
assert "こんにちは" in result
|
||||
assert "😀" in result
|
||||
assert "Müller" in result
|
||||
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_datetime_object(self):
|
||||
"""Test dumping data with datetime objects (using default=str)"""
|
||||
now = datetime(2023, 10, 15, 14, 30, 0)
|
||||
data = {"timestamp": now}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
# datetime should be converted to string
|
||||
assert "2023-10-15" in result
|
||||
|
||||
def test_dump_date_object(self):
|
||||
"""Test dumping data with date objects"""
|
||||
today = date(2023, 10, 15)
|
||||
data = {"date": today}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "2023-10-15" in result
|
||||
|
||||
def test_dump_decimal_object(self):
|
||||
"""Test dumping data with Decimal objects"""
|
||||
data = {"amount": Decimal("123.45")}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "123.45" in result
|
||||
|
||||
def test_dump_empty_dict(self):
|
||||
"""Test dumping an empty dictionary"""
|
||||
data = {}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == {}
|
||||
|
||||
def test_dump_empty_list(self):
|
||||
"""Test dumping an empty list"""
|
||||
data = []
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == []
|
||||
|
||||
def test_dump_string_directly(self):
|
||||
"""Test dumping a string directly"""
|
||||
data = "Hello, World!"
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_number_directly(self):
|
||||
"""Test dumping a number directly"""
|
||||
data = 42
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_boolean_directly(self):
|
||||
"""Test dumping a boolean directly"""
|
||||
data = True
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed is True
|
||||
|
||||
def test_dump_none_directly(self):
|
||||
"""Test dumping None directly"""
|
||||
data = None
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert result == "null"
|
||||
parsed = json.loads(result)
|
||||
assert parsed is None
|
||||
|
||||
def test_dump_complex_nested_structure(self):
|
||||
"""Test dumping a complex nested structure"""
|
||||
data = {
|
||||
"users": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Alice",
|
||||
"tags": ["admin", "user"],
|
||||
"metadata": {
|
||||
"created": datetime(2023, 1, 1),
|
||||
"active": True
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Bob",
|
||||
"tags": ["user"],
|
||||
"metadata": {
|
||||
"created": datetime(2023, 6, 15),
|
||||
"active": False
|
||||
}
|
||||
}
|
||||
],
|
||||
"total": 2
|
||||
}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
# Check that it's valid JSON
|
||||
parsed = json.loads(result)
|
||||
assert len(parsed["users"]) == 2
|
||||
assert parsed["total"] == 2
|
||||
|
||||
def test_dump_special_characters(self):
|
||||
"""Test dumping data with special characters"""
|
||||
data = {
|
||||
"quote": 'He said "Hello"',
|
||||
"backslash": "path\\to\\file",
|
||||
"newline": "line1\nline2",
|
||||
"tab": "col1\tcol2"
|
||||
}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
|
||||
def test_dump_large_numbers(self):
|
||||
"""Test dumping large numbers"""
|
||||
data = {
|
||||
"big_int": 123456789012345678901234567890,
|
||||
"big_float": 1.23456789e100
|
||||
}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed["big_int"] == data["big_int"]
|
||||
|
||||
def test_dump_list_of_dicts(self):
|
||||
"""Test dumping a list of dictionaries"""
|
||||
data = [
|
||||
{"id": 1, "name": "Item 1"},
|
||||
{"id": 2, "name": "Item 2"},
|
||||
{"id": 3, "name": "Item 3"}
|
||||
]
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
parsed = json.loads(result)
|
||||
assert parsed == data
|
||||
assert len(parsed) == 3
|
||||
|
||||
|
||||
class CustomObject:
|
||||
"""Custom class for testing default=str conversion"""
|
||||
def __init__(self, value: Any):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return f"CustomObject({self.value})"
|
||||
|
||||
|
||||
class TestDumpDataWithCustomObjects:
|
||||
"""Test cases for dump_data with custom objects"""
|
||||
|
||||
def test_dump_custom_object(self):
|
||||
"""Test that custom objects are converted using str()"""
|
||||
obj = CustomObject("test")
|
||||
data = {"custom": obj}
|
||||
result = dump_data(data)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "CustomObject(test)" in result
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
560
tests/unit/debug_handling/test_profiling.py
Normal file
560
tests/unit/debug_handling/test_profiling.py
Normal file
@@ -0,0 +1,560 @@
|
||||
"""
|
||||
Unit tests for corelibs.debug_handling.profiling module
|
||||
"""
|
||||
|
||||
import time
|
||||
import tracemalloc
|
||||
|
||||
from corelibs.debug_handling.profiling import display_top, Profiling
|
||||
|
||||
|
||||
class TestDisplayTop:
|
||||
"""Test display_top function"""
|
||||
|
||||
def test_display_top_basic(self):
|
||||
"""Test that display_top returns a string with basic stats"""
|
||||
tracemalloc.start()
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 10000
|
||||
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Top 10 lines" in result
|
||||
assert "KiB" in result
|
||||
assert "Total allocated size:" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_display_top_with_custom_limit(self):
|
||||
"""Test display_top with custom limit parameter"""
|
||||
tracemalloc.start()
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 10000
|
||||
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot, limit=5)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Top 5 lines" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_display_top_with_different_key_type(self):
|
||||
"""Test display_top with different key_type parameter"""
|
||||
tracemalloc.start()
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 10000
|
||||
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot, key_type='filename')
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Top 10 lines" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_display_top_filters_traces(self):
|
||||
"""Test that display_top filters out bootstrap and unknown traces"""
|
||||
tracemalloc.start()
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 10000
|
||||
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot)
|
||||
|
||||
# Should not contain filtered traces
|
||||
assert "<frozen importlib._bootstrap>" not in result
|
||||
assert "<unknown>" not in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_display_top_with_limit_larger_than_stats(self):
|
||||
"""Test display_top when limit is larger than available stats"""
|
||||
tracemalloc.start()
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 100
|
||||
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot, limit=1000)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Top 1000 lines" in result
|
||||
assert "Total allocated size:" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_display_top_empty_snapshot(self):
|
||||
"""Test display_top with a snapshot that has minimal traces"""
|
||||
tracemalloc.start()
|
||||
snapshot = tracemalloc.take_snapshot()
|
||||
tracemalloc.stop()
|
||||
|
||||
result = display_top(snapshot, limit=1)
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Top 1 lines" in result
|
||||
|
||||
|
||||
class TestProfilingInitialization:
|
||||
"""Test Profiling class initialization"""
|
||||
|
||||
def test_profiling_initialization(self):
|
||||
"""Test that Profiling initializes correctly"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Should be able to create instance
|
||||
assert isinstance(profiler, Profiling)
|
||||
|
||||
def test_profiling_initial_state(self):
|
||||
"""Test that Profiling starts in a clean state"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Should not raise an error when calling end_profiling
|
||||
# even though start_profiling wasn't called
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
assert isinstance(result, str)
|
||||
|
||||
|
||||
class TestProfilingStartEnd:
|
||||
"""Test start_profiling and end_profiling functionality"""
|
||||
|
||||
def test_start_profiling(self):
|
||||
"""Test that start_profiling can be called"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Should not raise an error
|
||||
profiler.start_profiling("test_operation")
|
||||
|
||||
def test_end_profiling(self):
|
||||
"""Test that end_profiling can be called"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("test_operation")
|
||||
|
||||
# Should not raise an error
|
||||
profiler.end_profiling()
|
||||
|
||||
def test_start_profiling_with_different_idents(self):
|
||||
"""Test start_profiling with different identifier strings"""
|
||||
profiler = Profiling()
|
||||
|
||||
identifiers = ["short", "longer_identifier", "very_long_identifier_with_many_chars"]
|
||||
|
||||
for ident in identifiers:
|
||||
profiler.start_profiling(ident)
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert ident in result
|
||||
|
||||
def test_end_profiling_without_start(self):
|
||||
"""Test that end_profiling can be called without start_profiling"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Should not raise an error but internal state should indicate warning
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_profiling_measures_time(self):
|
||||
"""Test that profiling measures elapsed time"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("time_test")
|
||||
|
||||
sleep_duration = 0.05 # 50ms
|
||||
time.sleep(sleep_duration)
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "time:" in result
|
||||
# Should have some time measurement
|
||||
assert "ms" in result or "s" in result
|
||||
|
||||
def test_profiling_measures_memory(self):
|
||||
"""Test that profiling measures memory usage"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("memory_test")
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 100000
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "RSS:" in result
|
||||
assert "VMS:" in result
|
||||
assert "time:" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
|
||||
class TestProfilingPrintProfiling:
|
||||
"""Test print_profiling functionality"""
|
||||
|
||||
def test_print_profiling_returns_string(self):
|
||||
"""Test that print_profiling returns a string"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("test")
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_print_profiling_contains_identifier(self):
|
||||
"""Test that print_profiling includes the identifier"""
|
||||
profiler = Profiling()
|
||||
identifier = "my_test_operation"
|
||||
|
||||
profiler.start_profiling(identifier)
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert identifier in result
|
||||
|
||||
def test_print_profiling_format(self):
|
||||
"""Test that print_profiling has expected format"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("test")
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
# Check for expected components
|
||||
assert "Profiling:" in result
|
||||
assert "RSS:" in result
|
||||
assert "VMS:" in result
|
||||
assert "time:" in result
|
||||
|
||||
def test_print_profiling_multiple_calls(self):
|
||||
"""Test that print_profiling can be called multiple times"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("test")
|
||||
profiler.end_profiling()
|
||||
|
||||
result1 = profiler.print_profiling()
|
||||
result2 = profiler.print_profiling()
|
||||
|
||||
# Should return the same result
|
||||
assert result1 == result2
|
||||
|
||||
def test_print_profiling_time_formats(self):
|
||||
"""Test different time format outputs"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Very short duration (milliseconds)
|
||||
profiler.start_profiling("ms_test")
|
||||
time.sleep(0.001)
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
assert "ms" in result
|
||||
|
||||
# Slightly longer duration (seconds)
|
||||
profiler.start_profiling("s_test")
|
||||
time.sleep(0.1)
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
# Could be ms or s depending on timing
|
||||
assert ("ms" in result or "s" in result)
|
||||
|
||||
def test_print_profiling_memory_formats(self):
|
||||
"""Test different memory format outputs"""
|
||||
profiler = Profiling()
|
||||
profiler.start_profiling("memory_format_test")
|
||||
|
||||
# Allocate some memory
|
||||
data = [0] * 50000
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
# Should have some memory unit (B, kB, MB, GB)
|
||||
assert any(unit in result for unit in ["B", "kB", "MB", "GB"])
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
|
||||
class TestProfilingIntegration:
|
||||
"""Integration tests for Profiling class"""
|
||||
|
||||
def test_complete_profiling_cycle(self):
|
||||
"""Test a complete profiling cycle from start to print"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("complete_cycle")
|
||||
|
||||
# Do some work
|
||||
data = [i for i in range(10000)]
|
||||
time.sleep(0.01)
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "complete_cycle" in result
|
||||
assert "RSS:" in result
|
||||
assert "VMS:" in result
|
||||
assert "time:" in result
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_multiple_profiling_sessions(self):
|
||||
"""Test running multiple profiling sessions"""
|
||||
profiler = Profiling()
|
||||
|
||||
# First session
|
||||
profiler.start_profiling("session_1")
|
||||
time.sleep(0.01)
|
||||
profiler.end_profiling()
|
||||
result1 = profiler.print_profiling()
|
||||
|
||||
# Second session (same profiler instance)
|
||||
profiler.start_profiling("session_2")
|
||||
data = [0] * 100000
|
||||
time.sleep(0.01)
|
||||
profiler.end_profiling()
|
||||
result2 = profiler.print_profiling()
|
||||
|
||||
# Results should be different
|
||||
assert "session_1" in result1
|
||||
assert "session_2" in result2
|
||||
assert result1 != result2
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
def test_profiling_with_zero_work(self):
|
||||
"""Test profiling with minimal work"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("zero_work")
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "zero_work" in result
|
||||
|
||||
def test_profiling_with_heavy_computation(self):
|
||||
"""Test profiling with heavier computation"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("heavy_computation")
|
||||
|
||||
# Do some computation
|
||||
result_data: list[list[int]] = []
|
||||
for _ in range(1000):
|
||||
result_data.append([j * 2 for j in range(100)])
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "heavy_computation" in result
|
||||
# Should show measurable time and memory
|
||||
assert "time:" in result
|
||||
|
||||
# Clean up
|
||||
del result_data
|
||||
|
||||
def test_independent_profilers(self):
|
||||
"""Test that multiple Profiling instances are independent"""
|
||||
profiler1 = Profiling()
|
||||
profiler2 = Profiling()
|
||||
|
||||
profiler1.start_profiling("profiler_1")
|
||||
time.sleep(0.01)
|
||||
|
||||
profiler2.start_profiling("profiler_2")
|
||||
data = [0] * 100000
|
||||
time.sleep(0.01)
|
||||
|
||||
profiler1.end_profiling()
|
||||
profiler2.end_profiling()
|
||||
|
||||
result1 = profiler1.print_profiling()
|
||||
result2 = profiler2.print_profiling()
|
||||
|
||||
# Should have different identifiers
|
||||
assert "profiler_1" in result1
|
||||
assert "profiler_2" in result2
|
||||
|
||||
# Results should be different
|
||||
assert result1 != result2
|
||||
|
||||
# Clean up
|
||||
del data
|
||||
|
||||
|
||||
class TestProfilingEdgeCases:
|
||||
"""Test edge cases and boundary conditions"""
|
||||
|
||||
def test_empty_identifier(self):
|
||||
"""Test profiling with empty identifier"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("")
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "Profiling:" in result
|
||||
|
||||
def test_very_long_identifier(self):
|
||||
"""Test profiling with very long identifier"""
|
||||
profiler = Profiling()
|
||||
|
||||
long_ident = "a" * 100
|
||||
|
||||
profiler.start_profiling(long_ident)
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert long_ident in result
|
||||
|
||||
def test_special_characters_in_identifier(self):
|
||||
"""Test profiling with special characters in identifier"""
|
||||
profiler = Profiling()
|
||||
|
||||
special_ident = "test_@#$%_operation"
|
||||
|
||||
profiler.start_profiling(special_ident)
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert special_ident in result
|
||||
|
||||
def test_rapid_consecutive_profiling(self):
|
||||
"""Test rapid consecutive profiling cycles"""
|
||||
profiler = Profiling()
|
||||
|
||||
for i in range(5):
|
||||
profiler.start_profiling(f"rapid_{i}")
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert f"rapid_{i}" in result
|
||||
|
||||
def test_profiling_negative_memory_change(self):
|
||||
"""Test profiling when memory usage decreases"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Allocate some memory before profiling
|
||||
pre_data = [0] * 1000000
|
||||
|
||||
profiler.start_profiling("memory_decrease")
|
||||
|
||||
# Free the memory
|
||||
del pre_data
|
||||
|
||||
profiler.end_profiling()
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "memory_decrease" in result
|
||||
# Should handle negative memory change gracefully
|
||||
|
||||
def test_very_short_duration(self):
|
||||
"""Test profiling with extremely short duration"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("instant")
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert "instant" in result
|
||||
assert "ms" in result # Should show milliseconds for very short duration
|
||||
|
||||
|
||||
class TestProfilingContextManager:
|
||||
"""Test profiling usage patterns similar to context managers"""
|
||||
|
||||
def test_typical_usage_pattern(self):
|
||||
"""Test typical usage pattern for profiling"""
|
||||
profiler = Profiling()
|
||||
|
||||
# Typical pattern
|
||||
profiler.start_profiling("typical_operation")
|
||||
|
||||
# Perform operation
|
||||
result_list: list[int] = []
|
||||
for _ in range(1000):
|
||||
result_list.append(_ * 2)
|
||||
|
||||
profiler.end_profiling()
|
||||
|
||||
# Get results
|
||||
output = profiler.print_profiling()
|
||||
|
||||
assert isinstance(output, str)
|
||||
assert "typical_operation" in output
|
||||
|
||||
# Clean up
|
||||
del result_list
|
||||
|
||||
def test_profiling_without_end(self):
|
||||
"""Test what happens when end_profiling is not called"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.start_profiling("no_end")
|
||||
|
||||
# Don't call end_profiling
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
# Should still return a string (though data might be incomplete)
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_profiling_end_without_start(self):
|
||||
"""Test calling end_profiling multiple times without start"""
|
||||
profiler = Profiling()
|
||||
|
||||
profiler.end_profiling()
|
||||
profiler.end_profiling()
|
||||
|
||||
result = profiler.print_profiling()
|
||||
|
||||
assert isinstance(result, str)
|
||||
|
||||
# __END__
|
||||
405
tests/unit/debug_handling/test_timer.py
Normal file
405
tests/unit/debug_handling/test_timer.py
Normal file
@@ -0,0 +1,405 @@
|
||||
"""
|
||||
Unit tests for corelibs.debug_handling.timer module
|
||||
"""
|
||||
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from corelibs.debug_handling.timer import Timer
|
||||
|
||||
|
||||
class TestTimerInitialization:
|
||||
"""Test Timer class initialization"""
|
||||
|
||||
def test_timer_initialization(self):
|
||||
"""Test that Timer initializes with correct default values"""
|
||||
timer = Timer()
|
||||
|
||||
# Check that start times are set
|
||||
assert isinstance(timer.get_overall_start_time(), datetime)
|
||||
assert isinstance(timer.get_start_time(), datetime)
|
||||
|
||||
# Check that end times are None
|
||||
assert timer.get_overall_end_time() is None
|
||||
assert timer.get_end_time() is None
|
||||
|
||||
# Check that run times are None
|
||||
assert timer.get_overall_run_time() is None
|
||||
assert timer.get_run_time() is None
|
||||
|
||||
def test_timer_start_times_are_recent(self):
|
||||
"""Test that start times are set to current time on initialization"""
|
||||
before_init = datetime.now()
|
||||
timer = Timer()
|
||||
after_init = datetime.now()
|
||||
|
||||
overall_start = timer.get_overall_start_time()
|
||||
start = timer.get_start_time()
|
||||
|
||||
assert before_init <= overall_start <= after_init
|
||||
assert before_init <= start <= after_init
|
||||
|
||||
def test_timer_start_times_are_same(self):
|
||||
"""Test that overall_start_time and start_time are initialized to the same time"""
|
||||
timer = Timer()
|
||||
|
||||
overall_start = timer.get_overall_start_time()
|
||||
start = timer.get_start_time()
|
||||
|
||||
# They should be very close (within a few microseconds)
|
||||
time_diff = abs((overall_start - start).total_seconds())
|
||||
assert time_diff < 0.001 # Less than 1 millisecond
|
||||
|
||||
|
||||
class TestOverallRunTime:
|
||||
"""Test overall run time functionality"""
|
||||
|
||||
def test_overall_run_time_returns_timedelta(self):
|
||||
"""Test that overall_run_time returns a timedelta object"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01) # Sleep for 10ms
|
||||
|
||||
result = timer.overall_run_time()
|
||||
|
||||
assert isinstance(result, timedelta)
|
||||
|
||||
def test_overall_run_time_sets_end_time(self):
|
||||
"""Test that calling overall_run_time sets the end time"""
|
||||
timer = Timer()
|
||||
|
||||
assert timer.get_overall_end_time() is None
|
||||
|
||||
timer.overall_run_time()
|
||||
|
||||
assert isinstance(timer.get_overall_end_time(), datetime)
|
||||
|
||||
def test_overall_run_time_sets_run_time(self):
|
||||
"""Test that calling overall_run_time sets the run time"""
|
||||
timer = Timer()
|
||||
|
||||
assert timer.get_overall_run_time() is None
|
||||
|
||||
timer.overall_run_time()
|
||||
|
||||
assert isinstance(timer.get_overall_run_time(), timedelta)
|
||||
|
||||
def test_overall_run_time_accuracy(self):
|
||||
"""Test that overall_run_time calculates time difference accurately"""
|
||||
timer = Timer()
|
||||
sleep_duration = 0.05 # 50ms
|
||||
time.sleep(sleep_duration)
|
||||
|
||||
result = timer.overall_run_time()
|
||||
|
||||
# Allow for some variance (10ms tolerance)
|
||||
assert sleep_duration - 0.01 <= result.total_seconds() <= sleep_duration + 0.01
|
||||
|
||||
def test_overall_run_time_multiple_calls(self):
|
||||
"""Test that calling overall_run_time multiple times updates the values"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01)
|
||||
|
||||
first_result = timer.overall_run_time()
|
||||
first_end_time = timer.get_overall_end_time()
|
||||
|
||||
time.sleep(0.01)
|
||||
|
||||
second_result = timer.overall_run_time()
|
||||
second_end_time = timer.get_overall_end_time()
|
||||
|
||||
# Second call should have longer runtime
|
||||
assert second_result > first_result
|
||||
assert second_end_time is not None
|
||||
assert first_end_time is not None
|
||||
# End time should be updated
|
||||
assert second_end_time > first_end_time
|
||||
|
||||
def test_overall_run_time_consistency(self):
|
||||
"""Test that get_overall_run_time returns the same value as overall_run_time"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01)
|
||||
|
||||
calculated_time = timer.overall_run_time()
|
||||
retrieved_time = timer.get_overall_run_time()
|
||||
|
||||
assert calculated_time == retrieved_time
|
||||
|
||||
|
||||
class TestRunTime:
|
||||
"""Test run time functionality"""
|
||||
|
||||
def test_run_time_returns_timedelta(self):
|
||||
"""Test that run_time returns a timedelta object"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01)
|
||||
|
||||
result = timer.run_time()
|
||||
|
||||
assert isinstance(result, timedelta)
|
||||
|
||||
def test_run_time_sets_end_time(self):
|
||||
"""Test that calling run_time sets the end time"""
|
||||
timer = Timer()
|
||||
|
||||
assert timer.get_end_time() is None
|
||||
|
||||
timer.run_time()
|
||||
|
||||
assert isinstance(timer.get_end_time(), datetime)
|
||||
|
||||
def test_run_time_sets_run_time(self):
|
||||
"""Test that calling run_time sets the run time"""
|
||||
timer = Timer()
|
||||
|
||||
assert timer.get_run_time() is None
|
||||
|
||||
timer.run_time()
|
||||
|
||||
assert isinstance(timer.get_run_time(), timedelta)
|
||||
|
||||
def test_run_time_accuracy(self):
|
||||
"""Test that run_time calculates time difference accurately"""
|
||||
timer = Timer()
|
||||
sleep_duration = 0.05 # 50ms
|
||||
time.sleep(sleep_duration)
|
||||
|
||||
result = timer.run_time()
|
||||
|
||||
# Allow for some variance (10ms tolerance)
|
||||
assert sleep_duration - 0.01 <= result.total_seconds() <= sleep_duration + 0.01
|
||||
|
||||
def test_run_time_multiple_calls(self):
|
||||
"""Test that calling run_time multiple times updates the values"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01)
|
||||
|
||||
first_result = timer.run_time()
|
||||
first_end_time = timer.get_end_time()
|
||||
|
||||
time.sleep(0.01)
|
||||
|
||||
second_result = timer.run_time()
|
||||
second_end_time = timer.get_end_time()
|
||||
|
||||
# Second call should have longer runtime
|
||||
assert second_result > first_result
|
||||
assert second_end_time is not None
|
||||
assert first_end_time is not None
|
||||
# End time should be updated
|
||||
assert second_end_time > first_end_time
|
||||
|
||||
def test_run_time_consistency(self):
|
||||
"""Test that get_run_time returns the same value as run_time"""
|
||||
timer = Timer()
|
||||
time.sleep(0.01)
|
||||
|
||||
calculated_time = timer.run_time()
|
||||
retrieved_time = timer.get_run_time()
|
||||
|
||||
assert calculated_time == retrieved_time
|
||||
|
||||
|
||||
class TestResetRunTime:
|
||||
"""Test reset_run_time functionality"""
|
||||
|
||||
def test_reset_run_time_resets_start_time(self):
|
||||
"""Test that reset_run_time updates the start time"""
|
||||
timer = Timer()
|
||||
original_start = timer.get_start_time()
|
||||
|
||||
time.sleep(0.02)
|
||||
timer.reset_run_time()
|
||||
|
||||
new_start = timer.get_start_time()
|
||||
|
||||
assert new_start > original_start
|
||||
|
||||
def test_reset_run_time_clears_end_time(self):
|
||||
"""Test that reset_run_time clears the end time"""
|
||||
timer = Timer()
|
||||
timer.run_time()
|
||||
|
||||
assert timer.get_end_time() is not None
|
||||
|
||||
timer.reset_run_time()
|
||||
|
||||
assert timer.get_end_time() is None
|
||||
|
||||
def test_reset_run_time_clears_run_time(self):
|
||||
"""Test that reset_run_time clears the run time"""
|
||||
timer = Timer()
|
||||
timer.run_time()
|
||||
|
||||
assert timer.get_run_time() is not None
|
||||
|
||||
timer.reset_run_time()
|
||||
|
||||
assert timer.get_run_time() is None
|
||||
|
||||
def test_reset_run_time_does_not_affect_overall_times(self):
|
||||
"""Test that reset_run_time does not affect overall times"""
|
||||
timer = Timer()
|
||||
|
||||
overall_start = timer.get_overall_start_time()
|
||||
timer.overall_run_time()
|
||||
overall_end = timer.get_overall_end_time()
|
||||
overall_run = timer.get_overall_run_time()
|
||||
|
||||
timer.reset_run_time()
|
||||
|
||||
# Overall times should remain unchanged
|
||||
assert timer.get_overall_start_time() == overall_start
|
||||
assert timer.get_overall_end_time() == overall_end
|
||||
assert timer.get_overall_run_time() == overall_run
|
||||
|
||||
def test_reset_run_time_allows_new_measurement(self):
|
||||
"""Test that reset_run_time allows for new time measurements"""
|
||||
timer = Timer()
|
||||
time.sleep(0.02)
|
||||
timer.run_time()
|
||||
|
||||
first_run_time = timer.get_run_time()
|
||||
|
||||
timer.reset_run_time()
|
||||
time.sleep(0.01)
|
||||
timer.run_time()
|
||||
|
||||
second_run_time = timer.get_run_time()
|
||||
|
||||
assert second_run_time is not None
|
||||
assert first_run_time is not None
|
||||
# Second measurement should be shorter since we reset
|
||||
assert second_run_time < first_run_time
|
||||
|
||||
|
||||
class TestTimerIntegration:
|
||||
"""Integration tests for Timer class"""
|
||||
|
||||
def test_independent_timers(self):
|
||||
"""Test that multiple Timer instances are independent"""
|
||||
timer1 = Timer()
|
||||
time.sleep(0.01)
|
||||
timer2 = Timer()
|
||||
|
||||
# timer1 should have earlier start time
|
||||
assert timer1.get_start_time() < timer2.get_start_time()
|
||||
assert timer1.get_overall_start_time() < timer2.get_overall_start_time()
|
||||
|
||||
def test_overall_and_run_time_independence(self):
|
||||
"""Test that overall time and run time are independent"""
|
||||
timer = Timer()
|
||||
time.sleep(0.02)
|
||||
|
||||
# Reset run time but not overall
|
||||
timer.reset_run_time()
|
||||
time.sleep(0.01)
|
||||
|
||||
run_time = timer.run_time()
|
||||
overall_time = timer.overall_run_time()
|
||||
|
||||
# Overall time should be longer than run time
|
||||
assert overall_time > run_time
|
||||
|
||||
def test_typical_usage_pattern(self):
|
||||
"""Test a typical usage pattern of the Timer class"""
|
||||
timer = Timer()
|
||||
|
||||
# Measure first operation
|
||||
time.sleep(0.01)
|
||||
first_operation = timer.run_time()
|
||||
assert first_operation.total_seconds() > 0
|
||||
|
||||
# Reset and measure second operation
|
||||
timer.reset_run_time()
|
||||
time.sleep(0.01)
|
||||
second_operation = timer.run_time()
|
||||
assert second_operation.total_seconds() > 0
|
||||
|
||||
# Get overall time
|
||||
overall = timer.overall_run_time()
|
||||
|
||||
# Overall should be greater than individual operations
|
||||
assert overall > first_operation
|
||||
assert overall > second_operation
|
||||
|
||||
def test_zero_sleep_timer(self):
|
||||
"""Test timer with minimal sleep (edge case)"""
|
||||
timer = Timer()
|
||||
|
||||
# Call run_time immediately
|
||||
result = timer.run_time()
|
||||
|
||||
# Should still return a valid timedelta (very small)
|
||||
assert isinstance(result, timedelta)
|
||||
assert result.total_seconds() >= 0
|
||||
|
||||
def test_getter_methods_before_calculation(self):
|
||||
"""Test that getter methods return None before calculation methods are called"""
|
||||
timer = Timer()
|
||||
|
||||
# Before calling run_time()
|
||||
assert timer.get_end_time() is None
|
||||
assert timer.get_run_time() is None
|
||||
|
||||
# Before calling overall_run_time()
|
||||
assert timer.get_overall_end_time() is None
|
||||
assert timer.get_overall_run_time() is None
|
||||
|
||||
# But start times should always be set
|
||||
assert timer.get_start_time() is not None
|
||||
assert timer.get_overall_start_time() is not None
|
||||
|
||||
|
||||
class TestTimerEdgeCases:
|
||||
"""Test edge cases and boundary conditions"""
|
||||
|
||||
def test_rapid_consecutive_calls(self):
|
||||
"""Test rapid consecutive calls to run_time"""
|
||||
timer = Timer()
|
||||
|
||||
results: list[timedelta] = []
|
||||
for _ in range(5):
|
||||
results.append(timer.run_time())
|
||||
|
||||
# Each result should be greater than or equal to the previous
|
||||
for i in range(1, len(results)):
|
||||
assert results[i] >= results[i - 1]
|
||||
|
||||
def test_very_short_duration(self):
|
||||
"""Test timer with very short duration"""
|
||||
timer = Timer()
|
||||
result = timer.run_time()
|
||||
|
||||
# Should be a very small positive timedelta
|
||||
assert isinstance(result, timedelta)
|
||||
assert result.total_seconds() >= 0
|
||||
assert result.total_seconds() < 0.1 # Less than 100ms
|
||||
|
||||
def test_reset_multiple_times(self):
|
||||
"""Test resetting the timer multiple times"""
|
||||
timer = Timer()
|
||||
|
||||
for _ in range(3):
|
||||
timer.reset_run_time()
|
||||
time.sleep(0.01)
|
||||
result = timer.run_time()
|
||||
|
||||
assert isinstance(result, timedelta)
|
||||
assert result.total_seconds() > 0
|
||||
|
||||
def test_overall_time_persists_through_resets(self):
|
||||
"""Test that overall time continues even when run_time is reset"""
|
||||
timer = Timer()
|
||||
|
||||
time.sleep(0.01)
|
||||
timer.reset_run_time()
|
||||
|
||||
time.sleep(0.01)
|
||||
timer.reset_run_time()
|
||||
|
||||
overall = timer.overall_run_time()
|
||||
|
||||
# Overall time should reflect total elapsed time
|
||||
assert overall.total_seconds() >= 0.02
|
||||
|
||||
# __END__
|
||||
975
tests/unit/debug_handling/test_writeline.py
Normal file
975
tests/unit/debug_handling/test_writeline.py
Normal file
@@ -0,0 +1,975 @@
|
||||
"""
|
||||
Unit tests for debug_handling.writeline module
|
||||
"""
|
||||
|
||||
import io
|
||||
import pytest
|
||||
from pytest import CaptureFixture
|
||||
|
||||
from corelibs.debug_handling.writeline import (
|
||||
write_l,
|
||||
pr_header,
|
||||
pr_title,
|
||||
pr_open,
|
||||
pr_close,
|
||||
pr_act
|
||||
)
|
||||
|
||||
|
||||
class TestWriteL:
|
||||
"""Test cases for write_l function"""
|
||||
|
||||
def test_write_l_print_only(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with print_line=True and no file"""
|
||||
write_l("Test line", print_line=True)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "Test line\n"
|
||||
|
||||
def test_write_l_no_print_no_file(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with print_line=False and no file (should do nothing)"""
|
||||
write_l("Test line", print_line=False)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
|
||||
def test_write_l_file_only(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with file handler only (no print)"""
|
||||
fpl = io.StringIO()
|
||||
write_l("Test line", fpl=fpl, print_line=False)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
assert fpl.getvalue() == "Test line\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_both_print_and_file(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with both print and file output"""
|
||||
fpl = io.StringIO()
|
||||
write_l("Test line", fpl=fpl, print_line=True)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "Test line\n"
|
||||
assert fpl.getvalue() == "Test line\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_multiple_lines_to_file(self):
|
||||
"""Test write_l writing multiple lines to file"""
|
||||
fpl = io.StringIO()
|
||||
write_l("Line 1", fpl=fpl, print_line=False)
|
||||
write_l("Line 2", fpl=fpl, print_line=False)
|
||||
write_l("Line 3", fpl=fpl, print_line=False)
|
||||
assert fpl.getvalue() == "Line 1\nLine 2\nLine 3\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_empty_string(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with empty string"""
|
||||
fpl = io.StringIO()
|
||||
write_l("", fpl=fpl, print_line=True)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "\n"
|
||||
assert fpl.getvalue() == "\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_special_characters(self):
|
||||
"""Test write_l with special characters"""
|
||||
fpl = io.StringIO()
|
||||
special_line = "Special: \t\n\r\\ 特殊文字 €"
|
||||
write_l(special_line, fpl=fpl, print_line=False)
|
||||
assert special_line + "\n" in fpl.getvalue()
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_long_string(self):
|
||||
"""Test write_l with long string"""
|
||||
fpl = io.StringIO()
|
||||
long_line = "A" * 1000
|
||||
write_l(long_line, fpl=fpl, print_line=False)
|
||||
assert fpl.getvalue() == long_line + "\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_unicode_content(self):
|
||||
"""Test write_l with unicode content"""
|
||||
fpl = io.StringIO()
|
||||
unicode_line = "Hello 世界 🌍 Привет"
|
||||
write_l(unicode_line, fpl=fpl, print_line=False)
|
||||
assert fpl.getvalue() == unicode_line + "\n"
|
||||
fpl.close()
|
||||
|
||||
def test_write_l_default_parameters(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with default parameters"""
|
||||
write_l("Test")
|
||||
captured = capsys.readouterr()
|
||||
# Default print_line is False
|
||||
assert captured.out == ""
|
||||
|
||||
def test_write_l_with_newline_in_string(self):
|
||||
"""Test write_l with newline characters in the string"""
|
||||
fpl = io.StringIO()
|
||||
write_l("Line with\nnewline", fpl=fpl, print_line=False)
|
||||
assert fpl.getvalue() == "Line with\nnewline\n"
|
||||
fpl.close()
|
||||
|
||||
|
||||
class TestPrHeader:
|
||||
"""Test cases for pr_header function"""
|
||||
|
||||
def test_pr_header_default(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with default parameters"""
|
||||
pr_header("TEST")
|
||||
captured = capsys.readouterr()
|
||||
assert "#" in captured.out
|
||||
assert "TEST" in captured.out
|
||||
|
||||
def test_pr_header_custom_marker(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with custom marker string"""
|
||||
pr_header("TEST", marker_string="*")
|
||||
captured = capsys.readouterr()
|
||||
assert "*" in captured.out
|
||||
assert "TEST" in captured.out
|
||||
assert "#" not in captured.out
|
||||
|
||||
def test_pr_header_custom_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with custom width"""
|
||||
pr_header("TEST", width=50)
|
||||
captured = capsys.readouterr()
|
||||
# Check that output is formatted
|
||||
assert "TEST" in captured.out
|
||||
|
||||
def test_pr_header_short_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with short tag"""
|
||||
pr_header("X")
|
||||
captured = capsys.readouterr()
|
||||
assert "X" in captured.out
|
||||
assert "#" in captured.out
|
||||
|
||||
def test_pr_header_long_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with long tag"""
|
||||
pr_header("This is a very long header tag")
|
||||
captured = capsys.readouterr()
|
||||
assert "This is a very long header tag" in captured.out
|
||||
|
||||
def test_pr_header_empty_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with empty tag"""
|
||||
pr_header("")
|
||||
captured = capsys.readouterr()
|
||||
assert "#" in captured.out
|
||||
|
||||
def test_pr_header_special_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with special characters in tag"""
|
||||
pr_header("TEST: 123! @#$")
|
||||
captured = capsys.readouterr()
|
||||
assert "TEST: 123! @#$" in captured.out
|
||||
|
||||
def test_pr_header_unicode(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with unicode characters"""
|
||||
pr_header("テスト 🎉")
|
||||
captured = capsys.readouterr()
|
||||
assert "テスト 🎉" in captured.out
|
||||
|
||||
def test_pr_header_various_markers(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with various marker strings"""
|
||||
markers = ["*", "=", "-", "+", "~", "@"]
|
||||
for marker in markers:
|
||||
pr_header("TEST", marker_string=marker)
|
||||
captured = capsys.readouterr()
|
||||
assert marker in captured.out
|
||||
assert "TEST" in captured.out
|
||||
|
||||
def test_pr_header_zero_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with width of 0"""
|
||||
pr_header("TEST", width=0)
|
||||
captured = capsys.readouterr()
|
||||
assert "TEST" in captured.out
|
||||
|
||||
def test_pr_header_large_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with large width"""
|
||||
pr_header("TEST", width=100)
|
||||
captured = capsys.readouterr()
|
||||
assert "TEST" in captured.out
|
||||
assert "#" in captured.out
|
||||
|
||||
def test_pr_header_format(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header output format"""
|
||||
pr_header("CENTER", marker_string="#", width=20)
|
||||
captured = capsys.readouterr()
|
||||
# Should have spaces around centered text
|
||||
assert " CENTER " in captured.out or "CENTER" in captured.out
|
||||
|
||||
|
||||
class TestPrTitle:
|
||||
"""Test cases for pr_title function"""
|
||||
|
||||
def test_pr_title_default(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with default parameters"""
|
||||
pr_title("Test Title")
|
||||
captured = capsys.readouterr()
|
||||
assert "Test Title" in captured.out
|
||||
assert "|" in captured.out
|
||||
assert "." in captured.out
|
||||
assert ":" in captured.out
|
||||
|
||||
def test_pr_title_custom_prefix(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with custom prefix string"""
|
||||
pr_title("Test", prefix_string=">")
|
||||
captured = capsys.readouterr()
|
||||
assert ">" in captured.out
|
||||
assert "Test" in captured.out
|
||||
assert "|" not in captured.out
|
||||
|
||||
def test_pr_title_custom_space_filler(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with custom space filler"""
|
||||
pr_title("Test", space_filler="-")
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
assert "-" in captured.out
|
||||
assert "." not in captured.out
|
||||
|
||||
def test_pr_title_custom_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with custom width"""
|
||||
pr_title("Test", width=50)
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
|
||||
def test_pr_title_short_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with short tag"""
|
||||
pr_title("X")
|
||||
captured = capsys.readouterr()
|
||||
assert "X" in captured.out
|
||||
assert "." in captured.out
|
||||
|
||||
def test_pr_title_long_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with long tag"""
|
||||
pr_title("This is a very long title tag")
|
||||
captured = capsys.readouterr()
|
||||
assert "This is a very long title tag" in captured.out
|
||||
|
||||
def test_pr_title_empty_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with empty tag"""
|
||||
pr_title("")
|
||||
captured = capsys.readouterr()
|
||||
assert "|" in captured.out
|
||||
assert ":" in captured.out
|
||||
|
||||
def test_pr_title_special_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with special characters"""
|
||||
pr_title("Task #123!")
|
||||
captured = capsys.readouterr()
|
||||
assert "Task #123!" in captured.out
|
||||
|
||||
def test_pr_title_unicode(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with unicode characters"""
|
||||
pr_title("タイトル 📝")
|
||||
captured = capsys.readouterr()
|
||||
assert "タイトル 📝" in captured.out
|
||||
|
||||
def test_pr_title_various_fillers(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with various space fillers"""
|
||||
fillers = [".", "-", "_", "*", " ", "~"]
|
||||
for filler in fillers:
|
||||
pr_title("Test", space_filler=filler)
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
|
||||
def test_pr_title_zero_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with width of 0"""
|
||||
pr_title("Test", width=0)
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
|
||||
def test_pr_title_large_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with large width"""
|
||||
pr_title("Test", width=100)
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
|
||||
def test_pr_title_format_left_align(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title output format (should be left-aligned with filler)"""
|
||||
pr_title("Start", space_filler=".", width=10)
|
||||
captured = capsys.readouterr()
|
||||
# Should have the tag followed by dots
|
||||
assert "Start" in captured.out
|
||||
assert ":" in captured.out
|
||||
|
||||
|
||||
class TestPrOpen:
|
||||
"""Test cases for pr_open function"""
|
||||
|
||||
def test_pr_open_default(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with default parameters"""
|
||||
pr_open("Processing")
|
||||
captured = capsys.readouterr()
|
||||
assert "Processing" in captured.out
|
||||
assert "|" in captured.out
|
||||
assert "." in captured.out
|
||||
assert "[" in captured.out
|
||||
# Should not have newline at the end
|
||||
assert not captured.out.endswith("\n")
|
||||
|
||||
def test_pr_open_custom_prefix(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with custom prefix string"""
|
||||
pr_open("Task", prefix_string=">")
|
||||
captured = capsys.readouterr()
|
||||
assert ">" in captured.out
|
||||
assert "Task" in captured.out
|
||||
assert "|" not in captured.out
|
||||
|
||||
def test_pr_open_custom_space_filler(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with custom space filler"""
|
||||
pr_open("Task", space_filler="-")
|
||||
captured = capsys.readouterr()
|
||||
assert "Task" in captured.out
|
||||
assert "-" in captured.out
|
||||
assert "." not in captured.out
|
||||
|
||||
def test_pr_open_custom_width(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with custom width"""
|
||||
pr_open("Task", width=50)
|
||||
captured = capsys.readouterr()
|
||||
assert "Task" in captured.out
|
||||
assert "[" in captured.out
|
||||
|
||||
def test_pr_open_short_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with short tag"""
|
||||
pr_open("X")
|
||||
captured = capsys.readouterr()
|
||||
assert "X" in captured.out
|
||||
assert "[" in captured.out
|
||||
|
||||
def test_pr_open_long_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with long tag"""
|
||||
pr_open("This is a very long task tag")
|
||||
captured = capsys.readouterr()
|
||||
assert "This is a very long task tag" in captured.out
|
||||
|
||||
def test_pr_open_empty_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with empty tag"""
|
||||
pr_open("")
|
||||
captured = capsys.readouterr()
|
||||
assert "[" in captured.out
|
||||
assert "|" in captured.out
|
||||
|
||||
def test_pr_open_no_newline(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open doesn't end with newline"""
|
||||
pr_open("Test")
|
||||
captured = capsys.readouterr()
|
||||
# Output should not end with newline (uses end="")
|
||||
assert not captured.out.endswith("\n")
|
||||
|
||||
def test_pr_open_special_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with special characters"""
|
||||
pr_open("Loading: 50%")
|
||||
captured = capsys.readouterr()
|
||||
assert "Loading: 50%" in captured.out
|
||||
|
||||
def test_pr_open_unicode(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open with unicode characters"""
|
||||
pr_open("処理中 ⏳")
|
||||
captured = capsys.readouterr()
|
||||
assert "処理中 ⏳" in captured.out
|
||||
|
||||
def test_pr_open_format(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_open output format"""
|
||||
pr_open("Task", prefix_string="|", space_filler=".", width=20)
|
||||
captured = capsys.readouterr()
|
||||
assert "|" in captured.out
|
||||
assert "Task" in captured.out
|
||||
assert "[" in captured.out
|
||||
|
||||
|
||||
class TestPrClose:
|
||||
"""Test cases for pr_close function"""
|
||||
|
||||
def test_pr_close_default(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with default (empty) tag"""
|
||||
pr_close()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "]\n"
|
||||
|
||||
def test_pr_close_with_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with custom tag"""
|
||||
pr_close("DONE")
|
||||
captured = capsys.readouterr()
|
||||
assert "DONE" in captured.out
|
||||
assert "]" in captured.out
|
||||
assert captured.out.endswith("\n")
|
||||
|
||||
def test_pr_close_with_space(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with space in tag"""
|
||||
pr_close(" OK ")
|
||||
captured = capsys.readouterr()
|
||||
assert " OK " in captured.out
|
||||
assert "]" in captured.out
|
||||
|
||||
def test_pr_close_empty_string(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with empty string (same as default)"""
|
||||
pr_close("")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "]\n"
|
||||
|
||||
def test_pr_close_special_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with special characters"""
|
||||
pr_close("✓")
|
||||
captured = capsys.readouterr()
|
||||
assert "✓" in captured.out
|
||||
assert "]" in captured.out
|
||||
|
||||
def test_pr_close_unicode(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with unicode characters"""
|
||||
pr_close("完了")
|
||||
captured = capsys.readouterr()
|
||||
assert "完了" in captured.out
|
||||
assert "]" in captured.out
|
||||
|
||||
def test_pr_close_newline(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close ends with newline"""
|
||||
pr_close("OK")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out.endswith("\n")
|
||||
|
||||
def test_pr_close_various_tags(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with various tags"""
|
||||
tags = ["OK", "DONE", "✓", "✗", "SKIP", "PASS", "FAIL"]
|
||||
for tag in tags:
|
||||
pr_close(tag)
|
||||
captured = capsys.readouterr()
|
||||
assert tag in captured.out
|
||||
assert "]" in captured.out
|
||||
|
||||
|
||||
class TestPrAct:
|
||||
"""Test cases for pr_act function"""
|
||||
|
||||
def test_pr_act_default(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with default dot"""
|
||||
pr_act()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "."
|
||||
assert not captured.out.endswith("\n")
|
||||
|
||||
def test_pr_act_custom_character(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with custom character"""
|
||||
pr_act("#")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "#"
|
||||
|
||||
def test_pr_act_multiple_calls(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with multiple calls"""
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "..."
|
||||
|
||||
def test_pr_act_various_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with various characters"""
|
||||
characters = [".", "#", "*", "+", "-", "=", ">", "~"]
|
||||
for char in characters:
|
||||
pr_act(char)
|
||||
captured = capsys.readouterr()
|
||||
assert "".join(characters) in captured.out
|
||||
|
||||
def test_pr_act_empty_string(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with empty string"""
|
||||
pr_act("")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
|
||||
def test_pr_act_special_character(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with special characters"""
|
||||
pr_act("✓")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "✓"
|
||||
|
||||
def test_pr_act_unicode(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with unicode character"""
|
||||
pr_act("●")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "●"
|
||||
|
||||
def test_pr_act_no_newline(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act doesn't add newline"""
|
||||
pr_act("x")
|
||||
captured = capsys.readouterr()
|
||||
assert not captured.out.endswith("\n")
|
||||
|
||||
def test_pr_act_multiple_characters(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with multiple characters in string"""
|
||||
pr_act("...")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "..."
|
||||
|
||||
def test_pr_act_whitespace(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with whitespace"""
|
||||
pr_act(" ")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == " "
|
||||
|
||||
|
||||
class TestProgressCombinations:
|
||||
"""Test combinations of progress printer functions"""
|
||||
|
||||
def test_complete_progress_flow(self, capsys: CaptureFixture[str]):
|
||||
"""Test complete progress output flow"""
|
||||
pr_header("PROCESS")
|
||||
pr_title("Task 1")
|
||||
pr_open("Subtask")
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
pr_close(" OK")
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert "PROCESS" in captured.out
|
||||
assert "Task 1" in captured.out
|
||||
assert "Subtask" in captured.out
|
||||
assert "..." in captured.out
|
||||
assert " OK]" in captured.out
|
||||
|
||||
def test_multiple_tasks_progress(self, capsys: CaptureFixture[str]):
|
||||
"""Test multiple tasks with progress"""
|
||||
pr_header("BATCH PROCESS")
|
||||
for i in range(3):
|
||||
pr_open(f"Task {i + 1}")
|
||||
for _ in range(5):
|
||||
pr_act(".")
|
||||
pr_close(" DONE")
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert "BATCH PROCESS" in captured.out
|
||||
assert "Task 1" in captured.out
|
||||
assert "Task 2" in captured.out
|
||||
assert "Task 3" in captured.out
|
||||
assert " DONE]" in captured.out
|
||||
|
||||
def test_nested_progress(self, capsys: CaptureFixture[str]):
|
||||
"""Test nested progress indicators"""
|
||||
pr_header("MAIN TASK", marker_string="=")
|
||||
pr_title("Subtask A", prefix_string=">")
|
||||
pr_open("Processing")
|
||||
pr_act("#")
|
||||
pr_act("#")
|
||||
pr_close()
|
||||
pr_title("Subtask B", prefix_string=">")
|
||||
pr_open("Processing")
|
||||
pr_act("*")
|
||||
pr_act("*")
|
||||
pr_close(" OK")
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert "MAIN TASK" in captured.out
|
||||
assert "Subtask A" in captured.out
|
||||
assert "Subtask B" in captured.out
|
||||
assert "##" in captured.out
|
||||
assert "**" in captured.out
|
||||
|
||||
def test_progress_with_different_markers(self, capsys: CaptureFixture[str]):
|
||||
"""Test progress with different marker styles"""
|
||||
pr_header("Process", marker_string="*")
|
||||
pr_title("Step 1", prefix_string=">>", space_filler="-")
|
||||
pr_open("Work", prefix_string=">>", space_filler="-")
|
||||
pr_act("+")
|
||||
pr_close(" ✓")
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert "*" in captured.out
|
||||
assert ">>" in captured.out
|
||||
assert "-" in captured.out
|
||||
assert "+" in captured.out
|
||||
assert "✓" in captured.out
|
||||
|
||||
def test_empty_progress_sequence(self, capsys: CaptureFixture[str]):
|
||||
"""Test progress sequence with no actual progress"""
|
||||
pr_open("Quick task")
|
||||
pr_close(" SKIP")
|
||||
captured = capsys.readouterr()
|
||||
|
||||
assert "Quick task" in captured.out
|
||||
assert " SKIP]" in captured.out
|
||||
|
||||
|
||||
class TestIntegration:
|
||||
"""Integration tests combining multiple scenarios"""
|
||||
|
||||
def test_file_and_console_logging(self, capsys: CaptureFixture[str]):
|
||||
"""Test logging to both file and console"""
|
||||
fpl = io.StringIO()
|
||||
|
||||
write_l("Starting process", fpl=fpl, print_line=True)
|
||||
write_l("Processing item 1", fpl=fpl, print_line=True)
|
||||
write_l("Processing item 2", fpl=fpl, print_line=True)
|
||||
write_l("Complete", fpl=fpl, print_line=True)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
file_content = fpl.getvalue()
|
||||
|
||||
# Check console output
|
||||
assert "Starting process\n" in captured.out
|
||||
assert "Processing item 1\n" in captured.out
|
||||
assert "Processing item 2\n" in captured.out
|
||||
assert "Complete\n" in captured.out
|
||||
|
||||
# Check file output
|
||||
assert "Starting process\n" in file_content
|
||||
assert "Processing item 1\n" in file_content
|
||||
assert "Processing item 2\n" in file_content
|
||||
assert "Complete\n" in file_content
|
||||
|
||||
fpl.close()
|
||||
|
||||
def test_progress_with_logging(self, capsys: CaptureFixture[str]):
|
||||
"""Test combining progress output with file logging"""
|
||||
fpl = io.StringIO()
|
||||
|
||||
write_l("=== Process Start ===", fpl=fpl, print_line=True)
|
||||
pr_header("MAIN PROCESS")
|
||||
write_l("Header shown", fpl=fpl, print_line=False)
|
||||
|
||||
pr_open("Task 1")
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
pr_close(" OK")
|
||||
write_l("Task 1 completed", fpl=fpl, print_line=False)
|
||||
|
||||
write_l("=== Process End ===", fpl=fpl, print_line=True)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
file_content = fpl.getvalue()
|
||||
|
||||
assert "=== Process Start ===" in captured.out
|
||||
assert "MAIN PROCESS" in captured.out
|
||||
assert "Task 1" in captured.out
|
||||
assert "=== Process End ===" in captured.out
|
||||
|
||||
assert "=== Process Start ===\n" in file_content
|
||||
assert "Header shown\n" in file_content
|
||||
assert "Task 1 completed\n" in file_content
|
||||
assert "=== Process End ===\n" in file_content
|
||||
|
||||
fpl.close()
|
||||
|
||||
def test_complex_workflow(self, capsys: CaptureFixture[str]):
|
||||
"""Test complex workflow with all functions"""
|
||||
fpl = io.StringIO()
|
||||
|
||||
write_l("Log: Starting batch process", fpl=fpl, print_line=False)
|
||||
pr_header("BATCH PROCESSOR", marker_string="=", width=40)
|
||||
|
||||
for i in range(2):
|
||||
write_l(f"Log: Processing batch {i + 1}", fpl=fpl, print_line=False)
|
||||
pr_title(f"Batch {i + 1}", prefix_string="|", space_filler=".")
|
||||
|
||||
pr_open(f"Item {i + 1}", prefix_string="|", space_filler=".")
|
||||
for j in range(3):
|
||||
pr_act("*")
|
||||
write_l(f"Log: Progress {j + 1}/3", fpl=fpl, print_line=False)
|
||||
pr_close(" ✓")
|
||||
|
||||
write_l(f"Log: Batch {i + 1} complete", fpl=fpl, print_line=False)
|
||||
|
||||
write_l("Log: All batches complete", fpl=fpl, print_line=False)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
file_content = fpl.getvalue()
|
||||
|
||||
# Check console has progress indicators
|
||||
assert "BATCH PROCESSOR" in captured.out
|
||||
assert "Batch 1" in captured.out
|
||||
assert "Batch 2" in captured.out
|
||||
assert "***" in captured.out
|
||||
assert "✓" in captured.out
|
||||
|
||||
# Check file has all log entries
|
||||
assert "Log: Starting batch process\n" in file_content
|
||||
assert "Log: Processing batch 1\n" in file_content
|
||||
assert "Log: Processing batch 2\n" in file_content
|
||||
assert "Log: Progress 1/3\n" in file_content
|
||||
assert "Log: Batch 1 complete\n" in file_content
|
||||
assert "Log: All batches complete\n" in file_content
|
||||
|
||||
fpl.close()
|
||||
|
||||
|
||||
class TestEdgeCases:
|
||||
"""Test edge cases and boundary conditions"""
|
||||
|
||||
def test_write_l_none_file_handler(self, capsys: CaptureFixture[str]):
|
||||
"""Test write_l explicitly with None file handler"""
|
||||
write_l("Test", fpl=None, print_line=True)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "Test\n"
|
||||
|
||||
def test_pr_header_negative_width(self):
|
||||
"""Test pr_header with negative width raises ValueError"""
|
||||
with pytest.raises(ValueError):
|
||||
pr_header("Test", width=-10)
|
||||
|
||||
def test_pr_title_negative_width(self):
|
||||
"""Test pr_title with negative width raises ValueError"""
|
||||
with pytest.raises(ValueError):
|
||||
pr_title("Test", width=-10)
|
||||
|
||||
def test_pr_open_negative_width(self):
|
||||
"""Test pr_open with negative width raises ValueError"""
|
||||
with pytest.raises(ValueError):
|
||||
pr_open("Test", width=-10)
|
||||
|
||||
def test_multiple_pr_act_no_close(self, capsys: CaptureFixture[str]):
|
||||
"""Test multiple pr_act calls without pr_close"""
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
pr_act(".")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == "..."
|
||||
|
||||
def test_pr_close_without_pr_open(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close without prior pr_open (should still work)"""
|
||||
pr_close(" OK")
|
||||
captured = capsys.readouterr()
|
||||
assert " OK]" in captured.out
|
||||
|
||||
def test_very_long_strings(self):
|
||||
"""Test with very long strings"""
|
||||
fpl = io.StringIO()
|
||||
long_str = "A" * 10000
|
||||
write_l(long_str, fpl=fpl, print_line=False)
|
||||
assert len(fpl.getvalue()) == 10001 # string + newline
|
||||
fpl.close()
|
||||
|
||||
def test_pr_header_very_long_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with tag longer than width"""
|
||||
pr_header("This is a very long tag that exceeds the width", width=10)
|
||||
captured = capsys.readouterr()
|
||||
assert "This is a very long tag that exceeds the width" in captured.out
|
||||
|
||||
def test_pr_title_very_long_tag(self, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with tag longer than width"""
|
||||
pr_title("This is a very long tag that exceeds the width", width=10)
|
||||
captured = capsys.readouterr()
|
||||
assert "This is a very long tag that exceeds the width" in captured.out
|
||||
|
||||
def test_write_l_closed_file(self):
|
||||
"""Test write_l with closed file should raise error"""
|
||||
fpl = io.StringIO()
|
||||
fpl.close()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
write_l("Test", fpl=fpl, print_line=False)
|
||||
|
||||
|
||||
class TestParametrized:
|
||||
"""Parametrized tests for comprehensive coverage"""
|
||||
|
||||
@pytest.mark.parametrize("print_line", [True, False])
|
||||
def test_write_l_print_line_variations(self, print_line: bool, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with different print_line values"""
|
||||
write_l("Test", print_line=print_line)
|
||||
captured = capsys.readouterr()
|
||||
if print_line:
|
||||
assert captured.out == "Test\n"
|
||||
else:
|
||||
assert captured.out == ""
|
||||
|
||||
@pytest.mark.parametrize("marker", ["#", "*", "=", "-", "+", "~", "@", "^"])
|
||||
def test_pr_header_various_markers_param(self, marker: str, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with various markers"""
|
||||
pr_header("TEST", marker_string=marker)
|
||||
captured = capsys.readouterr()
|
||||
assert marker in captured.out
|
||||
assert "TEST" in captured.out
|
||||
|
||||
@pytest.mark.parametrize("width", [0, 5, 10, 20, 35, 50, 100])
|
||||
def test_pr_header_various_widths(self, width: int, capsys: CaptureFixture[str]):
|
||||
"""Test pr_header with various widths"""
|
||||
pr_header("TEST", width=width)
|
||||
captured = capsys.readouterr()
|
||||
assert "TEST" in captured.out
|
||||
|
||||
@pytest.mark.parametrize("filler", [".", "-", "_", "*", " ", "~", "="])
|
||||
def test_pr_title_various_fillers_param(self, filler: str, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with various space fillers"""
|
||||
pr_title("Test", space_filler=filler)
|
||||
captured = capsys.readouterr()
|
||||
assert "Test" in captured.out
|
||||
|
||||
@pytest.mark.parametrize("prefix", ["|", ">", ">>", "*", "-", "+"])
|
||||
def test_pr_title_various_prefixes(self, prefix: str, capsys: CaptureFixture[str]):
|
||||
"""Test pr_title with various prefix strings"""
|
||||
pr_title("Test", prefix_string=prefix)
|
||||
captured = capsys.readouterr()
|
||||
assert prefix in captured.out
|
||||
assert "Test" in captured.out
|
||||
|
||||
@pytest.mark.parametrize("act_char", [".", "#", "*", "+", "-", "=", ">", "~", "✓", "●"])
|
||||
def test_pr_act_various_characters_param(self, act_char: str, capsys: CaptureFixture[str]):
|
||||
"""Test pr_act with various characters"""
|
||||
pr_act(act_char)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == act_char
|
||||
|
||||
@pytest.mark.parametrize("close_tag", ["", " OK", " DONE", " ✓", " ✗", " SKIP", " PASS"])
|
||||
def test_pr_close_various_tags_param(self, close_tag: str, capsys: CaptureFixture[str]):
|
||||
"""Test pr_close with various tags"""
|
||||
pr_close(close_tag)
|
||||
captured = capsys.readouterr()
|
||||
assert f"{close_tag}]" in captured.out
|
||||
|
||||
@pytest.mark.parametrize("content", [
|
||||
"Simple text",
|
||||
"Text with 特殊文字",
|
||||
"Text with emoji 🎉",
|
||||
"Text\twith\ttabs",
|
||||
"Multiple\n\nNewlines",
|
||||
"",
|
||||
"A" * 100,
|
||||
])
|
||||
def test_write_l_various_content(self, content: str, capsys: CaptureFixture[str]):
|
||||
"""Test write_l with various content types"""
|
||||
fpl = io.StringIO()
|
||||
write_l(content, fpl=fpl, print_line=True)
|
||||
captured = capsys.readouterr()
|
||||
assert content in captured.out
|
||||
assert content + "\n" in fpl.getvalue()
|
||||
fpl.close()
|
||||
|
||||
|
||||
class TestRealWorldScenarios:
|
||||
"""Test real-world usage scenarios"""
|
||||
|
||||
def test_batch_processing_output(self, capsys: CaptureFixture[str]):
|
||||
"""Test typical batch processing output"""
|
||||
pr_header("BATCH PROCESSOR", marker_string="=", width=50)
|
||||
|
||||
items = ["file1.txt", "file2.txt", "file3.txt"]
|
||||
for item in items:
|
||||
pr_open(f"Processing {item}")
|
||||
for _ in range(10):
|
||||
pr_act(".")
|
||||
pr_close(" ✓")
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "BATCH PROCESSOR" in captured.out
|
||||
for item in items:
|
||||
assert item in captured.out
|
||||
assert "✓" in captured.out
|
||||
|
||||
def test_logging_workflow(self, capsys: CaptureFixture[str]):
|
||||
"""Test typical logging workflow"""
|
||||
log_file = io.StringIO()
|
||||
|
||||
# Simulate a workflow with logging
|
||||
write_l("[INFO] Starting process", fpl=log_file, print_line=True)
|
||||
write_l("[INFO] Initializing components", fpl=log_file, print_line=True)
|
||||
write_l("[DEBUG] Component A loaded", fpl=log_file, print_line=False)
|
||||
write_l("[DEBUG] Component B loaded", fpl=log_file, print_line=False)
|
||||
write_l("[INFO] Processing data", fpl=log_file, print_line=True)
|
||||
write_l("[INFO] Process complete", fpl=log_file, print_line=True)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
log_content = log_file.getvalue()
|
||||
|
||||
# Console should only have INFO messages
|
||||
assert "[INFO] Starting process" in captured.out
|
||||
assert "[DEBUG] Component A loaded" not in captured.out
|
||||
|
||||
# Log file should have all messages
|
||||
assert "[INFO] Starting process\n" in log_content
|
||||
assert "[DEBUG] Component A loaded\n" in log_content
|
||||
assert "[DEBUG] Component B loaded\n" in log_content
|
||||
|
||||
log_file.close()
|
||||
|
||||
def test_progress_indicator_for_long_task(self, capsys: CaptureFixture[str]):
|
||||
"""Test progress indicator for a long-running task"""
|
||||
pr_header("DATA PROCESSING")
|
||||
pr_open("Loading data", width=50)
|
||||
|
||||
# Simulate progress
|
||||
for i in range(20):
|
||||
if i % 5 == 0:
|
||||
pr_act(str(i // 5))
|
||||
else:
|
||||
pr_act(".")
|
||||
|
||||
pr_close(" COMPLETE")
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "DATA PROCESSING" in captured.out
|
||||
assert "Loading data" in captured.out
|
||||
assert "COMPLETE" in captured.out
|
||||
|
||||
def test_multi_stage_process(self, capsys: CaptureFixture[str]):
|
||||
"""Test multi-stage process with titles and progress"""
|
||||
pr_header("DEPLOYMENT PIPELINE", marker_string="=")
|
||||
|
||||
stages = ["Build", "Test", "Deploy"]
|
||||
for stage in stages:
|
||||
pr_title(stage)
|
||||
pr_open(f"Running {stage.lower()}")
|
||||
pr_act("#")
|
||||
pr_act("#")
|
||||
pr_act("#")
|
||||
pr_close(" OK")
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "DEPLOYMENT PIPELINE" in captured.out
|
||||
for stage in stages:
|
||||
assert stage in captured.out
|
||||
assert "###" in captured.out
|
||||
|
||||
def test_error_reporting_with_logging(self, capsys: CaptureFixture[str]):
|
||||
"""Test error reporting workflow"""
|
||||
error_log = io.StringIO()
|
||||
|
||||
pr_header("VALIDATION", marker_string="!")
|
||||
pr_open("Checking files")
|
||||
|
||||
write_l("[ERROR] File not found: data.csv", fpl=error_log, print_line=False)
|
||||
pr_act("✗")
|
||||
|
||||
write_l("[ERROR] Permission denied: output.txt", fpl=error_log, print_line=False)
|
||||
pr_act("✗")
|
||||
|
||||
pr_close(" FAILED")
|
||||
|
||||
captured = capsys.readouterr()
|
||||
log_content = error_log.getvalue()
|
||||
|
||||
assert "VALIDATION" in captured.out
|
||||
assert "Checking files" in captured.out
|
||||
assert "✗✗" in captured.out
|
||||
assert "FAILED" in captured.out
|
||||
|
||||
assert "[ERROR] File not found: data.csv\n" in log_content
|
||||
assert "[ERROR] Permission denied: output.txt\n" in log_content
|
||||
|
||||
error_log.close()
|
||||
|
||||
def test_detailed_reporting(self, capsys: CaptureFixture[str]):
|
||||
"""Test detailed reporting with mixed output"""
|
||||
report_file = io.StringIO()
|
||||
|
||||
pr_header("SYSTEM REPORT", marker_string="#", width=60)
|
||||
write_l("=== System Report Generated ===", fpl=report_file, print_line=False)
|
||||
|
||||
pr_title("Database Status", prefix_string=">>")
|
||||
write_l("Database: Connected", fpl=report_file, print_line=False)
|
||||
write_l("Tables: 15", fpl=report_file, print_line=False)
|
||||
write_l("Records: 1,234,567", fpl=report_file, print_line=False)
|
||||
|
||||
pr_title("API Status", prefix_string=">>")
|
||||
write_l("API: Online", fpl=report_file, print_line=False)
|
||||
write_l("Requests/min: 1,500", fpl=report_file, print_line=False)
|
||||
|
||||
write_l("=== Report Complete ===", fpl=report_file, print_line=False)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
report_content = report_file.getvalue()
|
||||
|
||||
assert "SYSTEM REPORT" in captured.out
|
||||
assert "Database Status" in captured.out
|
||||
assert "API Status" in captured.out
|
||||
|
||||
assert "=== System Report Generated ===\n" in report_content
|
||||
assert "Database: Connected\n" in report_content
|
||||
assert "API: Online\n" in report_content
|
||||
assert "=== Report Complete ===\n" in report_content
|
||||
|
||||
report_file.close()
|
||||
|
||||
# __END__
|
||||
1249
tests/unit/email_handling/test_send_email.py
Normal file
1249
tests/unit/email_handling/test_send_email.py
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user