Compare commits
245 Commits
v0.5.0
...
refactor/T
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
28527990e9 | ||
|
|
b58a26f79a | ||
|
|
8bb4a202cd | ||
|
|
f265b55ef8 | ||
|
|
85063ea5df | ||
|
|
31086fea53 | ||
|
|
fd956095de | ||
|
|
a046d9f84c | ||
|
|
2e0d5aeb51 | ||
|
|
28ab7c6f0c | ||
|
|
d098eb58f3 | ||
|
|
5319a059ad | ||
|
|
163b8c4018 | ||
|
|
6322b95068 | ||
|
|
715ed1f9c2 | ||
|
|
82a759dd21 | ||
|
|
fe913608c4 | ||
|
|
79f9c5d1c6 | ||
|
|
3d091129e2 | ||
|
|
1a978f786d | ||
|
|
51669d3c5f | ||
|
|
d128dcb479 | ||
|
|
84286593f6 | ||
|
|
8d97f09e5e | ||
|
|
2748bc19be | ||
|
|
0b3c8fc774 | ||
|
|
7da18e0f00 | ||
|
|
49e38081ad | ||
|
|
a14f993a31 | ||
|
|
ae938f9909 | ||
|
|
f91e0bb93a | ||
|
|
d3f61005cf | ||
|
|
2923a3e88b | ||
|
|
a73ced0067 | ||
|
|
f89b91fe7f | ||
|
|
5950485d46 | ||
|
|
f349927a63 | ||
|
|
dfe8890598 | ||
|
|
d224876a8e | ||
|
|
17e8c76b94 | ||
|
|
9034a31cd6 | ||
|
|
523e61c9f7 | ||
|
|
cf575ded90 | ||
|
|
11a75d8532 | ||
|
|
6593e11332 | ||
|
|
c310f669d6 | ||
|
|
f327f47c3f | ||
|
|
acd61e825e | ||
|
|
895701da59 | ||
|
|
e0fb0db1f0 | ||
|
|
dc7e56106e | ||
|
|
90e5179980 | ||
|
|
9db39003c4 | ||
|
|
4ffe372434 | ||
|
|
a00c27c465 | ||
|
|
1f7f4b8d53 | ||
|
|
baca79ce82 | ||
|
|
4265be6430 | ||
|
|
c16b086467 | ||
|
|
48a98c0206 | ||
|
|
f1788f057f | ||
|
|
0ad8883809 | ||
|
|
51e9b1ce7c | ||
|
|
0d3104f60a | ||
|
|
d29f827fc9 | ||
|
|
282fe1f7c0 | ||
|
|
afce5043e4 | ||
|
|
5996bb1fc0 | ||
|
|
06a17d7c30 | ||
|
|
af7633183c | ||
|
|
1280b2f855 | ||
|
|
2e0b1f5951 | ||
|
|
548d7491b8 | ||
|
|
ad99115544 | ||
|
|
52919cbc49 | ||
|
|
7f2dc13c31 | ||
|
|
592652cff1 | ||
|
|
6a1724695e | ||
|
|
037210756e | ||
|
|
4e78d83092 | ||
|
|
0e6331fa6a | ||
|
|
c98c5df63c | ||
|
|
0981c74da9 | ||
|
|
31518799f6 | ||
|
|
e8b4b9b48e | ||
|
|
cd06272b38 | ||
|
|
c5ab4352e3 | ||
|
|
0da4a6b70a | ||
|
|
11c5f3387c | ||
|
|
3ed0171e17 | ||
|
|
c7b38b0d70 | ||
|
|
caf0039de4 | ||
|
|
2637e1e42c | ||
|
|
d0a1673965 | ||
|
|
07e5d23f72 | ||
|
|
fb4fdb6857 | ||
|
|
d642a13b6e | ||
|
|
8967031f91 | ||
|
|
89caada4cc | ||
|
|
b3616269bc | ||
|
|
4fa22813ce | ||
|
|
3ee3a0dce0 | ||
|
|
1226721bc0 | ||
|
|
a76eae0cc7 | ||
|
|
53cf2a6f48 | ||
|
|
fe69530b38 | ||
|
|
bf83c1c394 | ||
|
|
84ce43ab93 | ||
|
|
5e0765ee24 | ||
|
|
6edf9398b7 | ||
|
|
30bf9c1bcb | ||
|
|
0b59f3cc7a | ||
|
|
2544fad9ce | ||
|
|
e579ef5834 | ||
|
|
543e9766a1 | ||
|
|
4c3611aba7 | ||
|
|
dadc14563a | ||
|
|
c1eda7305b | ||
|
|
2f4e236350 | ||
|
|
b858936c68 | ||
|
|
78ce30283e | ||
|
|
f85fbb86af | ||
|
|
ed22105ec8 | ||
|
|
7c5af588c7 | ||
|
|
2690a285d9 | ||
|
|
bb60a570d0 | ||
|
|
ca0ab2d7d1 | ||
|
|
38bae7fb46 | ||
|
|
14466c3ff8 | ||
|
|
fe824f9fb4 | ||
|
|
ef5981b473 | ||
|
|
7d1ee70cf6 | ||
|
|
7c72d99619 | ||
|
|
b32887a6d8 | ||
|
|
37a197e7f1 | ||
|
|
74cb3d2c54 | ||
|
|
d19abcabc7 | ||
|
|
f8ae6609c7 | ||
|
|
cbd39ff161 | ||
|
|
f8905a176c | ||
|
|
847288e91f | ||
|
|
446d9d5217 | ||
|
|
3a7a1659f0 | ||
|
|
bc23006a34 | ||
|
|
6090995eba | ||
|
|
60db747d6d | ||
|
|
a7a4141f58 | ||
|
|
2b04cbe239 | ||
|
|
765cc061c1 | ||
|
|
80319385f0 | ||
|
|
29dd906fe0 | ||
|
|
d5dc4028c3 | ||
|
|
0df049d453 | ||
|
|
0bd7c1f685 | ||
|
|
2f08ecabbf | ||
|
|
12af1c80dc | ||
|
|
a52b6e0a55 | ||
|
|
a586cf65e2 | ||
|
|
e2e7882bfa | ||
|
|
4f9c2b9d5f | ||
|
|
5203bcf1ea | ||
|
|
f1e3bc8559 | ||
|
|
b97ca6f064 | ||
|
|
d1ea9874da | ||
|
|
3cd3f87d68 | ||
|
|
582937b866 | ||
|
|
2b8240c156 | ||
|
|
abf4b7ac89 | ||
|
|
9c49f83c16 | ||
|
|
3a625ed0ee | ||
|
|
2cfbf4bb90 | ||
|
|
5767533668 | ||
|
|
24798f19ca | ||
|
|
26f8249187 | ||
|
|
dcefa564da | ||
|
|
edd35dccea | ||
|
|
ea527ea60c | ||
|
|
fd5e1db22b | ||
|
|
39e23faf7f | ||
|
|
de285b531a | ||
|
|
0a29a592f9 | ||
|
|
e045b1d3b5 | ||
|
|
280e5fa861 | ||
|
|
472d3495b5 | ||
|
|
2778ac6870 | ||
|
|
743a0a8ac9 | ||
|
|
694712ed2e | ||
|
|
ea3b4f1790 | ||
|
|
da68818d4f | ||
|
|
db6a3b53c5 | ||
|
|
82b089498e | ||
|
|
948b0dd5e7 | ||
|
|
4acc0b51b1 | ||
|
|
a626b738a9 | ||
|
|
7119844313 | ||
|
|
5763f57830 | ||
|
|
70e8ceecce | ||
|
|
acbe1ac692 | ||
|
|
99bca2c467 | ||
|
|
b74ed1f30e | ||
|
|
8082ab78a1 | ||
|
|
c69076f517 | ||
|
|
648ab001b6 | ||
|
|
447034046e | ||
|
|
0770ac0bb4 | ||
|
|
aa2fbd4f70 | ||
|
|
58c8447531 | ||
|
|
bcca43d774 | ||
|
|
e9ccfe7ad2 | ||
|
|
6c2637ad34 | ||
|
|
7183d05dd6 | ||
|
|
b45ca85cd3 | ||
|
|
4ca45ebc73 | ||
|
|
6902768fed | ||
|
|
3f9f2ceaac | ||
|
|
2a248bd249 | ||
|
|
c559a6bafb | ||
|
|
19d7e9b5ed | ||
|
|
3e5a5accf7 | ||
|
|
424c91945a | ||
|
|
c657dc564e | ||
|
|
208f002284 | ||
|
|
084ecc01e0 | ||
|
|
08cb994d8d | ||
|
|
67f1a6688d | ||
|
|
efb7968e93 | ||
|
|
fe7c7db004 | ||
|
|
79d1ccae9a | ||
|
|
6e69af4aa8 | ||
|
|
d500b7d473 | ||
|
|
ef599a1aad | ||
|
|
2d197134f1 | ||
|
|
717080a009 | ||
|
|
19197c71ff | ||
|
|
051b93f2d8 | ||
|
|
e04b3598b8 | ||
|
|
b88e0fe564 | ||
|
|
060e3b4afe | ||
|
|
cd07267475 | ||
|
|
2fa031f6ee | ||
|
|
f38cce1c1d | ||
|
|
52dd1e7b73 | ||
|
|
661a182655 | ||
|
|
d803de312d | ||
|
|
57a36d64f1 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -3,3 +3,5 @@
|
||||
**/*.egg-info
|
||||
.mypy_cache/
|
||||
**/.env
|
||||
.coverage
|
||||
uv.lock
|
||||
|
||||
164
README.md
Normal file
164
README.md
Normal file
@@ -0,0 +1,164 @@
|
||||
# CoreLibs for Python
|
||||
|
||||
> [!warning]
|
||||
> This is pre-production, location of methods and names of paths can change
|
||||
>
|
||||
> This will be split up into modules per file and this will be just a collection holder
|
||||
> See [Deprecated](#deprecated) below
|
||||
|
||||
This is a pip package that can be installed into any project and covers the following parts
|
||||
|
||||
- logging update with exception logs
|
||||
- requests wrapper for easier auth pass on access
|
||||
- dict fingerprinting
|
||||
- sending email
|
||||
- jmespath search
|
||||
- json helpers for conten replace and output
|
||||
- dump outputs for data for debugging
|
||||
- progress printing
|
||||
- string formatting, time creation, byte formatting
|
||||
- Enum base class
|
||||
- SQLite simple IO class
|
||||
- Symmetric encryption
|
||||
|
||||
## Current list
|
||||
|
||||
- config_handling: simple INI config file data loader with check/convert/etc
|
||||
- csv_interface: csv dict writer/reader helper
|
||||
- debug_handling: various debug helpers like data dumper, timer, utilization, etc
|
||||
- db_handling: SQLite interface class
|
||||
- encyption_handling: symmetric encryption
|
||||
- email_handling: simple email sending
|
||||
- file_handling: crc handling for file content and file names, progress bar
|
||||
- json_handling: jmespath support and json date support, replace content in dict with json paths
|
||||
- iterator_handling: list and dictionary handling support (search, fingerprinting, etc)
|
||||
- logging_handling: extend log and also error message handling
|
||||
- requests_handling: requests wrapper for better calls with auth headers
|
||||
- script_handling: pid lock file handling, abort timer
|
||||
- string_handling: byte format, datetime format, datetime compare, hashing, string formats for numbers, double byte string format, etc
|
||||
- var_handling: var type checkers, enum base class
|
||||
|
||||
## Unfinished
|
||||
|
||||
- csv_handling/csv_interface: The CSV DictWriter interface is just in a very basic way implemented
|
||||
- script_handling/script_helpers: No idea if there is need for this, tests are written but not finished
|
||||
|
||||
## Deprecated
|
||||
|
||||
All content in this module will move to stand alone libraries, as of now the following entries have moved and will throw deprecated warnings if used
|
||||
|
||||
- check_handling.regex_constants_compiled: corelibs-regex-checks
|
||||
- check_handling.regex_constants: corelibs-regex-checks
|
||||
- csv_handling.csv_interface: corelibs-csv
|
||||
- datetime_handling.datetime_helpers: corelibs-datetime
|
||||
- datetime_handling.timestamp_convert: corelibs-datetime
|
||||
- datetime_handling.timestamp_strings: corelibs-datetime
|
||||
- debug_handling.debug_helpers: corelibs-stack-trace
|
||||
- debug_handling.dump_data: corelibs-dump-data
|
||||
- debug_handling.profiling: corelibs-debug
|
||||
- debug_handling.timer: corelibs-debug
|
||||
- debug_handling.writeline: corelibs-debug
|
||||
- encryption_handling.symmetrix_encryption: corelibs-encryption
|
||||
- exceptions.csv_exceptions: orelibs-csv
|
||||
- file_handling.file_bom_encoding: corelibs-file
|
||||
- file_handling.file_crc: corelibs-file
|
||||
- file_handling.file_handling: corelibs-file
|
||||
- iterator_handling.data_search: corelibs-search
|
||||
- iterator_handling.dict_helpers: corelibs-iterator
|
||||
- iterator_handling.dict_mask: corelibs-dump-data
|
||||
- iterator_handling.fingerprint: corelibs-hash
|
||||
- iterator_handling.list_helpers: corelibs-iterator
|
||||
- json_handling.jmespath_helper: corelibs-search
|
||||
- json_handling.json_helper: corelibs-json
|
||||
- math_handling.math_helpers: python.math
|
||||
- requests_handling.auth_helpers: corelibs-requests
|
||||
- requests_handling.caller: corelibs-requests
|
||||
- script_handling.progress: corelibs-progress
|
||||
- script_handling.script_helpers: corelibs-script
|
||||
- string_handling.byte_helpers: corelibs-strings
|
||||
- string_handling.double_byte_string_format: corelibs-double-byte-format
|
||||
- string_handling.hash_helpers: corelibs-hash
|
||||
- string_handling.string_helpers: corelibs-strings
|
||||
- string_handling.text_colors: corelibs-text-colors
|
||||
- var_handling.enum_base: corelibs-enum-base
|
||||
- var_handling.var_helpers: corelibs-var
|
||||
|
||||
## UV setup
|
||||
|
||||
uv must be [installed](https://docs.astral.sh/uv/getting-started/installation/)
|
||||
|
||||
## How to publish
|
||||
|
||||
Have the following setup in `project.toml`
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "opj-pypi"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
explicit = true
|
||||
```
|
||||
|
||||
```sh
|
||||
uv build
|
||||
uv publish --index opj-pypi --token <gitea token>
|
||||
```
|
||||
|
||||
## Use package
|
||||
|
||||
We must set the full index URL here because we run with "--no-project"
|
||||
|
||||
```sh
|
||||
uv run --with corelibs --index opj-pypi=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/ --no-project -- python -c "import corelibs"
|
||||
```
|
||||
|
||||
### Python tests
|
||||
|
||||
All python tests are the tests/ folder. They are structured by the source folder layout
|
||||
|
||||
run them with
|
||||
|
||||
```sh
|
||||
uv run pytest
|
||||
```
|
||||
|
||||
Get a coverate report
|
||||
|
||||
```sh
|
||||
uv run pytest --cov=corelibs
|
||||
uv run pytest --cov=corelibs --cov-report=term-missing
|
||||
```
|
||||
|
||||
### Other tests
|
||||
|
||||
In the test-run folder usage and run tests are located, runt them below
|
||||
|
||||
```sh
|
||||
uv run test-run/<script>
|
||||
```
|
||||
|
||||
## How to install in another project
|
||||
|
||||
This will also add the index entry
|
||||
|
||||
```sh
|
||||
uv add corelibs --index opj-pypi=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/
|
||||
```
|
||||
|
||||
## Python venv setup
|
||||
|
||||
After clone, run the command below to install all dependenciss
|
||||
|
||||
```sh
|
||||
uv sync
|
||||
```
|
||||
|
||||
## NOTE on TLS problems
|
||||
|
||||
> [!warning] TLS problems with Netskope
|
||||
|
||||
If the Netskope service is running all uv runs will fail unless either --native-tls is set or the enviroment variable SSL_CERT_FILE is set, see blow
|
||||
|
||||
```sh
|
||||
export SSL_CERT_FILE='/Library/Application Support/Netskope/STAgent/data/nscacert_combined.pem'
|
||||
```
|
||||
84
ReadMe.md
84
ReadMe.md
@@ -1,84 +0,0 @@
|
||||
# CoreLibs for Python
|
||||
|
||||
This is a pip package that can be installed into any project and covers the following pars
|
||||
|
||||
- logging update with exception logs
|
||||
- requests wrapper for easier auth pass on access
|
||||
- dict fingerprinting
|
||||
- jmespath search
|
||||
- dump outputs for data
|
||||
- progress printing
|
||||
- string formatting, time creation, byte formatting
|
||||
|
||||
## Current list
|
||||
|
||||
- csv_handling: csv dict writer helper
|
||||
- debug_handling: various debug helpers like data dumper, timer, utilization, etc
|
||||
- file_handling: crc handling for file content and file names, progress bar
|
||||
- json_handling: jmespath support and json date support
|
||||
- list_dict_handling: list and dictionary handling support (search, fingerprinting, etc)
|
||||
- logging_handling: extend log and also error message handling
|
||||
- requests_handling: requests wrapper for better calls with auth headers
|
||||
- script_handling: pid lock file handling, abort timer
|
||||
- string_handling: byte format, datetime format, hashing, string formats for numbrers, double byte string format, etc
|
||||
|
||||
## How to publish
|
||||
|
||||
Have the following setup in `project.toml`
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "egra-gitea"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
explicit = true
|
||||
```
|
||||
|
||||
```sh
|
||||
uv build --native-tls
|
||||
uv publish --index egra-gitea --token <gitea token> --native-tls
|
||||
```
|
||||
|
||||
## Test package
|
||||
|
||||
We must set the full index URL here because we run with "--no-project"
|
||||
|
||||
```sh
|
||||
uv run --with corelibs --index egra-gitea=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/ --no-project --native-tls -- python -c "import corelibs"
|
||||
```
|
||||
|
||||
### Other tests
|
||||
|
||||
In the test folder other tests are located.
|
||||
|
||||
At the moment only a small test for the "progress" and the "double byte string format" module is set
|
||||
|
||||
```sh
|
||||
uv run --native-tls tests/progress/progress_test.py
|
||||
```
|
||||
|
||||
```sh
|
||||
uv run --native-tls tests/double_byte_string_format/double_byte_string_format.py
|
||||
```
|
||||
|
||||
## How to install in another project
|
||||
|
||||
This will also add the index entry
|
||||
|
||||
```sh
|
||||
uv add corelibs --index egra-gitea=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/ --native-tls
|
||||
```
|
||||
|
||||
## Python venv setup
|
||||
|
||||
In the folder where the script will be located
|
||||
|
||||
```sh
|
||||
uv venv --python 3.13
|
||||
```
|
||||
|
||||
Install all neded dependencies
|
||||
|
||||
```sh
|
||||
uv sync
|
||||
```
|
||||
11
SECURITY.md
Normal file
11
SECURITY.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Security Policy
|
||||
|
||||
This software follows the [Semver 2.0 scheme](https://semver.org/).
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Only the latest version is supported
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Open a ticket to report a secuirty problem
|
||||
7
ToDo.md
7
ToDo.md
@@ -1,4 +1,7 @@
|
||||
# ToDo list
|
||||
|
||||
- stub files .pyi
|
||||
- fix all remaning check errors
|
||||
- [x] stub files .pyi
|
||||
- [ ] Add tests for all, we need 100% test coverate
|
||||
- [x] Log: add custom format for "stack_correct" if set, this will override the normal stack block
|
||||
- [ ] Log: add rotate for size based
|
||||
- [ ] All folders and file names need to be revisited for naming and content collection
|
||||
|
||||
@@ -1,29 +1,64 @@
|
||||
# MARK: Project info
|
||||
[project]
|
||||
name = "corelibs"
|
||||
version = "0.5.0"
|
||||
version = "0.48.0"
|
||||
description = "Collection of utils for Python scripts"
|
||||
readme = "ReadMe.md"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
"corelibs-csv>=1.0.0",
|
||||
"corelibs-datetime>=1.0.1",
|
||||
"corelibs-debug>=1.0.0",
|
||||
"corelibs-double-byte-format>=1.0.0",
|
||||
"corelibs-dump-data>=1.0.0",
|
||||
"corelibs-encryption>=1.0.0",
|
||||
"corelibs-enum-base>=1.0.0",
|
||||
"corelibs-file>=1.0.0",
|
||||
"corelibs-hash>=1.0.0",
|
||||
"corelibs-iterator>=1.0.0",
|
||||
"corelibs-json>=1.0.0",
|
||||
"corelibs-progress>=1.0.0",
|
||||
"corelibs-regex-checks>=1.0.0",
|
||||
"corelibs-requests>=1.0.0",
|
||||
"corelibs-script>=1.0.0",
|
||||
"corelibs-search>=1.0.0",
|
||||
"corelibs-stack-trace>=1.0.0",
|
||||
"corelibs-strings>=1.0.0",
|
||||
"corelibs-text-colors>=1.0.0",
|
||||
"corelibs-var>=1.0.0",
|
||||
"cryptography>=46.0.3",
|
||||
"jmespath>=1.0.1",
|
||||
"jsonpath-ng>=1.7.0",
|
||||
"psutil>=7.0.0",
|
||||
"requests>=2.32.4",
|
||||
"requests[socks]>=2.32.5",
|
||||
]
|
||||
# set this to disable publish to pypi (pip)
|
||||
# classifiers = ["Private :: Do Not Upload"]
|
||||
|
||||
# MARK: build target
|
||||
[[tool.uv.index]]
|
||||
name = "egra-gitea"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
explicit = true
|
||||
|
||||
# MARK: build system
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
# set this to disable publish to pypi (pip)
|
||||
# classifiers = ["Private :: Do Not Upload"]
|
||||
|
||||
# MARK: build target
|
||||
[[tool.uv.index]]
|
||||
name = "opj-pypi"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
|
||||
[tool.uv.sources]
|
||||
corelibs-enum-base = { index = "opj-pypi" }
|
||||
corelibs-datetime = { index = "opj-pypi" }
|
||||
corelibs-var = { index = "opj-pypi" }
|
||||
corelibs-text-colors = { index = "opj-pypi" }
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"deepdiff>=8.6.1",
|
||||
"pytest>=8.4.1",
|
||||
"pytest-cov>=6.2.1",
|
||||
"typing-extensions>=4.15.0",
|
||||
]
|
||||
|
||||
# MARK: Python linting
|
||||
[tool.pyright]
|
||||
@@ -47,6 +82,38 @@ notes = ["FIXME", "TODO"]
|
||||
notes-rgx = '(FIXME|TODO)(\((TTD-|#)\[0-9]+\))'
|
||||
[tool.flake8]
|
||||
max-line-length = 120
|
||||
ignore = [
|
||||
"E741", # ignore ambigious variable name
|
||||
"W504" # Line break occurred after a binary operator [wrong triggered by "or" in if]
|
||||
]
|
||||
[tool.pylint.MASTER]
|
||||
# this is for the tests/etc folders
|
||||
init-hook='import sys; sys.path.append("src/")'
|
||||
|
||||
# MARK: Testing
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = [
|
||||
"tests",
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = [
|
||||
"*/tests/*",
|
||||
"*/test_*.py",
|
||||
"*/__init__.py"
|
||||
]
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_lines = [
|
||||
"pragma: no cover",
|
||||
"def __repr__",
|
||||
"def __str__",
|
||||
"raise AssertionError",
|
||||
"raise NotImplementedError",
|
||||
"if __name__ == .__main__.:"
|
||||
]
|
||||
exclude_also = [
|
||||
"def __.*__\\(",
|
||||
"def __.*\\(",
|
||||
"def _.*\\(",
|
||||
]
|
||||
|
||||
@@ -1,93 +0,0 @@
|
||||
"""
|
||||
Write to CSV file
|
||||
- each class set is one file write with one header set
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from pathlib import Path
|
||||
from collections import Counter
|
||||
import csv
|
||||
|
||||
|
||||
class CsvWriter:
|
||||
"""
|
||||
write to a CSV file
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: Path,
|
||||
file_name: str,
|
||||
header: dict[str, str],
|
||||
header_order: list[str] | None = None
|
||||
):
|
||||
self.path = path
|
||||
self.file_name = file_name
|
||||
# Key: index for write for the line dict, Values: header entries
|
||||
self.header = header
|
||||
self.csv_file_writer = self.__open_csv(header_order)
|
||||
|
||||
def __open_csv(self, header_order: list[str] | None) -> 'csv.DictWriter[str] | None':
|
||||
"""
|
||||
open csv file for writing, write headers
|
||||
|
||||
Note that if there is no header_order set we use the order in header dictionary
|
||||
|
||||
Arguments:
|
||||
line {list[str] | None} -- optional dedicated header order
|
||||
|
||||
Returns:
|
||||
csv.DictWriter[str] | None: _description_
|
||||
"""
|
||||
# if header order is set, make sure all header value fields exist
|
||||
header_values = self.header.values()
|
||||
if header_order is not None:
|
||||
if Counter(header_values) != Counter(header_order):
|
||||
print(
|
||||
"header order does not match header values: "
|
||||
f"{', '.join(header_values)} != {', '.join(header_order)}"
|
||||
)
|
||||
return None
|
||||
header_values = header_order
|
||||
# no duplicates
|
||||
if len(header_values) != len(set(header_values)):
|
||||
print(f"Header must have unique values only: {', '.join(header_values)}")
|
||||
return None
|
||||
try:
|
||||
fp = open(
|
||||
self.path.joinpath(self.file_name),
|
||||
"w", encoding="utf-8"
|
||||
)
|
||||
csv_file_writer = csv.DictWriter(
|
||||
fp,
|
||||
fieldnames=header_values,
|
||||
delimiter=",",
|
||||
quotechar='"',
|
||||
quoting=csv.QUOTE_MINIMAL,
|
||||
)
|
||||
csv_file_writer.writeheader()
|
||||
return csv_file_writer
|
||||
except OSError as err:
|
||||
print("OS error:", err)
|
||||
return None
|
||||
|
||||
def write_csv(self, line: dict[str, str]) -> bool:
|
||||
"""
|
||||
write member csv line
|
||||
|
||||
Arguments:
|
||||
line {dict[str, str]} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
if self.csv_file_writer is None:
|
||||
return False
|
||||
csv_row: dict[str, Any] = {}
|
||||
# only write entries that are in the header list
|
||||
for key, value in self.header.items():
|
||||
csv_row[value] = line[key]
|
||||
self.csv_file_writer.writerow(csv_row)
|
||||
return True
|
||||
|
||||
# __END__
|
||||
@@ -1,126 +0,0 @@
|
||||
"""
|
||||
Profile memory usage in Python
|
||||
"""
|
||||
|
||||
# https://docs.python.org/3/library/tracemalloc.html
|
||||
|
||||
import os
|
||||
import time
|
||||
import tracemalloc
|
||||
import linecache
|
||||
from typing import Tuple
|
||||
from tracemalloc import Snapshot
|
||||
import psutil
|
||||
|
||||
|
||||
def display_top(snapshot: Snapshot, key_type: str = 'lineno', limit: int = 10) -> str:
|
||||
"""
|
||||
Print tracmalloc stats
|
||||
https://docs.python.org/3/library/tracemalloc.html#pretty-top
|
||||
|
||||
Args:
|
||||
snapshot (Snapshot): _description_
|
||||
key_type (str, optional): _description_. Defaults to 'lineno'.
|
||||
limit (int, optional): _description_. Defaults to 10.
|
||||
"""
|
||||
snapshot = snapshot.filter_traces((
|
||||
tracemalloc.Filter(False, "<frozen importlib._bootstrap>"),
|
||||
tracemalloc.Filter(False, "<unknown>"),
|
||||
))
|
||||
top_stats = snapshot.statistics(key_type)
|
||||
|
||||
profiler_msg = f"Top {limit} lines"
|
||||
for index, stat in enumerate(top_stats[:limit], 1):
|
||||
frame = stat.traceback[0]
|
||||
# replace "/path/to/module/file.py" with "module/file.py"
|
||||
filename = os.sep.join(frame.filename.split(os.sep)[-2:])
|
||||
profiler_msg += f"#{index}: {filename}:{frame.lineno}: {(stat.size / 1024):.1f} KiB"
|
||||
line = linecache.getline(frame.filename, frame.lineno).strip()
|
||||
if line:
|
||||
profiler_msg += f" {line}"
|
||||
|
||||
other = top_stats[limit:]
|
||||
if other:
|
||||
size = sum(stat.size for stat in other)
|
||||
profiler_msg += f"{len(other)} other: {(size / 1024):.1f} KiB"
|
||||
total = sum(stat.size for stat in top_stats)
|
||||
profiler_msg += f"Total allocated size: {(total / 1024):.1f} KiB"
|
||||
return profiler_msg
|
||||
|
||||
|
||||
class Profiling:
|
||||
"""
|
||||
Profile memory usage and elapsed time for some block
|
||||
Based on: https://stackoverflow.com/a/53301648
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# profiling id
|
||||
self.__ident: str = ''
|
||||
# memory
|
||||
self.__rss_before: int = 0
|
||||
self.__vms_before: int = 0
|
||||
# self.shared_before: int = 0
|
||||
self.__rss_used: int = 0
|
||||
self.__vms_used: int = 0
|
||||
# self.shared_used: int = 0
|
||||
# time
|
||||
self.__call_start: float = 0
|
||||
self.__elapsed = 0
|
||||
|
||||
def __get_process_memory(self) -> Tuple[int, int]:
|
||||
process = psutil.Process(os.getpid())
|
||||
mi = process.memory_info()
|
||||
# macos does not have mi.shared
|
||||
return mi.rss, mi.vms
|
||||
|
||||
def __elapsed_since(self) -> str:
|
||||
elapsed = time.time() - self.__call_start
|
||||
if elapsed < 1:
|
||||
return str(round(elapsed * 1000, 2)) + "ms"
|
||||
if elapsed < 60:
|
||||
return str(round(elapsed, 2)) + "s"
|
||||
if elapsed < 3600:
|
||||
return str(round(elapsed / 60, 2)) + "min"
|
||||
return str(round(elapsed / 3600, 2)) + "hrs"
|
||||
|
||||
def __format_bytes(self, bytes_data: int) -> str:
|
||||
if abs(bytes_data) < 1000:
|
||||
return str(bytes_data) + "B"
|
||||
if abs(bytes_data) < 1e6:
|
||||
return str(round(bytes_data / 1e3, 2)) + "kB"
|
||||
if abs(bytes_data) < 1e9:
|
||||
return str(round(bytes_data / 1e6, 2)) + "MB"
|
||||
return str(round(bytes_data / 1e9, 2)) + "GB"
|
||||
|
||||
def start_profiling(self, ident: str) -> None:
|
||||
"""
|
||||
start the profiling
|
||||
"""
|
||||
self.__ident = ident
|
||||
self.__rss_before, self.__vms_before = self.__get_process_memory()
|
||||
self.__call_start = time.time()
|
||||
|
||||
def end_profiling(self) -> None:
|
||||
"""
|
||||
end the profiling
|
||||
"""
|
||||
if self.__rss_before == 0 and self.__vms_before == 0:
|
||||
print("start_profile() was not called, output will be negative")
|
||||
self.__elapsed = self.__elapsed_since()
|
||||
__rss_after, __vms_after = self.__get_process_memory()
|
||||
self.__rss_used = __rss_after - self.__rss_before
|
||||
self.__vms_used = __vms_after - self.__vms_before
|
||||
|
||||
def print_profiling(self) -> str:
|
||||
"""
|
||||
print the profiling time
|
||||
"""
|
||||
return (
|
||||
f"Profiling: {self.__ident:>20} "
|
||||
f"RSS: {self.__format_bytes(self.__rss_used):>8} | "
|
||||
f"VMS: {self.__format_bytes(self.__vms_used):>8} | "
|
||||
f"time: {self.__elapsed:>8}"
|
||||
)
|
||||
|
||||
# __END__
|
||||
@@ -1,113 +0,0 @@
|
||||
"""
|
||||
a interval time class
|
||||
|
||||
Returns:
|
||||
Timer: class timer for basic time run calculations
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
class Timer:
|
||||
"""
|
||||
get difference between start and end date/time
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
init new start time and set end time to None
|
||||
"""
|
||||
self._overall_start_time = datetime.now()
|
||||
self._overall_end_time = None
|
||||
self._overall_run_time = None
|
||||
self._start_time = datetime.now()
|
||||
self._end_time = None
|
||||
self._run_time = None
|
||||
|
||||
# MARK: overall run time
|
||||
def overall_run_time(self) -> timedelta:
|
||||
"""
|
||||
overall run time difference from class launch to call of this function
|
||||
|
||||
Returns:
|
||||
timedelta: _description_
|
||||
"""
|
||||
self._overall_end_time = datetime.now()
|
||||
self._overall_run_time = self._overall_end_time - self._overall_start_time
|
||||
return self._overall_run_time
|
||||
|
||||
def get_overall_start_time(self) -> datetime:
|
||||
"""
|
||||
get set start time
|
||||
|
||||
Returns:
|
||||
datetime: _description_
|
||||
"""
|
||||
return self._overall_start_time
|
||||
|
||||
def get_overall_end_time(self) -> datetime | None:
|
||||
"""
|
||||
get set end time or None for not set
|
||||
|
||||
Returns:
|
||||
datetime|None: _description_
|
||||
"""
|
||||
return self._overall_end_time
|
||||
|
||||
def get_overall_run_time(self) -> timedelta | None:
|
||||
"""
|
||||
get run time or None if run time was not called
|
||||
|
||||
Returns:
|
||||
datetime|None: _description_
|
||||
"""
|
||||
return self._overall_run_time
|
||||
|
||||
# MARK: set run time
|
||||
def run_time(self) -> timedelta:
|
||||
"""
|
||||
difference between start time and current time
|
||||
|
||||
Returns:
|
||||
datetime: _description_
|
||||
"""
|
||||
self._end_time = datetime.now()
|
||||
self._run_time = self._end_time - self._start_time
|
||||
return self._run_time
|
||||
|
||||
def reset_run_time(self):
|
||||
"""
|
||||
reset start/end and run tine
|
||||
"""
|
||||
self._start_time = datetime.now()
|
||||
self._end_time = None
|
||||
self._run_time = None
|
||||
|
||||
def get_start_time(self) -> datetime:
|
||||
"""
|
||||
get set start time
|
||||
|
||||
Returns:
|
||||
datetime: _description_
|
||||
"""
|
||||
return self._start_time
|
||||
|
||||
def get_end_time(self) -> datetime | None:
|
||||
"""
|
||||
get set end time or None for not set
|
||||
|
||||
Returns:
|
||||
datetime|None: _description_
|
||||
"""
|
||||
return self._end_time
|
||||
|
||||
def get_run_time(self) -> timedelta | None:
|
||||
"""
|
||||
get run time or None if run time was not called
|
||||
|
||||
Returns:
|
||||
datetime|None: _description_
|
||||
"""
|
||||
return self._run_time
|
||||
|
||||
# __END__
|
||||
@@ -1,75 +0,0 @@
|
||||
"""
|
||||
Various small helpers for data writing
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
if TYPE_CHECKING:
|
||||
from io import TextIOWrapper
|
||||
|
||||
|
||||
def write_l(line: str, fpl: 'TextIOWrapper | None' = None, print_line: bool = False):
|
||||
"""
|
||||
Write a line to screen and to output file
|
||||
|
||||
Args:
|
||||
line (String): Line to write
|
||||
fpl (Resource): file handler resource, if none write only to console
|
||||
"""
|
||||
if print_line is True:
|
||||
print(line)
|
||||
if fpl is not None:
|
||||
fpl.write(line + "\n")
|
||||
|
||||
|
||||
# progress printers
|
||||
|
||||
def pr_header(tag: str, marker_string: str = '#', width: int = 35):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
tag (str): _description_
|
||||
"""
|
||||
print(f" {marker_string} {tag:^{width}} {marker_string}")
|
||||
|
||||
|
||||
def pr_title(tag: str, prefix_string: str = '|', space_filler: str = '.', width: int = 35):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
tag (str): _description_
|
||||
prefix_string (str, optional): _description_. Defaults to '|'.
|
||||
"""
|
||||
print(f" {prefix_string} {tag:{space_filler}<{width}}:", flush=True)
|
||||
|
||||
|
||||
def pr_open(tag: str, prefix_string: str = '|', space_filler: str = '.', width: int = 35):
|
||||
"""
|
||||
writen progress open line with tag
|
||||
|
||||
Args:
|
||||
tag (str): _description_
|
||||
prefix_string (str): prefix string. Default: '|'
|
||||
"""
|
||||
print(f" {prefix_string} {tag:{space_filler}<{width}} [", end="", flush=True)
|
||||
|
||||
|
||||
def pr_close(tag: str = ''):
|
||||
"""
|
||||
write the close tag with new line
|
||||
|
||||
Args:
|
||||
tag (str, optional): _description_. Defaults to ''.
|
||||
"""
|
||||
print(f"{tag}]", flush=True)
|
||||
|
||||
|
||||
def pr_act(act: str = "."):
|
||||
"""
|
||||
write progress character
|
||||
|
||||
Args:
|
||||
act (str, optional): _description_. Defaults to ".".
|
||||
"""
|
||||
print(f"{act}", end="", flush=True)
|
||||
|
||||
# __EMD__
|
||||
@@ -1,46 +0,0 @@
|
||||
"""
|
||||
File handling utilities
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def remove_all_in_directory(directory: Path, ignore_files: list[str] | None = None, verbose: bool = False) -> bool:
|
||||
"""
|
||||
remove all files and folders in a directory
|
||||
can exclude files or folders
|
||||
|
||||
Args:
|
||||
directory (Path): _description_
|
||||
ignore_files (list[str], optional): _description_. Defaults to None.
|
||||
|
||||
Returns:
|
||||
bool: _description_
|
||||
"""
|
||||
if not directory.is_dir():
|
||||
return False
|
||||
if ignore_files is None:
|
||||
ignore_files = []
|
||||
if verbose:
|
||||
print(f"Remove old files in: {directory.name} [", end="", flush=True)
|
||||
# remove all files and folders in given directory by recursive globbing
|
||||
for file in directory.rglob("*"):
|
||||
# skip if in ignore files
|
||||
if file.name in ignore_files:
|
||||
continue
|
||||
# remove one file, or a whole directory
|
||||
if file.is_file():
|
||||
os.remove(file)
|
||||
if verbose:
|
||||
print(".", end="", flush=True)
|
||||
elif file.is_dir():
|
||||
shutil.rmtree(file)
|
||||
if verbose:
|
||||
print("/", end="", flush=True)
|
||||
if verbose:
|
||||
print("]", flush=True)
|
||||
return True
|
||||
|
||||
# __END__
|
||||
@@ -1,477 +0,0 @@
|
||||
"""
|
||||
AUTHOR: Clemens Schwaighofer
|
||||
DATE CREATED: 2009/7/24 (2025/7/2)
|
||||
DESCRIPTION: progress percent class (perl -> python)
|
||||
|
||||
HOW TO USE
|
||||
* load
|
||||
from progress import Progress
|
||||
* init
|
||||
prg = Progress()
|
||||
allowed parameters to pass are (in order)
|
||||
- verbose (0/1/...) : show output
|
||||
- precision (-2~10) : -2 (5%), -1 (10%), 0 (normal 0-100%), 1~10 (100.m~%)
|
||||
- microtime (1/0/-1) : show microtime in eta/run time
|
||||
- wide time (bool) : padd time so all time column doesn't change width of line
|
||||
- prefix line break (bool): add line break before string and not only after
|
||||
prg = Progress(verbose = 1, precision = 2)
|
||||
* settings methods
|
||||
set_wide_time(bool)
|
||||
set_microtime(int -1/0/1)
|
||||
set_prefix_lb(bool)
|
||||
set_verbose(0/1 int)
|
||||
set_precision(-2~10 int)
|
||||
set_linecount(int)
|
||||
set_filesize(int)
|
||||
set_start_time(time optional)
|
||||
set_eta_start_time(time optional)
|
||||
set_end_time(time optional)
|
||||
show_position(file pos optional)
|
||||
"""
|
||||
|
||||
import time
|
||||
from typing import Literal
|
||||
from math import floor
|
||||
from corelibs.string_handling.datetime_helpers import convert_timestamp
|
||||
from corelibs.string_handling.byte_helpers import format_bytes
|
||||
|
||||
|
||||
class Progress():
|
||||
"""
|
||||
file progress output information
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
verbose: int = 0,
|
||||
precision: int = 1,
|
||||
microtime: Literal[-1] | Literal[1] | Literal[0] = 0,
|
||||
wide_time: bool = False,
|
||||
prefix_lb: bool = False
|
||||
):
|
||||
# set default var stuff
|
||||
# max lines in input
|
||||
self.linecount: int = 0
|
||||
# max file size
|
||||
self.filesize: int = 0
|
||||
# * comma after percent
|
||||
self.precision: int = 0
|
||||
# * if flagged 1, then wthe wide 15 char left bound format is used
|
||||
self.wide_time: bool = False
|
||||
# * verbose status from outside
|
||||
self.verbose: bool = False
|
||||
# * microtime output for last run time (1 for enable 0 for auto -1 for disable)
|
||||
self.microtime: Literal[-1] | Literal[1] | Literal[0] = 0
|
||||
# micro time flag for last group
|
||||
self.lg_microtime: bool = False
|
||||
# = flag if output was given
|
||||
self.change = 0
|
||||
# = global start for the full script running time
|
||||
self.start: float | None = None
|
||||
# = for the eta time, can be set after a query or long read in, to not create a wrong ETA time
|
||||
self.start_run: float | None = None
|
||||
# loop start
|
||||
self.start_time: float | None = None
|
||||
# global end
|
||||
self.end: float | None = None
|
||||
# loop end
|
||||
self.end_time: float | None = None
|
||||
# run time in seconds, set when end time method is called
|
||||
self.run_time: float | None = None
|
||||
# = filesize current
|
||||
self.count_size: int | None = None
|
||||
# position current
|
||||
self.count: int = 0
|
||||
# last count (position)
|
||||
self.current_count: int = 0
|
||||
# the current file post
|
||||
self.file_pos: int | None = None
|
||||
# lines processed in the last run
|
||||
self.lines_processed: int = 0
|
||||
# time in th seconds for the last group run (until percent change)
|
||||
self.last_group: float = 0
|
||||
# float value, lines processed per second to the last group run
|
||||
self.lines_in_last_group: float = 0
|
||||
# float values, lines processed per second to complete run
|
||||
self.lines_in_global: float = 0
|
||||
# flaot value, bytes processes per second in the last group run
|
||||
self.bytes_in_last_group: float = 0
|
||||
# float value, bytes processed per second to complete run
|
||||
self.bytes_in_global: float = 0
|
||||
# bytes processed in last run (in bytes)
|
||||
self.size_in_last_group: int = 0
|
||||
# current file position 8size)
|
||||
self.current_size: int = 0
|
||||
# last percent position
|
||||
self.last_percent: int | float = 0
|
||||
# if we have normal % or in steps of 10
|
||||
self.precision_ten_step: int = 0
|
||||
# the default size this is precision + 4
|
||||
self.percent_print: int = 5
|
||||
# this is 1 if it is 1 or 0 for precision or precision size
|
||||
self.percent_precision: int = 1
|
||||
# prefix line with a line break
|
||||
self.prefix_lb: bool = False
|
||||
# estimated time to finish
|
||||
self.eta: float | None = None
|
||||
# run time since start
|
||||
self.full_time_needed: float | None = None
|
||||
# the actual output
|
||||
self.string: str = ''
|
||||
|
||||
# initialize the class
|
||||
self.set_precision(precision)
|
||||
self.set_verbose(verbose)
|
||||
self.set_micro_time(microtime)
|
||||
self.set_wide_time(wide_time)
|
||||
self.set_prefix_lb(prefix_lb)
|
||||
self.set_start_time()
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
resets the current progress to 0, but keeps the overall start variables set
|
||||
"""
|
||||
# reset what always gets reset
|
||||
self.count = 0
|
||||
self.count_size = None
|
||||
self.current_count = 0
|
||||
self.linecount = 0
|
||||
self.lines_processed = 0
|
||||
self.last_group = 0
|
||||
self.lines_in_last_group = 0
|
||||
self.lines_in_global = 0
|
||||
self.bytes_in_last_group = 0
|
||||
self.bytes_in_global = 0
|
||||
self.size_in_last_group = 0
|
||||
self.filesize = 0
|
||||
self.current_size = 0
|
||||
self.last_percent = 0
|
||||
self.eta = 0
|
||||
self.full_time_needed = 0
|
||||
self.start_run = None
|
||||
self.start_time = None
|
||||
self.end_time = None
|
||||
|
||||
def set_wide_time(self, wide_time: bool) -> bool:
|
||||
"""
|
||||
sets the show wide time flag
|
||||
|
||||
Arguments:
|
||||
wide_time {bool} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
self.wide_time = wide_time
|
||||
return self.wide_time
|
||||
|
||||
def set_micro_time(self, microtime: Literal[-1] | Literal[1] | Literal[0]) -> Literal[-1] | Literal[1] | Literal[0]:
|
||||
"""sets the show microtime -1 OFF, 0 AUTO, 1 ON
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
self.microtime = microtime
|
||||
return self.microtime
|
||||
|
||||
def set_prefix_lb(self, prefix_lb: bool) -> bool:
|
||||
"""
|
||||
set prefix line break flag
|
||||
|
||||
Arguments:
|
||||
prefix_lb {bool} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
self.prefix_lb = prefix_lb
|
||||
return self.prefix_lb
|
||||
|
||||
def set_verbose(self, verbose: int) -> bool:
|
||||
"""
|
||||
set the internal verbose flag to 1 if any value higher than 1 is given, else sets it to 0
|
||||
|
||||
Arguments:
|
||||
verbose {int} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
if verbose > 0:
|
||||
self.verbose = True
|
||||
else:
|
||||
self.verbose = False
|
||||
return self.verbose
|
||||
|
||||
def set_precision(self, precision: int) -> int:
|
||||
"""
|
||||
sets the output precision size. If -2 for five step, -1 for ten step
|
||||
else sets the precision normally, for 0, no precision is set, maximum precision is 10
|
||||
|
||||
Arguments:
|
||||
precision {int} -- _description_
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
# if not a valid number, we set it to 0
|
||||
if precision < -2 or precision > 10:
|
||||
precision = 0
|
||||
if precision < 0:
|
||||
if precision < -1:
|
||||
self.precision_ten_step = 5
|
||||
else:
|
||||
self.precision_ten_step = 10
|
||||
self.precision = 0 # no comma
|
||||
self.percent_precision = 0 # no print precision
|
||||
self.percent_print = 3 # max 3 length
|
||||
else:
|
||||
# comma values visible
|
||||
self.precision = 10 if precision < 0 or precision > 10 else precision
|
||||
# for calcualtion of precision
|
||||
self.percent_precision = 10 if precision < 0 or precision > 10 else precision
|
||||
# for the format output base is 4, plsut he percent precision length
|
||||
self.percent_print = (3 if precision == 0 else 4) + self.percent_precision
|
||||
# return the set precision
|
||||
return self.precision
|
||||
|
||||
def set_linecount(self, linecount: int) -> int:
|
||||
"""
|
||||
set the maximum lines in this file, if value is smaller than 0 or 0, then it is set to 1
|
||||
|
||||
Arguments:
|
||||
linecount {int} -- _description_
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
if linecount > 0:
|
||||
self.linecount = linecount
|
||||
else:
|
||||
self.linecount = 1
|
||||
return self.linecount
|
||||
|
||||
def set_filesize(self, filesize: int) -> int:
|
||||
"""
|
||||
set the maximum filesize for this file, if value is smaller than 0 or 0, then it is set to 1
|
||||
|
||||
Arguments:
|
||||
filesize {int} -- _description_
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
if filesize > 0:
|
||||
self.filesize = filesize
|
||||
else:
|
||||
self.filesize = 1
|
||||
return self.filesize
|
||||
|
||||
def set_start_time(self, time_value: float = time.time()) -> None:
|
||||
"""
|
||||
initial set of the start times, auto set
|
||||
|
||||
Keyword Arguments:
|
||||
time_value {float} -- _description_ (default: {time.time()})
|
||||
"""
|
||||
# avoid possible double set of the original start time
|
||||
if not self.start:
|
||||
self.start = time_value
|
||||
self.start_time = time_value
|
||||
self.start_run = time_value
|
||||
|
||||
def set_eta_start_time(self, time_value: float = time.time()) -> None:
|
||||
"""
|
||||
sets the loop % run time, for correct ETA calculation
|
||||
calls set start time, as the main start time is only set once
|
||||
|
||||
Keyword Arguments:
|
||||
time_value {float} -- _description_ (default: {time.time()})
|
||||
"""
|
||||
self.set_start_time(time_value)
|
||||
|
||||
def set_end_time(self, time_value: float = time.time()) -> None:
|
||||
"""
|
||||
set the end time
|
||||
|
||||
Keyword Arguments:
|
||||
time_value {float} -- _description_ (default: {time.time()})
|
||||
"""
|
||||
self.end = time_value
|
||||
self.end_time = time_value
|
||||
if self.start is None:
|
||||
self.start = 0
|
||||
# the overall run time in micro seconds
|
||||
self.run_time = self.end - self.start
|
||||
|
||||
def show_position(self, filepos: int = 0) -> str:
|
||||
"""
|
||||
processes the current position. either based on read the file size pos, or the line count
|
||||
|
||||
Keyword Arguments:
|
||||
filepos {int} -- _description_ (default: {0})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
show_filesize = True # if we print from file size or line count
|
||||
# microtime flags
|
||||
eta_microtime = False
|
||||
ftn_microtime = False
|
||||
lg_microtime = False
|
||||
# percent precision calc
|
||||
# _p_spf = "{:." + str(self.precision) + "f}"
|
||||
# output format for percent
|
||||
_pr_p_spf = "{:>" + str(self.percent_print) + "." + str(self.percent_precision) + "f}"
|
||||
# set the linecount precision based on the final linecount, if not, leave it empty
|
||||
_pr_lc = "{}"
|
||||
if self.linecount:
|
||||
_pr_lc = "{:>" + str(len(str(f"{self.linecount:,}"))) + ",}"
|
||||
# time format, if flag is set, the wide format is used
|
||||
_pr_tf = "{}"
|
||||
if self.wide_time:
|
||||
_pr_tf = "{:>15}"
|
||||
|
||||
# count up
|
||||
self.count += 1
|
||||
# if we have file pos from parameter
|
||||
if filepos != 0:
|
||||
self.file_pos = filepos
|
||||
else:
|
||||
# we did not, so we set internal value
|
||||
self.file_pos = self.count
|
||||
# we also check if the filesize was set now
|
||||
if self.filesize == 0:
|
||||
self.filesize = self.linecount
|
||||
# set ignore filesize output (no data)
|
||||
show_filesize = False
|
||||
# set the count size based on the file pos, is only used if we have filesize
|
||||
self.count_size = self.file_pos
|
||||
# do normal or down to 10 (0, 10, ...) %
|
||||
if self.precision_ten_step:
|
||||
_percent = int((self.file_pos / float(self.filesize)) * 100)
|
||||
mod = _percent % self.precision_ten_step
|
||||
percent = _percent if mod == 0 else self.last_percent
|
||||
else:
|
||||
# calc percent
|
||||
percent = round(((self.file_pos / float(self.filesize)) * 100), self.precision)
|
||||
|
||||
# output
|
||||
if percent != self.last_percent:
|
||||
self.end_time = time.time() # current time (for loop time)
|
||||
if self.start is None:
|
||||
self.start = 0
|
||||
if self.start_time is None:
|
||||
self.start_time = 0
|
||||
# for from the beginning
|
||||
full_time_needed = self.end_time - self.start # how long from the start
|
||||
self.last_group = self.end_time - self.start_time # how long for last loop
|
||||
self.lines_processed = self.count - self.current_count # how many lines processed
|
||||
# lines in last group
|
||||
self.lines_in_last_group = (self.lines_processed / self.last_group) if self.last_group else 0
|
||||
# lines in global
|
||||
self.lines_in_global = (self.count / full_time_needed) if full_time_needed else 0
|
||||
# if we have linecount or not
|
||||
if self.linecount == 0:
|
||||
full_time_per_line = (full_time_needed if full_time_needed else 1) / self.count_size # how long for all
|
||||
# estimate for the rest
|
||||
eta = full_time_per_line * (self.filesize - self.count_size)
|
||||
else:
|
||||
# how long for all
|
||||
full_time_per_line = (full_time_needed if full_time_needed else 1) / self.count
|
||||
# estimate for the rest
|
||||
eta = full_time_per_line * (self.linecount - self.count)
|
||||
# just in case ...
|
||||
if eta < 0:
|
||||
eta = 0
|
||||
# check if to show microtime
|
||||
# ON
|
||||
if self.microtime == 1:
|
||||
eta_microtime = ftn_microtime = lg_microtime = True
|
||||
# AUTO
|
||||
if self.microtime == 0:
|
||||
if eta > 0 and eta < 1:
|
||||
eta_microtime = True
|
||||
if full_time_needed > 0 and full_time_needed < 1:
|
||||
ftn_microtime = True
|
||||
# pre check last group: if pre comma part is same add microtime anyway
|
||||
if self.last_group > 0 and self.last_group < 1:
|
||||
lg_microtime = True
|
||||
if self.last_group == floor(self.last_group):
|
||||
lg_microtime = True
|
||||
self.last_group = floor(self.last_group)
|
||||
# if with filesize or without
|
||||
if show_filesize:
|
||||
# last group size
|
||||
self.size_in_last_group = self.count_size - self.current_size
|
||||
# calc kb/s if there is any filesize data
|
||||
# last group
|
||||
self.bytes_in_last_group = (self.size_in_last_group / self.last_group) if self.last_group else 0
|
||||
# global
|
||||
self.bytes_in_global = (self.count_size / full_time_needed) if full_time_needed else 0
|
||||
# only used if we run with file size for the next check
|
||||
self.current_size = self.count_size
|
||||
|
||||
if self.verbose >= 1:
|
||||
self.string = (
|
||||
f"Processed {_pr_p_spf}% "
|
||||
"[{} / {}] | "
|
||||
f"{_pr_lc} / {_pr_lc} Lines | ETA: {_pr_tf} / TR: {_pr_tf} / "
|
||||
"LR: {:,} "
|
||||
"lines ({:,}) in {}, {:,.2f} ({:,.2f}) lines/s, {} ({}) b/s"
|
||||
).format(
|
||||
float(percent),
|
||||
format_bytes(self.count_size),
|
||||
format_bytes(self.filesize),
|
||||
self.count,
|
||||
self.linecount,
|
||||
convert_timestamp(eta, eta_microtime),
|
||||
convert_timestamp(full_time_needed, ftn_microtime),
|
||||
self.lines_processed,
|
||||
self.size_in_last_group,
|
||||
convert_timestamp(self.last_group, lg_microtime),
|
||||
self.lines_in_global,
|
||||
self.lines_in_last_group,
|
||||
format_bytes(self.bytes_in_global),
|
||||
format_bytes(self.bytes_in_last_group)
|
||||
)
|
||||
else:
|
||||
if self.verbose >= 1:
|
||||
self.string = (
|
||||
f"Processed {_pr_p_spf}% | {_pr_lc} / {_pr_lc} Lines "
|
||||
f"| ETA: {_pr_tf} / TR: {_pr_tf} / "
|
||||
"LR: {:,} lines in {}, {:,.2f} ({:,.2f}) lines/s"
|
||||
).format(
|
||||
float(percent),
|
||||
self.count,
|
||||
self.linecount,
|
||||
convert_timestamp(eta, eta_microtime),
|
||||
convert_timestamp(full_time_needed, ftn_microtime),
|
||||
self.lines_processed,
|
||||
convert_timestamp(self.last_group, lg_microtime),
|
||||
self.lines_in_global,
|
||||
self.lines_in_last_group
|
||||
)
|
||||
# prefix return string with line break if flagged
|
||||
self.string = ("\n" if self.prefix_lb else '') + self.string
|
||||
# print the string if verbose is turned on
|
||||
if self.verbose >= 1:
|
||||
print(self.string)
|
||||
|
||||
# write back vars
|
||||
self.last_percent = percent
|
||||
self.eta = eta
|
||||
self.full_time_needed = full_time_needed
|
||||
self.lg_microtime = lg_microtime
|
||||
# for the next run, check data
|
||||
self.start_time = time.time()
|
||||
self.current_count = self.count
|
||||
# trigger if this is a change
|
||||
self.change = 1
|
||||
else:
|
||||
# trigger if this is a change
|
||||
self.change = 0
|
||||
# return string
|
||||
return self.string
|
||||
# } END OF ShowPosition
|
||||
|
||||
# __END__
|
||||
@@ -1,35 +0,0 @@
|
||||
"""
|
||||
helper functions for jmespath interfaces
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
import jmespath
|
||||
import jmespath.exceptions
|
||||
|
||||
|
||||
def jmespath_search(search_data: dict[Any, Any] | list[Any], search_params: str) -> Any:
|
||||
"""
|
||||
jmespath search wrapper
|
||||
|
||||
Args:
|
||||
search_data (dict | list): _description_
|
||||
search_params (str): _description_
|
||||
|
||||
Raises:
|
||||
ValueError: jmespath.exceptions.LexerError
|
||||
ValueError: jmespath.exceptions.ParseError
|
||||
|
||||
Returns:
|
||||
Any: dict/list/etc, None if nothing found
|
||||
"""
|
||||
try:
|
||||
search_result = jmespath.search(search_params, search_data)
|
||||
except jmespath.exceptions.LexerError as excp:
|
||||
raise ValueError(f"Compile failed: {search_params}: {excp}") from excp
|
||||
except jmespath.exceptions.ParseError as excp:
|
||||
raise ValueError(f"Parse failed: {search_params}: {excp}") from excp
|
||||
except TypeError as excp:
|
||||
raise ValueError(f"Type error for search_params: {excp}") from excp
|
||||
return search_result
|
||||
|
||||
# __END__
|
||||
@@ -1,31 +0,0 @@
|
||||
"""
|
||||
json encoder for datetime
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from json import JSONEncoder
|
||||
from datetime import datetime, date
|
||||
|
||||
|
||||
# subclass JSONEncoder
|
||||
class DateTimeEncoder(JSONEncoder):
|
||||
"""
|
||||
Override the default method
|
||||
cls=DateTimeEncoder
|
||||
"""
|
||||
def default(self, o: Any) -> str | None:
|
||||
if isinstance(o, (date, datetime)):
|
||||
return o.isoformat()
|
||||
return None
|
||||
|
||||
|
||||
def default(obj: Any) -> str | None:
|
||||
"""
|
||||
default override
|
||||
default=default
|
||||
"""
|
||||
if isinstance(obj, (date, datetime)):
|
||||
return obj.isoformat()
|
||||
return None
|
||||
|
||||
# __END__
|
||||
@@ -1,130 +0,0 @@
|
||||
"""
|
||||
wrapper around search path
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
|
||||
def array_search(
|
||||
search_params: list[dict[str, str | bool | list[str | None]]],
|
||||
data: list[dict[str, Any]],
|
||||
return_index: bool = False
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
search in an array of dicts with an array of Key/Value set
|
||||
all Key/Value sets must match
|
||||
Value set can be list for OR match
|
||||
option: case_senstive: default True
|
||||
|
||||
Args:
|
||||
search_params (list): List of search params in "Key"/"Value" lists with options
|
||||
data (list): data to search in, must be a list
|
||||
return_index (bool): return index of list [default False]
|
||||
|
||||
Raises:
|
||||
ValueError: if search params is not a list
|
||||
KeyError: if Key or Value are missing in search params
|
||||
KeyError: More than one Key with the same name set
|
||||
|
||||
Returns:
|
||||
list: list of found elements, or if return index
|
||||
list of dics with "index" and "data", where "data" holds the result list
|
||||
"""
|
||||
if not isinstance(search_params, list): # type: ignore
|
||||
raise ValueError("search_params must be a list")
|
||||
keys = []
|
||||
for search in search_params:
|
||||
if not search.get('Key') or not search.get('Value'):
|
||||
raise KeyError(
|
||||
f"Either Key '{search.get('Key', '')}' or "
|
||||
f"Value '{search.get('Value', '')}' is missing or empty"
|
||||
)
|
||||
# if double key -> abort
|
||||
if search.get("Key") in keys:
|
||||
raise KeyError(
|
||||
f"Key {search.get('Key', '')} already exists in search_params"
|
||||
)
|
||||
|
||||
return_items: list[dict[str, Any]] = []
|
||||
for si_idx, search_item in enumerate(data):
|
||||
# for each search entry, all must match
|
||||
matching = 0
|
||||
for search in search_params:
|
||||
# either Value direct or if Value is list then any of those items can match
|
||||
# values are compared in lower case if case senstive is off
|
||||
# lower case left side
|
||||
# TODO: allow nested Keys. eg "Key: ["Key a", "key b"]" to be ["Key a"]["key b"]
|
||||
if search.get("case_sensitive", True) is False:
|
||||
search_value = search_item.get(str(search['Key']), "").lower()
|
||||
else:
|
||||
search_value = search_item.get(str(search['Key']), "")
|
||||
# lower case right side
|
||||
if isinstance(search['Value'], list):
|
||||
search_in = [
|
||||
str(k).lower()
|
||||
if search.get("case_sensitive", True) is False else k
|
||||
for k in search['Value']
|
||||
]
|
||||
elif search.get("case_sensitive", True) is False:
|
||||
search_in = str(search['Value']).lower()
|
||||
else:
|
||||
search_in = search['Value']
|
||||
# compare check
|
||||
if (
|
||||
(
|
||||
isinstance(search_in, list) and
|
||||
search_value in search_in
|
||||
) or
|
||||
search_value == search_in
|
||||
):
|
||||
matching += 1
|
||||
if len(search_params) == matching:
|
||||
if return_index is True:
|
||||
# the data is now in "data sub set"
|
||||
return_items.append({
|
||||
"index": si_idx,
|
||||
"data": search_item
|
||||
})
|
||||
else:
|
||||
return_items.append(search_item)
|
||||
# return all found or empty list
|
||||
return return_items
|
||||
|
||||
|
||||
def key_lookup(haystack: dict[str, str], key: str) -> str:
|
||||
"""
|
||||
simple key lookup in haystack, erturns empty string if not found
|
||||
|
||||
Args:
|
||||
haystack (dict[str, str]): _description_
|
||||
key (str): _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return haystack.get(key, "")
|
||||
|
||||
|
||||
def value_lookup(haystack: dict[str, str], value: str, raise_on_many: bool = False) -> str:
|
||||
"""
|
||||
find by value, if not found returns empty, if not raise on many returns the first one
|
||||
|
||||
Args:
|
||||
haystack (dict[str, str]): _description_
|
||||
value (str): _description_
|
||||
raise_on_many (bool, optional): _description_. Defaults to False.
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
keys = [__key for __key, __value in haystack.items() if __value == value]
|
||||
if not keys:
|
||||
return ""
|
||||
if raise_on_many is True and len(keys) > 1:
|
||||
raise ValueError("More than one element found with the same name")
|
||||
return keys[0]
|
||||
|
||||
# __END__
|
||||
@@ -1,37 +0,0 @@
|
||||
"""
|
||||
Dict helpers
|
||||
"""
|
||||
|
||||
|
||||
def mask(
|
||||
data_set: dict[str, str],
|
||||
mask_keys: list[str] | None = None,
|
||||
mask_str: str = "***",
|
||||
skip: bool = False
|
||||
) -> dict[str, str]:
|
||||
"""
|
||||
mask data for output
|
||||
Checks if mask_keys list exist in any key in the data set either from the start or at the end
|
||||
|
||||
Arguments:
|
||||
data_set {dict[str, str]} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
mask_keys {list[str] | None} -- _description_ (default: {None})
|
||||
mask_str {str} -- _description_ (default: {"***"})
|
||||
skip {bool} -- _description_ (default: {False})
|
||||
|
||||
Returns:
|
||||
dict[str, str] -- _description_
|
||||
"""
|
||||
if skip is True:
|
||||
return data_set
|
||||
if mask_keys is None:
|
||||
mask_keys = ["password", "secret"]
|
||||
return {
|
||||
key: mask_str
|
||||
if any(key.startswith(mask_key) or key.endswith(mask_key) for mask_key in mask_keys) else value
|
||||
for key, value in data_set.items()
|
||||
}
|
||||
|
||||
# __END__
|
||||
@@ -1,21 +0,0 @@
|
||||
"""
|
||||
dict dump as JSON formatted
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
|
||||
def dump_data(data: dict[Any, Any] | list[Any] | str | None) -> str:
|
||||
"""
|
||||
dump formated output from dict/list
|
||||
|
||||
Args:
|
||||
data (dict | list | str): _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return json.dumps(data, indent=4, ensure_ascii=False, default=str)
|
||||
|
||||
# __END__
|
||||
@@ -1,39 +0,0 @@
|
||||
"""
|
||||
Various dictionary, object and list hashers
|
||||
"""
|
||||
|
||||
import json
|
||||
import hashlib
|
||||
from typing import Any
|
||||
|
||||
|
||||
def dict_hash_frozen(data: dict[Any, Any]) -> int:
|
||||
"""
|
||||
hash a dict via freeze
|
||||
|
||||
Args:
|
||||
data (dict): _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return hash(frozenset(data.items()))
|
||||
|
||||
|
||||
def dict_hash_crc(data: dict[Any, Any] | list[Any]) -> str:
|
||||
"""
|
||||
Create a sha256 hash over dict
|
||||
alternative for
|
||||
dict_hash_frozen
|
||||
|
||||
Args:
|
||||
data (dict | list): _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return hashlib.sha256(
|
||||
json.dumps(data, sort_keys=True, ensure_ascii=True).encode('utf-8')
|
||||
).hexdigest()
|
||||
|
||||
# __END__
|
||||
@@ -1,63 +0,0 @@
|
||||
"""
|
||||
Various helper functions for type data clean up
|
||||
"""
|
||||
|
||||
from typing import Any, cast
|
||||
|
||||
|
||||
def delete_keys_from_set(
|
||||
set_data: dict[str, Any] | list[Any] | str, keys: list[str]
|
||||
) -> dict[str, Any] | list[Any] | Any:
|
||||
"""
|
||||
remove all keys from set_data
|
||||
|
||||
Args:
|
||||
set_data (dict[str, Any] | list[Any] | None): _description_
|
||||
keys (list[str]): _description_
|
||||
|
||||
Returns:
|
||||
dict[str, Any] | list[Any] | None: _description_
|
||||
"""
|
||||
# skip everything if there is no keys list
|
||||
if not keys:
|
||||
return set_data
|
||||
if isinstance(set_data, dict):
|
||||
for key, value in set_data.copy().items():
|
||||
if key in keys:
|
||||
del set_data[key]
|
||||
if isinstance(value, (dict, list)):
|
||||
delete_keys_from_set(value, keys) # type: ignore Partly unknown
|
||||
elif isinstance(set_data, list):
|
||||
for value in set_data:
|
||||
if isinstance(value, (dict, list)):
|
||||
delete_keys_from_set(value, keys) # type: ignore Partly unknown
|
||||
else:
|
||||
set_data = [set_data]
|
||||
|
||||
return set_data
|
||||
|
||||
|
||||
def build_dict(
|
||||
any_dict: Any, ignore_entries: list[str] | None = None
|
||||
) -> dict[str, Any | list[Any] | dict[Any, Any]]:
|
||||
"""
|
||||
rewrite any AWS *TypeDef to new dict so we can add/change entrys
|
||||
|
||||
Args:
|
||||
any_dict (Any): _description_
|
||||
|
||||
Returns:
|
||||
dict[str, Any | list[Any]]: _description_
|
||||
"""
|
||||
if ignore_entries is None:
|
||||
return cast(dict[str, Any | list[Any] | dict[Any, Any]], any_dict)
|
||||
# ignore entries can be one key or key nested
|
||||
# return {
|
||||
# key: value for key, value in any_dict.items() if key not in ignore_entries
|
||||
# }
|
||||
return cast(
|
||||
dict[str, Any | list[Any] | dict[Any, Any]],
|
||||
delete_keys_from_set(any_dict, ignore_entries)
|
||||
)
|
||||
|
||||
# __END__
|
||||
@@ -1,120 +0,0 @@
|
||||
"""
|
||||
A log handler wrapper
|
||||
"""
|
||||
|
||||
import logging.handlers
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Mapping
|
||||
|
||||
|
||||
class Log:
|
||||
"""
|
||||
logger setup
|
||||
"""
|
||||
|
||||
EXCEPTION: int = 60
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
log_path: Path,
|
||||
log_name: str,
|
||||
log_level_console: str = 'WARNING',
|
||||
log_level_file: str = 'DEBUG',
|
||||
add_start_info: bool = True
|
||||
):
|
||||
logging.addLevelName(Log.EXCEPTION, 'EXCEPTION')
|
||||
if not log_name.endswith('.log'):
|
||||
log_path = log_path.with_suffix('.log')
|
||||
# overall logger settings
|
||||
self.logger = logging.getLogger(log_name)
|
||||
# set maximum logging level for all logging output
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
# console logger
|
||||
self.__console_handler(log_level_console)
|
||||
# file logger
|
||||
self.__file_handler(log_level_file, log_path)
|
||||
# if requests set a start log
|
||||
if add_start_info is True:
|
||||
self.break_line('START')
|
||||
|
||||
def __filter_exceptions(self, record: logging.LogRecord) -> bool:
|
||||
return record.levelname != "EXCEPTION"
|
||||
|
||||
def __console_handler(self, log_level_console: str = 'WARNING'):
|
||||
# console logger
|
||||
if not isinstance(getattr(logging, log_level_console.upper(), None), int):
|
||||
log_level_console = 'WARNING'
|
||||
console_handler = logging.StreamHandler()
|
||||
formatter_console = logging.Formatter(
|
||||
(
|
||||
'[%(asctime)s.%(msecs)03d] '
|
||||
'[%(filename)s:%(funcName)s:%(lineno)d] '
|
||||
'<%(levelname)s> '
|
||||
'%(message)s'
|
||||
),
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
)
|
||||
console_handler.setLevel(log_level_console)
|
||||
# do not show exceptions logs on console
|
||||
console_handler.addFilter(self.__filter_exceptions)
|
||||
console_handler.setFormatter(formatter_console)
|
||||
self.logger.addHandler(console_handler)
|
||||
|
||||
def __file_handler(self, log_level_file: str, log_path: Path) -> None:
|
||||
# file logger
|
||||
if not isinstance(getattr(logging, log_level_file.upper(), None), int):
|
||||
log_level_file = 'DEBUG'
|
||||
file_handler = logging.handlers.TimedRotatingFileHandler(
|
||||
filename=log_path,
|
||||
encoding="utf-8",
|
||||
when="D",
|
||||
interval=1
|
||||
)
|
||||
formatter_file_handler = logging.Formatter(
|
||||
(
|
||||
'[%(asctime)s.%(msecs)03d] '
|
||||
'[%(pathname)s:%(funcName)s:%(lineno)d] '
|
||||
'[%(name)s:%(process)d] '
|
||||
'<%(levelname)s> '
|
||||
'%(message)s'
|
||||
),
|
||||
datefmt="%Y-%m-%dT%H:%M:%S",
|
||||
)
|
||||
file_handler.setLevel(log_level_file)
|
||||
file_handler.setFormatter(formatter_file_handler)
|
||||
self.logger.addHandler(file_handler)
|
||||
|
||||
def break_line(self, info: str = "BREAK"):
|
||||
"""
|
||||
add a break line as info level
|
||||
|
||||
Keyword Arguments:
|
||||
info {str} -- _description_ (default: {"BREAK"})
|
||||
"""
|
||||
self.logger.info("[%s] ================================>", info)
|
||||
|
||||
def exception(self, msg: object, *args: object, extra: Mapping[str, object] | None = None) -> None:
|
||||
"""
|
||||
log on exceotion level
|
||||
|
||||
Args:
|
||||
msg (object): _description_
|
||||
*args (object): arguments for msg
|
||||
extra: Mapping[str, object] | None: extra arguments for the formatting if needed
|
||||
"""
|
||||
self.logger.log(Log.EXCEPTION, msg, *args, exc_info=True, extra=extra)
|
||||
|
||||
def validate_log_level(self, log_level: str) -> bool:
|
||||
"""
|
||||
if the log level is invalid, will erturn false
|
||||
|
||||
Args:
|
||||
log_level (str): _description_
|
||||
|
||||
Returns:
|
||||
bool: _description_
|
||||
"""
|
||||
return isinstance(getattr(logging, log_level.upper(), None), int)
|
||||
|
||||
# __END__
|
||||
@@ -1,190 +0,0 @@
|
||||
"""
|
||||
requests lib interface
|
||||
V2 call type
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
import warnings
|
||||
import requests
|
||||
# to hide the verfiy warnings because of the bad SSL settings from Netskope, Akamai, etc
|
||||
warnings.filterwarnings('ignore', message='Unverified HTTPS request')
|
||||
|
||||
|
||||
class Caller:
|
||||
"""_summary_"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
header: dict[str, str],
|
||||
verify: bool = True,
|
||||
timeout: int = 20,
|
||||
proxy: dict[str, str] | None = None
|
||||
):
|
||||
self.headers = header
|
||||
self.timeout: int = timeout
|
||||
self.cafile = "/Library/Application Support/Netskope/STAgent/data/nscacert.pem"
|
||||
self.verify = verify
|
||||
self.proxy = proxy
|
||||
|
||||
def __timeout(self, timeout: int | None) -> int:
|
||||
if timeout is not None:
|
||||
return timeout
|
||||
return self.timeout
|
||||
|
||||
def __call(
|
||||
self,
|
||||
action: str,
|
||||
url: str,
|
||||
data: dict[str, Any] | None = None,
|
||||
params: dict[str, Any] | None = None,
|
||||
timeout: int | None = None
|
||||
) -> requests.Response | None:
|
||||
"""
|
||||
call wrapper, on error returns None
|
||||
|
||||
Args:
|
||||
action (str): _description_
|
||||
url (str): _description_
|
||||
data (dict | None): _description_. Defaults to None.
|
||||
params (dict | None): _description_. Defaults to None.
|
||||
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
try:
|
||||
response = None
|
||||
if action == "get":
|
||||
response = requests.get(
|
||||
url,
|
||||
params=params,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
)
|
||||
elif action == "post":
|
||||
response = requests.post(
|
||||
url,
|
||||
params=params,
|
||||
json=data,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
)
|
||||
elif action == "put":
|
||||
response = requests.put(
|
||||
url,
|
||||
params=params,
|
||||
json=data,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
)
|
||||
elif action == "patch":
|
||||
response = requests.patch(
|
||||
url,
|
||||
params=params,
|
||||
json=data,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
)
|
||||
elif action == "delete":
|
||||
response = requests.delete(
|
||||
url,
|
||||
params=params,
|
||||
headers=self.headers,
|
||||
timeout=self.__timeout(timeout),
|
||||
verify=self.verify,
|
||||
proxies=self.proxy
|
||||
)
|
||||
return response
|
||||
except requests.exceptions.InvalidSchema as e:
|
||||
print(f"Invalid URL during '{action}' for {url}:\n\t{e}")
|
||||
return None
|
||||
except requests.exceptions.ReadTimeout as e:
|
||||
print(f"Timeout ({self.timeout}s) during '{action}' for {url}:\n\t{e}")
|
||||
return None
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
print(f"Connection error during '{action}' for {url}:\n\t{e}")
|
||||
return None
|
||||
|
||||
def get(self, url: str, params: dict[str, Any] | None = None) -> requests.Response | None:
|
||||
"""
|
||||
get data
|
||||
|
||||
Args:
|
||||
url (str): _description_
|
||||
params (dict | None): _description_
|
||||
|
||||
Returns:
|
||||
requests.Response: _description_
|
||||
"""
|
||||
return self.__call('get', url, params=params)
|
||||
|
||||
def post(
|
||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
||||
) -> requests.Response | None:
|
||||
"""
|
||||
post data
|
||||
|
||||
Args:
|
||||
url (str): _description_
|
||||
data (dict | None): _description_
|
||||
params (dict | None): _description_
|
||||
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('post', url, data, params)
|
||||
|
||||
def put(
|
||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
||||
) -> requests.Response | None:
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
url (str): _description_
|
||||
data (dict | None): _description_
|
||||
params (dict | None): _description_
|
||||
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('put', url, data, params)
|
||||
|
||||
def patch(
|
||||
self, url: str, data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
||||
) -> requests.Response | None:
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
url (str): _description_
|
||||
data (dict | None): _description_
|
||||
params (dict | None): _description_
|
||||
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('patch', url, data, params)
|
||||
|
||||
def delete(self, url: str, params: dict[str, Any] | None = None) -> requests.Response | None:
|
||||
"""
|
||||
delete
|
||||
|
||||
Args:
|
||||
url (str): _description_
|
||||
params (dict | None): _description_
|
||||
|
||||
Returns:
|
||||
requests.Response | None: _description_
|
||||
"""
|
||||
return self.__call('delete', url, params=params)
|
||||
|
||||
# __END__
|
||||
@@ -1,97 +0,0 @@
|
||||
"""
|
||||
Helper methods for scripts
|
||||
"""
|
||||
|
||||
import time
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import psutil
|
||||
|
||||
|
||||
def wait_abort(sleep: int = 5) -> None:
|
||||
"""
|
||||
wait a certain time for an abort command
|
||||
|
||||
Keyword Arguments:
|
||||
sleep {int} -- _description_ (default: {5})
|
||||
"""
|
||||
try:
|
||||
print(f"Waiting {sleep} seconds (Press CTRL +C to abort) [", end="", flush=True)
|
||||
for _ in range(1, sleep):
|
||||
print(".", end="", flush=True)
|
||||
time.sleep(1)
|
||||
print("]", flush=True)
|
||||
except KeyboardInterrupt:
|
||||
print("\nInterrupted by user")
|
||||
sys.exit(0)
|
||||
print("\n\n")
|
||||
|
||||
|
||||
def lock_run(lock_file: Path) -> None:
|
||||
"""
|
||||
lock a script run
|
||||
needed is the lock file name
|
||||
|
||||
Arguments:
|
||||
lock_file {Path} -- _description_
|
||||
|
||||
Raises:
|
||||
IOError: _description_
|
||||
Exception: _description_
|
||||
IOError: _description_
|
||||
"""
|
||||
no_file = False
|
||||
run_pid = os.getpid()
|
||||
# or os.path.isfile()
|
||||
try:
|
||||
with open(lock_file, "r", encoding="UTF-8") as fp:
|
||||
exists = False
|
||||
pid = fp.read()
|
||||
fp.close()
|
||||
if pid:
|
||||
# check if this pid exists
|
||||
for proc in psutil.process_iter(['pid', 'name', 'cmdline']):
|
||||
try:
|
||||
if pid == proc.info['pid']:
|
||||
exists = True
|
||||
break
|
||||
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
|
||||
# in case we cannot access
|
||||
continue
|
||||
if not exists:
|
||||
# no pid but lock file, unlink
|
||||
try:
|
||||
lock_file.unlink()
|
||||
no_file = True
|
||||
except IOError as e:
|
||||
raise IOError(f"Cannot remove lock_file: {lock_file}: {e}") from e
|
||||
else:
|
||||
raise IOError(f"Script is already running with PID {pid}")
|
||||
except IOError:
|
||||
no_file = True
|
||||
if no_file:
|
||||
try:
|
||||
with open(lock_file, "w", encoding="UTF-8") as fp:
|
||||
fp.write(str(run_pid))
|
||||
fp.close()
|
||||
except IOError as e:
|
||||
raise IOError(f"Cannot open run lock file '{lock_file}' for writing: {e}") from e
|
||||
|
||||
|
||||
def unlock_run(lock_file: Path) -> None:
|
||||
"""
|
||||
removes the lock file
|
||||
|
||||
Arguments:
|
||||
lock_file {Path} -- _description_
|
||||
|
||||
Raises:
|
||||
Exception: _description_
|
||||
"""
|
||||
try:
|
||||
lock_file.unlink()
|
||||
except IOError as e:
|
||||
raise IOError(f"Cannot remove lock_file: {lock_file}: {e}") from e
|
||||
|
||||
# __END__
|
||||
@@ -1,37 +0,0 @@
|
||||
"""
|
||||
Format bytes
|
||||
"""
|
||||
|
||||
|
||||
def format_bytes(byte_value: float | int | str) -> str:
|
||||
"""
|
||||
Format a byte value to a human readable string
|
||||
|
||||
Arguments:
|
||||
byte_value {float | int | str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# if string exit
|
||||
if isinstance(byte_value, str):
|
||||
return byte_value
|
||||
# empty byte value is set to 0
|
||||
if not byte_value:
|
||||
byte_value = float(0)
|
||||
# if not float, convert to flaot
|
||||
if isinstance(byte_value, int):
|
||||
byte_value = float(byte_value)
|
||||
# loop through valid extensions
|
||||
for unit in ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB"]:
|
||||
# never go into the negativ and check if it is smaller than next set
|
||||
# if it is, print out return string
|
||||
if abs(byte_value) < 1024.0:
|
||||
return f"{byte_value:,.2f} {unit}"
|
||||
# divided for the next loop check
|
||||
byte_value /= 1024.0
|
||||
# if it is too big, return YB
|
||||
return f"{byte_value:,.2f} YB"
|
||||
|
||||
|
||||
# __NED__
|
||||
@@ -1,63 +0,0 @@
|
||||
"""
|
||||
Various string based date/time helpers
|
||||
"""
|
||||
|
||||
from math import floor
|
||||
import time
|
||||
|
||||
|
||||
def convert_timestamp(timestamp: float | int, show_micro: bool = True) -> str:
|
||||
"""
|
||||
format timestamp into human readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
show_micro {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# cut of the ms, but first round them up to four
|
||||
__timestamp_ms_split = str(round(timestamp, 4)).split(".")
|
||||
timestamp = int(__timestamp_ms_split[0])
|
||||
try:
|
||||
ms = int(__timestamp_ms_split[1])
|
||||
except IndexError:
|
||||
ms = 0
|
||||
timegroups = (86400, 3600, 60, 1)
|
||||
output: list[int] = []
|
||||
for i in timegroups:
|
||||
output.append(int(floor(timestamp / i)))
|
||||
timestamp = timestamp % i
|
||||
# output has days|hours|min|sec ms
|
||||
time_string = ""
|
||||
if output[0]:
|
||||
time_string = f"{output[0]}d"
|
||||
if output[0] or output[1]:
|
||||
time_string += f"{output[1]}h "
|
||||
if output[0] or output[1] or output[2]:
|
||||
time_string += f"{output[2]}m "
|
||||
time_string += f"{output[3]}s"
|
||||
if show_micro:
|
||||
time_string += f" {ms}ms" if ms else " 0ms"
|
||||
return time_string
|
||||
|
||||
|
||||
def create_time(timestamp: float, timestamp_format: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""
|
||||
just takes a timestamp and prints out humand readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timestamp_format {_type_} -- _description_ (default: {"%Y-%m-%d %H:%M:%S"})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return time.strftime(timestamp_format, time.localtime(timestamp))
|
||||
|
||||
# __END__
|
||||
@@ -1,226 +0,0 @@
|
||||
"""
|
||||
Format double byte strings to exact length
|
||||
"""
|
||||
|
||||
import unicodedata
|
||||
|
||||
|
||||
class DoubleByteFormatString:
|
||||
"""
|
||||
Format a string to exact length
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
string: str,
|
||||
cut_length: int,
|
||||
format_length: int | None = None,
|
||||
placeholder: str = '..',
|
||||
format_string: str = '{{:<{len}}}'
|
||||
):
|
||||
"""
|
||||
shorts a string to exact cut length and sets it to format length
|
||||
|
||||
after "cut_length" cut the "placeholder" will be added, so that the new cut_length is never
|
||||
larget than the cut_length given (".." is counted to cut_length)
|
||||
if format_length if set and outside format_length will be set
|
||||
the cut_length is adjusted to format_length if the format_length is shorter
|
||||
|
||||
Example
|
||||
|
||||
"Foo bar baz" 10 charcters -> 5 cut_length -> 10 format_length
|
||||
"Foo.. "
|
||||
|
||||
use class.get_string_short() for cut length shortend string
|
||||
use class.get_string_short_formated() to get the shorted string to format length padding
|
||||
|
||||
creates a class that shortens and sets the format length
|
||||
to use with a print format run the format needs to be pre set in
|
||||
the style of {{:<{len}}} style
|
||||
self.get_string_short_formated() for the "len" parameter
|
||||
|
||||
Args:
|
||||
string (str): string to work with
|
||||
cut_length (int): width to shorten to
|
||||
format_length (int | None): format length. Defaults to None
|
||||
placeholder (str, optional): placeholder to put after shortened string. Defaults to '..'.
|
||||
format_string (str, optional): format string. Defaults to '{{:<{len}}}'
|
||||
"""
|
||||
# output variables
|
||||
self.string_short: str = ''
|
||||
self.string_width_value: int = 0
|
||||
self.string_short_width: int = 0
|
||||
self.format_length_value: int = 0
|
||||
# internal varaibles
|
||||
self.placeholder: str = placeholder
|
||||
# original string
|
||||
self.string: str = ''
|
||||
# width to cut string to
|
||||
self.cut_length: int = 0
|
||||
# format length to set to
|
||||
self.format_length: int = 0
|
||||
# main string
|
||||
self.string = str(string)
|
||||
|
||||
self.format_string: str = format_string
|
||||
|
||||
# if width is > 0 set, else set width of string (fallback)
|
||||
if cut_length > 0:
|
||||
self.cut_length = cut_length
|
||||
elif cut_length <= 0:
|
||||
self.cut_length = self.__string_width_calc(self.string)
|
||||
# format length set, if not set or smaller than 0, set to width of string
|
||||
self.format_length = self.cut_length
|
||||
if format_length is not None and format_length > 0:
|
||||
self.format_length = format_length
|
||||
# check that width is not larger then length if yes, set width to length
|
||||
self.cut_length = min(self.cut_length, self.format_length)
|
||||
|
||||
# process the string shorten and format length calculation
|
||||
self.process()
|
||||
|
||||
def process(self):
|
||||
"""
|
||||
runs all the class methods to set string length, the string shortened
|
||||
and the format length
|
||||
"""
|
||||
# call the internal ones to set the data
|
||||
if self.string:
|
||||
self.__string_width()
|
||||
self.__shorten_string()
|
||||
if self.format_length:
|
||||
self.__format_length()
|
||||
|
||||
def get_string_short(self) -> str:
|
||||
"""
|
||||
get the shortend string
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return self.string_short
|
||||
|
||||
def get_string_short_formated(self, format_string: str = '{{:<{len}}}') -> str:
|
||||
"""
|
||||
get the formatted string
|
||||
|
||||
Keyword Arguments:
|
||||
format_string {_type_} -- _description_ (default: {'{{:<{len}}}'})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
if not format_string:
|
||||
format_string = self.format_string
|
||||
return format_string.format(
|
||||
len=self.get_format_length()
|
||||
).format(
|
||||
self.get_string_short()
|
||||
)
|
||||
|
||||
def get_format_length(self) -> int:
|
||||
"""
|
||||
get the format length for outside length set
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
return self.format_length_value
|
||||
|
||||
def get_cut_length(self) -> int:
|
||||
"""
|
||||
get the actual cut length
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
return self.cut_length
|
||||
|
||||
def get_requested_cut_length(self) -> int:
|
||||
"""
|
||||
get the requested cut length
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
return self.cut_length
|
||||
|
||||
def get_requested_format_length(self) -> int:
|
||||
"""
|
||||
get the requested format length
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
return self.format_length
|
||||
|
||||
def __string_width_calc(self, string: str) -> int:
|
||||
"""
|
||||
does the actual string width calculation
|
||||
|
||||
Args:
|
||||
string (str): string to calculate from
|
||||
|
||||
Returns:
|
||||
int: stringth width
|
||||
"""
|
||||
return sum(1 + (unicodedata.east_asian_width(c) in "WF") for c in string)
|
||||
|
||||
def __string_width(self):
|
||||
"""
|
||||
calculates the string width based on the characters
|
||||
this is an internal method and should not be called on itself
|
||||
"""
|
||||
# only run if string is set and is valid string
|
||||
if self.string:
|
||||
# calculate width. add +1 for each double byte character
|
||||
self.string_width_value = self.__string_width_calc(self.string)
|
||||
|
||||
def __format_length(self):
|
||||
"""
|
||||
set the format length based on the length for the format
|
||||
and the shortend string
|
||||
this is an internal method and should not be called on itself
|
||||
"""
|
||||
if not self.string_short:
|
||||
self.__shorten_string()
|
||||
# get correct format length based on string
|
||||
if (
|
||||
self.string_short and
|
||||
self.format_length > 0 and
|
||||
self.string_short_width > 0
|
||||
):
|
||||
# length: format length wanted
|
||||
# substract the width of the shortend string - the length of the shortend string
|
||||
self.format_length_value = self.format_length - (self.string_short_width - len(self.string_short))
|
||||
else:
|
||||
# if we have nothing to shorten the length, keep the old one
|
||||
self.format_length_value = self.format_length
|
||||
|
||||
def __shorten_string(self):
|
||||
"""
|
||||
shorten string down to set width
|
||||
this is an internal method and should not be called on itself
|
||||
"""
|
||||
# set string width if not set
|
||||
if not self.string_width_value:
|
||||
self.__string_width()
|
||||
# if the double byte string width is larger than the wanted width
|
||||
if self.string_width_value > self.cut_length:
|
||||
cur_len = 0
|
||||
self.string_short = ''
|
||||
for char in str(self.string):
|
||||
# set the current length if we add the character
|
||||
cur_len += 2 if unicodedata.east_asian_width(char) in "WF" else 1
|
||||
# if the new length is smaller than the output length to shorten too add the char
|
||||
if cur_len <= (self.cut_length - len(self.placeholder)):
|
||||
self.string_short += char
|
||||
self.string_short_width = cur_len
|
||||
# return string with new width and placeholder
|
||||
self.string_short = f"{self.string_short}{self.placeholder}"
|
||||
self.string_short_width += len(self.placeholder)
|
||||
else:
|
||||
# if string is same saze just copy
|
||||
self.string_short = self.string
|
||||
|
||||
# __END__
|
||||
@@ -1,86 +0,0 @@
|
||||
"""
|
||||
String helpers
|
||||
"""
|
||||
|
||||
from textwrap import shorten
|
||||
|
||||
|
||||
def shorten_string(string: str, length: int, hard_shorten: bool = False, placeholder: str = " [~]") -> str:
|
||||
"""
|
||||
check if entry is too long and cut it, but only for console output
|
||||
Note that if there are no spaces in the string, it will automatically use the hard split mode
|
||||
|
||||
Args:
|
||||
string (str): _description_
|
||||
length (int): _description_
|
||||
hard_shorten (bool): if shorte should be done on fixed string lenght. Default: False
|
||||
placeholder (str): placeholder string. Default: " [~]"
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
length = int(length)
|
||||
string = str(string)
|
||||
if len(string) > length:
|
||||
if hard_shorten is True or " " not in string:
|
||||
short_string = f"{string[:(length - len(placeholder))]}{placeholder}"
|
||||
else:
|
||||
short_string = shorten(string, width=length, placeholder=placeholder)
|
||||
else:
|
||||
short_string = string
|
||||
|
||||
return short_string
|
||||
|
||||
|
||||
def left_fill(string: str, width: int, char: str = " ") -> str:
|
||||
"""
|
||||
left fill for a certain length to fill a max size
|
||||
string is the original string to left padd, width is the maximum width
|
||||
that needs to be filled, char is the filler character
|
||||
|
||||
Arguments:
|
||||
string {str} -- _description_
|
||||
width {int} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
char {str} -- _description_ (default: {" "})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# the width needs to be string
|
||||
if width < 0:
|
||||
width = len(string)
|
||||
# char can only be one length long
|
||||
if len(char) != 1:
|
||||
char = " "
|
||||
return (
|
||||
"{:"
|
||||
f"{char}>{width}"
|
||||
"}"
|
||||
).format(string)
|
||||
|
||||
|
||||
def format_number(number: float, precision: int = 0) -> str:
|
||||
"""
|
||||
format numbers, current trailing zeros does not work
|
||||
use {:,} or {:,.f} or {:,.<N>f} <N> = number instead of this
|
||||
|
||||
Arguments:
|
||||
number {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
precision {int} -- _description_ (default: {0})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
if precision < 0 and precision > 100:
|
||||
precision = 0
|
||||
return (
|
||||
"{:,."
|
||||
f"{str(precision)}"
|
||||
"f}"
|
||||
).format(number)
|
||||
|
||||
# __END__
|
||||
51
src/corelibs/check_handling/regex_constants.py
Normal file
51
src/corelibs/check_handling/regex_constants.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""
|
||||
List of regex compiled strings that can be used
|
||||
"""
|
||||
|
||||
import re
|
||||
from warnings import warn, deprecated
|
||||
from corelibs_regex_checks.regex_constants import (
|
||||
compile_re as compile_re_ng,
|
||||
SUB_EMAIL_BASIC_REGEX as SUB_EMAIL_BASIC_REGEX_NG,
|
||||
EMAIL_BASIC_REGEX as EMAIL_BASIC_REGEX_NG,
|
||||
NAME_EMAIL_SIMPLE_REGEX as NAME_EMAIL_SIMPLE_REGEX_NG,
|
||||
NAME_EMAIL_BASIC_REGEX as NAME_EMAIL_BASIC_REGEX_NG,
|
||||
DOMAIN_WITH_LOCALHOST_REGEX as DOMAIN_WITH_LOCALHOST_REGEX_NG,
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX as DOMAIN_WITH_LOCALHOST_PORT_REGEX_NG,
|
||||
DOMAIN_REGEX as DOMAIN_REGEX_NG
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_regex_checks.regex_constants.compile_re instead")
|
||||
def compile_re(reg: str) -> re.Pattern[str]:
|
||||
"""
|
||||
compile a regex with verbose flag
|
||||
|
||||
Arguments:
|
||||
reg {str} -- _description_
|
||||
|
||||
Returns:
|
||||
re.Pattern[str] -- _description_
|
||||
"""
|
||||
return compile_re_ng(reg)
|
||||
|
||||
|
||||
# email regex
|
||||
SUB_EMAIL_BASIC_REGEX = SUB_EMAIL_BASIC_REGEX_NG
|
||||
|
||||
EMAIL_BASIC_REGEX = EMAIL_BASIC_REGEX_NG
|
||||
# name + email regex for email sending type like "foo bar" <email@mail.com>
|
||||
NAME_EMAIL_SIMPLE_REGEX = NAME_EMAIL_SIMPLE_REGEX_NG
|
||||
# name + email with the basic regex set
|
||||
NAME_EMAIL_BASIC_REGEX = NAME_EMAIL_BASIC_REGEX_NG
|
||||
# Domain regex with localhost
|
||||
DOMAIN_WITH_LOCALHOST_REGEX = DOMAIN_WITH_LOCALHOST_REGEX_NG
|
||||
# domain regex with loclhost and optional port
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX = DOMAIN_WITH_LOCALHOST_PORT_REGEX_NG
|
||||
# Domain, no localhost
|
||||
DOMAIN_REGEX = DOMAIN_REGEX_NG
|
||||
|
||||
# At the module level, issue a deprecation warning
|
||||
warn("Use corelibs_regex_checks.regex_constants instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
27
src/corelibs/check_handling/regex_constants_compiled.py
Normal file
27
src/corelibs/check_handling/regex_constants_compiled.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""
|
||||
List of regex compiled strings that can be used
|
||||
"""
|
||||
|
||||
import warnings
|
||||
|
||||
from corelibs_regex_checks.regex_constants_compiled import (
|
||||
COMPILED_EMAIL_BASIC_REGEX as COMPILED_EMAIL_BASIC_REGEX_NG,
|
||||
COMPILED_NAME_EMAIL_SIMPLE_REGEX as COMPILED_NAME_EMAIL_SIMPLE_REGEX_NG,
|
||||
COMPILED_NAME_EMAIL_BASIC_REGEX as COMPILED_NAME_EMAIL_BASIC_REGEX_NG,
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX as COMPILED_DOMAIN_WITH_LOCALHOST_REGEX_NG,
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX as COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX_NG,
|
||||
COMPILED_DOMAIN_REGEX as COMPILED_DOMAIN_REGEX_NG
|
||||
)
|
||||
|
||||
# all above in compiled form
|
||||
COMPILED_EMAIL_BASIC_REGEX = COMPILED_EMAIL_BASIC_REGEX_NG
|
||||
COMPILED_NAME_EMAIL_SIMPLE_REGEX = COMPILED_NAME_EMAIL_SIMPLE_REGEX_NG
|
||||
COMPILED_NAME_EMAIL_BASIC_REGEX = COMPILED_NAME_EMAIL_BASIC_REGEX_NG
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX = COMPILED_DOMAIN_WITH_LOCALHOST_REGEX_NG
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX = COMPILED_DOMAIN_WITH_LOCALHOST_PORT_REGEX_NG
|
||||
COMPILED_DOMAIN_REGEX = COMPILED_DOMAIN_REGEX_NG
|
||||
|
||||
# At the module level, issue a deprecation warning
|
||||
warnings.warn("Use corelibs_regex_checks.regex_constants_compiled instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
587
src/corelibs/config_handling/settings_loader.py
Normal file
587
src/corelibs/config_handling/settings_loader.py
Normal file
@@ -0,0 +1,587 @@
|
||||
"""
|
||||
Load settings file for a certain group
|
||||
Check data for existing and valid
|
||||
Additional check for override settings as arguments
|
||||
"""
|
||||
|
||||
import re
|
||||
import configparser
|
||||
from typing import Any, Tuple, Sequence, cast
|
||||
from pathlib import Path
|
||||
from corelibs_var.var_helpers import is_int, is_float, str_to_bool
|
||||
from corelibs.logging_handling.log import Log
|
||||
from corelibs.iterator_handling.list_helpers import convert_to_list, is_list_in_list
|
||||
from corelibs.config_handling.settings_loader_handling.settings_loader_check import SettingsLoaderCheck
|
||||
|
||||
|
||||
class SettingsLoader:
|
||||
"""
|
||||
Settings Loader with Argument parser
|
||||
"""
|
||||
|
||||
# split char
|
||||
DEFAULT_ELEMENT_SPLIT_CHAR: str = ','
|
||||
|
||||
CONVERT_TO_LIST: list[str] = ['str', 'int', 'float', 'bool', 'auto']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
args: dict[str, Any],
|
||||
config_file: Path,
|
||||
log: 'Log | None' = None,
|
||||
always_print: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
init the Settings loader
|
||||
|
||||
Args:
|
||||
args (dict): Script Arguments
|
||||
config_file (Path): config file including path
|
||||
log (Log | None): Lop class, if set errors are written to this
|
||||
always_print (bool): Set to true to always print errors, even if Log is available
|
||||
element_split_char (str): Split character, default is ','
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
"""
|
||||
self.args = args
|
||||
self.config_file = config_file
|
||||
self.log = log
|
||||
self.always_print = always_print
|
||||
# config parser, load config file first
|
||||
self.config_parser: configparser.ConfigParser | None = self.__load_config_file()
|
||||
# for check settings, abort flag
|
||||
self.__check_settings_abort: bool = False
|
||||
|
||||
# error messages for raise ValueError
|
||||
self.__error_msg: list[str] = []
|
||||
|
||||
# MARK: load settings
|
||||
def load_settings(
|
||||
self,
|
||||
config_id: str,
|
||||
config_validate: dict[str, list[str]] | None = None,
|
||||
allow_not_exist: bool = False
|
||||
) -> dict[str, str]:
|
||||
"""
|
||||
neutral settings loader
|
||||
|
||||
The settings values on the right side are seen as a list if they have "," inside (see ELEMENT SPLIT CHAR)
|
||||
but only if the "check:list." is set
|
||||
|
||||
for the allowe entries set, each set is "key => checks", check set is "check type:settings"
|
||||
key: the key name in the settings file
|
||||
check: check set with the following allowed entries on the left side for type
|
||||
- mandatory: must be set as "mandatory:yes", if the key entry is missing or empty throws error
|
||||
- check: see __check_settings for the settings currently available
|
||||
- matching: a | list of entries where the value has to match too
|
||||
- in: the right side is another KEY value from the settings where this value must be inside
|
||||
- split: character to split entries, if set check:list+ must be set if checks are needed
|
||||
- convert: convert to int, float -> if element is number convert, else leave as is
|
||||
- empty: convert empty to, if nothing set on the right side then convert to None type
|
||||
|
||||
TODO: there should be a config/options argument for general settings
|
||||
|
||||
Args:
|
||||
config_id (str): what block to load
|
||||
config_validate (dict[str, list[str]]): list of allowed entries sets
|
||||
allow_not_exist (bool): If set to True, does not throw an error, but returns empty set
|
||||
|
||||
Returns:
|
||||
dict[str, str]: key = value list
|
||||
"""
|
||||
# reset error message list before run
|
||||
self.__error_msg = []
|
||||
# default set entries
|
||||
entry_set_empty: dict[str, str | None] = {}
|
||||
# entries that have to be split
|
||||
entry_split_char: dict[str, str] = {}
|
||||
# entries that should be converted
|
||||
entry_convert: dict[str, str] = {}
|
||||
# no args to set
|
||||
args_overrride: list[str] = []
|
||||
# all the settings for the config id given
|
||||
settings: dict[str, dict[str, Any]] = {
|
||||
config_id: {},
|
||||
}
|
||||
if config_validate is None:
|
||||
config_validate = {}
|
||||
if self.config_parser is not None:
|
||||
try:
|
||||
# load all data as is, validation is done afterwards
|
||||
settings[config_id] = dict(self.config_parser[config_id])
|
||||
except KeyError as e:
|
||||
if allow_not_exist is True:
|
||||
return {}
|
||||
raise ValueError(self.__print(
|
||||
f"[!] Cannot read [{config_id}] block in the file {self.config_file}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
try:
|
||||
for key, checks in config_validate.items():
|
||||
skip = True
|
||||
split_char = self.DEFAULT_ELEMENT_SPLIT_CHAR
|
||||
# if one is set as list in check -> do not skip, but add to list
|
||||
for check in checks:
|
||||
if check.startswith("convert:"):
|
||||
try:
|
||||
[_, convert_to] = check.split(":")
|
||||
if convert_to not in self.CONVERT_TO_LIST:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the convert type is invalid {check}: {convert_to}",
|
||||
'CRITICAL'
|
||||
))
|
||||
entry_convert[key] = convert_to
|
||||
except ValueError as e:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the convert type setup for entry failed: {check}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
if check.startswith('empty:'):
|
||||
try:
|
||||
[_, empty_set] = check.split(":")
|
||||
if not empty_set:
|
||||
empty_set = None
|
||||
entry_set_empty[key] = empty_set
|
||||
except ValueError as e:
|
||||
print(f"VALUE ERROR: {key}")
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the empty set type for entry failed: {check}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
# split char, also check to not set it twice, first one only
|
||||
if check.startswith("split:") and not entry_split_char.get(key):
|
||||
try:
|
||||
[_, split_char] = check.split(":")
|
||||
if len(split_char) == 0:
|
||||
self.__print(
|
||||
(
|
||||
f"[*] In [{config_id}] the [{key}] split char character is empty, "
|
||||
f"fallback to: {self.DEFAULT_ELEMENT_SPLIT_CHAR}"
|
||||
),
|
||||
"WARNING"
|
||||
)
|
||||
split_char = self.DEFAULT_ELEMENT_SPLIT_CHAR
|
||||
entry_split_char[key] = split_char
|
||||
skip = False
|
||||
except ValueError as e:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the split character setup for entry failed: {check}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
if check == "args_override:yes":
|
||||
args_overrride.append(key)
|
||||
if skip:
|
||||
continue
|
||||
if settings[config_id][key]:
|
||||
settings[config_id][key] = [
|
||||
__value.replace(" ", "")
|
||||
for __value in settings[config_id][key].split(split_char)
|
||||
]
|
||||
else:
|
||||
settings[config_id][key] = []
|
||||
except KeyError as e:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] Cannot read [{config_id}] block because the entry [{e}] could not be found",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
else:
|
||||
# ignore error if arguments are set
|
||||
if not self.__check_arguments(config_validate, True):
|
||||
raise ValueError(self.__print(f"[!] Cannot find file: {self.config_file}", 'CRITICAL'))
|
||||
# base set
|
||||
settings[config_id] = {}
|
||||
# make sure all are set
|
||||
# if we have arguments set, this override config settings
|
||||
error: bool = False
|
||||
for entry, validate in config_validate.items():
|
||||
# if we have command line option set, this one overrides config
|
||||
if (args_entry := self.__get_arg(entry)) is not None:
|
||||
self.__print(f"[*] Command line option override for: {entry}", 'WARNING')
|
||||
if (
|
||||
# only set if flagged as allowed override from args
|
||||
entry in args_overrride and
|
||||
(isinstance(args_entry, list) and entry_split_char.get(entry)) or
|
||||
(not isinstance(args_entry, list) and not entry_split_char.get(entry))
|
||||
):
|
||||
# args is list, but entry has not split, do not set
|
||||
settings[config_id][entry] = args_entry
|
||||
# validate checks
|
||||
for check in validate:
|
||||
# CHECKS
|
||||
# - mandatory
|
||||
# - check: regex check (see SettingsLoaderCheck class for entries)
|
||||
# - matching: entry in given list
|
||||
# - in: entry in other setting entry list
|
||||
# - length: for string length
|
||||
# - range: for int/float range check
|
||||
# mandatory check
|
||||
if check == "mandatory:yes" and (
|
||||
not settings[config_id].get(entry) or settings[config_id].get(entry) == ['']
|
||||
):
|
||||
error = True
|
||||
self.__print(f"[!] Missing content entry for: {entry}", 'ERROR')
|
||||
# skip if empty none
|
||||
if settings[config_id].get(entry) is None:
|
||||
continue
|
||||
if check.startswith("check:"):
|
||||
# replace the check and run normal checks
|
||||
settings[config_id][entry] = self.__check_settings(
|
||||
check, entry, settings[config_id][entry]
|
||||
)
|
||||
if self.__check_settings_abort is True:
|
||||
error = True
|
||||
elif check.startswith("matching:"):
|
||||
checks = check.replace("matching:", "").split("|")
|
||||
if __result := is_list_in_list(convert_to_list(settings[config_id][entry]), list(checks)):
|
||||
error = True
|
||||
self.__print(f"[!] [{entry}] '{__result}' not matching {checks}", 'ERROR')
|
||||
elif check.startswith("in:"):
|
||||
check = check.replace("in:", "")
|
||||
# skip if check does not exist, and set error
|
||||
if settings[config_id].get(check) is None:
|
||||
error = True
|
||||
self.__print(f"[!] [{entry}] '{check}' target does not exist", 'ERROR')
|
||||
continue
|
||||
# entry must be in check entry
|
||||
# in for list, else equal with convert to string
|
||||
if (
|
||||
__result := is_list_in_list(
|
||||
convert_to_list(settings[config_id][entry]),
|
||||
__checks := convert_to_list(settings[config_id][check])
|
||||
)
|
||||
):
|
||||
self.__print(f"[!] [{entry}] '{__result}' must be in the '{__checks}' values list", 'ERROR')
|
||||
error = True
|
||||
elif check.startswith('length:'):
|
||||
check = check.replace("length:", "")
|
||||
# length can be: n, n-, n-m, -m
|
||||
# as: equal, >= >=< =<
|
||||
self.__build_from_to_equal(entry, check)
|
||||
if not self.__length_range_validate(
|
||||
entry,
|
||||
'length',
|
||||
cast(list[str], convert_to_list(settings[config_id][entry])),
|
||||
self.__build_from_to_equal(entry, check, convert_to_int=True)
|
||||
):
|
||||
error = True
|
||||
elif check.startswith('range:'):
|
||||
check = check.replace("range:", "")
|
||||
if not self.__length_range_validate(
|
||||
entry,
|
||||
'range',
|
||||
cast(list[str], convert_to_list(settings[config_id][entry])),
|
||||
self.__build_from_to_equal(entry, check)
|
||||
):
|
||||
error = True
|
||||
# after post clean up if we have empty entries and we are mandatory
|
||||
if check == "mandatory:yes" and (
|
||||
not settings[config_id].get(entry) or settings[config_id].get(entry) == ['']
|
||||
):
|
||||
error = True
|
||||
self.__print(f"[!] Missing content entry for: {entry}", 'ERROR')
|
||||
if error is True:
|
||||
self.__print("[!] Missing or incorrect settings data. Cannot proceed", 'CRITICAL')
|
||||
raise ValueError(
|
||||
"Missing or incorrect settings data. Cannot proceed: " + "; ".join(self.__error_msg)
|
||||
)
|
||||
# set empty
|
||||
for [entry, empty_set] in entry_set_empty.items():
|
||||
# if set, skip, else set to empty value
|
||||
if settings[config_id].get(entry) or isinstance(settings[config_id].get(entry), list):
|
||||
continue
|
||||
settings[config_id][entry] = empty_set
|
||||
# Convert input
|
||||
for [entry, convert_type] in entry_convert.items():
|
||||
if convert_type in ["int", "any"] and is_int(settings[config_id][entry]):
|
||||
settings[config_id][entry] = int(settings[config_id][entry])
|
||||
elif convert_type in ["float", "any"] and is_float(settings[config_id][entry]):
|
||||
settings[config_id][entry] = float(settings[config_id][entry])
|
||||
elif convert_type in ["bool", "any"] and (
|
||||
settings[config_id][entry].lower() == "true" or
|
||||
settings[config_id][entry].lower() == "false"
|
||||
):
|
||||
try:
|
||||
settings[config_id][entry] = str_to_bool(settings[config_id][entry])
|
||||
except ValueError:
|
||||
self.__print(
|
||||
f"[!] Could not convert to boolean for '{entry}': {settings[config_id][entry]}",
|
||||
'ERROR'
|
||||
)
|
||||
# string is always string
|
||||
# TODO: empty and int/float/bool: set to none?
|
||||
|
||||
return settings[config_id]
|
||||
|
||||
# MARK: build from/to/requal logic
|
||||
def __build_from_to_equal(
|
||||
self, entry: str, check: str, convert_to_int: bool = False
|
||||
) -> Tuple[float | None, float | None, float | None]:
|
||||
"""
|
||||
split out the "n-m" part to get the to/from/equal
|
||||
|
||||
Arguments:
|
||||
entry {str} -- _description_
|
||||
check {str} -- _description_
|
||||
|
||||
Returns:
|
||||
Tuple[float | None, float | None, float | None] -- _description_
|
||||
|
||||
Throws:
|
||||
ValueError if range/length entries are not float
|
||||
"""
|
||||
__from = None
|
||||
__to = None
|
||||
__equal = None
|
||||
try:
|
||||
[__from, __to] = check.split('-')
|
||||
if (__from and not is_float(__from)) or (__to and not is_float(__to)):
|
||||
raise ValueError(self.__print(
|
||||
f"[{entry}] Check value for length is not in: {check}",
|
||||
'CRITICAL'
|
||||
))
|
||||
if len(__from) == 0:
|
||||
__from = None
|
||||
if len(__to) == 0:
|
||||
__to = None
|
||||
except ValueError as e:
|
||||
if not is_float(__equal := check):
|
||||
raise ValueError(self.__print(
|
||||
f"[{entry}] Check value for length is not a valid integer: {check}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
if len(__equal) == 0:
|
||||
__equal = None
|
||||
# makre sure this is all int or None
|
||||
if __from is not None:
|
||||
__from = int(__from) if convert_to_int else float(__from)
|
||||
if __to is not None:
|
||||
__to = int(__to) if convert_to_int else float(__to)
|
||||
if __equal is not None:
|
||||
__equal = int(__equal) if convert_to_int else float(__equal)
|
||||
return (
|
||||
__from,
|
||||
__to,
|
||||
__equal
|
||||
)
|
||||
|
||||
# MARK: length/range validation
|
||||
def __length_range_validate(
|
||||
self,
|
||||
entry: str,
|
||||
check_type: str,
|
||||
values: Sequence[str | int | float],
|
||||
check: Tuple[float | None, float | None, float | None],
|
||||
) -> bool:
|
||||
(__from, __to, __equal) = check
|
||||
valid = True
|
||||
for value_raw in convert_to_list(values):
|
||||
# skip no tset values for range check
|
||||
if not value_raw:
|
||||
continue
|
||||
value = 0
|
||||
error_mark = ''
|
||||
if check_type == 'length':
|
||||
error_mark = 'length'
|
||||
value = len(str(value_raw))
|
||||
elif check_type == 'range':
|
||||
error_mark = 'range'
|
||||
value = float(str(value_raw))
|
||||
if __equal is not None and value != __equal:
|
||||
self.__print(f"[!] [{entry}] '{value_raw}' {error_mark} does not match {__equal}", 'ERROR')
|
||||
valid = False
|
||||
continue
|
||||
if __from is not None and __to is None and value < __from:
|
||||
self.__print(f"[!] [{entry}] '{value_raw}' {error_mark} smaller than minimum {__from}", 'ERROR')
|
||||
valid = False
|
||||
continue
|
||||
if __from is None and __to is not None and value > __to:
|
||||
self.__print(f"[!] [{entry}] '{value_raw}' {error_mark} larger than maximum {__to}", 'ERROR')
|
||||
valid = False
|
||||
continue
|
||||
if __from is not None and __to is not None and (
|
||||
value < __from or value > __to
|
||||
):
|
||||
self.__print(
|
||||
f"[!] [{entry}] '{value_raw}' {error_mark} outside valid range {__from} to {__to}",
|
||||
'ERROR'
|
||||
)
|
||||
valid = False
|
||||
continue
|
||||
return valid
|
||||
|
||||
# MARK: load config file data from file
|
||||
def __load_config_file(self) -> configparser.ConfigParser | None:
|
||||
"""
|
||||
load and parse the config file
|
||||
if not loadable return None
|
||||
"""
|
||||
# remove file name and get base path and check
|
||||
if not self.config_file.parent.is_dir():
|
||||
raise ValueError(f"Cannot find the config folder: {self.config_file.parent}")
|
||||
config = configparser.ConfigParser()
|
||||
if self.config_file.is_file():
|
||||
config.read(self.config_file)
|
||||
return config
|
||||
return None
|
||||
|
||||
# MARK: regex clean up one
|
||||
def __clean_invalid_setting(
|
||||
self,
|
||||
entry: str,
|
||||
validate: str,
|
||||
value: str,
|
||||
regex: str,
|
||||
regex_clean: str | None,
|
||||
replace: str = "",
|
||||
print_error: bool = True,
|
||||
) -> str:
|
||||
"""
|
||||
check is a string is invalid, print optional error message and clean up string
|
||||
|
||||
Args:
|
||||
entry (str): what entry key
|
||||
validate (str): validate type
|
||||
value (str): the value to check against
|
||||
regex (str): regex used for checking as r'...'
|
||||
regex_clean (str): regex used for cleaning as r'...'
|
||||
replace (str): replace with character. Defaults to ''
|
||||
print_error (bool): print the error message. Defaults to True
|
||||
"""
|
||||
check = re.compile(regex, re.VERBOSE)
|
||||
clean: re.Pattern[str] | None = None
|
||||
if regex_clean is not None:
|
||||
clean = re.compile(regex_clean, re.VERBOSE)
|
||||
# value must be set if clean is None, else empty value is allowed and will fail
|
||||
if (clean is None and value or clean) and not check.search(value):
|
||||
self.__print(
|
||||
f"[!] Invalid content for '{entry}' with check '{validate}' and data: {value}",
|
||||
'ERROR', print_error
|
||||
)
|
||||
# clean up if clean up is not none, else return EMPTY string
|
||||
if clean is not None:
|
||||
return clean.sub(replace, value)
|
||||
self.__check_settings_abort = True
|
||||
return ''
|
||||
# else return as is
|
||||
return value
|
||||
|
||||
# MARK: check settings, regx
|
||||
def __check_settings(
|
||||
self,
|
||||
check: str, entry: str, setting_value: list[str] | str
|
||||
) -> list[str] | str:
|
||||
"""
|
||||
check each setting valid
|
||||
The settings are defined in the SettingsLoaderCheck class
|
||||
|
||||
Args:
|
||||
check (str): What check to run
|
||||
entry (str): Variable name, just for information message
|
||||
setting_value (list[str | int] | str | int): settings value data
|
||||
|
||||
Returns:
|
||||
list[str | int] |111 str | int: cleaned up settings value data
|
||||
"""
|
||||
check = check.replace("check:", "")
|
||||
# get the check settings
|
||||
__check_settings = SettingsLoaderCheck.CHECK_SETTINGS.get(check)
|
||||
if __check_settings is None:
|
||||
raise ValueError(self.__print(
|
||||
f"[{entry}] Cannot get SettingsLoaderCheck.CHECK_SETTINGS for {check}",
|
||||
'CRITICAL'
|
||||
))
|
||||
# reset the abort check
|
||||
self.__check_settings_abort = False
|
||||
# either removes or replaces invalid characters in the list
|
||||
if isinstance(setting_value, list):
|
||||
# clean up invalid characters
|
||||
# loop over result and keep only filled (strip empty)
|
||||
setting_value = [e for e in [
|
||||
self.__clean_invalid_setting(
|
||||
entry, check, str(__entry),
|
||||
__check_settings['regex'], __check_settings['regex_clean'], __check_settings['replace']
|
||||
)
|
||||
for __entry in setting_value
|
||||
] if e]
|
||||
else:
|
||||
setting_value = self.__clean_invalid_setting(
|
||||
entry, check, str(setting_value),
|
||||
__check_settings['regex'], __check_settings['regex_clean'], __check_settings['replace']
|
||||
)
|
||||
# else:
|
||||
# self.__print(f"[!] Unkown type to check", 'ERROR)
|
||||
# return data
|
||||
return setting_value
|
||||
|
||||
# MARK: check arguments, for config file load fail
|
||||
def __check_arguments(self, arguments: dict[str, list[str]], all_set: bool = False) -> bool:
|
||||
"""
|
||||
check if ast least one argument is set
|
||||
|
||||
Args:
|
||||
arguments (list[str]): _description_
|
||||
|
||||
Returns:
|
||||
bool: _description_
|
||||
"""
|
||||
count_set = 0
|
||||
count_arguments = 0
|
||||
has_argument = False
|
||||
for argument, validate in arguments.items():
|
||||
# if argument is mandatory add to count, if not mandatory set has "has" to skip error
|
||||
mandatory = any(entry == "mandatory:yes" for entry in validate)
|
||||
if not mandatory:
|
||||
has_argument = True
|
||||
continue
|
||||
count_arguments += 1
|
||||
if self.__get_arg(argument):
|
||||
has_argument = True
|
||||
count_set += 1
|
||||
# for all set, True only if all are set
|
||||
if all_set is True:
|
||||
has_argument = count_set == count_arguments
|
||||
|
||||
return has_argument
|
||||
|
||||
# MARK: get argument from args dict
|
||||
def __get_arg(self, entry: str) -> Any:
|
||||
"""
|
||||
check if an argument entry xists, if None -> returns None else value of argument
|
||||
|
||||
Arguments:
|
||||
entry {str} -- _description_
|
||||
|
||||
Returns:
|
||||
Any -- _description_
|
||||
"""
|
||||
if self.args.get(entry) is None:
|
||||
return None
|
||||
return self.args.get(entry)
|
||||
|
||||
# MARK: error print
|
||||
def __print(self, msg: str, level: str, print_error: bool = True) -> str:
|
||||
"""
|
||||
print out error, if Log class is set then print to log instead
|
||||
|
||||
Arguments:
|
||||
msg {str} -- _description_
|
||||
level {str} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
print_error {bool} -- _description_ (default: {True})
|
||||
"""
|
||||
if self.log is not None:
|
||||
if not Log.validate_log_level(level):
|
||||
level = 'ERROR'
|
||||
self.log.logger.log(Log.get_log_level_int(level), msg, stacklevel=2)
|
||||
if self.log is None or self.always_print:
|
||||
if print_error:
|
||||
print(f"[SettingsLoader] {msg}")
|
||||
if level == 'ERROR':
|
||||
# remove any prefix [!] for error message list
|
||||
self.__error_msg.append(msg.replace('[!] ', '').strip())
|
||||
return msg
|
||||
|
||||
|
||||
# __END__
|
||||
@@ -0,0 +1,81 @@
|
||||
"""
|
||||
Class of checks that can be run on value entries
|
||||
"""
|
||||
|
||||
from typing import TypedDict
|
||||
from corelibs.check_handling.regex_constants import (
|
||||
EMAIL_BASIC_REGEX, DOMAIN_WITH_LOCALHOST_REGEX, DOMAIN_WITH_LOCALHOST_PORT_REGEX, DOMAIN_REGEX
|
||||
)
|
||||
|
||||
|
||||
class SettingsLoaderCheckValue(TypedDict):
|
||||
"""Settings check entries"""
|
||||
|
||||
regex: str
|
||||
# if None, then on error we exit, eles we clean up data
|
||||
regex_clean: str | None
|
||||
replace: str
|
||||
|
||||
|
||||
class SettingsLoaderCheck:
|
||||
"""
|
||||
check:<NAME> or check:list+<NAME>
|
||||
"""
|
||||
|
||||
CHECK_SETTINGS: dict[str, SettingsLoaderCheckValue] = {
|
||||
"int": {
|
||||
"regex": r"^[0-9]+$",
|
||||
"regex_clean": r"[^0-9]",
|
||||
"replace": "",
|
||||
},
|
||||
"string.alphanumeric": {
|
||||
"regex": r"^[a-zA-Z0-9]+$",
|
||||
"regex_clean": r"[^a-zA-Z0-9]",
|
||||
"replace": "",
|
||||
},
|
||||
"string.alphanumeric.lower.dash": {
|
||||
"regex": r"^[a-z0-9-]+$",
|
||||
"regex_clean": r"[^a-z0-9-]",
|
||||
"replace": "",
|
||||
},
|
||||
# A-Z a-z 0-9 _ - . ONLY
|
||||
# This one does not remove, but replaces with _
|
||||
"string.alphanumeric.extended.replace": {
|
||||
"regex": r"^[_.a-zA-Z0-9-]+$",
|
||||
"regex_clean": r"[^_.a-zA-Z0-9-]",
|
||||
"replace": "_",
|
||||
},
|
||||
# This does a baisc email check, only alphanumeric with special characters
|
||||
"string.email.basic": {
|
||||
"regex": EMAIL_BASIC_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Domain check, including localhost no port
|
||||
"string.domain.with-localhost": {
|
||||
"regex": DOMAIN_WITH_LOCALHOST_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Domain check, with localhost and port
|
||||
"string.domain.with-localhost.port": {
|
||||
"regex": DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Domain check, no pure localhost allowed
|
||||
"string.domain": {
|
||||
"regex": DOMAIN_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Basic date check, does not validate date itself
|
||||
"string.date": {
|
||||
"regex": r"^\d{4}[/-]\d{1,2}[/-]\d{1,2}$",
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# __END__
|
||||
39
src/corelibs/csv_handling/csv_interface.py
Normal file
39
src/corelibs/csv_handling/csv_interface.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""
|
||||
Write to CSV file
|
||||
- each class set is one file write with one header set
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
from corelibs_csv.csv_interface import (
|
||||
CsvReader as CoreLibsCsvReader, CsvWriter as CoreLibsCsvWriter,
|
||||
ENCODING as CoreLibsEncoding,
|
||||
ENCODING_UTF8_SIG as CoreLibsEncodingUtf8Sig,
|
||||
DELIMITER as CoreLibsDelimiter,
|
||||
QUOTECHAR as CoreLibsQuotechar,
|
||||
QUOTING as CoreLibsQuoting
|
||||
)
|
||||
|
||||
|
||||
ENCODING = CoreLibsEncoding
|
||||
ENCODING_UTF8_SIG = CoreLibsEncodingUtf8Sig
|
||||
DELIMITER = CoreLibsDelimiter
|
||||
QUOTECHAR = CoreLibsQuotechar
|
||||
# type: _QuotingType
|
||||
QUOTING = CoreLibsQuoting
|
||||
|
||||
|
||||
class CsvWriter(CoreLibsCsvWriter):
|
||||
"""
|
||||
write to a CSV file
|
||||
"""
|
||||
|
||||
|
||||
class CsvReader(CoreLibsCsvReader):
|
||||
"""
|
||||
read from a CSV file
|
||||
"""
|
||||
|
||||
|
||||
warn("Use corelibs_csv.csv_interface instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
235
src/corelibs/datetime_handling/datetime_helpers.py
Normal file
235
src/corelibs/datetime_handling/datetime_helpers.py
Normal file
@@ -0,0 +1,235 @@
|
||||
"""
|
||||
Various string based date/time helpers
|
||||
"""
|
||||
|
||||
from datetime import datetime, time
|
||||
from warnings import deprecated
|
||||
from zoneinfo import ZoneInfo
|
||||
from corelibs_datetime import datetime_helpers
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.create_time instead")
|
||||
def create_time(timestamp: float, timestamp_format: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""
|
||||
just takes a timestamp and prints out humand readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timestamp_format {_type_} -- _description_ (default: {"%Y-%m-%d %H:%M:%S"})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return datetime_helpers.create_time(timestamp, timestamp_format)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.get_system_timezone instead")
|
||||
def get_system_timezone():
|
||||
"""Get system timezone using datetime's automatic detection"""
|
||||
# Get current time with system timezone
|
||||
return datetime_helpers.get_system_timezone()
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_timezone_data instead")
|
||||
def parse_timezone_data(timezone_tz: str = '') -> ZoneInfo:
|
||||
"""
|
||||
parses a string to get the ZoneInfo
|
||||
If not set or not valid gets local time,
|
||||
if that is not possible get UTC
|
||||
|
||||
Keyword Arguments:
|
||||
timezone_tz {str} -- _description_ (default: {''})
|
||||
|
||||
Returns:
|
||||
ZoneInfo -- _description_
|
||||
"""
|
||||
return datetime_helpers.parse_timezone_data(timezone_tz)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.get_datetime_iso8601 instead")
|
||||
def get_datetime_iso8601(timezone_tz: str | ZoneInfo = '', sep: str = 'T', timespec: str = 'microseconds') -> str:
|
||||
"""
|
||||
set a datetime in the iso8601 format with microseconds
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
try:
|
||||
return datetime_helpers.get_datetime_iso8601(timezone_tz, sep, timespec)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.validate_date instead")
|
||||
def validate_date(date: str, not_before: datetime | None = None, not_after: datetime | None = None) -> bool:
|
||||
"""
|
||||
check if Y-m-d or Y/m/d are parsable and valid
|
||||
|
||||
Arguments:
|
||||
date {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return datetime_helpers.validate_date(date, not_before, not_after)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_flexible_date instead")
|
||||
def parse_flexible_date(
|
||||
date_str: str,
|
||||
timezone_tz: str | ZoneInfo | None = None,
|
||||
shift_time_zone: bool = True
|
||||
) -> datetime | None:
|
||||
"""
|
||||
Parse date string in multiple formats
|
||||
will add time zone info if not None
|
||||
on default it will change the TZ and time to the new time zone
|
||||
if no TZ info is set in date_str, then localtime is assumed
|
||||
|
||||
Arguments:
|
||||
date_str {str} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timezone_tz {str | ZoneInfo | None} -- _description_ (default: {None})
|
||||
shift_time_zone {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
datetime | None -- _description_
|
||||
"""
|
||||
return datetime_helpers.parse_flexible_date(
|
||||
date_str,
|
||||
timezone_tz,
|
||||
shift_time_zone
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.compare_dates instead")
|
||||
def compare_dates(date1_str: str, date2_str: str) -> None | bool:
|
||||
"""
|
||||
compare two dates, if the first one is newer than the second one return True
|
||||
If the dates are equal then false will be returned
|
||||
on error return None
|
||||
|
||||
Arguments:
|
||||
date1_str {str} -- _description_
|
||||
date2_str {str} -- _description_
|
||||
|
||||
Returns:
|
||||
None | bool -- _description_
|
||||
"""
|
||||
return datetime_helpers.compare_dates(date1_str, date2_str)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.find_newest_datetime_in_list instead")
|
||||
def find_newest_datetime_in_list(date_list: list[str]) -> None | str:
|
||||
"""
|
||||
Find the newest date from a list of ISO 8601 formatted date strings.
|
||||
Handles potential parsing errors gracefully.
|
||||
|
||||
Args:
|
||||
date_list (list): List of date strings in format '2025-08-06T16:17:39.747+09:00'
|
||||
|
||||
Returns:
|
||||
str: The date string with the newest/latest date, or None if list is empty or all dates are invalid
|
||||
"""
|
||||
return datetime_helpers.find_newest_datetime_in_list(date_list)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_day_of_week_range instead")
|
||||
def parse_day_of_week_range(dow_days: str) -> list[tuple[int, str]]:
|
||||
"""
|
||||
Parse a day of week list/range string and return a list of tuples with day index and name.
|
||||
Allowed are short (eg Mon) or long names (eg Monday).
|
||||
|
||||
Arguments:
|
||||
dow_days {str} -- A comma-separated list of days or ranges (e.g., "Mon,Wed-Fri")
|
||||
|
||||
Raises:
|
||||
ValueError: If the input format is invalid or if duplicate days are found.
|
||||
|
||||
Returns:
|
||||
list[tuple[int, str]] -- A list of tuples containing the day index and name.
|
||||
"""
|
||||
# we have Sun twice because it can be 0 or 7
|
||||
# Mon is 1 and Sun is 7, which is ISO standard
|
||||
try:
|
||||
return datetime_helpers.parse_day_of_week_range(dow_days)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_time_range instead")
|
||||
def parse_time_range(time_str: str, time_format: str = "%H:%M") -> tuple[time, time]:
|
||||
"""
|
||||
Parse a time range string in the format "HH:MM-HH:MM" and return a tuple of two time objects.
|
||||
|
||||
Arguments:
|
||||
time_str {str} -- The time range string to parse.
|
||||
|
||||
Raises:
|
||||
ValueError: Invalid time block set
|
||||
ValueError: Invalid time format
|
||||
ValueError: Start time must be before end time
|
||||
|
||||
Returns:
|
||||
tuple[time, time] -- start time, end time: leading zeros formattd
|
||||
"""
|
||||
try:
|
||||
return datetime_helpers.parse_time_range(time_str, time_format)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.times_overlap_or_connect instead")
|
||||
def times_overlap_or_connect(time1: tuple[time, time], time2: tuple[time, time], allow_touching: bool = False) -> bool:
|
||||
"""
|
||||
Check if two time ranges overlap or connect
|
||||
|
||||
Args:
|
||||
time1 (tuple): (start_time, end_time) for first range
|
||||
time2 (tuple): (start_time, end_time) for second range
|
||||
allow_touching (bool): If True, touching ranges (e.g., 8:00-10:00 and 10:00-12:00) are allowed
|
||||
|
||||
Returns:
|
||||
bool: True if ranges overlap or connect (based on allow_touching)
|
||||
"""
|
||||
return datetime_helpers.times_overlap_or_connect(time1, time2, allow_touching)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.is_time_in_range instead")
|
||||
def is_time_in_range(current_time: str, start_time: str, end_time: str) -> bool:
|
||||
"""
|
||||
Check if current_time is within start_time and end_time (inclusive)
|
||||
Time format: "HH:MM" (24-hour format)
|
||||
|
||||
Arguments:
|
||||
current_time {str} -- _description_
|
||||
start_time {str} -- _description_
|
||||
end_time {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
# Convert string times to time objects
|
||||
return datetime_helpers.is_time_in_range(current_time, start_time, end_time)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.reorder_weekdays_from_today instead")
|
||||
def reorder_weekdays_from_today(base_day: str) -> dict[int, str]:
|
||||
"""
|
||||
Reorder the days of the week starting from the specified base_day.
|
||||
|
||||
Arguments:
|
||||
base_day {str} -- The day to start the week from (e.g., "Mon").
|
||||
|
||||
Returns:
|
||||
dict[int, str] -- A dictionary mapping day numbers to day names.
|
||||
"""
|
||||
try:
|
||||
return datetime_helpers.reorder_weekdays_from_today(base_day)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
# __END__
|
||||
88
src/corelibs/datetime_handling/timestamp_convert.py
Normal file
88
src/corelibs/datetime_handling/timestamp_convert.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""
|
||||
Convert timestamp strings with time units into seconds and vice versa.
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from corelibs_datetime import timestamp_convert
|
||||
from corelibs_datetime.timestamp_convert import TimeParseError as NewTimeParseError, TimeUnitError as NewTimeUnitError
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.TimeParseError instead")
|
||||
class TimeParseError(Exception):
|
||||
"""Custom exception for time parsing errors."""
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.TimeUnitError instead")
|
||||
class TimeUnitError(Exception):
|
||||
"""Custom exception for time parsing errors."""
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.convert_to_seconds instead")
|
||||
def convert_to_seconds(time_string: str | int | float) -> int:
|
||||
"""
|
||||
Conver a string with time units into a seconds string
|
||||
The following units are allowed
|
||||
Y: 365 days
|
||||
M: 30 days
|
||||
d, h, m, s
|
||||
|
||||
Arguments:
|
||||
time_string {str} -- _description_
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
|
||||
# skip out if this is a number of any type
|
||||
# numbers will br made float, rounded and then converted to int
|
||||
try:
|
||||
return timestamp_convert.convert_to_seconds(time_string)
|
||||
except NewTimeParseError as e:
|
||||
raise TimeParseError(f"Deprecated, use corelibs_datetime.timestamp_convert.TimeParseError: {e}") from e
|
||||
except NewTimeUnitError as e:
|
||||
raise TimeUnitError(f"Deprecated, use corelibs_datetime.timestamp_convert.TimeUnitError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.seconds_to_string instead")
|
||||
def seconds_to_string(seconds: str | int | float, show_microseconds: bool = False) -> str:
|
||||
"""
|
||||
Convert seconds to compact human readable format (e.g., "1d 2h 3m 4.567s")
|
||||
Zero values are omitted.
|
||||
milliseconds if requested are added as fractional part of seconds.
|
||||
Supports negative values with "-" prefix
|
||||
if not int or float, will return as is
|
||||
|
||||
Args:
|
||||
seconds (float): Time in seconds (can be negative)
|
||||
show_microseconds (bool): Whether to show microseconds precision
|
||||
|
||||
Returns:
|
||||
str: Compact human readable time format
|
||||
"""
|
||||
return timestamp_convert.seconds_to_string(seconds, show_microseconds)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.convert_timestamp instead")
|
||||
def convert_timestamp(timestamp: float | int | str, show_microseconds: bool = True) -> str:
|
||||
"""
|
||||
format timestamp into human readable format. This function will add 0 values between set values
|
||||
for example if we have 1d 1s it would output 1d 0h 0m 1s
|
||||
Milliseconds will be shown if set, and added with ms at the end
|
||||
Negative values will be prefixed with "-"
|
||||
if not int or float, will return as is
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
show_micro {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return timestamp_convert.convert_timestamp(timestamp, show_microseconds)
|
||||
|
||||
# __END__
|
||||
21
src/corelibs/datetime_handling/timestamp_strings.py
Normal file
21
src/corelibs/datetime_handling/timestamp_strings.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""
|
||||
Current timestamp strings and time zones
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from zoneinfo import ZoneInfo
|
||||
from corelibs_datetime import timestamp_strings
|
||||
|
||||
|
||||
class TimestampStrings(timestamp_strings.TimestampStrings):
|
||||
"""
|
||||
set default time stamps
|
||||
"""
|
||||
|
||||
TIME_ZONE: str = 'Asia/Tokyo'
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_strings.TimestampStrings instead")
|
||||
def __init__(self, time_zone: str | ZoneInfo | None = None):
|
||||
super().__init__(time_zone)
|
||||
|
||||
# __END__
|
||||
76
src/corelibs/db_handling/sql_main.py
Normal file
76
src/corelibs/db_handling/sql_main.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Main SQL base for any SQL calls
|
||||
This is a wrapper for SQLiteIO or other future DB Interfaces
|
||||
[Note: at the moment only SQLiteIO is implemented]
|
||||
- on class creation connection with ValueError on fail
|
||||
- connect method checks if already connected and warns
|
||||
- connection class fails with ValueError if not valid target is selected (SQL wrapper type)
|
||||
- connected check class method
|
||||
- a process class that returns data as list or False if end or error
|
||||
|
||||
TODO: adapt more CoreLibs DB IO class flow here
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Literal
|
||||
from corelibs_stack_trace.stack import call_stack
|
||||
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
IDENT_SPLIT_CHARACTER: str = ':'
|
||||
|
||||
|
||||
class SQLMain:
|
||||
"""Main SQL interface class"""
|
||||
def __init__(self, log: 'Logger', db_ident: str):
|
||||
self.log = log
|
||||
self.dbh: SQLiteIO | None = None
|
||||
self.db_target: str | None = None
|
||||
self.connect(db_ident)
|
||||
if not self.connected():
|
||||
raise ValueError(f'Failed to connect to database [{call_stack()}]')
|
||||
|
||||
def connect(self, db_ident: str):
|
||||
"""setup basic connection"""
|
||||
if self.dbh is not None and self.dbh.conn is not None:
|
||||
self.log.warning(f"A database connection already exists for: {self.db_target} [{call_stack()}]")
|
||||
return
|
||||
self.db_target, db_dsn = db_ident.split(IDENT_SPLIT_CHARACTER)
|
||||
match self.db_target:
|
||||
case 'sqlite':
|
||||
# this is a Path only at the moment
|
||||
self.dbh = SQLiteIO(self.log, db_dsn, row_factory='Dict')
|
||||
case _:
|
||||
raise ValueError(f'SQL interface for {self.db_target} is not implemented [{call_stack()}]')
|
||||
if not self.dbh.db_connected():
|
||||
raise ValueError(f"DB Connection failed for: {self.db_target} [{call_stack()}]")
|
||||
|
||||
def close(self):
|
||||
"""close connection"""
|
||||
if self.dbh is None or not self.connected():
|
||||
return
|
||||
# self.log.info(f"Close DB Connection: {self.db_target} [{call_stack()}]")
|
||||
self.dbh.db_close()
|
||||
|
||||
def connected(self) -> bool:
|
||||
"""check connectuon"""
|
||||
if self.dbh is None or not self.dbh.db_connected():
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
return True
|
||||
|
||||
def process_query(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""mini wrapper for execute query"""
|
||||
if self.dbh is not None:
|
||||
result = self.dbh.execute_query(query, params)
|
||||
if result is False:
|
||||
return False
|
||||
else:
|
||||
self.log.error(f"Problem connecting to db: {self.db_target} [{call_stack()}]")
|
||||
return False
|
||||
return result
|
||||
|
||||
# __END__
|
||||
214
src/corelibs/db_handling/sqlite_io.py
Normal file
214
src/corelibs/db_handling/sqlite_io.py
Normal file
@@ -0,0 +1,214 @@
|
||||
"""
|
||||
SQLite DB::IO
|
||||
Will be moved to the CoreLibs
|
||||
also method names are subject to change
|
||||
"""
|
||||
|
||||
# import gc
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, TYPE_CHECKING
|
||||
import sqlite3
|
||||
from corelibs_stack_trace.stack import call_stack
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
class SQLiteIO():
|
||||
"""Mini SQLite interface"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
log: 'Logger',
|
||||
db_name: str | Path,
|
||||
autocommit: bool = False,
|
||||
enable_fkey: bool = True,
|
||||
row_factory: str | None = None
|
||||
):
|
||||
self.log = log
|
||||
self.db_name = db_name
|
||||
self.autocommit = autocommit
|
||||
self.enable_fkey = enable_fkey
|
||||
self.row_factory = row_factory
|
||||
self.conn: sqlite3.Connection | None = self.db_connect()
|
||||
|
||||
# def __del__(self):
|
||||
# self.db_close()
|
||||
|
||||
def db_connect(self) -> sqlite3.Connection | None:
|
||||
"""
|
||||
Connect to SQLite database, create if it doesn't exist
|
||||
"""
|
||||
try:
|
||||
# Connect to database (creates if doesn't exist)
|
||||
self.conn = sqlite3.connect(self.db_name, autocommit=self.autocommit)
|
||||
self.conn.setconfig(sqlite3.SQLITE_DBCONFIG_ENABLE_FKEY, True)
|
||||
# self.conn.execute("PRAGMA journal_mode=WAL")
|
||||
# self.log.debug(f"Connected to database: {self.db_name}")
|
||||
|
||||
def dict_factory(cursor: sqlite3.Cursor, row: list[Any]):
|
||||
fields = [column[0] for column in cursor.description]
|
||||
return dict(zip(fields, row))
|
||||
|
||||
match self.row_factory:
|
||||
case 'Row':
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
case 'Dict':
|
||||
self.conn.row_factory = dict_factory
|
||||
case _:
|
||||
self.conn.row_factory = None
|
||||
|
||||
return self.conn
|
||||
except (sqlite3.Error, sqlite3.OperationalError) as e:
|
||||
self.log.error(f"Error connecting to database [{type(e).__name__}] [{self.db_name}]: {e} [{call_stack()}]")
|
||||
self.log.error(f"Error code: {e.sqlite_errorcode if hasattr(e, 'sqlite_errorcode') else 'N/A'}")
|
||||
self.log.error(f"Error name: {e.sqlite_errorname if hasattr(e, 'sqlite_errorname') else 'N/A'}")
|
||||
return None
|
||||
|
||||
def db_close(self):
|
||||
"""close connection"""
|
||||
if self.conn is not None:
|
||||
self.conn.close()
|
||||
self.conn = None
|
||||
|
||||
def db_connected(self) -> bool:
|
||||
"""
|
||||
Return True if db connection is not none
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return True if self.conn else False
|
||||
|
||||
def __content_exists(self, content_name: str, sql_type: str) -> bool:
|
||||
"""
|
||||
Check if some content name for a certain type exists
|
||||
|
||||
Arguments:
|
||||
content_name {str} -- _description_
|
||||
sql_type {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
if self.conn is None:
|
||||
return False
|
||||
try:
|
||||
cursor = self.conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE type = ? AND name = ?
|
||||
""", (sql_type, content_name,))
|
||||
return cursor.fetchone() is not None
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error checking table [{content_name}/{sql_type}] existence: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def table_exists(self, table_name: str) -> bool:
|
||||
"""
|
||||
Check if a table exists in the database
|
||||
"""
|
||||
return self.__content_exists(table_name, 'table')
|
||||
|
||||
def trigger_exists(self, trigger_name: str) -> bool:
|
||||
"""
|
||||
Check if a triggere exits
|
||||
"""
|
||||
return self.__content_exists(trigger_name, 'trigger')
|
||||
|
||||
def index_exists(self, index_name: str) -> bool:
|
||||
"""
|
||||
Check if a triggere exits
|
||||
"""
|
||||
return self.__content_exists(index_name, 'index')
|
||||
|
||||
def meta_data_detail(self, table_name: str) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""table detail"""
|
||||
query_show_table = """
|
||||
SELECT
|
||||
ti.cid, ti.name, ti.type, ti.'notnull', ti.dflt_value, ti.pk,
|
||||
il_ii.idx_name, il_ii.idx_unique, il_ii.idx_origin, il_ii.idx_partial
|
||||
FROM
|
||||
sqlite_schema AS m,
|
||||
pragma_table_info(m.name) AS ti
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
il.name AS idx_name, il.'unique' AS idx_unique, il.origin AS idx_origin, il.partial AS idx_partial,
|
||||
ii.cid AS tbl_cid
|
||||
FROM
|
||||
sqlite_schema AS m,
|
||||
pragma_index_list(m.name) AS il,
|
||||
pragma_index_info(il.name) AS ii
|
||||
WHERE m.name = ?1
|
||||
) AS il_ii ON (ti.cid = il_ii.tbl_cid)
|
||||
WHERE
|
||||
m.name = ?1
|
||||
"""
|
||||
return self.execute_query(query_show_table, (table_name,))
|
||||
|
||||
def execute_cursor(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> sqlite3.Cursor | Literal[False]:
|
||||
"""execute a cursor, used in execute query or return one and for fetch_row"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
cursor = self.conn.cursor()
|
||||
if params:
|
||||
cursor.execute(query, params)
|
||||
else:
|
||||
cursor.execute(query)
|
||||
return cursor
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during executing cursor [{query}:{params}]: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def execute_query(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""query execute with or without params, returns result"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
if (cursor := self.execute_cursor(query, params)) is False:
|
||||
return False
|
||||
# fetch before commit because we need to get the RETURN before
|
||||
result = cursor.fetchall()
|
||||
# this is for INSERT/UPDATE/CREATE only
|
||||
self.conn.commit()
|
||||
return result
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during executing query [{query}:{params}]: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def return_one(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> tuple[Any, ...] | dict[str, Any] | Literal[False] | None:
|
||||
"""return one row, only for SELECT"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
if (cursor := self.execute_cursor(query, params)) is False:
|
||||
return False
|
||||
return cursor.fetchone()
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during return one: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def fetch_row(
|
||||
self, cursor: sqlite3.Cursor | Literal[False]
|
||||
) -> tuple[Any, ...] | dict[str, Any] | Literal[False] | None:
|
||||
"""read from cursor"""
|
||||
if self.conn is None or cursor is False:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
return cursor.fetchone()
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during fetch row: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
# __END__
|
||||
62
src/corelibs/debug_handling/debug_helpers.py
Normal file
62
src/corelibs/debug_handling/debug_helpers.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""
|
||||
Various debug helpers
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from typing import Tuple, Type
|
||||
from types import TracebackType
|
||||
from corelibs_stack_trace.stack import call_stack as call_stack_ng, exception_stack as exception_stack_ng
|
||||
|
||||
# _typeshed.OptExcInfo
|
||||
OptExcInfo = Tuple[None, None, None] | Tuple[Type[BaseException], BaseException, TracebackType]
|
||||
|
||||
|
||||
@deprecated("Use corelibs_stack_trace.stack.call_stack instead")
|
||||
def call_stack(
|
||||
start: int = 0,
|
||||
skip_last: int = -1,
|
||||
separator: str = ' -> ',
|
||||
reset_start_if_empty: bool = False
|
||||
) -> str:
|
||||
"""
|
||||
get the trace for the last entry
|
||||
|
||||
Keyword Arguments:
|
||||
start {int} -- start, if too might output will empty until reset_start_if_empty is set (default: {0})
|
||||
skip_last {int} -- how many of the last are skipped, defaults to -1 for current method (default: {-1})
|
||||
seperator {str} -- add stack separator, if empty defaults to ' -> ' (default: { -> })
|
||||
reset_start_if_empty {bool} -- if no stack returned because of too high start,
|
||||
reset to 0 for full read (default: {False})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return call_stack_ng(
|
||||
start=start,
|
||||
skip_last=skip_last,
|
||||
separator=separator,
|
||||
reset_start_if_empty=reset_start_if_empty
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_stack_trace.stack.exception_stack instead")
|
||||
def exception_stack(
|
||||
exc_stack: OptExcInfo | None = None,
|
||||
separator: str = ' -> '
|
||||
) -> str:
|
||||
"""
|
||||
Exception traceback, if no sys.exc_info is set, run internal
|
||||
|
||||
Keyword Arguments:
|
||||
exc_stack {OptExcInfo | None} -- _description_ (default: {None})
|
||||
separator {str} -- _description_ (default: {' -> '})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return exception_stack_ng(
|
||||
exc_stack=exc_stack,
|
||||
separator=separator
|
||||
)
|
||||
|
||||
# __END__
|
||||
23
src/corelibs/debug_handling/dump_data.py
Normal file
23
src/corelibs/debug_handling/dump_data.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
dict dump as JSON formatted
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from typing import Any
|
||||
from corelibs_dump_data.dump_data import dump_data as dump_data_ng
|
||||
|
||||
|
||||
@deprecated("Use corelibs_dump_data.dump_data.dump_data instead")
|
||||
def dump_data(data: Any, use_indent: bool = True) -> str:
|
||||
"""
|
||||
dump formated output from dict/list
|
||||
|
||||
Args:
|
||||
data (dict | list | str): _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return dump_data_ng(data=data, use_indent=use_indent)
|
||||
|
||||
# __END__
|
||||
43
src/corelibs/debug_handling/profiling.py
Normal file
43
src/corelibs/debug_handling/profiling.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""
|
||||
Profile memory usage in Python
|
||||
"""
|
||||
|
||||
# https://docs.python.org/3/library/tracemalloc.html
|
||||
|
||||
from warnings import warn, deprecated
|
||||
from typing import TYPE_CHECKING
|
||||
from corelibs_debug.profiling import display_top as display_top_ng, display_top_str, Profiling as CoreLibsProfiling
|
||||
if TYPE_CHECKING:
|
||||
from tracemalloc import Snapshot
|
||||
|
||||
|
||||
@deprecated("Use corelibs_debug.profiling.display_top_str with data from display_top instead")
|
||||
def display_top(snapshot: 'Snapshot', key_type: str = 'lineno', limit: int = 10) -> str:
|
||||
"""
|
||||
Print tracmalloc stats
|
||||
https://docs.python.org/3/library/tracemalloc.html#pretty-top
|
||||
|
||||
Args:
|
||||
snapshot ('Snapshot'): _description_
|
||||
key_type (str, optional): _description_. Defaults to 'lineno'.
|
||||
limit (int, optional): _description_. Defaults to 10.
|
||||
"""
|
||||
return display_top_str(
|
||||
display_top_ng(
|
||||
snapshot=snapshot,
|
||||
key_type=key_type,
|
||||
limit=limit
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class Profiling(CoreLibsProfiling):
|
||||
"""
|
||||
Profile memory usage and elapsed time for some block
|
||||
Based on: https://stackoverflow.com/a/53301648
|
||||
"""
|
||||
|
||||
|
||||
warn("Use corelibs_debug.profiling.Profiling instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
20
src/corelibs/debug_handling/timer.py
Normal file
20
src/corelibs/debug_handling/timer.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""
|
||||
a interval time class
|
||||
|
||||
Returns:
|
||||
Timer: class timer for basic time run calculations
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
from corelibs_debug.timer import Timer as CorelibsTimer
|
||||
|
||||
|
||||
class Timer(CorelibsTimer):
|
||||
"""
|
||||
get difference between start and end date/time
|
||||
"""
|
||||
|
||||
|
||||
warn("Use corelibs_debug.timer.Timer instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
102
src/corelibs/debug_handling/writeline.py
Normal file
102
src/corelibs/debug_handling/writeline.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""
|
||||
Various small helpers for data writing
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from typing import TYPE_CHECKING
|
||||
from corelibs_debug.writeline import (
|
||||
write_l as write_l_ng, pr_header as pr_header_ng,
|
||||
pr_title as pr_title_ng, pr_open as pr_open_ng,
|
||||
pr_close as pr_close_ng, pr_act as pr_act_ng
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
from io import TextIOWrapper, StringIO
|
||||
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.write_l instead")
|
||||
def write_l(line: str, fpl: 'TextIOWrapper | StringIO | None' = None, print_line: bool = False):
|
||||
"""
|
||||
Write a line to screen and to output file
|
||||
|
||||
Args:
|
||||
line (String): Line to write
|
||||
fpl (Resource): file handler resource, if none write only to console
|
||||
"""
|
||||
return write_l_ng(
|
||||
line=line,
|
||||
fpl=fpl,
|
||||
print_line=print_line
|
||||
)
|
||||
|
||||
|
||||
# progress printers
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.pr_header instead")
|
||||
def pr_header(tag: str, marker_string: str = '#', width: int = 35):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
tag (str): _description_
|
||||
"""
|
||||
return pr_header_ng(
|
||||
tag=tag,
|
||||
marker_string=marker_string,
|
||||
width=width
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.pr_title instead")
|
||||
def pr_title(tag: str, prefix_string: str = '|', space_filler: str = '.', width: int = 35):
|
||||
"""_summary_
|
||||
|
||||
Args:
|
||||
tag (str): _description_
|
||||
prefix_string (str, optional): _description_. Defaults to '|'.
|
||||
"""
|
||||
return pr_title_ng(
|
||||
tag=tag,
|
||||
prefix_string=prefix_string,
|
||||
space_filler=space_filler,
|
||||
width=width
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.pr_open instead")
|
||||
def pr_open(tag: str, prefix_string: str = '|', space_filler: str = '.', width: int = 35):
|
||||
"""
|
||||
writen progress open line with tag
|
||||
|
||||
Args:
|
||||
tag (str): _description_
|
||||
prefix_string (str): prefix string. Default: '|'
|
||||
"""
|
||||
return pr_open_ng(
|
||||
tag=tag,
|
||||
prefix_string=prefix_string,
|
||||
space_filler=space_filler,
|
||||
width=width
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.pr_close instead")
|
||||
def pr_close(tag: str = ''):
|
||||
"""
|
||||
write the close tag with new line
|
||||
|
||||
Args:
|
||||
tag (str, optional): _description_. Defaults to ''.
|
||||
"""
|
||||
return pr_close_ng(tag=tag)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_debug.writeline.pr_act instead")
|
||||
def pr_act(act: str = "."):
|
||||
"""
|
||||
write progress character
|
||||
|
||||
Args:
|
||||
act (str, optional): _description_. Defaults to ".".
|
||||
"""
|
||||
return pr_act_ng(act=act)
|
||||
|
||||
# __EMD__
|
||||
219
src/corelibs/email_handling/send_email.py
Normal file
219
src/corelibs/email_handling/send_email.py
Normal file
@@ -0,0 +1,219 @@
|
||||
"""
|
||||
Send email wrapper
|
||||
"""
|
||||
|
||||
import smtplib
|
||||
from email.message import EmailMessage
|
||||
from email.header import Header
|
||||
from email.utils import formataddr, parseaddr
|
||||
from typing import TYPE_CHECKING, Any
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
class SendEmail:
|
||||
"""
|
||||
send emails based on a template to a list of receivers
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
log: "Logger",
|
||||
settings: dict[str, Any],
|
||||
template: dict[str, str],
|
||||
from_email: str,
|
||||
combined_send: bool = True,
|
||||
receivers: list[str] | None = None,
|
||||
data: list[dict[str, str]] | None = None,
|
||||
):
|
||||
"""
|
||||
init send email class
|
||||
|
||||
Args:
|
||||
template (dict): Dictionary with body and subject
|
||||
from_email (str): from email as "Name" <email>
|
||||
combined_send (bool): True for sending as one set for all receivers
|
||||
receivers (list): list of emails to send to
|
||||
data (dict): data to replace in template
|
||||
args (Namespace): _description_
|
||||
"""
|
||||
self.log = log
|
||||
self.settings = settings
|
||||
# internal settings
|
||||
self.template = template
|
||||
self.from_email = from_email
|
||||
self.combined_send = combined_send
|
||||
self.receivers = receivers
|
||||
self.data = data
|
||||
|
||||
def send_email(
|
||||
self,
|
||||
data: list[dict[str, str]] | None,
|
||||
receivers: list[str] | None,
|
||||
template: dict[str, str] | None = None,
|
||||
from_email: str | None = None,
|
||||
combined_send: bool | None = None,
|
||||
test_only: bool | None = None
|
||||
):
|
||||
"""
|
||||
build email and send
|
||||
|
||||
Arguments:
|
||||
data {list[dict[str, str]] | None} -- _description_
|
||||
receivers {list[str] | None} -- _description_
|
||||
combined_send {bool | None} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
template {dict[str, str] | None} -- _description_ (default: {None})
|
||||
from_email {str | None} -- _description_ (default: {None})
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
ValueError: _description_
|
||||
"""
|
||||
if data is None and self.data is not None:
|
||||
data = self.data
|
||||
if data is None:
|
||||
raise ValueError("No replace data set, cannot send email")
|
||||
if receivers is None and self.receivers is not None:
|
||||
receivers = self.receivers
|
||||
if receivers is None:
|
||||
raise ValueError("No receivers list set, cannot send email")
|
||||
if combined_send is None:
|
||||
combined_send = self.combined_send
|
||||
if test_only is not None:
|
||||
self.settings['test'] = test_only
|
||||
|
||||
if template is None:
|
||||
template = self.template
|
||||
if from_email is None:
|
||||
from_email = self.from_email
|
||||
|
||||
if not template['subject'] or not template['body']:
|
||||
raise ValueError("Both Subject and Body must be set")
|
||||
|
||||
self.log.debug(
|
||||
"[EMAIL]:\n"
|
||||
f"Subject: {template['subject']}\n"
|
||||
f"Body: {template['body']}\n"
|
||||
f"From: {from_email}\n"
|
||||
f"Combined send: {combined_send}\n"
|
||||
f"Receivers: {receivers}\n"
|
||||
f"Replace data: {data}"
|
||||
)
|
||||
|
||||
# send email
|
||||
self.send_email_list(
|
||||
self.prepare_email_content(
|
||||
from_email, template, data
|
||||
),
|
||||
receivers,
|
||||
combined_send,
|
||||
test_only
|
||||
)
|
||||
|
||||
def prepare_email_content(
|
||||
self,
|
||||
from_email: str,
|
||||
template: dict[str, str],
|
||||
data: list[dict[str, str]],
|
||||
) -> list[EmailMessage]:
|
||||
"""
|
||||
prepare email for sending
|
||||
|
||||
Args:
|
||||
template (dict): template data for this email
|
||||
data (dict): data to replace in email
|
||||
|
||||
Returns:
|
||||
list: Email Message Objects as list
|
||||
"""
|
||||
_subject = ""
|
||||
_body = ""
|
||||
msg: list[EmailMessage] = []
|
||||
for replace in data:
|
||||
_subject = template["subject"]
|
||||
_body = template["body"]
|
||||
for key, value in replace.items():
|
||||
placeholder = f"{{{{{key}}}}}"
|
||||
_subject = _subject.replace(placeholder, value)
|
||||
_body = _body.replace(placeholder, value)
|
||||
name, addr = parseaddr(from_email)
|
||||
if name:
|
||||
# Encode the name part with MIME encoding
|
||||
encoded_name = str(Header(name, 'utf-8'))
|
||||
from_email_encoded = formataddr((encoded_name, addr))
|
||||
else:
|
||||
from_email_encoded = from_email
|
||||
# create a simple email and add subhect, from email
|
||||
msg_email = EmailMessage()
|
||||
# msg.set_content(_body, charset='utf-8', cte='quoted-printable')
|
||||
msg_email.set_content(_body, charset="utf-8")
|
||||
msg_email["Subject"] = _subject
|
||||
msg_email["From"] = from_email_encoded
|
||||
# push to array for sening
|
||||
msg.append(msg_email)
|
||||
return msg
|
||||
|
||||
def send_email_list(
|
||||
self,
|
||||
emails: list[EmailMessage],
|
||||
receivers: list[str],
|
||||
combined_send: bool | None = None,
|
||||
test_only: bool | None = None
|
||||
):
|
||||
"""
|
||||
send email to receivers list
|
||||
|
||||
Args:
|
||||
email (list): Email Message object with set obdy, subject, from as list
|
||||
receivers (array): email receivers list as array
|
||||
combined_send (bool): True for sending as one set for all receivers
|
||||
"""
|
||||
|
||||
if test_only is not None:
|
||||
self.settings['test'] = test_only
|
||||
|
||||
# localhost (postfix does the rest)
|
||||
smtp = None
|
||||
smtp_host = self.settings.get('smtp_host', "localhost")
|
||||
try:
|
||||
smtp = smtplib.SMTP(smtp_host)
|
||||
except ConnectionRefusedError as e:
|
||||
self.log.error("Could not open SMTP connection to: %s, %s", smtp_host, e)
|
||||
# prepare receiver list
|
||||
receivers_encoded: list[str] = []
|
||||
for __receiver in receivers:
|
||||
to_name, to_addr = parseaddr(__receiver)
|
||||
if to_name:
|
||||
# Encode the name part with MIME encoding
|
||||
encoded_to_name = str(Header(to_name, 'utf-8'))
|
||||
receivers_encoded.append(formataddr((encoded_to_name, to_addr)))
|
||||
else:
|
||||
receivers_encoded.append(__receiver)
|
||||
# loop over messages and then over recievers
|
||||
for msg in emails:
|
||||
if combined_send is True:
|
||||
msg["To"] = ", ".join(receivers_encoded)
|
||||
if not self.settings.get('test'):
|
||||
if smtp is not None:
|
||||
smtp.send_message(msg, msg["From"], receivers_encoded)
|
||||
else:
|
||||
self.log.info(f"[EMAIL] Test, not sending email\n{msg}")
|
||||
else:
|
||||
for receiver in receivers_encoded:
|
||||
self.log.debug(f"===> Send to: {receiver}")
|
||||
if "To" in msg:
|
||||
msg.replace_header("To", receiver)
|
||||
else:
|
||||
msg["To"] = receiver
|
||||
if not self.settings.get('test'):
|
||||
if smtp is not None:
|
||||
smtp.send_message(msg)
|
||||
else:
|
||||
self.log.info(f"[EMAIL] Test, not sending email\n{msg}")
|
||||
# close smtp
|
||||
if smtp is not None:
|
||||
smtp.quit()
|
||||
|
||||
# __END__
|
||||
22
src/corelibs/encryption_handling/symmetric_encryption.py
Normal file
22
src/corelibs/encryption_handling/symmetric_encryption.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""
|
||||
simple symmetric encryption
|
||||
Will be moved to CoreLibs
|
||||
TODO: set key per encryption run
|
||||
"""
|
||||
|
||||
import warnings
|
||||
from corelibs_encryption.symmetric import SymmetricEncryption as CorelibsSymmetricEncryption
|
||||
|
||||
|
||||
class SymmetricEncryption(CorelibsSymmetricEncryption):
|
||||
"""
|
||||
simple encryption
|
||||
|
||||
the encrypted package has "encrypted_data" and "salt" as fields, salt is needed to create the
|
||||
key from the password to decrypt
|
||||
"""
|
||||
|
||||
|
||||
warnings.warn("Use corelibs_encryption.symmetric.SymmetricEncryption instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
40
src/corelibs/exceptions/csv_exceptions.py
Normal file
40
src/corelibs/exceptions/csv_exceptions.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""
|
||||
Exceptions for csv file reading and processing
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
from corelibs_csv.csv_exceptions import (
|
||||
NoCsvReader as CoreLibsNoCsvReader,
|
||||
CompulsoryCsvHeaderCheckFailed as CoreLibsCompulsoryCsvHeaderCheckFailed,
|
||||
CsvHeaderDataMissing as CoreLibsCsvHeaderDataMissing,
|
||||
CsvRowDataKeysNotMatching as CoreLibsCsvRowDataKeysNotMatching
|
||||
)
|
||||
|
||||
|
||||
class NoCsvReader(CoreLibsNoCsvReader):
|
||||
"""
|
||||
CSV reader is none
|
||||
"""
|
||||
|
||||
|
||||
class CsvHeaderDataMissing(CoreLibsCsvHeaderDataMissing):
|
||||
"""
|
||||
The csv reader returned None as headers, the header column in the csv file is missing
|
||||
"""
|
||||
|
||||
|
||||
class CompulsoryCsvHeaderCheckFailed(CoreLibsCompulsoryCsvHeaderCheckFailed):
|
||||
"""
|
||||
raise if the header is not matching to the excpeted values
|
||||
"""
|
||||
|
||||
|
||||
class CsvRowDataKeysNotMatching(CoreLibsCsvRowDataKeysNotMatching):
|
||||
"""
|
||||
raise if the row data keys do not match the expected header keys
|
||||
"""
|
||||
|
||||
|
||||
warn("Use corelibs_csv.csv_exceptions instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/file_handling/__init__.py
Normal file
0
src/corelibs/file_handling/__init__.py
Normal file
42
src/corelibs/file_handling/file_bom_encoding.py
Normal file
42
src/corelibs/file_handling/file_bom_encoding.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""
|
||||
File check if BOM encoded, needed for CSV load
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from pathlib import Path
|
||||
from corelibs_file.file_bom_encoding import (
|
||||
is_bom_encoded as is_bom_encoding_ng,
|
||||
get_bom_encoding_info,
|
||||
BomEncodingInfo
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_file.file_bom_encoding.is_bom_encoded instead")
|
||||
def is_bom_encoded(file_path: Path) -> bool:
|
||||
"""
|
||||
Detect if a file is BOM encoded
|
||||
|
||||
Args:
|
||||
file_path (str): Path to the file to check
|
||||
|
||||
Returns:
|
||||
bool: True if file has BOM, False otherwise
|
||||
"""
|
||||
return is_bom_encoding_ng(file_path)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_file.file_bom_encoding.get_bom_encoding_info instead")
|
||||
def is_bom_encoded_info(file_path: Path) -> BomEncodingInfo:
|
||||
"""
|
||||
Enhanced BOM detection with additional file analysis
|
||||
|
||||
Args:
|
||||
file_path (str): Path to the file to check
|
||||
|
||||
Returns:
|
||||
dict: Comprehensive BOM and encoding information
|
||||
"""
|
||||
return get_bom_encoding_info(file_path)
|
||||
|
||||
|
||||
# __END__
|
||||
@@ -2,10 +2,13 @@
|
||||
crc handlers for file CRC
|
||||
"""
|
||||
|
||||
import zlib
|
||||
from warnings import deprecated
|
||||
from pathlib import Path
|
||||
from corelibs_file.file_crc import file_crc as file_crc_ng
|
||||
from corelibs_file.file_handling import get_file_name
|
||||
|
||||
|
||||
@deprecated("Use corelibs_file.file_crc.file_crc instead")
|
||||
def file_crc(file_path: Path) -> str:
|
||||
"""
|
||||
With for loop and buffer, create file crc32
|
||||
@@ -16,13 +19,10 @@ def file_crc(file_path: Path) -> str:
|
||||
Returns:
|
||||
str: file crc32
|
||||
"""
|
||||
crc = 0
|
||||
with open(file_path, 'rb', 65536) as ins:
|
||||
for _ in range(int((file_path.stat().st_size / 65536)) + 1):
|
||||
crc = zlib.crc32(ins.read(65536), crc)
|
||||
return f"{crc & 0xFFFFFFFF:08X}"
|
||||
return file_crc_ng(file_path)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_file.file_handling.get_file_name instead")
|
||||
def file_name_crc(file_path: Path, add_parent_folder: bool = False) -> str:
|
||||
"""
|
||||
either returns file name only from path
|
||||
@@ -38,9 +38,6 @@ def file_name_crc(file_path: Path, add_parent_folder: bool = False) -> str:
|
||||
Returns:
|
||||
str: file name as string
|
||||
"""
|
||||
if add_parent_folder:
|
||||
return str(Path(file_path.parent.name).joinpath(file_path.name))
|
||||
else:
|
||||
return file_path.name
|
||||
return get_file_name(file_path, add_parent_folder=add_parent_folder)
|
||||
|
||||
# __END__
|
||||
38
src/corelibs/file_handling/file_handling.py
Normal file
38
src/corelibs/file_handling/file_handling.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""
|
||||
File handling utilities
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from pathlib import Path
|
||||
from corelibs_file.file_handling import remove_all_in_directory as remove_all_in_directory_ng
|
||||
|
||||
|
||||
@deprecated("Use corelibs_file.file_handling.remove_all_in_directory instead")
|
||||
def remove_all_in_directory(
|
||||
directory: Path,
|
||||
ignore_files: list[str] | None = None,
|
||||
verbose: bool = False,
|
||||
dry_run: bool = False
|
||||
) -> bool:
|
||||
"""
|
||||
deprecated
|
||||
|
||||
Arguments:
|
||||
directory {Path} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
ignore_files {list[str] | None} -- _description_ (default: {None})
|
||||
verbose {bool} -- _description_ (default: {False})
|
||||
dry_run {bool} -- _description_ (default: {False})
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return remove_all_in_directory_ng(
|
||||
directory,
|
||||
ignore_files=ignore_files,
|
||||
verbose=verbose,
|
||||
dry_run=dry_run
|
||||
)
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/iterator_handling/__init__.py
Normal file
0
src/corelibs/iterator_handling/__init__.py
Normal file
95
src/corelibs/iterator_handling/data_search.py
Normal file
95
src/corelibs/iterator_handling/data_search.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""
|
||||
wrapper around search path
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from warnings import deprecated
|
||||
from corelibs_search.data_search import (
|
||||
ArraySearchList as CorelibsArraySearchList,
|
||||
find_in_array_from_list as corelibs_find_in_array_from_list,
|
||||
key_lookup as corelibs_key_lookup,
|
||||
value_lookup as corelibs_value_lookup
|
||||
)
|
||||
|
||||
|
||||
class ArraySearchList(CorelibsArraySearchList):
|
||||
"""find in array from list search dict"""
|
||||
|
||||
|
||||
@deprecated("Use corelibs_search.data_search.find_in_array_from_list instead")
|
||||
def array_search(
|
||||
search_params: list[ArraySearchList],
|
||||
data: list[dict[str, Any]],
|
||||
return_index: bool = False
|
||||
) -> list[dict[str, Any]]:
|
||||
"""depreacted, old call order"""
|
||||
return corelibs_find_in_array_from_list(data, search_params, return_index)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_search.data_search.find_in_array_from_list instead")
|
||||
def find_in_array_from_list(
|
||||
data: list[dict[str, Any]],
|
||||
search_params: list[ArraySearchList],
|
||||
return_index: bool = False
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
search in an list of dicts with an list of Key/Value set
|
||||
all Key/Value sets must match
|
||||
Value set can be list for OR match
|
||||
option: case_senstive: default True
|
||||
|
||||
Args:
|
||||
data (list): data to search in, must be a list
|
||||
search_params (list): List of search params in "key"/"value" lists with options
|
||||
return_index (bool): return index of list [default False]
|
||||
|
||||
Raises:
|
||||
ValueError: if search params is not a list
|
||||
KeyError: if Key or Value are missing in search params
|
||||
KeyError: More than one Key with the same name set
|
||||
|
||||
Returns:
|
||||
list: list of found elements, or if return index
|
||||
list of dics with "index" and "data", where "data" holds the result list
|
||||
"""
|
||||
return corelibs_find_in_array_from_list(
|
||||
data,
|
||||
search_params,
|
||||
return_index
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_search.data_search.key_lookup instead")
|
||||
def key_lookup(haystack: dict[str, str], key: str) -> str:
|
||||
"""
|
||||
simple key lookup in haystack, erturns empty string if not found
|
||||
|
||||
Args:
|
||||
haystack (dict[str, str]): _description_
|
||||
key (str): _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return corelibs_key_lookup(haystack, key)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_search.data_search.value_lookup instead")
|
||||
def value_lookup(haystack: dict[str, str], value: str, raise_on_many: bool = False) -> str:
|
||||
"""
|
||||
find by value, if not found returns empty, if not raise on many returns the first one
|
||||
|
||||
Args:
|
||||
haystack (dict[str, str]): _description_
|
||||
value (str): _description_
|
||||
raise_on_many (bool, optional): _description_. Defaults to False.
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return corelibs_value_lookup(haystack, value, raise_on_many)
|
||||
|
||||
# __END__
|
||||
63
src/corelibs/iterator_handling/dict_helpers.py
Normal file
63
src/corelibs/iterator_handling/dict_helpers.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""
|
||||
Various helper functions for type data clean up
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from typing import Any
|
||||
from corelibs_iterator.dict_support import (
|
||||
delete_keys_from_set as corelibs_delete_keys_from_set,
|
||||
convert_to_dict_type,
|
||||
set_entry as corelibs_set_entry
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_iterator.dict_support.delete_keys_from_set instead")
|
||||
def delete_keys_from_set(
|
||||
set_data: dict[str, Any] | list[Any] | str, keys: list[str]
|
||||
) -> dict[str, Any] | list[Any] | Any:
|
||||
"""
|
||||
remove all keys from set_data
|
||||
|
||||
Args:
|
||||
set_data (dict[str, Any] | list[Any] | None): _description_
|
||||
keys (list[str]): _description_
|
||||
|
||||
Returns:
|
||||
dict[str, Any] | list[Any] | None: _description_
|
||||
"""
|
||||
# skip everything if there is no keys list
|
||||
return corelibs_delete_keys_from_set(set_data, keys)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_iterator.dict_support.convert_to_dict_type instead")
|
||||
def build_dict(
|
||||
any_dict: Any, ignore_entries: list[str] | None = None
|
||||
) -> dict[str, Any | list[Any] | dict[Any, Any]]:
|
||||
"""
|
||||
rewrite any AWS *TypeDef to new dict so we can add/change entrys
|
||||
|
||||
Args:
|
||||
any_dict (Any): _description_
|
||||
|
||||
Returns:
|
||||
dict[str, Any | list[Any]]: _description_
|
||||
"""
|
||||
return convert_to_dict_type(any_dict, ignore_entries)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_iterator.dict_support.set_entry instead")
|
||||
def set_entry(dict_set: dict[str, Any], key: str, value_set: Any) -> dict[str, Any]:
|
||||
"""
|
||||
set a new entry in the dict set
|
||||
|
||||
Arguments:
|
||||
key {str} -- _description_
|
||||
dict_set {dict[str, Any]} -- _description_
|
||||
value_set {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
dict[str, Any] -- _description_
|
||||
"""
|
||||
return corelibs_set_entry(dict_set, key, value_set)
|
||||
|
||||
# __END__
|
||||
52
src/corelibs/iterator_handling/dict_mask.py
Normal file
52
src/corelibs/iterator_handling/dict_mask.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""
|
||||
Dict helpers
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from typing import TypeAlias, Union, Dict, List, Any
|
||||
from corelibs_dump_data.dict_mask import (
|
||||
mask as corelibs_mask
|
||||
)
|
||||
|
||||
# definitions for the mask run below
|
||||
MaskableValue: TypeAlias = Union[str, int, float, bool, None]
|
||||
NestedDict: TypeAlias = Dict[str, Union[MaskableValue, List[Any], 'NestedDict']]
|
||||
ProcessableValue: TypeAlias = Union[MaskableValue, List[Any], NestedDict]
|
||||
|
||||
|
||||
@deprecated("use corelibs_dump_data.dict_mask.mask instead")
|
||||
def mask(
|
||||
data_set: dict[str, Any],
|
||||
mask_keys: list[str] | None = None,
|
||||
mask_str: str = "***",
|
||||
mask_str_edges: str = '_',
|
||||
skip: bool = False
|
||||
) -> dict[str, Any] | list[Any]:
|
||||
"""
|
||||
mask data for output
|
||||
Checks if mask_keys list exist in any key in the data set either from the start or at the end
|
||||
|
||||
Use the mask_str_edges to define how searches inside a string should work. Default it must start
|
||||
and end with '_', remove to search string in string
|
||||
|
||||
Arguments:
|
||||
data_set {dict[str, Any]} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
mask_keys {list[str] | None} -- _description_ (default: {None})
|
||||
mask_str {str} -- _description_ (default: {"***"})
|
||||
mask_str_edges {str} -- _description_ (default: {"_"})
|
||||
skip {bool} -- if set to true skip (default: {False})
|
||||
|
||||
Returns:
|
||||
dict[str, str] -- _description_
|
||||
"""
|
||||
return corelibs_mask(
|
||||
data_set,
|
||||
mask_keys,
|
||||
mask_str,
|
||||
mask_str_edges,
|
||||
skip
|
||||
)
|
||||
|
||||
# __END__
|
||||
61
src/corelibs/iterator_handling/fingerprint.py
Normal file
61
src/corelibs/iterator_handling/fingerprint.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""
|
||||
Various dictionary, object and list hashers
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from typing import Any
|
||||
from corelibs_hash.fingerprint import (
|
||||
hash_object as corelibs_hash_object,
|
||||
dict_hash_frozen as corelibs_dict_hash_frozen,
|
||||
dict_hash_crc as corelibs_dict_hash_crc
|
||||
)
|
||||
|
||||
|
||||
@deprecated("use corelibs_hash.fingerprint.hash_object instead")
|
||||
def hash_object(obj: Any) -> str:
|
||||
"""
|
||||
RECOMMENDED for new use
|
||||
Create a hash for any dict or list with mixed key types
|
||||
|
||||
Arguments:
|
||||
obj {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return corelibs_hash_object(obj)
|
||||
|
||||
|
||||
@deprecated("use corelibs_hash.fingerprint.hash_object instead")
|
||||
def dict_hash_frozen(data: dict[Any, Any]) -> int:
|
||||
"""
|
||||
NOT RECOMMENDED, use dict_hash_crc or hash_object instead
|
||||
If used, DO NOT CHANGE
|
||||
hash a dict via freeze
|
||||
|
||||
Args:
|
||||
data (dict): _description_
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return corelibs_dict_hash_frozen(data)
|
||||
|
||||
|
||||
@deprecated("use corelibs_hash.fingerprint.dict_hash_crc and for new use hash_object instead")
|
||||
def dict_hash_crc(data: dict[Any, Any] | list[Any]) -> str:
|
||||
"""
|
||||
LEGACY METHOD, must be kept for fallback, if used by other code, DO NOT CHANGE
|
||||
Create a sha256 hash over dict or list
|
||||
alternative for
|
||||
dict_hash_frozen
|
||||
|
||||
Args:
|
||||
data (dict[Any, Any] | list[Any]): _description_
|
||||
|
||||
Returns:
|
||||
str: sha256 hash, prefiex with HO_ if fallback used
|
||||
"""
|
||||
return corelibs_dict_hash_crc(data)
|
||||
|
||||
# __END__
|
||||
62
src/corelibs/iterator_handling/list_helpers.py
Normal file
62
src/corelibs/iterator_handling/list_helpers.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""
|
||||
List type helpers
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from typing import Any, Sequence
|
||||
from corelibs_iterator.list_support import (
|
||||
convert_to_list as corelibs_convert_to_list,
|
||||
is_list_in_list as corelibs_is_list_in_list,
|
||||
make_unique_list_of_dicts as corelibs_make_unique_list_of_dicts
|
||||
)
|
||||
|
||||
|
||||
@deprecated("use corelibs_iterator.list_support.convert_to_list instead")
|
||||
def convert_to_list(
|
||||
entry: str | int | float | bool | Sequence[str | int | float | bool | Sequence[Any]]
|
||||
) -> Sequence[str | int | float | bool | Sequence[Any]]:
|
||||
"""
|
||||
Convert any of the non list values (except dictionary) to a list
|
||||
|
||||
Arguments:
|
||||
entry {str | int | float | bool | list[str | int | float | bool]} -- _description_
|
||||
|
||||
Returns:
|
||||
list[str | int | float | bool] -- _description_
|
||||
"""
|
||||
return corelibs_convert_to_list(entry)
|
||||
|
||||
|
||||
@deprecated("use corelibs_iterator.list_support.is_list_in_list instead")
|
||||
def is_list_in_list(
|
||||
list_a: Sequence[str | int | float | bool | Sequence[Any]],
|
||||
list_b: Sequence[str | int | float | bool | Sequence[Any]]
|
||||
) -> Sequence[str | int | float | bool | Sequence[Any]]:
|
||||
"""
|
||||
Return entries from list_a that are not in list_b
|
||||
Type safe compare
|
||||
|
||||
Arguments:
|
||||
list_a {list[Any]} -- _description_
|
||||
list_b {list[Any]} -- _description_
|
||||
|
||||
Returns:
|
||||
list[Any] -- _description_
|
||||
"""
|
||||
return corelibs_is_list_in_list(list_a, list_b)
|
||||
|
||||
|
||||
@deprecated("use corelibs_iterator.list_support.make_unique_list_of_dicts instead")
|
||||
def make_unique_list_of_dicts(dict_list: list[Any]) -> list[Any]:
|
||||
"""
|
||||
Create a list of unique dictionary entries
|
||||
|
||||
Arguments:
|
||||
dict_list {list[Any]} -- _description_
|
||||
|
||||
Returns:
|
||||
list[Any] -- _description_
|
||||
"""
|
||||
return corelibs_make_unique_list_of_dicts(dict_list)
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/json_handling/__init__.py
Normal file
0
src/corelibs/json_handling/__init__.py
Normal file
28
src/corelibs/json_handling/jmespath_helper.py
Normal file
28
src/corelibs/json_handling/jmespath_helper.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""
|
||||
helper functions for jmespath interfaces
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from typing import Any
|
||||
from corelibs_search.jmespath_search import jmespath_search as jmespath_search_ng
|
||||
|
||||
|
||||
@deprecated("Use corelibs_search.jmespath_search.jmespath_search instead")
|
||||
def jmespath_search(search_data: dict[Any, Any] | list[Any], search_params: str) -> Any:
|
||||
"""
|
||||
jmespath search wrapper
|
||||
|
||||
Args:
|
||||
search_data (dict | list): _description_
|
||||
search_params (str): _description_
|
||||
|
||||
Raises:
|
||||
ValueError: jmespath.exceptions.LexerError
|
||||
ValueError: jmespath.exceptions.ParseError
|
||||
|
||||
Returns:
|
||||
Any: dict/list/etc, None if nothing found
|
||||
"""
|
||||
return jmespath_search_ng(search_data, search_params)
|
||||
|
||||
# __END__
|
||||
59
src/corelibs/json_handling/json_helper.py
Normal file
59
src/corelibs/json_handling/json_helper.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""
|
||||
json encoder for datetime
|
||||
"""
|
||||
|
||||
from warnings import warn, deprecated
|
||||
from typing import Any
|
||||
from corelibs_json.json_support import (
|
||||
default_isoformat as default_isoformat_ng,
|
||||
DateTimeEncoder as DateTimeEncoderCoreLibs,
|
||||
json_dumps as json_dumps_ng,
|
||||
modify_with_jsonpath as modify_with_jsonpath_ng,
|
||||
)
|
||||
|
||||
|
||||
# subclass JSONEncoder
|
||||
class DateTimeEncoder(DateTimeEncoderCoreLibs):
|
||||
"""
|
||||
Override the default method
|
||||
dumps(..., cls=DateTimeEncoder, ...)
|
||||
"""
|
||||
|
||||
|
||||
warn("Use corelibs_json.json_support.DateTimeEncoder instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_json.json_support.default_isoformat instead")
|
||||
def default_isoformat(obj: Any) -> str | None:
|
||||
"""
|
||||
default override
|
||||
dumps(..., default=default, ...)
|
||||
"""
|
||||
return default_isoformat_ng(obj)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_json.json_support.json_dumps instead")
|
||||
def json_dumps(data: Any):
|
||||
"""
|
||||
wrapper for json.dumps with sure dump without throwing Exceptions
|
||||
|
||||
Arguments:
|
||||
data {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return json_dumps_ng(data)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_json.json_support.modify_with_jsonpath instead")
|
||||
def modify_with_jsonpath(data: dict[Any, Any], path: str, new_value: Any):
|
||||
"""
|
||||
Modify dictionary using JSONPath (more powerful than JMESPath for modifications)
|
||||
"""
|
||||
return modify_with_jsonpath_ng(data, path, new_value)
|
||||
|
||||
# __END__
|
||||
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/logging_handling/__init__.py
Normal file
0
src/corelibs/logging_handling/__init__.py
Normal file
963
src/corelibs/logging_handling/log.py
Normal file
963
src/corelibs/logging_handling/log.py
Normal file
@@ -0,0 +1,963 @@
|
||||
"""
|
||||
A log handler wrapper
|
||||
if log_settings['log_queue'] is set to multiprocessing.Queue it will launch with listeners
|
||||
attach "init_worker_logging" with the set log_queue
|
||||
"""
|
||||
|
||||
import re
|
||||
import logging.handlers
|
||||
import logging
|
||||
from datetime import datetime
|
||||
import time
|
||||
from pathlib import Path
|
||||
import atexit
|
||||
from enum import Flag, auto
|
||||
from typing import MutableMapping, TextIO, TypedDict, Any, TYPE_CHECKING, cast
|
||||
from corelibs_stack_trace.stack import call_stack, exception_stack
|
||||
from corelibs_text_colors.text_colors import Colors
|
||||
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from multiprocessing import Queue
|
||||
|
||||
|
||||
class ConsoleFormat(Flag):
|
||||
"""console format type bitmap flags"""
|
||||
TIME = auto()
|
||||
TIME_SECONDS = auto()
|
||||
TIME_MILLISECONDS = auto()
|
||||
TIME_MICROSECONDS = auto()
|
||||
TIMEZONE = auto()
|
||||
NAME = auto()
|
||||
FILE = auto()
|
||||
FUNCTION = auto()
|
||||
LINENO = auto()
|
||||
LEVEL = auto()
|
||||
|
||||
|
||||
class ConsoleFormatSettings:
|
||||
"""Console format quick settings groups"""
|
||||
# shows everything, time with milliseconds, and time zone, log name, file, function, line number
|
||||
ALL = (
|
||||
ConsoleFormat.TIME |
|
||||
ConsoleFormat.TIMEZONE |
|
||||
ConsoleFormat.NAME |
|
||||
ConsoleFormat.FILE |
|
||||
ConsoleFormat.FUNCTION |
|
||||
ConsoleFormat.LINENO |
|
||||
ConsoleFormat.LEVEL
|
||||
)
|
||||
# show time with no time zone, file, line and level
|
||||
CONDENSED = ConsoleFormat.TIME | ConsoleFormat.FILE | ConsoleFormat.LINENO | ConsoleFormat.LEVEL
|
||||
# only time and level
|
||||
MINIMAL = ConsoleFormat.TIME | ConsoleFormat.LEVEL
|
||||
# only level
|
||||
BARE = ConsoleFormat.LEVEL
|
||||
# only message
|
||||
NONE = ConsoleFormat(0)
|
||||
|
||||
@staticmethod
|
||||
def from_string(setting_str: str, default: ConsoleFormat | None = None) -> ConsoleFormat | None:
|
||||
"""
|
||||
Get a console format setting, if does not exist set to None
|
||||
|
||||
Arguments:
|
||||
setting_str {str} -- what to search for
|
||||
default {ConsoleFormat | None} -- if not found return this (default: {None})
|
||||
|
||||
Returns:
|
||||
ConsoleFormat | None -- found ConsoleFormat or None
|
||||
"""
|
||||
if hasattr(ConsoleFormatSettings, setting_str):
|
||||
return getattr(ConsoleFormatSettings, setting_str)
|
||||
return default
|
||||
|
||||
|
||||
# MARK: Log settings TypedDict
|
||||
class LogSettings(TypedDict):
|
||||
"""log settings, for Log setup"""
|
||||
log_level_console: LoggingLevel
|
||||
log_level_file: LoggingLevel
|
||||
per_run_log: bool
|
||||
console_enabled: bool
|
||||
console_color_output_enabled: bool
|
||||
console_format_type: ConsoleFormat
|
||||
add_start_info: bool
|
||||
add_end_info: bool
|
||||
log_queue: 'Queue[str] | None'
|
||||
|
||||
|
||||
class LoggerInit(TypedDict):
|
||||
"""for Logger init"""
|
||||
logger: logging.Logger
|
||||
log_queue: 'Queue[str] | None'
|
||||
|
||||
|
||||
# MARK: Custom color filter
|
||||
class CustomConsoleFormatter(logging.Formatter):
|
||||
"""
|
||||
Custom formatter with colors for console output
|
||||
"""
|
||||
|
||||
COLORS = {
|
||||
LoggingLevel.DEBUG.name: Colors.cyan,
|
||||
LoggingLevel.INFO.name: Colors.green,
|
||||
LoggingLevel.WARNING.name: Colors.yellow,
|
||||
LoggingLevel.ERROR.name: Colors.red,
|
||||
LoggingLevel.CRITICAL.name: Colors.red_bold,
|
||||
LoggingLevel.ALERT.name: Colors.yellow_bold,
|
||||
LoggingLevel.EMERGENCY.name: Colors.magenta_bold,
|
||||
LoggingLevel.EXCEPTION.name: Colors.magenta_bright, # will never be written to console
|
||||
}
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
"""
|
||||
set the color highlight
|
||||
|
||||
Arguments:
|
||||
record {logging.LogRecord} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# Add color to levelname for console output
|
||||
reset = Colors.reset
|
||||
color = self.COLORS.get(record.levelname, reset)
|
||||
# only highlight level for basic
|
||||
if record.levelname in [LoggingLevel.DEBUG.name, LoggingLevel.INFO.name]:
|
||||
record.levelname = f"{color}{record.levelname}{reset}"
|
||||
return super().format(record)
|
||||
# highlight whole line
|
||||
message = super().format(record)
|
||||
return f"{color}{message}{reset}"
|
||||
|
||||
# TODO: add custom handlers for stack_trace, if not set fill with %(filename)s:%(funcName)s:%(lineno)d
|
||||
# hasattr(record, 'stack_trace')
|
||||
# also for something like "context" where we add an array of anything to a message
|
||||
|
||||
|
||||
class CustomHandlerFilter(logging.Filter):
|
||||
"""
|
||||
Add a custom handler for filtering
|
||||
"""
|
||||
HANDLER_NAME_FILTER_EXCEPTION: str = 'console'
|
||||
|
||||
def __init__(self, handler_name: str, filter_exceptions: bool = False):
|
||||
super().__init__(name=handler_name)
|
||||
self.handler_name = handler_name
|
||||
self.filter_exceptions = filter_exceptions
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
# if console and exception do not show
|
||||
if self.handler_name == self.HANDLER_NAME_FILTER_EXCEPTION and self.filter_exceptions:
|
||||
return record.levelname != "EXCEPTION"
|
||||
# if cnosole entry is true and traget file filter
|
||||
if hasattr(record, 'console') and getattr(record, 'console') is True and self.handler_name == 'file':
|
||||
return False
|
||||
return True
|
||||
|
||||
# def __filter_exceptions(self, record: logging.LogRecord) -> bool:
|
||||
# return record.levelname != "EXCEPTION"
|
||||
|
||||
|
||||
# MARK: Parent class
|
||||
class LogParent:
|
||||
"""
|
||||
Parent class with general methods
|
||||
used by Log and Logger
|
||||
"""
|
||||
|
||||
# spacer lenght characters and the character
|
||||
SPACER_CHAR: str = '='
|
||||
SPACER_LENGTH: int = 32
|
||||
|
||||
def __init__(self):
|
||||
self.logger: logging.Logger
|
||||
self.log_queue: 'Queue[str] | None' = None
|
||||
self.handlers: dict[str, Any] = {}
|
||||
|
||||
# FIXME: we need to add a custom formater to add stack level listing if we want to
|
||||
# Important note, although they exist, it is recommended to use self.logger.NAME directly
|
||||
# so that the correct filename, method and row number is set
|
||||
# for > 50 use logger.log(LoggingLevel.<LEVEL>.value, ...)
|
||||
# for exception logger.log(LoggingLevel.EXCEPTION.value, ..., execInfo=True)
|
||||
# MARK: log message
|
||||
def log(self, level: int, msg: object, *args: object, extra: MutableMapping[str, object] | None = None):
|
||||
"""log general"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.log(level, msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: DEBUG 10
|
||||
def debug(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""debug"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.debug(msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: INFO 20
|
||||
def info(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""info"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.info(msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: WARNING 30
|
||||
def warning(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""warning"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.warning(msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: ERROR 40
|
||||
def error(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""error"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.error(msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: CRITICAL 50
|
||||
def critical(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""critcal"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.critical(msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: ALERT 55
|
||||
def alert(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""alert"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
# extra_dict = dict(extra)
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.log(LoggingLevel.ALERT.value, msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: EMERGECNY: 60
|
||||
def emergency(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""emergency"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.log(LoggingLevel.EMERGENCY.value, msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: EXCEPTION: 70
|
||||
def exception(
|
||||
self,
|
||||
msg: object, *args: object, extra: MutableMapping[str, object] | None = None,
|
||||
log_error: bool = True
|
||||
) -> None:
|
||||
"""
|
||||
log on exceotion level, this is log.exception, but logs with a new level
|
||||
|
||||
Args:
|
||||
msg (object): _description_
|
||||
*args (object): arguments for msg
|
||||
extra: Mapping[str, object] | None: extra arguments for the formatting if needed
|
||||
log_error: (bool): If set to false will not write additional error message for console (Default True)
|
||||
"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
extra['exception_trace'] = exception_stack()
|
||||
# write to console first with extra flag for filtering in file
|
||||
if log_error:
|
||||
self.logger.log(
|
||||
LoggingLevel.ERROR.value,
|
||||
f"<=EXCEPTION={extra['exception_trace']}> {msg} [{extra['stack_trace']}]",
|
||||
*args, extra=dict(extra) | {'console': True}, stacklevel=2
|
||||
)
|
||||
self.logger.log(LoggingLevel.EXCEPTION.value, msg, *args, exc_info=True, extra=extra, stacklevel=2)
|
||||
|
||||
def break_line(self, info: str = "BREAK"):
|
||||
"""
|
||||
add a break line as info level
|
||||
|
||||
Keyword Arguments:
|
||||
info {str} -- _description_ (default: {"BREAK"})
|
||||
"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
self.logger.info("[%s] %s>", info, self.SPACER_CHAR * self.SPACER_LENGTH)
|
||||
|
||||
# MARK: queue handling
|
||||
def flush(self, handler_name: str | None = None, timeout: float = 2.0) -> bool:
|
||||
"""
|
||||
Flush all pending messages
|
||||
|
||||
Keyword Arguments:
|
||||
handler_name {str | None} -- _description_ (default: {None})
|
||||
timeout {float} -- _description_ (default: {2.0})
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
if not self.log_queue:
|
||||
return False
|
||||
|
||||
try:
|
||||
# Wait for queue to be processed
|
||||
start_time = time.time()
|
||||
while not self.log_queue.empty() and (time.time() - start_time) < timeout:
|
||||
time.sleep(0.01)
|
||||
|
||||
# Flush all handlers or handler given
|
||||
if handler_name:
|
||||
try:
|
||||
self.handlers[handler_name].flush()
|
||||
except IndexError:
|
||||
pass
|
||||
else:
|
||||
for handler in self.handlers.values():
|
||||
handler.flush()
|
||||
except OSError:
|
||||
return False
|
||||
return True
|
||||
|
||||
def cleanup(self):
|
||||
"""
|
||||
cleanup for any open queues in case we have an abort
|
||||
"""
|
||||
if not self.log_queue:
|
||||
return
|
||||
self.flush()
|
||||
# Close the queue properly
|
||||
self.log_queue.close()
|
||||
self.log_queue.join_thread()
|
||||
|
||||
# MARK: log level handling
|
||||
def set_log_level(self, handler_name: str, log_level: LoggingLevel) -> bool:
|
||||
"""
|
||||
set the logging level for a handler
|
||||
|
||||
Arguments:
|
||||
handler {str} -- _description_
|
||||
log_level {LoggingLevel} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
try:
|
||||
# flush queue befoe changing logging level
|
||||
self.flush(handler_name)
|
||||
self.handlers[handler_name].setLevel(log_level.name)
|
||||
return True
|
||||
except IndexError:
|
||||
if self.logger:
|
||||
self.logger.error('Handler %s not found, cannot change log level', handler_name)
|
||||
return False
|
||||
except AttributeError:
|
||||
if self.logger:
|
||||
self.logger.error(
|
||||
'Cannot change to log level %s for handler %s, log level invalid',
|
||||
LoggingLevel.name, handler_name
|
||||
)
|
||||
return False
|
||||
|
||||
def get_log_level(self, handler_name: str) -> LoggingLevel:
|
||||
"""
|
||||
gettthe logging level for a handler
|
||||
|
||||
Arguments:
|
||||
handler_name {str} -- _description_
|
||||
|
||||
Returns:
|
||||
LoggingLevel -- _description_
|
||||
"""
|
||||
try:
|
||||
return LoggingLevel.from_any(self.handlers[handler_name].level)
|
||||
except IndexError:
|
||||
return LoggingLevel.NOTSET
|
||||
|
||||
def any_handler_is_minimum_level(self, log_level: LoggingLevel) -> bool:
|
||||
"""
|
||||
if any handler is set to minimum level
|
||||
|
||||
Arguments:
|
||||
log_level {LoggingLevel} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
for handler in self.handlers.values():
|
||||
try:
|
||||
if LoggingLevel.from_any(handler.level).includes(log_level):
|
||||
return True
|
||||
except (IndexError, AttributeError):
|
||||
continue
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def validate_log_level(log_level: Any) -> bool:
|
||||
"""
|
||||
if the log level is invalid will return false, else return true
|
||||
|
||||
Args:
|
||||
log_level (Any): _description_
|
||||
|
||||
Returns:
|
||||
bool: _description_
|
||||
"""
|
||||
try:
|
||||
_ = LoggingLevel.from_any(log_level).value
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_log_level_int(log_level: Any) -> int:
|
||||
"""
|
||||
Return log level as INT
|
||||
If invalid returns the default log level
|
||||
|
||||
Arguments:
|
||||
log_level {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
try:
|
||||
return LoggingLevel.from_any(log_level).value
|
||||
except ValueError:
|
||||
return LoggingLevel.from_string(Log.DEFAULT_LOG_LEVEL.name).value
|
||||
|
||||
|
||||
# MARK: Logger
|
||||
class Logger(LogParent):
|
||||
"""
|
||||
The class we can pass on to other clases without re-init the class itself
|
||||
NOTE: if no queue object is handled over the logging level change might not take immediate effect
|
||||
"""
|
||||
|
||||
def __init__(self, logger_settings: LoggerInit):
|
||||
LogParent.__init__(self)
|
||||
self.logger = logger_settings['logger']
|
||||
self.lg = self.logger
|
||||
self.l = self.logger
|
||||
self.handlers = {str(_handler.name): _handler for _handler in self.logger.handlers}
|
||||
self.log_queue = logger_settings['log_queue']
|
||||
|
||||
|
||||
# MARK: LogSetup class
|
||||
class Log(LogParent):
|
||||
"""
|
||||
logger setup
|
||||
"""
|
||||
|
||||
CONSOLE_HANDLER: str = 'stream_handler'
|
||||
FILE_HANDLER: str = 'file_handler'
|
||||
|
||||
# spacer lenght characters and the character
|
||||
SPACER_CHAR: str = '='
|
||||
SPACER_LENGTH: int = 32
|
||||
# default logging level
|
||||
DEFAULT_LOG_LEVEL: LoggingLevel = LoggingLevel.WARNING
|
||||
DEFAULT_LOG_LEVEL_FILE: LoggingLevel = LoggingLevel.DEBUG
|
||||
DEFAULT_LOG_LEVEL_CONSOLE: LoggingLevel = LoggingLevel.WARNING
|
||||
# default settings
|
||||
DEFAULT_LOG_SETTINGS: LogSettings = {
|
||||
"log_level_console": DEFAULT_LOG_LEVEL_CONSOLE,
|
||||
"log_level_file": DEFAULT_LOG_LEVEL_FILE,
|
||||
"per_run_log": False,
|
||||
"console_enabled": True,
|
||||
"console_color_output_enabled": True,
|
||||
# do not print log title, file, function and line number
|
||||
"console_format_type": ConsoleFormatSettings.ALL,
|
||||
"add_start_info": True,
|
||||
"add_end_info": False,
|
||||
"log_queue": None,
|
||||
}
|
||||
|
||||
# MARK: constructor
|
||||
def __init__(
|
||||
self,
|
||||
log_path: Path,
|
||||
log_name: str,
|
||||
log_settings: (
|
||||
dict[str, 'LoggingLevel | str | bool | None | Queue[str] | ConsoleFormat'] | # noqa: E501 # pylint: disable=line-too-long
|
||||
LogSettings | None
|
||||
) = None,
|
||||
other_handlers: dict[str, Any] | None = None
|
||||
):
|
||||
LogParent.__init__(self)
|
||||
# add new level for alert, emergecny and exception
|
||||
logging.addLevelName(LoggingLevel.ALERT.value, LoggingLevel.ALERT.name)
|
||||
logging.addLevelName(LoggingLevel.EMERGENCY.value, LoggingLevel.EMERGENCY.name)
|
||||
logging.addLevelName(LoggingLevel.EXCEPTION.value, LoggingLevel.EXCEPTION.name)
|
||||
# parse the logging settings
|
||||
self.log_settings = self.__parse_log_settings(log_settings)
|
||||
# if path, set log name with .log
|
||||
# if log name with .log, strip .log for naming
|
||||
if log_path.is_dir():
|
||||
__log_file_name = re.sub(r'[^a-zA-Z0-9]', '', log_name)
|
||||
if not log_name.endswith('.log'):
|
||||
log_path = log_path.joinpath(Path(__log_file_name).with_suffix('.log'))
|
||||
else:
|
||||
log_path = log_path.joinpath(__log_file_name)
|
||||
elif not log_path.suffix == '.log':
|
||||
# add .log if the path is a file but without .log
|
||||
log_path = log_path.with_suffix('.log')
|
||||
# stip .log from the log name if set
|
||||
if not log_name.endswith('.log'):
|
||||
log_name = Path(log_name).stem
|
||||
# general log name
|
||||
self.log_name = log_name
|
||||
|
||||
self.log_queue: 'Queue[str] | None' = None
|
||||
self.listener: logging.handlers.QueueListener | None = None
|
||||
self.logger: logging.Logger
|
||||
|
||||
# setup handlers
|
||||
# NOTE if console with color is set first, some of the color formatting is set
|
||||
# in the file writer too, for the ones where color is set BEFORE the format
|
||||
# Any is logging.StreamHandler, logging.FileHandler and all logging.handlers.*
|
||||
self.handlers: dict[str, Any] = {}
|
||||
self.add_handler(self.FILE_HANDLER, self.__create_file_handler(
|
||||
self.FILE_HANDLER, self.log_settings['log_level_file'], log_path)
|
||||
)
|
||||
if self.log_settings['console_enabled']:
|
||||
# console
|
||||
self.add_handler(self.CONSOLE_HANDLER, self.__create_console_handler(
|
||||
self.CONSOLE_HANDLER,
|
||||
self.log_settings['log_level_console'],
|
||||
console_format_type=self.log_settings['console_format_type'],
|
||||
))
|
||||
# add other handlers,
|
||||
if other_handlers is not None:
|
||||
for handler_key, handler in other_handlers.items():
|
||||
self.add_handler(handler_key, handler)
|
||||
# init listener if we have a log_queue set
|
||||
self.__init_listener(self.log_settings['log_queue'])
|
||||
|
||||
# overall logger start
|
||||
self.__init_log(log_name)
|
||||
# if requests set a start log
|
||||
if self.log_settings['add_start_info'] is True:
|
||||
self.break_line('START')
|
||||
|
||||
# MARK: deconstructor
|
||||
def __del__(self):
|
||||
"""
|
||||
Call when class is destroyed, make sure the listender is closed or else we throw a thread error
|
||||
"""
|
||||
if hasattr(self, 'log_settings') and self.log_settings.get('add_end_info'):
|
||||
self.break_line('END')
|
||||
self.stop_listener()
|
||||
|
||||
# MARK: parse log settings
|
||||
def __parse_log_settings(
|
||||
self,
|
||||
log_settings: dict[str, 'LoggingLevel | str | bool | None | Queue[str] | ConsoleFormat'] | # noqa: E501 # pylint: disable=line-too-long
|
||||
LogSettings | None
|
||||
) -> LogSettings:
|
||||
# skip with defaul it not set
|
||||
if log_settings is None:
|
||||
return self.DEFAULT_LOG_SETTINGS
|
||||
# check entries
|
||||
default_log_settings = self.DEFAULT_LOG_SETTINGS
|
||||
# check log levels
|
||||
for __log_entry in ['log_level_console', 'log_level_file']:
|
||||
if log_settings.get(__log_entry) is None:
|
||||
continue
|
||||
# if not valid reset to default, if not in default set to WARNING
|
||||
if not self.validate_log_level(__log_level := log_settings.get(__log_entry, '')):
|
||||
__log_level = self.DEFAULT_LOG_SETTINGS.get(
|
||||
__log_entry, self.DEFAULT_LOG_LEVEL
|
||||
)
|
||||
default_log_settings[__log_entry] = LoggingLevel.from_any(__log_level)
|
||||
# check bool
|
||||
for __log_entry in [
|
||||
"per_run_log",
|
||||
"console_enabled",
|
||||
"console_color_output_enabled",
|
||||
"add_start_info",
|
||||
"add_end_info",
|
||||
]:
|
||||
if log_settings.get(__log_entry) is None:
|
||||
continue
|
||||
if not isinstance(__setting := log_settings.get(__log_entry, ''), bool):
|
||||
__setting = self.DEFAULT_LOG_SETTINGS.get(__log_entry, True)
|
||||
default_log_settings[__log_entry] = __setting
|
||||
# check console log type
|
||||
if (console_format_type := log_settings.get('console_format_type')) is None:
|
||||
console_format_type = self.DEFAULT_LOG_SETTINGS['console_format_type']
|
||||
default_log_settings['console_format_type'] = cast('ConsoleFormat', console_format_type)
|
||||
# check log queue
|
||||
__setting = log_settings.get('log_queue', self.DEFAULT_LOG_SETTINGS['log_queue'])
|
||||
if __setting is not None:
|
||||
__setting = cast('Queue[str]', __setting)
|
||||
default_log_settings['log_queue'] = __setting
|
||||
return default_log_settings
|
||||
|
||||
# def __filter_exceptions(self, record: logging.LogRecord) -> bool:
|
||||
# return record.levelname != "EXCEPTION"
|
||||
|
||||
# MARK: add a handler
|
||||
def add_handler(
|
||||
self,
|
||||
handler_name: str,
|
||||
handler: Any
|
||||
) -> bool:
|
||||
"""
|
||||
Add a log handler to the handlers dict
|
||||
|
||||
Arguments:
|
||||
handler_name {str} -- _description_
|
||||
handler {Any} -- _description_
|
||||
"""
|
||||
if self.handlers.get(handler_name):
|
||||
return False
|
||||
if self.listener is not None or hasattr(self, 'logger'):
|
||||
raise ValueError(
|
||||
f"Cannot add handler {handler_name}: {handler.get_name()} because logger is already running"
|
||||
)
|
||||
# TODO: handler must be some handler type, how to check?
|
||||
self.handlers[handler_name] = handler
|
||||
return True
|
||||
|
||||
# MARK: console logger format
|
||||
def __build_console_format_from_string(self, console_format_type: ConsoleFormat) -> str:
|
||||
"""
|
||||
Build console format string from the given console format type
|
||||
|
||||
Arguments:
|
||||
console_format_type {ConsoleFormat} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
format_string = ''
|
||||
# time part if any of the times are requested
|
||||
if (
|
||||
ConsoleFormat.TIME in console_format_type or
|
||||
ConsoleFormat.TIME_SECONDS in console_format_type or
|
||||
ConsoleFormat.TIME_MILLISECONDS in console_format_type or
|
||||
ConsoleFormat.TIME_MICROSECONDS in console_format_type
|
||||
):
|
||||
format_string += '[%(asctime)s] '
|
||||
# set log name
|
||||
if ConsoleFormat.NAME in console_format_type:
|
||||
format_string += '[%(name)s] '
|
||||
# for any file/function/line number call
|
||||
if (
|
||||
ConsoleFormat.FILE in console_format_type or
|
||||
ConsoleFormat.FUNCTION in console_format_type or
|
||||
ConsoleFormat.LINENO in console_format_type
|
||||
):
|
||||
format_string += '['
|
||||
set_group: list[str] = []
|
||||
if ConsoleFormat.FILE in console_format_type:
|
||||
set_group.append('%(filename)s')
|
||||
if ConsoleFormat.FUNCTION in console_format_type:
|
||||
set_group.append('%(funcName)s')
|
||||
if ConsoleFormat.LINENO in console_format_type:
|
||||
set_group.append('%(lineno)d')
|
||||
format_string += ':'.join(set_group)
|
||||
format_string += '] '
|
||||
# level if wanted
|
||||
if ConsoleFormat.LEVEL in console_format_type:
|
||||
format_string += '<%(levelname)s> '
|
||||
# always message
|
||||
format_string += '%(message)s'
|
||||
return format_string
|
||||
|
||||
def __set_time_format_for_console_formatter(
|
||||
self, formatter_console: CustomConsoleFormatter | logging.Formatter, console_format_type: ConsoleFormat
|
||||
) -> None:
|
||||
"""
|
||||
Format time for a given format handler, this is for console format only
|
||||
|
||||
Arguments:
|
||||
formatter_console {CustomConsoleFormatter | logging.Formatter} -- _description_
|
||||
console_format_type {ConsoleFormat} -- _description_
|
||||
"""
|
||||
# default for TIME is milliseconds
|
||||
# if we have multiple set, the smallest precision wins
|
||||
if ConsoleFormat.TIME_MICROSECONDS in console_format_type:
|
||||
iso_precision = 'microseconds'
|
||||
elif (
|
||||
ConsoleFormat.TIME_MILLISECONDS in console_format_type or
|
||||
ConsoleFormat.TIME in console_format_type
|
||||
):
|
||||
iso_precision = 'milliseconds'
|
||||
elif ConsoleFormat.TIME_SECONDS in console_format_type:
|
||||
iso_precision = 'seconds'
|
||||
else:
|
||||
iso_precision = 'milliseconds'
|
||||
# do timestamp modification only if we have time requested
|
||||
if (
|
||||
ConsoleFormat.TIME in console_format_type or
|
||||
ConsoleFormat.TIME_SECONDS in console_format_type or
|
||||
ConsoleFormat.TIME_MILLISECONDS in console_format_type or
|
||||
ConsoleFormat.TIME_MICROSECONDS in console_format_type
|
||||
):
|
||||
# if we have with TZ we as the asttimezone call
|
||||
if ConsoleFormat.TIMEZONE in console_format_type:
|
||||
formatter_console.formatTime = (
|
||||
lambda record, datefmt=None:
|
||||
datetime
|
||||
.fromtimestamp(record.created)
|
||||
.astimezone()
|
||||
.isoformat(sep=" ", timespec=iso_precision)
|
||||
)
|
||||
else:
|
||||
formatter_console.formatTime = (
|
||||
lambda record, datefmt=None:
|
||||
datetime
|
||||
.fromtimestamp(record.created)
|
||||
.isoformat(sep=" ", timespec=iso_precision)
|
||||
)
|
||||
|
||||
def __set_console_formatter(self, console_format_type: ConsoleFormat) -> CustomConsoleFormatter | logging.Formatter:
|
||||
"""
|
||||
Build the full formatter and return it
|
||||
|
||||
Arguments:
|
||||
console_format_type {ConsoleFormat} -- _description_
|
||||
|
||||
Returns:
|
||||
CustomConsoleFormatter | logging.Formatter -- _description_
|
||||
"""
|
||||
format_string = self.__build_console_format_from_string(console_format_type)
|
||||
if self.log_settings['console_color_output_enabled']:
|
||||
# formatter_console = CustomConsoleFormatter(format_string, datefmt=format_date)
|
||||
formatter_console = CustomConsoleFormatter(format_string)
|
||||
else:
|
||||
# formatter_console = logging.Formatter(format_string, datefmt=format_date)
|
||||
formatter_console = logging.Formatter(format_string)
|
||||
self.__set_time_format_for_console_formatter(formatter_console, console_format_type)
|
||||
self.log_settings['console_format_type'] = console_format_type
|
||||
return formatter_console
|
||||
|
||||
# MARK: console handler update
|
||||
def update_console_formatter(
|
||||
self,
|
||||
console_format_type: ConsoleFormat,
|
||||
):
|
||||
"""
|
||||
Update the console formatter for format layout and time stamp format
|
||||
|
||||
Arguments:
|
||||
console_format_type {ConsoleFormat} -- _description_
|
||||
"""
|
||||
# skip if console not enabled
|
||||
if not self.log_settings['console_enabled']:
|
||||
return
|
||||
# skip if format has not changed
|
||||
if self.log_settings['console_format_type'] == console_format_type:
|
||||
return
|
||||
# update the formatter
|
||||
self.handlers[self.CONSOLE_HANDLER].setFormatter(
|
||||
self.__set_console_formatter(console_format_type)
|
||||
)
|
||||
|
||||
def get_console_formatter(self) -> ConsoleFormat:
|
||||
"""
|
||||
Get the current console formatter, this the settings type
|
||||
Note that if eg "ALL" is set it will return the combined information but not the ALL flag name itself
|
||||
|
||||
Returns:
|
||||
ConsoleFormat -- _description_
|
||||
"""
|
||||
return self.log_settings['console_format_type']
|
||||
|
||||
# MARK: console handler
|
||||
def __create_console_handler(
|
||||
self, handler_name: str,
|
||||
log_level_console: LoggingLevel = LoggingLevel.WARNING,
|
||||
filter_exceptions: bool = True,
|
||||
console_format_type: ConsoleFormat = ConsoleFormatSettings.ALL,
|
||||
) -> logging.StreamHandler[TextIO]:
|
||||
# console logger
|
||||
if not self.validate_log_level(log_level_console):
|
||||
log_level_console = self.DEFAULT_LOG_LEVEL_CONSOLE
|
||||
console_handler = logging.StreamHandler()
|
||||
# print(f"Console format type: {console_format_type}")
|
||||
# build the format string based on what flags are set
|
||||
# format_string = self.__build_console_format_from_string(console_format_type)
|
||||
# # basic date, but this will be overridden to ISO in formatTime
|
||||
# # format_date = "%Y-%m-%d %H:%M:%S"
|
||||
# # color or not
|
||||
# if self.log_settings['console_color_output_enabled']:
|
||||
# # formatter_console = CustomConsoleFormatter(format_string, datefmt=format_date)
|
||||
# formatter_console = CustomConsoleFormatter(format_string)
|
||||
# else:
|
||||
# # formatter_console = logging.Formatter(format_string, datefmt=format_date)
|
||||
# formatter_console = logging.Formatter(format_string)
|
||||
# # set the time format
|
||||
# self.__set_time_format_for_console_formatter(formatter_console, console_format_type)
|
||||
console_handler.set_name(handler_name)
|
||||
console_handler.setLevel(log_level_console.name)
|
||||
# do not show exceptions logs on console
|
||||
console_handler.addFilter(CustomHandlerFilter('console', filter_exceptions))
|
||||
console_handler.setFormatter(self.__set_console_formatter(console_format_type))
|
||||
return console_handler
|
||||
|
||||
# MARK: file handler
|
||||
def __create_file_handler(
|
||||
self, handler_name: str,
|
||||
log_level_file: LoggingLevel, log_path: Path,
|
||||
# for TimedRotating, if per_run_log is off
|
||||
when: str = "D", interval: int = 1, backup_count: int = 0
|
||||
) -> logging.handlers.TimedRotatingFileHandler | logging.FileHandler:
|
||||
# file logger
|
||||
# when: S/M/H/D/W0-W6/midnight
|
||||
# interval: how many, 1D = every day
|
||||
# backup_count: how many old to keep, 0 = all
|
||||
if not self.validate_log_level(log_level_file):
|
||||
log_level_file = self.DEFAULT_LOG_LEVEL_FILE
|
||||
if self.log_settings['per_run_log']:
|
||||
# log path, remove them stem (".log"), then add the datetime and add .log again
|
||||
now = datetime.now()
|
||||
# we add microseconds part to get milli seconds
|
||||
new_stem = f"{log_path.stem}.{now.strftime('%Y-%m-%d_%H-%M-%S')}.{str(now.microsecond)[:3]}"
|
||||
file_handler = logging.FileHandler(
|
||||
filename=log_path.with_name(f"{new_stem}{log_path.suffix}"),
|
||||
encoding="utf-8",
|
||||
)
|
||||
else:
|
||||
file_handler = logging.handlers.TimedRotatingFileHandler(
|
||||
filename=log_path,
|
||||
encoding="utf-8",
|
||||
when=when,
|
||||
interval=interval,
|
||||
backupCount=backup_count
|
||||
)
|
||||
formatter_file_handler = logging.Formatter(
|
||||
(
|
||||
# time stamp
|
||||
# '[%(asctime)s.%(msecs)03d] '
|
||||
'[%(asctime)s] '
|
||||
# log name
|
||||
'[%(name)s] '
|
||||
# filename + pid
|
||||
# '[%(filename)s:%(process)d] '
|
||||
# pid + path/filename + func + line number
|
||||
'[%(process)d:%(pathname)s:%(funcName)s:%(lineno)d] '
|
||||
# error level
|
||||
'<%(levelname)s> '
|
||||
# message
|
||||
'%(message)s'
|
||||
),
|
||||
datefmt="%Y-%m-%dT%H:%M:%S",
|
||||
)
|
||||
formatter_file_handler.formatTime = (
|
||||
lambda record, datefmt=None:
|
||||
datetime
|
||||
.fromtimestamp(record.created)
|
||||
.astimezone()
|
||||
.isoformat(sep="T", timespec="microseconds")
|
||||
)
|
||||
file_handler.set_name(handler_name)
|
||||
file_handler.setLevel(log_level_file.name)
|
||||
# do not show errors flagged with console (they are from exceptions)
|
||||
file_handler.addFilter(CustomHandlerFilter('file'))
|
||||
file_handler.setFormatter(formatter_file_handler)
|
||||
return file_handler
|
||||
|
||||
# MARK: init listener
|
||||
def __init_listener(self, log_queue: 'Queue[str] | None' = None):
|
||||
"""
|
||||
If we have a Queue option start the logging queue
|
||||
|
||||
Keyword Arguments:
|
||||
log_queue {Queue[str] | None} -- _description_ (default: {None})
|
||||
"""
|
||||
if log_queue is None:
|
||||
return
|
||||
self.log_queue = log_queue
|
||||
atexit.register(self.stop_listener)
|
||||
self.listener = logging.handlers.QueueListener(
|
||||
self.log_queue,
|
||||
*self.handlers.values(),
|
||||
respect_handler_level=True
|
||||
)
|
||||
self.listener.start()
|
||||
|
||||
def stop_listener(self):
|
||||
"""
|
||||
stop the listener
|
||||
"""
|
||||
if self.listener is not None:
|
||||
self.flush()
|
||||
self.listener.stop()
|
||||
|
||||
# MARK: init main log
|
||||
def __init_log(self, log_name: str) -> None:
|
||||
"""
|
||||
Initialize the main loggger
|
||||
"""
|
||||
queue_handler: logging.handlers.QueueHandler | None = None
|
||||
if self.log_queue is not None:
|
||||
queue_handler = logging.handlers.QueueHandler(self.log_queue)
|
||||
# overall logger settings
|
||||
self.logger = logging.getLogger(log_name)
|
||||
# add all the handlers
|
||||
if queue_handler is None:
|
||||
for handler in self.handlers.values():
|
||||
self.logger.addHandler(handler)
|
||||
else:
|
||||
self.logger.addHandler(queue_handler)
|
||||
# set maximum logging level for all logging output
|
||||
# log level filtering is done per handler
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
# short name
|
||||
self.lg = self.logger
|
||||
self.l = self.logger
|
||||
|
||||
# MARK: init logger for Fork/Thread
|
||||
@staticmethod
|
||||
def init_worker_logging(log_queue: 'Queue[str]') -> logging.Logger:
|
||||
"""
|
||||
This initalizes a logger that can be used in pool/thread queue calls
|
||||
call in worker initializer as "Log.init_worker_logging(Queue[str])
|
||||
"""
|
||||
queue_handler = logging.handlers.QueueHandler(log_queue)
|
||||
# getLogger call MUST be WITHOUT and logger name
|
||||
root_logger = logging.getLogger()
|
||||
# base logging level, filtering is done in the handlers
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
root_logger.handlers.clear()
|
||||
root_logger.addHandler(queue_handler)
|
||||
|
||||
# for debug only
|
||||
root_logger.debug('[LOGGER] Init log: %s - %s', log_queue, root_logger.handlers)
|
||||
|
||||
return root_logger
|
||||
|
||||
def get_logger_settings(self) -> LoggerInit:
|
||||
"""
|
||||
get the logger settings we need to init the Logger class
|
||||
|
||||
Returns:
|
||||
LoggerInit -- _description_
|
||||
"""
|
||||
return {
|
||||
"logger": self.logger,
|
||||
"log_queue": self.log_queue
|
||||
}
|
||||
|
||||
# __END__
|
||||
@@ -0,0 +1,89 @@
|
||||
"""
|
||||
All logging levels
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class LoggingLevel(Enum):
|
||||
"""
|
||||
Log class levels
|
||||
"""
|
||||
NOTSET = logging.NOTSET # 0
|
||||
DEBUG = logging.DEBUG # 10
|
||||
INFO = logging.INFO # 20
|
||||
WARNING = logging.WARNING # 30
|
||||
ERROR = logging.ERROR # 40
|
||||
CRITICAL = logging.CRITICAL # 50
|
||||
ALERT = 55 # 55 (for Sys log)
|
||||
EMERGENCY = 60 # 60 (for Sys log)
|
||||
EXCEPTION = 70 # 70 (manualy set, error but with higher level)
|
||||
# Alternative names
|
||||
WARN = logging.WARN # 30 (alias for WARNING)
|
||||
FATAL = logging.FATAL # 50 (alias for CRITICAL)
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, level_str: str):
|
||||
"""Convert string to LogLevel enum"""
|
||||
try:
|
||||
return cls[level_str.upper()]
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Invalid log level: {level_str}") from e
|
||||
except AttributeError as e:
|
||||
raise ValueError(f"Invalid log level: {level_str}") from e
|
||||
|
||||
@classmethod
|
||||
def from_int(cls, level_int: int):
|
||||
"""Convert integer to LogLevel enum"""
|
||||
try:
|
||||
return cls(level_int)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid log level: {level_int}") from e
|
||||
|
||||
@classmethod
|
||||
def from_any(cls, level_any: Any):
|
||||
"""
|
||||
Convert any vale
|
||||
if self LoggingLevel return as is, else try to convert from int or string
|
||||
|
||||
Arguments:
|
||||
level_any {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
if isinstance(level_any, LoggingLevel):
|
||||
return level_any
|
||||
if isinstance(level_any, int):
|
||||
return cls.from_int(level_any)
|
||||
return cls.from_string(level_any)
|
||||
|
||||
def to_logging_level(self):
|
||||
"""Convert to logging module level"""
|
||||
return self.value
|
||||
|
||||
def to_lower_case(self):
|
||||
"""return loser case"""
|
||||
return self.name.lower()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def includes(self, level: 'LoggingLevel'):
|
||||
"""
|
||||
if given level is included in set level
|
||||
eg: INFO set, ERROR is included in INFO because INFO level would print ERROR
|
||||
"""
|
||||
return self.value <= level.value
|
||||
|
||||
def is_higher_than(self, level: 'LoggingLevel'):
|
||||
"""if given value is higher than set"""
|
||||
return self.value > level.value
|
||||
|
||||
def is_lower_than(self, level: 'LoggingLevel'):
|
||||
"""if given value is lower than set"""
|
||||
return self.value < level.value
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/math_handling/__init__.py
Normal file
0
src/corelibs/math_handling/__init__.py
Normal file
38
src/corelibs/math_handling/math_helpers.py
Normal file
38
src/corelibs/math_handling/math_helpers.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""
|
||||
Various math helpers
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
import math
|
||||
|
||||
|
||||
@deprecated("Use math.gcd instead")
|
||||
def gcd(a: int, b: int):
|
||||
"""
|
||||
Calculate: Greatest Common Divisor
|
||||
|
||||
Arguments:
|
||||
a {int} -- _description_
|
||||
b {int} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return math.gcd(a, b)
|
||||
|
||||
|
||||
@deprecated("Use math.lcm instead")
|
||||
def lcd(a: int, b: int):
|
||||
"""
|
||||
Calculate: Least Common Denominator
|
||||
|
||||
Arguments:
|
||||
a {int} -- _description_
|
||||
b {int} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return math.lcm(a, b)
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/py.typed
Normal file
0
src/corelibs/py.typed
Normal file
0
src/corelibs/requests_handling/__init__.py
Normal file
0
src/corelibs/requests_handling/__init__.py
Normal file
23
src/corelibs/requests_handling/auth_helpers.py
Normal file
23
src/corelibs/requests_handling/auth_helpers.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
Various HTTP auth helpers
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from corelibs_requests.auth_helpers import basic_auth as corelibs_basic_auth
|
||||
|
||||
|
||||
@deprecated("use corelibs_requests.auth_helpers.basic_auth instead")
|
||||
def basic_auth(username: str, password: str) -> str:
|
||||
"""
|
||||
setup basic auth, for debug
|
||||
|
||||
Arguments:
|
||||
username {str} -- _description_
|
||||
password {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return corelibs_basic_auth(username, password)
|
||||
|
||||
# __END__
|
||||
37
src/corelibs/requests_handling/caller.py
Normal file
37
src/corelibs/requests_handling/caller.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""
|
||||
requests lib interface
|
||||
V2 call type
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
from corelibs_requests.caller import (
|
||||
Caller as CoreLibsCaller,
|
||||
ProxyConfig as CoreLibsProxyConfig,
|
||||
ErrorResponse as CoreLibsErrorResponse
|
||||
)
|
||||
|
||||
|
||||
class ErrorResponse(CoreLibsErrorResponse):
|
||||
"""
|
||||
Error response structure. This is returned if a request could not be completed
|
||||
"""
|
||||
|
||||
|
||||
class ProxyConfig(CoreLibsProxyConfig):
|
||||
"""
|
||||
Socks proxy settings
|
||||
"""
|
||||
|
||||
|
||||
class Caller(CoreLibsCaller):
|
||||
"""
|
||||
requests lib interface
|
||||
"""
|
||||
|
||||
|
||||
warn(
|
||||
"corelibs.requests_handling.caller is deprecated, use corelibs_requests.caller instead",
|
||||
DeprecationWarning, stacklevel=2
|
||||
)
|
||||
|
||||
# __END__
|
||||
44
src/corelibs/script_handling/progress.py
Normal file
44
src/corelibs/script_handling/progress.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""
|
||||
AUTHOR: Clemens Schwaighofer
|
||||
DATE CREATED: 2009/7/24 (2025/7/2)
|
||||
DESCRIPTION: progress percent class (perl -> python)
|
||||
|
||||
HOW TO USE
|
||||
* load
|
||||
from progress import Progress
|
||||
* init
|
||||
prg = Progress()
|
||||
allowed parameters to pass are (in order)
|
||||
- verbose (0/1/...) : show output
|
||||
- precision (-2~10) : -2 (5%), -1 (10%), 0 (normal 0-100%), 1~10 (100.m~%)
|
||||
- microtime (1/0/-1) : show microtime in eta/run time
|
||||
- wide time (bool) : padd time so all time column doesn't change width of line
|
||||
- prefix line break (bool): add line break before string and not only after
|
||||
prg = Progress(verbose = 1, precision = 2)
|
||||
* settings methods
|
||||
set_wide_time(bool)
|
||||
set_microtime(int -1/0/1)
|
||||
set_prefix_lb(bool)
|
||||
set_verbose(0/1 int)
|
||||
set_precision(-2~10 int)
|
||||
set_linecount(int)
|
||||
set_filesize(int)
|
||||
set_start_time(time optional)
|
||||
set_eta_start_time(time optional)
|
||||
set_end_time(time optional)
|
||||
show_position(file pos optional)
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
from corelibs_progress.progress import Progress as CoreProgress # for type checking only
|
||||
|
||||
|
||||
class Progress(CoreProgress):
|
||||
"""
|
||||
file progress output information
|
||||
"""
|
||||
|
||||
|
||||
warn("Use 'corelibs_progress.progress.Progress'", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
55
src/corelibs/script_handling/script_helpers.py
Normal file
55
src/corelibs/script_handling/script_helpers.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""
|
||||
Helper methods for scripts
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from pathlib import Path
|
||||
from corelibs_script.script_support import (
|
||||
wait_abort as corelibs_wait_abort,
|
||||
lock_run as corelibs_lock_run,
|
||||
unlock_run as corelibs_unlock_run,
|
||||
)
|
||||
|
||||
|
||||
@deprecated("use corelibs_script.script_support.wait_abort instead")
|
||||
def wait_abort(sleep: int = 5) -> None:
|
||||
"""
|
||||
wait a certain time for an abort command
|
||||
|
||||
Keyword Arguments:
|
||||
sleep {int} -- _description_ (default: {5})
|
||||
"""
|
||||
corelibs_wait_abort(sleep)
|
||||
|
||||
|
||||
@deprecated("use corelibs_script.script_support.lock_run instead")
|
||||
def lock_run(lock_file: Path) -> None:
|
||||
"""
|
||||
lock a script run
|
||||
needed is the lock file name
|
||||
|
||||
Arguments:
|
||||
lock_file {Path} -- _description_
|
||||
|
||||
Raises:
|
||||
IOError: _description_
|
||||
Exception: _description_
|
||||
IOError: _description_
|
||||
"""
|
||||
corelibs_lock_run(lock_file)
|
||||
|
||||
|
||||
@deprecated("use corelibs_script.script_support.unlock_run instead")
|
||||
def unlock_run(lock_file: Path) -> None:
|
||||
"""
|
||||
removes the lock file
|
||||
|
||||
Arguments:
|
||||
lock_file {Path} -- _description_
|
||||
|
||||
Raises:
|
||||
Exception: _description_
|
||||
"""
|
||||
corelibs_unlock_run(lock_file)
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/string_handling/__init__.py
Normal file
0
src/corelibs/string_handling/__init__.py
Normal file
26
src/corelibs/string_handling/byte_helpers.py
Normal file
26
src/corelibs/string_handling/byte_helpers.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""
|
||||
Format bytes
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from corelibs_strings.string_format import format_bytes as corelibs_format_bytes
|
||||
|
||||
|
||||
@deprecated("Use corelibs_strings.string_format.format_bytes instead")
|
||||
def format_bytes(byte_value: float | int | str) -> str:
|
||||
"""
|
||||
Format a byte value to a human readable string
|
||||
|
||||
Arguments:
|
||||
byte_value {float | int | str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# if string exit
|
||||
return corelibs_format_bytes(
|
||||
byte_value=byte_value,
|
||||
)
|
||||
|
||||
|
||||
# __NED__
|
||||
19
src/corelibs/string_handling/double_byte_string_format.py
Normal file
19
src/corelibs/string_handling/double_byte_string_format.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""
|
||||
Format double byte strings to exact length
|
||||
"""
|
||||
|
||||
from warnings import warn
|
||||
from corelibs_double_byte_format.double_byte_string_format import (
|
||||
DoubleByteFormatString as CorelibsDoubleByteFormatString
|
||||
)
|
||||
|
||||
|
||||
class DoubleByteFormatString(CorelibsDoubleByteFormatString):
|
||||
"""
|
||||
Format a string to exact length
|
||||
"""
|
||||
|
||||
|
||||
warn("Use 'corelibs_double_byte_format.double_byte_string_format' instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __END__
|
||||
@@ -2,10 +2,11 @@
|
||||
Various hash helpers for strings and things
|
||||
"""
|
||||
|
||||
import re
|
||||
import hashlib
|
||||
from warnings import deprecated
|
||||
from corelibs_hash.string_hash import crc32b_fix as corelibs_crc32b_fix, sha1_short as corelibs_sha1_short
|
||||
|
||||
|
||||
@deprecated("Use corelibs_hash.string_hash.crc32b_fix instead")
|
||||
def crc32b_fix(crc: str) -> str:
|
||||
"""
|
||||
fix a CRC32B with wrong order (from old PHP)
|
||||
@@ -16,15 +17,10 @@ def crc32b_fix(crc: str) -> str:
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# left pad with 0 to 8 chars
|
||||
crc = ("0" * (8 - len(crc))) + crc
|
||||
# flip two chars (byte hex)
|
||||
crc = re.sub(
|
||||
r"^([a-z0-9]{2})([a-z0-9]{2})([a-z0-9]{2})([a-z0-9]{2})$", r"\4\3\2\1", crc
|
||||
)
|
||||
return crc
|
||||
return corelibs_crc32b_fix(crc)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_hash.string_hash.sha1_short instead")
|
||||
def sha1_short(string: str) -> str:
|
||||
"""
|
||||
Return a 9 character long SHA1 part
|
||||
@@ -35,6 +31,6 @@ def sha1_short(string: str) -> str:
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return hashlib.sha1(string.encode('utf-8')).hexdigest()[:9]
|
||||
return corelibs_sha1_short(string)
|
||||
|
||||
# __END__
|
||||
103
src/corelibs/string_handling/string_helpers.py
Normal file
103
src/corelibs/string_handling/string_helpers.py
Normal file
@@ -0,0 +1,103 @@
|
||||
"""
|
||||
String helpers
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from corelibs_strings.string_support import (
|
||||
shorten_string as corelibs_shorten_string,
|
||||
left_fill as corelibs_left_fill,
|
||||
prepare_url_slash as corelibs_prepare_url_slash,
|
||||
)
|
||||
from corelibs_strings.string_format import format_number as corelibs_format_number
|
||||
|
||||
|
||||
@deprecated("Use corelibs_strings.string_support.shorten_string instead")
|
||||
def shorten_string(
|
||||
string: str | int | float, length: int, hard_shorten: bool = False, placeholder: str = " [~]"
|
||||
) -> str:
|
||||
"""
|
||||
check if entry is too long and cut it, but only for console output
|
||||
Note that if there are no spaces in the string, it will automatically use the hard split mode
|
||||
|
||||
Args:
|
||||
string (str | int | float): _description_
|
||||
length (int): _description_
|
||||
hard_shorten (bool): if shorte should be done on fixed string lenght. Default: False
|
||||
placeholder (str): placeholder string. Default: " [~]"
|
||||
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return corelibs_shorten_string(
|
||||
string=string,
|
||||
length=length,
|
||||
hard_shorten=hard_shorten,
|
||||
placeholder=placeholder,
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_strings.string_support.left_fill instead")
|
||||
def left_fill(string: str, width: int, char: str = " ") -> str:
|
||||
"""
|
||||
left fill for a certain length to fill a max size
|
||||
string is the original string to left padd, width is the maximum width
|
||||
that needs to be filled, char is the filler character
|
||||
|
||||
Arguments:
|
||||
string {str} -- _description_
|
||||
width {int} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
char {str} -- _description_ (default: {" "})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return corelibs_left_fill(
|
||||
string=string,
|
||||
width=width,
|
||||
char=char,
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_strings.string_format.format_number instead")
|
||||
def format_number(number: float, precision: int = 0) -> str:
|
||||
"""
|
||||
format numbers, current trailing zeros does not work
|
||||
use {:,} or {:,.f} or {:,.<N>f} <N> = number instead of this
|
||||
|
||||
The upper limit of the precision depends on the value of the number itself
|
||||
very large numbers will have no precision at all any more
|
||||
|
||||
Arguments:
|
||||
number {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
precision {int} -- _description_ (default: {0})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return corelibs_format_number(
|
||||
number=number,
|
||||
precision=precision,
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_strings.string_support.prepare_url_slash instead")
|
||||
def prepare_url_slash(url: str) -> str:
|
||||
"""
|
||||
if the URL does not start with /, add slash
|
||||
strip all double slashes in URL
|
||||
|
||||
Arguments:
|
||||
url {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return corelibs_prepare_url_slash(
|
||||
url=url,
|
||||
)
|
||||
|
||||
# __END__
|
||||
18
src/corelibs/string_handling/text_colors.py
Normal file
18
src/corelibs/string_handling/text_colors.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""
|
||||
Basic ANSI colors
|
||||
|
||||
Set colors with print(f"something {Colors.yellow}colorful{Colors.end})
|
||||
bold + underline + color combinations are possible.
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from corelibs_text_colors.text_colors import Colors as ColorsNew
|
||||
|
||||
|
||||
@deprecated("Use src.corelibs_text_colors.text_colors instead")
|
||||
class Colors(ColorsNew):
|
||||
"""
|
||||
ANSI colors defined
|
||||
"""
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/var_handling/__init__.py
Normal file
0
src/corelibs/var_handling/__init__.py
Normal file
25
src/corelibs/var_handling/enum_base.py
Normal file
25
src/corelibs/var_handling/enum_base.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""
|
||||
Enum base classes
|
||||
"""
|
||||
|
||||
import warnings
|
||||
from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
||||
|
||||
|
||||
class EnumBase(CorelibsEnumBase):
|
||||
"""
|
||||
base for enum
|
||||
|
||||
.. deprecated::
|
||||
Use corelibs_enum_base.EnumBase instead
|
||||
DEPRECATED: Use corelibs_enum_base.enum_base.EnumBase instead
|
||||
|
||||
lookup_any and from_any will return "EnumBase" and the sub class name
|
||||
run the return again to "from_any" to get a clean value, or cast it
|
||||
"""
|
||||
|
||||
|
||||
# At the module level, issue a deprecation warning
|
||||
warnings.warn("Use corelibs_enum_base.enum_base.EnumBase instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __EMD__
|
||||
15
src/corelibs/var_handling/enum_base.pyi
Normal file
15
src/corelibs/var_handling/enum_base.pyi
Normal file
@@ -0,0 +1,15 @@
|
||||
"""
|
||||
Enum base classes [STPUB]
|
||||
"""
|
||||
|
||||
from typing_extensions import deprecated
|
||||
from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
||||
|
||||
|
||||
@deprecated("Use corelibs_enum_base.enum_base.EnumBase instead")
|
||||
class EnumBase(CorelibsEnumBase):
|
||||
"""
|
||||
base for enum
|
||||
lookup_any and from_any will return "EnumBase" and the sub class name
|
||||
run the return again to "from_any" to get a clean value, or cast it
|
||||
"""
|
||||
54
src/corelibs/var_handling/var_helpers.py
Normal file
54
src/corelibs/var_handling/var_helpers.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""
|
||||
variable convert, check, etc helepr
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from warnings import deprecated
|
||||
import corelibs_var.var_helpers
|
||||
|
||||
|
||||
@deprecated("Use corelibs_var.var_helpers.is_int instead")
|
||||
def is_int(string: Any) -> bool:
|
||||
"""
|
||||
check if a value is int
|
||||
|
||||
Arguments:
|
||||
string {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return corelibs_var.var_helpers.is_int(string)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_var.var_helpers.is_float instead")
|
||||
def is_float(string: Any) -> bool:
|
||||
"""
|
||||
check if a value is float
|
||||
|
||||
Arguments:
|
||||
string {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return corelibs_var.var_helpers.is_float(string)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_var.var_helpers.str_to_bool instead")
|
||||
def str_to_bool(string: str):
|
||||
"""
|
||||
convert string to bool
|
||||
|
||||
Arguments:
|
||||
s {str} -- _description_
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return corelibs_var.var_helpers.str_to_bool(string)
|
||||
|
||||
# __END__
|
||||
109
test-run/check_handling/regex_checks.py
Normal file
109
test-run/check_handling/regex_checks.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""
|
||||
Test check andling for regex checks
|
||||
"""
|
||||
|
||||
from corelibs_text_colors.text_colors import Colors
|
||||
from corelibs.check_handling.regex_constants import (
|
||||
compile_re, DOMAIN_WITH_LOCALHOST_REGEX, EMAIL_BASIC_REGEX, NAME_EMAIL_BASIC_REGEX, SUB_EMAIL_BASIC_REGEX
|
||||
)
|
||||
from corelibs.check_handling.regex_constants_compiled import (
|
||||
COMPILED_DOMAIN_WITH_LOCALHOST_REGEX, COMPILED_EMAIL_BASIC_REGEX,
|
||||
COMPILED_NAME_EMAIL_SIMPLE_REGEX, COMPILED_NAME_EMAIL_BASIC_REGEX
|
||||
)
|
||||
|
||||
NAME_EMAIL_SIMPLE_REGEX = r"""
|
||||
^\s*(?:"(?P<name1>[^"]+)"\s*<(?P<email1>[^>]+)>|
|
||||
(?P<name2>.+?)\s*<(?P<email2>[^>]+)>|
|
||||
<(?P<email3>[^>]+)>|
|
||||
(?P<email4>[^\s<>]+))\s*$
|
||||
"""
|
||||
|
||||
|
||||
def domain_test():
|
||||
"""
|
||||
domain regex test
|
||||
"""
|
||||
print("=" * 30)
|
||||
test_domains = [
|
||||
"example.com",
|
||||
"localhost",
|
||||
"subdomain.localhost",
|
||||
"test.localhost.com",
|
||||
"some-domain.org"
|
||||
]
|
||||
|
||||
regex_domain_check = COMPILED_DOMAIN_WITH_LOCALHOST_REGEX
|
||||
print(f"REGEX: {DOMAIN_WITH_LOCALHOST_REGEX}")
|
||||
print(f"Check regex: {regex_domain_check.search('localhost')}")
|
||||
|
||||
for domain in test_domains:
|
||||
if regex_domain_check.search(domain):
|
||||
print(f"Matched: {domain}")
|
||||
else:
|
||||
print(f"Did not match: {domain}")
|
||||
|
||||
|
||||
def email_test():
|
||||
"""
|
||||
email regex test
|
||||
"""
|
||||
print("=" * 30)
|
||||
email_list = """
|
||||
e@bar.com
|
||||
<f@foobar.com>
|
||||
"Master" <foobar@bar.com>
|
||||
"not valid" not@valid.com
|
||||
also not valid not@valid.com
|
||||
some header <something@bar.com>
|
||||
test master <master@master.com>
|
||||
日本語 <japan@jp.net>
|
||||
"ひほん カケ苦" <foo@bar.com>
|
||||
single@entry.com
|
||||
arsch@popsch.com
|
||||
test open <open@open.com>
|
||||
"""
|
||||
|
||||
print(f"REGEX: SUB_EMAIL_BASIC_REGEX: {SUB_EMAIL_BASIC_REGEX}")
|
||||
print(f"REGEX: EMAIL_BASIC_REGEX: {EMAIL_BASIC_REGEX}")
|
||||
print(f"REGEX: COMPILED_NAME_EMAIL_SIMPLE_REGEX: {COMPILED_NAME_EMAIL_SIMPLE_REGEX}")
|
||||
print(f"REGEX: NAME_EMAIL_BASIC_REGEX: {NAME_EMAIL_BASIC_REGEX}")
|
||||
|
||||
basic_email = COMPILED_EMAIL_BASIC_REGEX
|
||||
sub_basic_email = compile_re(SUB_EMAIL_BASIC_REGEX)
|
||||
simple_name_email_regex = COMPILED_NAME_EMAIL_SIMPLE_REGEX
|
||||
full_name_email_regex = COMPILED_NAME_EMAIL_BASIC_REGEX
|
||||
for email in email_list.splitlines():
|
||||
email = email.strip()
|
||||
if not email:
|
||||
continue
|
||||
print(f">>> Testing: {email}")
|
||||
if not basic_email.match(email):
|
||||
print(f"{Colors.red}[EMAIL ] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[EMAIL ] Matched : {email}{Colors.reset}")
|
||||
if not sub_basic_email.match(email):
|
||||
print(f"{Colors.red}[SUB ] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[SUB ] Matched : {email}{Colors.reset}")
|
||||
if not simple_name_email_regex.match(email):
|
||||
print(f"{Colors.red}[SIMPLE] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[SIMPLE] Matched : {email}{Colors.reset}")
|
||||
if not full_name_email_regex.match(email):
|
||||
print(f"{Colors.red}[FULL ] No match: {email}{Colors.reset}")
|
||||
else:
|
||||
print(f"{Colors.green}[FULL ] Matched : {email}{Colors.reset}")
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Test regex checks
|
||||
"""
|
||||
domain_test()
|
||||
email_test()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
44
test-run/config_handling/config/settings.ini
Normal file
44
test-run/config_handling/config/settings.ini
Normal file
@@ -0,0 +1,44 @@
|
||||
[TestA]
|
||||
foo=bar
|
||||
overload_from_args=bar
|
||||
foobar=1
|
||||
bar=st
|
||||
arg_overload=should_not_be_set_because_of_command_line_is_list
|
||||
arg_overload_list=too,be,long
|
||||
arg_overload_not_set=this should not be set because of override flag
|
||||
just_values=too,be,long
|
||||
some_match=foo
|
||||
some_match_list=foo,bar
|
||||
test_list=a,b,c,d f, g h
|
||||
other_list=a|b|c|d|
|
||||
third_list=xy|ab|df|fg
|
||||
empty_list=
|
||||
str_length=foobar
|
||||
int_range=20
|
||||
int_range_not_set=
|
||||
int_range_not_set_empty_set=5
|
||||
bool_var=True
|
||||
#
|
||||
match_target=foo
|
||||
match_target_list=foo,bar,baz
|
||||
#
|
||||
match_source_a=foo
|
||||
match_source_b=foo
|
||||
; match_source_c=foo
|
||||
match_source_list=foo,bar
|
||||
|
||||
[TestB]
|
||||
element_a=Static energy
|
||||
element_b=123.5
|
||||
element_c=True
|
||||
elemend_d=AB:CD;EF
|
||||
email=foo@bar.com,other+bar-fee@domain-com.cp,
|
||||
email_not_mandatory=
|
||||
email_bad=gii@bar.com
|
||||
|
||||
[LoadTest]
|
||||
a.b.c=foo
|
||||
d:e:f=bar
|
||||
|
||||
[ErrorTest]
|
||||
some_value=42
|
||||
2
test-run/config_handling/log/.gitignore
vendored
Normal file
2
test-run/config_handling/log/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
148
test-run/config_handling/settings_loader.py
Normal file
148
test-run/config_handling/settings_loader.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""
|
||||
Settings loader test
|
||||
"""
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log
|
||||
from corelibs.config_handling.settings_loader import SettingsLoader
|
||||
from corelibs.config_handling.settings_loader_handling.settings_loader_check import SettingsLoaderCheck
|
||||
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
CONFIG_DIR: Path = Path("config")
|
||||
LOG_DIR: Path = Path("log")
|
||||
CONFIG_FILE: str = "settings.ini"
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Main run
|
||||
"""
|
||||
|
||||
# for log testing
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'settings_loader.log'),
|
||||
log_name="Settings Loader",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
"log_level_file": 'DEBUG',
|
||||
}
|
||||
)
|
||||
log.logger.info('Settings loader')
|
||||
|
||||
value = "2025/1/1"
|
||||
regex_c = re.compile(SettingsLoaderCheck.CHECK_SETTINGS['string.date']['regex'], re.VERBOSE)
|
||||
result = regex_c.search(value)
|
||||
log.info(f"regex {regex_c} check against {value} -> {result}")
|
||||
|
||||
sl = SettingsLoader(
|
||||
{
|
||||
'overload_from_args': 'OVERLOAD from ARGS',
|
||||
'arg_overload': ['should', 'not', 'be', 'set'],
|
||||
'arg_overload_list': ['overload', 'this', 'list'],
|
||||
'arg_overload_not_set': "DO_NOT_SET",
|
||||
},
|
||||
ROOT_PATH.joinpath(CONFIG_DIR, CONFIG_FILE),
|
||||
log=log
|
||||
)
|
||||
try:
|
||||
config_load = 'TestA'
|
||||
config_data = sl.load_settings(
|
||||
config_load,
|
||||
{
|
||||
# "doesnt": ["split:,"],
|
||||
"overload_from_args": ["args_override:yes", "mandatory:yes"],
|
||||
"foobar": ["check:int"],
|
||||
"bar": ["mandatory:yes"],
|
||||
"arg_overload_list": ["args_override:yes", "split:,",],
|
||||
"arg_overload_not_set": [],
|
||||
"some_match": ["matching:foo|bar"],
|
||||
"some_match_list": ["split:,", "matching:foo|bar"],
|
||||
"test_list": [
|
||||
"check:string.alphanumeric",
|
||||
"split:,"
|
||||
],
|
||||
"other_list": ["split:|"],
|
||||
"third_list": [
|
||||
"split:|",
|
||||
"check:string.alphanumeric"
|
||||
],
|
||||
"empty_list": [
|
||||
"split:,",
|
||||
],
|
||||
"str_length": [
|
||||
"length:2-10"
|
||||
],
|
||||
"int_range": [
|
||||
"range:2-50"
|
||||
],
|
||||
"int_range_not_set": [
|
||||
"range:2-50"
|
||||
],
|
||||
"int_range_not_set_empty_set": [
|
||||
"empty:"
|
||||
],
|
||||
"bool_var": ["convert:bool"],
|
||||
"match_target": ["matching:foo"],
|
||||
"match_target_list": ["split:,", "matching:foo|bar|baz",],
|
||||
"match_source_a": ["in:match_target"],
|
||||
"match_source_b": ["in:match_target_list"],
|
||||
"match_source_list": ["split:,", "in:match_target_list"],
|
||||
}
|
||||
)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
try:
|
||||
config_load = 'TestB'
|
||||
config_data = sl.load_settings(
|
||||
config_load,
|
||||
{
|
||||
"email": [
|
||||
"split:,",
|
||||
"mandatory:yes",
|
||||
"check:string.email.basic"
|
||||
],
|
||||
"email_not_mandatory": [
|
||||
"split:,",
|
||||
# "mandatory:yes",
|
||||
"check:string.email.basic"
|
||||
],
|
||||
"email_bad": [
|
||||
"split:,",
|
||||
"mandatory:yes",
|
||||
"check:string.email.basic"
|
||||
]
|
||||
}
|
||||
)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
try:
|
||||
config_load = 'LoadTest'
|
||||
config_data = sl.load_settings(config_load)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
try:
|
||||
config_load = 'ErrorTest'
|
||||
config_data = sl.load_settings(
|
||||
config_load,
|
||||
{
|
||||
"some_value": [
|
||||
"check:string.email.basic",
|
||||
],
|
||||
}
|
||||
)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
236
test-run/datetime_handling/datetime_helpers.py
Normal file
236
test-run/datetime_handling/datetime_helpers.py
Normal file
@@ -0,0 +1,236 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
date string helper test
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from corelibs.datetime_handling.datetime_helpers import (
|
||||
get_datetime_iso8601, get_system_timezone, parse_timezone_data, validate_date,
|
||||
parse_flexible_date, compare_dates, find_newest_datetime_in_list,
|
||||
parse_day_of_week_range, parse_time_range, times_overlap_or_connect, is_time_in_range,
|
||||
reorder_weekdays_from_today
|
||||
)
|
||||
|
||||
|
||||
def __get_datetime_iso8601():
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
for tz in [
|
||||
'', 'Asia/Tokyo', 'UTC', 'Europe/Vienna',
|
||||
'America/New_York', 'Australia/Sydney',
|
||||
'invalid'
|
||||
]:
|
||||
print(f"{tz} -> {get_datetime_iso8601(tz)}")
|
||||
|
||||
|
||||
def __parse_timezone_data():
|
||||
for tz in [
|
||||
'JST', 'KST', 'UTC', 'CET', 'CEST',
|
||||
]:
|
||||
print(f"{tz} -> {parse_timezone_data(tz)}")
|
||||
|
||||
|
||||
def __validate_date():
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
|
||||
test_dates = [
|
||||
"2024-01-01",
|
||||
"2024-02-29", # Leap year
|
||||
"2023-02-29", # Invalid date
|
||||
"2024-13-01", # Invalid month
|
||||
"2024-00-10", # Invalid month
|
||||
"2024-04-31", # Invalid day
|
||||
"invalid-date"
|
||||
]
|
||||
|
||||
for date_str in test_dates:
|
||||
is_valid = validate_date(date_str)
|
||||
print(f"Date '{date_str}' is valid: {is_valid}")
|
||||
|
||||
# also test not before and not after
|
||||
not_before_dates = [
|
||||
"2023-12-31",
|
||||
"2024-01-01",
|
||||
"2024-02-29",
|
||||
]
|
||||
not_after_dates = [
|
||||
"2024-12-31",
|
||||
"2024-11-30",
|
||||
"2025-01-01",
|
||||
]
|
||||
|
||||
for date_str in not_before_dates:
|
||||
datetime.strptime(date_str, "%Y-%m-%d") # Ensure valid date format
|
||||
is_valid = validate_date(date_str, not_before=datetime.strptime("2024-01-01", "%Y-%m-%d"))
|
||||
print(f"Date '{date_str}' is valid (not before 2024-01-01): {is_valid}")
|
||||
|
||||
for date_str in not_after_dates:
|
||||
is_valid = validate_date(date_str, not_after=datetime.strptime("2024-12-31", "%Y-%m-%d"))
|
||||
print(f"Date '{date_str}' is valid (not after 2024-12-31): {is_valid}")
|
||||
|
||||
for date_str in test_dates:
|
||||
is_valid = validate_date(
|
||||
date_str,
|
||||
not_before=datetime.strptime("2024-01-01", "%Y-%m-%d"),
|
||||
not_after=datetime.strptime("2024-12-31", "%Y-%m-%d")
|
||||
)
|
||||
print(f"Date '{date_str}' is valid (2024 only): {is_valid}")
|
||||
|
||||
|
||||
def __parse_flexible_date():
|
||||
for date_str in [
|
||||
"2024-01-01",
|
||||
"01/02/2024",
|
||||
"February 29, 2024",
|
||||
"Invalid date",
|
||||
"2025-01-01 12:18:10",
|
||||
"2025-01-01 12:18:10.566",
|
||||
"2025-01-01T12:18:10.566",
|
||||
"2025-01-01T12:18:10.566+02:00",
|
||||
]:
|
||||
print(f"{date_str} -> {parse_flexible_date(date_str)}")
|
||||
|
||||
|
||||
def __compare_dates():
|
||||
|
||||
for date1, date2 in [
|
||||
("2024-01-01 12:00:00", "2024-01-01 15:30:00"),
|
||||
("2024-01-02", "2024-01-01"),
|
||||
("2024-01-01T10:00:00+02:00", "2024-01-01T08:00:00Z"),
|
||||
("invalid-date", "2024-01-01"),
|
||||
("2024-01-01", "invalid-date"),
|
||||
("invalid-date", "also-invalid"),
|
||||
]:
|
||||
result = compare_dates(date1, date2)
|
||||
print(f"Comparing '{date1}' and '{date2}': {result}")
|
||||
|
||||
|
||||
def __find_newest_datetime_in_list():
|
||||
date_list = [
|
||||
"2024-01-01 12:00:00",
|
||||
"2024-01-02 09:30:00",
|
||||
"2023-12-31 23:59:59",
|
||||
"2024-01-02 15:45:00",
|
||||
"2024-01-02T15:45:00.001",
|
||||
"invalid-date",
|
||||
]
|
||||
newest_date = find_newest_datetime_in_list(date_list)
|
||||
print(f"Newest date in list: {newest_date}")
|
||||
|
||||
|
||||
def __parse_day_of_week_range():
|
||||
ranges = [
|
||||
"Mon-Fri",
|
||||
"Saturday-Sunday",
|
||||
"Wed-Mon",
|
||||
"Fri-Fri",
|
||||
"mon-tue",
|
||||
"Invalid-Range"
|
||||
]
|
||||
for range_str in ranges:
|
||||
try:
|
||||
days = parse_day_of_week_range(range_str)
|
||||
print(f"Day range '{range_str}' -> {days}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error parsing day range '{range_str}': {e}")
|
||||
|
||||
|
||||
def __parse_time_range():
|
||||
ranges = [
|
||||
"08:00-17:00",
|
||||
"22:00-06:00",
|
||||
"12:30-12:30",
|
||||
"invalid-range"
|
||||
]
|
||||
for range_str in ranges:
|
||||
try:
|
||||
start_time, end_time = parse_time_range(range_str)
|
||||
print(f"Time range '{range_str}' -> Start: {start_time}, End: {end_time}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error parsing time range '{range_str}': {e}")
|
||||
|
||||
|
||||
def __times_overlap_or_connect():
|
||||
time_format = "%H:%M"
|
||||
time_ranges = [
|
||||
(("08:00", "12:00"), ("11:00", "15:00")), # Overlap
|
||||
(("22:00", "02:00"), ("01:00", "05:00")), # Overlap across midnight
|
||||
(("10:00", "12:00"), ("12:00", "14:00")), # Connect
|
||||
(("09:00", "11:00"), ("12:00", "14:00")), # No overlap
|
||||
]
|
||||
for (start1, end1), (start2, end2) in time_ranges:
|
||||
start1 = datetime.strptime(start1, time_format).time()
|
||||
end1 = datetime.strptime(end1, time_format).time()
|
||||
start2 = datetime.strptime(start2, time_format).time()
|
||||
end2 = datetime.strptime(end2, time_format).time()
|
||||
overlap = times_overlap_or_connect((start1, end1), (start2, end2))
|
||||
overlap_connect = times_overlap_or_connect((start1, end1), (start2, end2), True)
|
||||
print(f"Time ranges {start1}-{end1} and {start2}-{end2} overlap/connect: {overlap}/{overlap_connect}")
|
||||
|
||||
|
||||
def __is_time_in_range():
|
||||
time_format = "%H:%M:%S"
|
||||
test_cases = [
|
||||
("10:00:00", "09:00:00", "11:00:00"),
|
||||
("23:30:00", "22:00:00", "01:00:00"), # Across midnight
|
||||
("05:00:00", "06:00:00", "10:00:00"), # Not in range
|
||||
("12:00:00", "12:00:00", "12:00:00"), # Exact match
|
||||
]
|
||||
for (check_time, start_time, end_time) in test_cases:
|
||||
start_time = datetime.strptime(start_time, time_format).time()
|
||||
end_time = datetime.strptime(end_time, time_format).time()
|
||||
in_range = is_time_in_range(
|
||||
f"{check_time}", start_time.strftime("%H:%M:%S"), end_time.strftime("%H:%M:%S")
|
||||
)
|
||||
print(f"Time {check_time} in range {start_time}-{end_time}: {in_range}")
|
||||
|
||||
|
||||
def __reorder_weekdays_from_today():
|
||||
for base_day in [
|
||||
"Tue", "Wed", "Sunday", "Fri", "InvalidDay"
|
||||
]:
|
||||
try:
|
||||
reordered_days = reorder_weekdays_from_today(base_day)
|
||||
print(f"Reordered weekdays from {base_day}: {reordered_days}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error reordering weekdays from '{base_day}': {e}")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
print("\nDatetime ISO 8601 tests:\n")
|
||||
__get_datetime_iso8601()
|
||||
print("\nSystem time test:")
|
||||
print(f"System time: {get_system_timezone()}")
|
||||
print("\nParse timezone data tests:\n")
|
||||
__parse_timezone_data()
|
||||
print("\nValidate date tests:\n")
|
||||
__validate_date()
|
||||
print("\nParse flexible date tests:\n")
|
||||
__parse_flexible_date()
|
||||
print("\nCompare dates tests:\n")
|
||||
__compare_dates()
|
||||
print("\nFind newest datetime in list tests:\n")
|
||||
__find_newest_datetime_in_list()
|
||||
print("\nParse day of week range tests:\n")
|
||||
__parse_day_of_week_range()
|
||||
print("\nParse time range tests:\n")
|
||||
__parse_time_range()
|
||||
print("\nTimes overlap or connect tests:\n")
|
||||
__times_overlap_or_connect()
|
||||
print("\nIs time in range tests:\n")
|
||||
__is_time_in_range()
|
||||
print("\nReorder weekdays from today tests:\n")
|
||||
__reorder_weekdays_from_today()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
92
test-run/datetime_handling/timestamp_convert.py
Normal file
92
test-run/datetime_handling/timestamp_convert.py
Normal file
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
timestamp string checks
|
||||
"""
|
||||
|
||||
from corelibs.datetime_handling.timestamp_convert import (
|
||||
convert_timestamp, seconds_to_string, convert_to_seconds, TimeParseError, TimeUnitError
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
print("\n--- Testing convert_to_seconds ---\n")
|
||||
test_cases = [
|
||||
"5M 6d", # 5 months, 6 days
|
||||
"2h 30m 45s", # 2 hours, 30 minutes, 45 seconds
|
||||
"1Y 2M 3d", # 1 year, 2 months, 3 days
|
||||
"1h", # 1 hour
|
||||
"30m", # 30 minutes
|
||||
"2 hours 15 minutes", # 2 hours, 15 minutes
|
||||
"1d 12h", # 1 day, 12 hours
|
||||
"3M 2d 4h", # 3 months, 2 days, 4 hours
|
||||
"45s", # 45 seconds
|
||||
"-45s", # -45 seconds
|
||||
"-1h", # -1 hour
|
||||
"-30m", # -30 minutes
|
||||
"-2h 30m 45s", # -2 hours, 30 minutes, 45 seconds
|
||||
"-1d 12h", # -1 day, 12 hours
|
||||
"-3M 2d 4h", # -3 months, 2 days, 4 hours
|
||||
"-1Y 2M 3d", # -1 year, 2 months, 3 days
|
||||
"-2 hours 15 minutes", # -2 hours, 15 minutes
|
||||
"-1 year 2 months", # -1 year, 2 months
|
||||
"-2Y 6M 15d 8h 30m 45s", # Complex negative example
|
||||
"1 year 2 months", # 1 year, 2 months
|
||||
"2Y 6M 15d 8h 30m 45s", # Complex example
|
||||
# invalid tests
|
||||
"5M 6d 2M", # months appears twice
|
||||
"2h 30m 45s 1h", # hours appears twice
|
||||
"1d 2 days", # days appears twice (short and long form)
|
||||
"30m 45 minutes", # minutes appears twice
|
||||
"1Y 2 years", # years appears twice
|
||||
"1x 2 yrs", # invalid names
|
||||
|
||||
123, # int
|
||||
789.12, # float
|
||||
456.56, # float, high
|
||||
"4566", # int as string
|
||||
"5551.12", # float as string
|
||||
"5551.56", # float, high as string
|
||||
]
|
||||
|
||||
for time_string in test_cases:
|
||||
try:
|
||||
result = convert_to_seconds(time_string)
|
||||
print(f"Human readable to seconds: {time_string} => {result}")
|
||||
except (TimeParseError, TimeUnitError) as e:
|
||||
print(f"Error encountered for {time_string}: {type(e).__name__}: {e}")
|
||||
|
||||
print("\n--- Testing seconds_to_string and convert_timestamp ---\n")
|
||||
|
||||
test_values = [
|
||||
'as is string',
|
||||
-172800.001234, # -2 days, -0.001234 seconds
|
||||
-90061.789, # -1 day, -1 hour, -1 minute, -1.789 seconds
|
||||
-3661.456, # -1 hour, -1 minute, -1.456 seconds
|
||||
-65.123, # -1 minute, -5.123 seconds
|
||||
-1.5, # -1.5 seconds
|
||||
-0.001, # -1 millisecond
|
||||
-0.000001, # -1 microsecond
|
||||
0, # 0 seconds
|
||||
0.000001, # 1 microsecond
|
||||
0.001, # 1 millisecond
|
||||
1.5, # 1.5 seconds
|
||||
65.123, # 1 minute, 5.123 seconds
|
||||
3661.456, # 1 hour, 1 minute, 1.456 seconds
|
||||
90061.789, # 1 day, 1 hour, 1 minute, 1.789 seconds
|
||||
172800.001234 # 2 days, 0.001234 seconds
|
||||
]
|
||||
|
||||
for time_value in test_values:
|
||||
result = seconds_to_string(time_value, show_microseconds=True)
|
||||
result_alt = convert_timestamp(time_value, show_microseconds=True)
|
||||
print(f"Seconds to human readable: {time_value} => {result} / {result_alt}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
2
test-run/db_handling/database/.gitignore
vendored
Normal file
2
test-run/db_handling/database/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
2
test-run/db_handling/log/.gitignore
vendored
Normal file
2
test-run/db_handling/log/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
139
test-run/db_handling/sql_main.py
Normal file
139
test-run/db_handling/sql_main.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""
|
||||
SQL Main wrapper test
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import json
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log, Logger
|
||||
from corelibs.db_handling.sql_main import SQLMain
|
||||
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
DATABASE_DIR: Path = Path("database")
|
||||
LOG_DIR: Path = Path("log")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'sqlite_main.log'),
|
||||
log_name="SQLite Main",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
"log_level_file": 'DEBUG',
|
||||
}
|
||||
)
|
||||
sql_main = SQLMain(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_ident=f"sqlite:{ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_main.db')}"
|
||||
)
|
||||
if sql_main.connected():
|
||||
log.info("SQL Main connected successfully")
|
||||
else:
|
||||
log.error('SQL Main connection failed')
|
||||
if sql_main.dbh is None:
|
||||
log.error('SQL Main DBH instance is None')
|
||||
return
|
||||
|
||||
if sql_main.dbh.trigger_exists('trg_test_a_set_date_updated_on_update'):
|
||||
log.info("Trigger trg_test_a_set_date_updated_on_update exists")
|
||||
if sql_main.dbh.table_exists('test_a'):
|
||||
log.info("Table test_a exists, dropping for clean test")
|
||||
sql_main.dbh.execute_query("DROP TABLE test_a;")
|
||||
# create a dummy table
|
||||
table_sql = """
|
||||
CREATE TABLE IF NOT EXISTS test_a (
|
||||
test_a_id INTEGER PRIMARY KEY,
|
||||
date_created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%f', 'now')),
|
||||
date_updated TEXT,
|
||||
uid TEXT NOT NULL UNIQUE,
|
||||
set_current_timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
text_a TEXT,
|
||||
content,
|
||||
int_a INTEGER,
|
||||
float_a REAL
|
||||
);
|
||||
"""
|
||||
|
||||
result = sql_main.dbh.execute_query(table_sql)
|
||||
log.debug(f"Create table result: {result}")
|
||||
trigger_sql = """
|
||||
CREATE TRIGGER trg_test_a_set_date_updated_on_update
|
||||
AFTER UPDATE ON test_a
|
||||
FOR EACH ROW
|
||||
WHEN OLD.date_updated IS NULL OR NEW.date_updated = OLD.date_updated
|
||||
BEGIN
|
||||
UPDATE test_a
|
||||
SET date_updated = (strftime('%Y-%m-%d %H:%M:%f', 'now'))
|
||||
WHERE test_a_id = NEW.test_a_id;
|
||||
END;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(trigger_sql)
|
||||
log.debug(f"Create trigger result: {result}")
|
||||
result = sql_main.dbh.meta_data_detail('test_a')
|
||||
log.debug(f"Table meta data detail: {dump_data(result)}")
|
||||
# INSERT DATA
|
||||
sql = """
|
||||
INSERT INTO test_a (uid, text_a, content, int_a, float_a)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
RETURNING test_a_id, uid;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[1] Insert data result: {dump_data(result)}")
|
||||
__uid: str = ''
|
||||
if result is not False:
|
||||
# first one only of interest
|
||||
result = dict(result[0])
|
||||
__uid = str(result.get('uid', ''))
|
||||
# second insert
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[2] Insert data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.execute_query("SELECT * FROM test_a;")
|
||||
log.debug(f"Select data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row result: {dump_data(result)}")
|
||||
sql = """
|
||||
UPDATE test_a
|
||||
SET text_a = ?
|
||||
WHERE uid = ?;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
'Some updated text A',
|
||||
__uid,
|
||||
)
|
||||
)
|
||||
log.debug(f"Update data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row after update result: {dump_data(result)}")
|
||||
|
||||
sql_main.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
146
test-run/db_handling/sqlite_io.py
Normal file
146
test-run/db_handling/sqlite_io.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""
|
||||
SQLite IO test
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import json
|
||||
import sqlite3
|
||||
from corelibs_dump_data.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log, Logger
|
||||
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
DATABASE_DIR: Path = Path("database")
|
||||
LOG_DIR: Path = Path("log")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'sqlite_io.log'),
|
||||
log_name="SQLite IO",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
"log_level_file": 'DEBUG',
|
||||
}
|
||||
)
|
||||
db = SQLiteIO(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_name=ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_io.db'),
|
||||
row_factory='Dict'
|
||||
)
|
||||
if db.db_connected():
|
||||
log.info(f"Connected to DB: {db.db_name}")
|
||||
if db.trigger_exists('trg_test_a_set_date_updated_on_update'):
|
||||
log.info("Trigger trg_test_a_set_date_updated_on_update exists")
|
||||
if db.table_exists('test_a'):
|
||||
log.info("Table test_a exists, dropping for clean test")
|
||||
db.execute_query("DROP TABLE test_a;")
|
||||
# create a dummy table
|
||||
table_sql = """
|
||||
CREATE TABLE IF NOT EXISTS test_a (
|
||||
test_a_id INTEGER PRIMARY KEY,
|
||||
date_created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%f', 'now')),
|
||||
date_updated TEXT,
|
||||
uid TEXT NOT NULL UNIQUE,
|
||||
set_current_timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
text_a TEXT,
|
||||
content,
|
||||
int_a INTEGER,
|
||||
float_a REAL
|
||||
);
|
||||
"""
|
||||
result = db.execute_query(table_sql)
|
||||
log.debug(f"Create table result: {result}")
|
||||
trigger_sql = """
|
||||
CREATE TRIGGER trg_test_a_set_date_updated_on_update
|
||||
AFTER UPDATE ON test_a
|
||||
FOR EACH ROW
|
||||
WHEN OLD.date_updated IS NULL OR NEW.date_updated = OLD.date_updated
|
||||
BEGIN
|
||||
UPDATE test_a
|
||||
SET date_updated = (strftime('%Y-%m-%d %H:%M:%f', 'now'))
|
||||
WHERE test_a_id = NEW.test_a_id;
|
||||
END;
|
||||
"""
|
||||
result = db.execute_query(trigger_sql)
|
||||
log.debug(f"Create trigger result: {result}")
|
||||
result = db.meta_data_detail('test_a')
|
||||
log.debug(f"Table meta data detail: {dump_data(result)}")
|
||||
# INSERT DATA
|
||||
sql = """
|
||||
INSERT INTO test_a (uid, text_a, content, int_a, float_a)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
RETURNING test_a_id, uid;
|
||||
"""
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[1] Insert data result: {dump_data(result)}")
|
||||
__uid: str = ''
|
||||
if result is not False:
|
||||
# first one only of interest
|
||||
result = dict(result[0])
|
||||
__uid = str(result.get('uid', ''))
|
||||
# second insert
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[2] Insert data result: {dump_data(result)}")
|
||||
result = db.execute_query("SELECT * FROM test_a;")
|
||||
log.debug(f"Select data result: {dump_data(result)}")
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row result: {dump_data(result)}")
|
||||
sql = """
|
||||
UPDATE test_a
|
||||
SET text_a = ?
|
||||
WHERE uid = ?;
|
||||
"""
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
'Some updated text A',
|
||||
__uid,
|
||||
)
|
||||
)
|
||||
log.debug(f"Update data result: {dump_data(result)}")
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row after update result: {dump_data(result)}")
|
||||
|
||||
db.db_close()
|
||||
|
||||
db = SQLiteIO(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_name=ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_io.db'),
|
||||
row_factory='Row'
|
||||
)
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
if result is not None and result is not False:
|
||||
log.debug(f"Fetch row result: {dump_data(result)} -> {dict(result)} -> {result.keys()}")
|
||||
log.debug(f"Access via index: {result[5]} -> {result['text_a']}")
|
||||
if isinstance(result, sqlite3.Row):
|
||||
log.debug('Result is sqlite3.Row as expected')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user