Compare commits
210 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5950485d46 | ||
|
|
f349927a63 | ||
|
|
dfe8890598 | ||
|
|
d224876a8e | ||
|
|
17e8c76b94 | ||
|
|
9034a31cd6 | ||
|
|
523e61c9f7 | ||
|
|
cf575ded90 | ||
|
|
11a75d8532 | ||
|
|
6593e11332 | ||
|
|
c310f669d6 | ||
|
|
f327f47c3f | ||
|
|
acd61e825e | ||
|
|
895701da59 | ||
|
|
e0fb0db1f0 | ||
|
|
dc7e56106e | ||
|
|
90e5179980 | ||
|
|
9db39003c4 | ||
|
|
4ffe372434 | ||
|
|
a00c27c465 | ||
|
|
1f7f4b8d53 | ||
|
|
baca79ce82 | ||
|
|
4265be6430 | ||
|
|
c16b086467 | ||
|
|
48a98c0206 | ||
|
|
f1788f057f | ||
|
|
0ad8883809 | ||
|
|
51e9b1ce7c | ||
|
|
0d3104f60a | ||
|
|
d29f827fc9 | ||
|
|
282fe1f7c0 | ||
|
|
afce5043e4 | ||
|
|
5996bb1fc0 | ||
|
|
06a17d7c30 | ||
|
|
af7633183c | ||
|
|
1280b2f855 | ||
|
|
2e0b1f5951 | ||
|
|
548d7491b8 | ||
|
|
ad99115544 | ||
|
|
52919cbc49 | ||
|
|
7f2dc13c31 | ||
|
|
592652cff1 | ||
|
|
6a1724695e | ||
|
|
037210756e | ||
|
|
4e78d83092 | ||
|
|
0e6331fa6a | ||
|
|
c98c5df63c | ||
|
|
0981c74da9 | ||
|
|
31518799f6 | ||
|
|
e8b4b9b48e | ||
|
|
cd06272b38 | ||
|
|
c5ab4352e3 | ||
|
|
0da4a6b70a | ||
|
|
11c5f3387c | ||
|
|
3ed0171e17 | ||
|
|
c7b38b0d70 | ||
|
|
caf0039de4 | ||
|
|
2637e1e42c | ||
|
|
d0a1673965 | ||
|
|
07e5d23f72 | ||
|
|
fb4fdb6857 | ||
|
|
d642a13b6e | ||
|
|
8967031f91 | ||
|
|
89caada4cc | ||
|
|
b3616269bc | ||
|
|
4fa22813ce | ||
|
|
3ee3a0dce0 | ||
|
|
1226721bc0 | ||
|
|
a76eae0cc7 | ||
|
|
53cf2a6f48 | ||
|
|
fe69530b38 | ||
|
|
bf83c1c394 | ||
|
|
84ce43ab93 | ||
|
|
5e0765ee24 | ||
|
|
6edf9398b7 | ||
|
|
30bf9c1bcb | ||
|
|
0b59f3cc7a | ||
|
|
2544fad9ce | ||
|
|
e579ef5834 | ||
|
|
543e9766a1 | ||
|
|
4c3611aba7 | ||
|
|
dadc14563a | ||
|
|
c1eda7305b | ||
|
|
2f4e236350 | ||
|
|
b858936c68 | ||
|
|
78ce30283e | ||
|
|
f85fbb86af | ||
|
|
ed22105ec8 | ||
|
|
7c5af588c7 | ||
|
|
2690a285d9 | ||
|
|
bb60a570d0 | ||
|
|
ca0ab2d7d1 | ||
|
|
38bae7fb46 | ||
|
|
14466c3ff8 | ||
|
|
fe824f9fb4 | ||
|
|
ef5981b473 | ||
|
|
7d1ee70cf6 | ||
|
|
7c72d99619 | ||
|
|
b32887a6d8 | ||
|
|
37a197e7f1 | ||
|
|
74cb3d2c54 | ||
|
|
d19abcabc7 | ||
|
|
f8ae6609c7 | ||
|
|
cbd39ff161 | ||
|
|
f8905a176c | ||
|
|
847288e91f | ||
|
|
446d9d5217 | ||
|
|
3a7a1659f0 | ||
|
|
bc23006a34 | ||
|
|
6090995eba | ||
|
|
60db747d6d | ||
|
|
a7a4141f58 | ||
|
|
2b04cbe239 | ||
|
|
765cc061c1 | ||
|
|
80319385f0 | ||
|
|
29dd906fe0 | ||
|
|
d5dc4028c3 | ||
|
|
0df049d453 | ||
|
|
0bd7c1f685 | ||
|
|
2f08ecabbf | ||
|
|
12af1c80dc | ||
|
|
a52b6e0a55 | ||
|
|
a586cf65e2 | ||
|
|
e2e7882bfa | ||
|
|
4f9c2b9d5f | ||
|
|
5203bcf1ea | ||
|
|
f1e3bc8559 | ||
|
|
b97ca6f064 | ||
|
|
d1ea9874da | ||
|
|
3cd3f87d68 | ||
|
|
582937b866 | ||
|
|
2b8240c156 | ||
|
|
abf4b7ac89 | ||
|
|
9c49f83c16 | ||
|
|
3a625ed0ee | ||
|
|
2cfbf4bb90 | ||
|
|
5767533668 | ||
|
|
24798f19ca | ||
|
|
26f8249187 | ||
|
|
dcefa564da | ||
|
|
edd35dccea | ||
|
|
ea527ea60c | ||
|
|
fd5e1db22b | ||
|
|
39e23faf7f | ||
|
|
de285b531a | ||
|
|
0a29a592f9 | ||
|
|
e045b1d3b5 | ||
|
|
280e5fa861 | ||
|
|
472d3495b5 | ||
|
|
2778ac6870 | ||
|
|
743a0a8ac9 | ||
|
|
694712ed2e | ||
|
|
ea3b4f1790 | ||
|
|
da68818d4f | ||
|
|
db6a3b53c5 | ||
|
|
82b089498e | ||
|
|
948b0dd5e7 | ||
|
|
4acc0b51b1 | ||
|
|
a626b738a9 | ||
|
|
7119844313 | ||
|
|
5763f57830 | ||
|
|
70e8ceecce | ||
|
|
acbe1ac692 | ||
|
|
99bca2c467 | ||
|
|
b74ed1f30e | ||
|
|
8082ab78a1 | ||
|
|
c69076f517 | ||
|
|
648ab001b6 | ||
|
|
447034046e | ||
|
|
0770ac0bb4 | ||
|
|
aa2fbd4f70 | ||
|
|
58c8447531 | ||
|
|
bcca43d774 | ||
|
|
e9ccfe7ad2 | ||
|
|
6c2637ad34 | ||
|
|
7183d05dd6 | ||
|
|
b45ca85cd3 | ||
|
|
4ca45ebc73 | ||
|
|
6902768fed | ||
|
|
3f9f2ceaac | ||
|
|
2a248bd249 | ||
|
|
c559a6bafb | ||
|
|
19d7e9b5ed | ||
|
|
3e5a5accf7 | ||
|
|
424c91945a | ||
|
|
c657dc564e | ||
|
|
208f002284 | ||
|
|
084ecc01e0 | ||
|
|
08cb994d8d | ||
|
|
67f1a6688d | ||
|
|
efb7968e93 | ||
|
|
fe7c7db004 | ||
|
|
79d1ccae9a | ||
|
|
6e69af4aa8 | ||
|
|
d500b7d473 | ||
|
|
ef599a1aad | ||
|
|
2d197134f1 | ||
|
|
717080a009 | ||
|
|
19197c71ff | ||
|
|
051b93f2d8 | ||
|
|
e04b3598b8 | ||
|
|
b88e0fe564 | ||
|
|
060e3b4afe | ||
|
|
cd07267475 | ||
|
|
2fa031f6ee | ||
|
|
f38cce1c1d | ||
|
|
52dd1e7b73 | ||
|
|
661a182655 | ||
|
|
d803de312d | ||
|
|
57a36d64f1 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -3,3 +3,5 @@
|
||||
**/*.egg-info
|
||||
.mypy_cache/
|
||||
**/.env
|
||||
.coverage
|
||||
uv.lock
|
||||
|
||||
133
README.md
Normal file
133
README.md
Normal file
@@ -0,0 +1,133 @@
|
||||
# CoreLibs for Python
|
||||
|
||||
> [!warning]
|
||||
> This is pre-production, location of methods and names of paths can change
|
||||
>
|
||||
> This will be split up into modules per file and this will be just a collection holder
|
||||
> See [Deprecated](#deprecated) below
|
||||
|
||||
This is a pip package that can be installed into any project and covers the following parts
|
||||
|
||||
- logging update with exception logs
|
||||
- requests wrapper for easier auth pass on access
|
||||
- dict fingerprinting
|
||||
- sending email
|
||||
- jmespath search
|
||||
- json helpers for conten replace and output
|
||||
- dump outputs for data for debugging
|
||||
- progress printing
|
||||
- string formatting, time creation, byte formatting
|
||||
- Enum base class
|
||||
- SQLite simple IO class
|
||||
- Symmetric encryption
|
||||
|
||||
## Current list
|
||||
|
||||
- config_handling: simple INI config file data loader with check/convert/etc
|
||||
- csv_interface: csv dict writer/reader helper
|
||||
- debug_handling: various debug helpers like data dumper, timer, utilization, etc
|
||||
- db_handling: SQLite interface class
|
||||
- encyption_handling: symmetric encryption
|
||||
- email_handling: simple email sending
|
||||
- file_handling: crc handling for file content and file names, progress bar
|
||||
- json_handling: jmespath support and json date support, replace content in dict with json paths
|
||||
- iterator_handling: list and dictionary handling support (search, fingerprinting, etc)
|
||||
- logging_handling: extend log and also error message handling
|
||||
- requests_handling: requests wrapper for better calls with auth headers
|
||||
- script_handling: pid lock file handling, abort timer
|
||||
- string_handling: byte format, datetime format, datetime compare, hashing, string formats for numbers, double byte string format, etc
|
||||
- var_handling: var type checkers, enum base class
|
||||
|
||||
## Unfinished
|
||||
|
||||
- csv_handling/csv_interface: The CSV DictWriter interface is just in a very basic way implemented
|
||||
- script_handling/script_helpers: No idea if there is need for this, tests are written but not finished
|
||||
|
||||
## Deprecated
|
||||
|
||||
All content in this module will move to stand alone libraries, as of now the following entries have moved and will throw deprecated warnings if used
|
||||
|
||||
- var_handling.enum_base: corelibs-enum-base
|
||||
- var_handling.var_helpers: corelibs-var
|
||||
- datetime_handling: corelibs-datetime
|
||||
- string_handling.text_colors: corelibs-text-colors
|
||||
|
||||
## UV setup
|
||||
|
||||
uv must be [installed](https://docs.astral.sh/uv/getting-started/installation/)
|
||||
|
||||
## How to publish
|
||||
|
||||
Have the following setup in `project.toml`
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "opj-pypi"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
explicit = true
|
||||
```
|
||||
|
||||
```sh
|
||||
uv build
|
||||
uv publish --index opj-pypi --token <gitea token>
|
||||
```
|
||||
|
||||
## Use package
|
||||
|
||||
We must set the full index URL here because we run with "--no-project"
|
||||
|
||||
```sh
|
||||
uv run --with corelibs --index opj-pypi=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/ --no-project -- python -c "import corelibs"
|
||||
```
|
||||
|
||||
### Python tests
|
||||
|
||||
All python tests are the tests/ folder. They are structured by the source folder layout
|
||||
|
||||
run them with
|
||||
|
||||
```sh
|
||||
uv run pytest
|
||||
```
|
||||
|
||||
Get a coverate report
|
||||
|
||||
```sh
|
||||
uv run pytest --cov=corelibs
|
||||
uv run pytest --cov=corelibs --cov-report=term-missing
|
||||
```
|
||||
|
||||
### Other tests
|
||||
|
||||
In the test-run folder usage and run tests are located, runt them below
|
||||
|
||||
```sh
|
||||
uv run test-run/<script>
|
||||
```
|
||||
|
||||
## How to install in another project
|
||||
|
||||
This will also add the index entry
|
||||
|
||||
```sh
|
||||
uv add corelibs --index opj-pypi=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/
|
||||
```
|
||||
|
||||
## Python venv setup
|
||||
|
||||
After clone, run the command below to install all dependenciss
|
||||
|
||||
```sh
|
||||
uv sync
|
||||
```
|
||||
|
||||
## NOTE on TLS problems
|
||||
|
||||
> [!warning] TLS problems with Netskope
|
||||
|
||||
If the Netskope service is running all uv runs will fail unless either --native-tls is set or the enviroment variable SSL_CERT_FILE is set, see blow
|
||||
|
||||
```sh
|
||||
export SSL_CERT_FILE='/Library/Application Support/Netskope/STAgent/data/nscacert_combined.pem'
|
||||
```
|
||||
84
ReadMe.md
84
ReadMe.md
@@ -1,84 +0,0 @@
|
||||
# CoreLibs for Python
|
||||
|
||||
This is a pip package that can be installed into any project and covers the following pars
|
||||
|
||||
- logging update with exception logs
|
||||
- requests wrapper for easier auth pass on access
|
||||
- dict fingerprinting
|
||||
- jmespath search
|
||||
- dump outputs for data
|
||||
- progress printing
|
||||
- string formatting, time creation, byte formatting
|
||||
|
||||
## Current list
|
||||
|
||||
- csv_handling: csv dict writer helper
|
||||
- debug_handling: various debug helpers like data dumper, timer, utilization, etc
|
||||
- file_handling: crc handling for file content and file names, progress bar
|
||||
- json_handling: jmespath support and json date support
|
||||
- list_dict_handling: list and dictionary handling support (search, fingerprinting, etc)
|
||||
- logging_handling: extend log and also error message handling
|
||||
- requests_handling: requests wrapper for better calls with auth headers
|
||||
- script_handling: pid lock file handling, abort timer
|
||||
- string_handling: byte format, datetime format, hashing, string formats for numbrers, double byte string format, etc
|
||||
|
||||
## How to publish
|
||||
|
||||
Have the following setup in `project.toml`
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "egra-gitea"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
explicit = true
|
||||
```
|
||||
|
||||
```sh
|
||||
uv build --native-tls
|
||||
uv publish --index egra-gitea --token <gitea token> --native-tls
|
||||
```
|
||||
|
||||
## Test package
|
||||
|
||||
We must set the full index URL here because we run with "--no-project"
|
||||
|
||||
```sh
|
||||
uv run --with corelibs --index egra-gitea=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/ --no-project --native-tls -- python -c "import corelibs"
|
||||
```
|
||||
|
||||
### Other tests
|
||||
|
||||
In the test folder other tests are located.
|
||||
|
||||
At the moment only a small test for the "progress" and the "double byte string format" module is set
|
||||
|
||||
```sh
|
||||
uv run --native-tls tests/progress/progress_test.py
|
||||
```
|
||||
|
||||
```sh
|
||||
uv run --native-tls tests/double_byte_string_format/double_byte_string_format.py
|
||||
```
|
||||
|
||||
## How to install in another project
|
||||
|
||||
This will also add the index entry
|
||||
|
||||
```sh
|
||||
uv add corelibs --index egra-gitea=https://git.egplusww.jp/api/packages/PyPI/pypi/simple/ --native-tls
|
||||
```
|
||||
|
||||
## Python venv setup
|
||||
|
||||
In the folder where the script will be located
|
||||
|
||||
```sh
|
||||
uv venv --python 3.13
|
||||
```
|
||||
|
||||
Install all neded dependencies
|
||||
|
||||
```sh
|
||||
uv sync
|
||||
```
|
||||
11
SECURITY.md
Normal file
11
SECURITY.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Security Policy
|
||||
|
||||
This software follows the [Semver 2.0 scheme](https://semver.org/).
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Only the latest version is supported
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Open a ticket to report a secuirty problem
|
||||
7
ToDo.md
7
ToDo.md
@@ -1,4 +1,7 @@
|
||||
# ToDo list
|
||||
|
||||
- stub files .pyi
|
||||
- fix all remaning check errors
|
||||
- [x] stub files .pyi
|
||||
- [ ] Add tests for all, we need 100% test coverate
|
||||
- [x] Log: add custom format for "stack_correct" if set, this will override the normal stack block
|
||||
- [ ] Log: add rotate for size based
|
||||
- [ ] All folders and file names need to be revisited for naming and content collection
|
||||
|
||||
@@ -1,29 +1,48 @@
|
||||
# MARK: Project info
|
||||
[project]
|
||||
name = "corelibs"
|
||||
version = "0.5.0"
|
||||
version = "0.43.2"
|
||||
description = "Collection of utils for Python scripts"
|
||||
readme = "ReadMe.md"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
"corelibs-datetime>=1.0.1",
|
||||
"corelibs-enum-base>=1.0.0",
|
||||
"corelibs-text-colors>=1.0.0",
|
||||
"corelibs-var>=1.0.0",
|
||||
"cryptography>=46.0.3",
|
||||
"jmespath>=1.0.1",
|
||||
"jsonpath-ng>=1.7.0",
|
||||
"psutil>=7.0.0",
|
||||
"requests>=2.32.4",
|
||||
"requests[socks]>=2.32.5",
|
||||
]
|
||||
# set this to disable publish to pypi (pip)
|
||||
# classifiers = ["Private :: Do Not Upload"]
|
||||
|
||||
# MARK: build target
|
||||
[[tool.uv.index]]
|
||||
name = "egra-gitea"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
explicit = true
|
||||
|
||||
# MARK: build system
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
# set this to disable publish to pypi (pip)
|
||||
# classifiers = ["Private :: Do Not Upload"]
|
||||
|
||||
# MARK: build target
|
||||
[[tool.uv.index]]
|
||||
name = "opj-pypi"
|
||||
url = "https://git.egplusww.jp/api/packages/PyPI/pypi/simple/"
|
||||
publish-url = "https://git.egplusww.jp/api/packages/PyPI/pypi"
|
||||
|
||||
[tool.uv.sources]
|
||||
corelibs-enum-base = { index = "opj-pypi" }
|
||||
corelibs-datetime = { index = "opj-pypi" }
|
||||
corelibs-var = { index = "opj-pypi" }
|
||||
corelibs-text-colors = { index = "opj-pypi" }
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"deepdiff>=8.6.1",
|
||||
"pytest>=8.4.1",
|
||||
"pytest-cov>=6.2.1",
|
||||
"typing-extensions>=4.15.0",
|
||||
]
|
||||
|
||||
# MARK: Python linting
|
||||
[tool.pyright]
|
||||
@@ -47,6 +66,38 @@ notes = ["FIXME", "TODO"]
|
||||
notes-rgx = '(FIXME|TODO)(\((TTD-|#)\[0-9]+\))'
|
||||
[tool.flake8]
|
||||
max-line-length = 120
|
||||
ignore = [
|
||||
"E741", # ignore ambigious variable name
|
||||
"W504" # Line break occurred after a binary operator [wrong triggered by "or" in if]
|
||||
]
|
||||
[tool.pylint.MASTER]
|
||||
# this is for the tests/etc folders
|
||||
init-hook='import sys; sys.path.append("src/")'
|
||||
|
||||
# MARK: Testing
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = [
|
||||
"tests",
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = [
|
||||
"*/tests/*",
|
||||
"*/test_*.py",
|
||||
"*/__init__.py"
|
||||
]
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_lines = [
|
||||
"pragma: no cover",
|
||||
"def __repr__",
|
||||
"def __str__",
|
||||
"raise AssertionError",
|
||||
"raise NotImplementedError",
|
||||
"if __name__ == .__main__.:"
|
||||
]
|
||||
exclude_also = [
|
||||
"def __.*__\\(",
|
||||
"def __.*\\(",
|
||||
"def _.*\\(",
|
||||
]
|
||||
|
||||
@@ -1,93 +0,0 @@
|
||||
"""
|
||||
Write to CSV file
|
||||
- each class set is one file write with one header set
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from pathlib import Path
|
||||
from collections import Counter
|
||||
import csv
|
||||
|
||||
|
||||
class CsvWriter:
|
||||
"""
|
||||
write to a CSV file
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: Path,
|
||||
file_name: str,
|
||||
header: dict[str, str],
|
||||
header_order: list[str] | None = None
|
||||
):
|
||||
self.path = path
|
||||
self.file_name = file_name
|
||||
# Key: index for write for the line dict, Values: header entries
|
||||
self.header = header
|
||||
self.csv_file_writer = self.__open_csv(header_order)
|
||||
|
||||
def __open_csv(self, header_order: list[str] | None) -> 'csv.DictWriter[str] | None':
|
||||
"""
|
||||
open csv file for writing, write headers
|
||||
|
||||
Note that if there is no header_order set we use the order in header dictionary
|
||||
|
||||
Arguments:
|
||||
line {list[str] | None} -- optional dedicated header order
|
||||
|
||||
Returns:
|
||||
csv.DictWriter[str] | None: _description_
|
||||
"""
|
||||
# if header order is set, make sure all header value fields exist
|
||||
header_values = self.header.values()
|
||||
if header_order is not None:
|
||||
if Counter(header_values) != Counter(header_order):
|
||||
print(
|
||||
"header order does not match header values: "
|
||||
f"{', '.join(header_values)} != {', '.join(header_order)}"
|
||||
)
|
||||
return None
|
||||
header_values = header_order
|
||||
# no duplicates
|
||||
if len(header_values) != len(set(header_values)):
|
||||
print(f"Header must have unique values only: {', '.join(header_values)}")
|
||||
return None
|
||||
try:
|
||||
fp = open(
|
||||
self.path.joinpath(self.file_name),
|
||||
"w", encoding="utf-8"
|
||||
)
|
||||
csv_file_writer = csv.DictWriter(
|
||||
fp,
|
||||
fieldnames=header_values,
|
||||
delimiter=",",
|
||||
quotechar='"',
|
||||
quoting=csv.QUOTE_MINIMAL,
|
||||
)
|
||||
csv_file_writer.writeheader()
|
||||
return csv_file_writer
|
||||
except OSError as err:
|
||||
print("OS error:", err)
|
||||
return None
|
||||
|
||||
def write_csv(self, line: dict[str, str]) -> bool:
|
||||
"""
|
||||
write member csv line
|
||||
|
||||
Arguments:
|
||||
line {dict[str, str]} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
if self.csv_file_writer is None:
|
||||
return False
|
||||
csv_row: dict[str, Any] = {}
|
||||
# only write entries that are in the header list
|
||||
for key, value in self.header.items():
|
||||
csv_row[value] = line[key]
|
||||
self.csv_file_writer.writerow(csv_row)
|
||||
return True
|
||||
|
||||
# __END__
|
||||
@@ -1,31 +0,0 @@
|
||||
"""
|
||||
json encoder for datetime
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from json import JSONEncoder
|
||||
from datetime import datetime, date
|
||||
|
||||
|
||||
# subclass JSONEncoder
|
||||
class DateTimeEncoder(JSONEncoder):
|
||||
"""
|
||||
Override the default method
|
||||
cls=DateTimeEncoder
|
||||
"""
|
||||
def default(self, o: Any) -> str | None:
|
||||
if isinstance(o, (date, datetime)):
|
||||
return o.isoformat()
|
||||
return None
|
||||
|
||||
|
||||
def default(obj: Any) -> str | None:
|
||||
"""
|
||||
default override
|
||||
default=default
|
||||
"""
|
||||
if isinstance(obj, (date, datetime)):
|
||||
return obj.isoformat()
|
||||
return None
|
||||
|
||||
# __END__
|
||||
@@ -1,37 +0,0 @@
|
||||
"""
|
||||
Dict helpers
|
||||
"""
|
||||
|
||||
|
||||
def mask(
|
||||
data_set: dict[str, str],
|
||||
mask_keys: list[str] | None = None,
|
||||
mask_str: str = "***",
|
||||
skip: bool = False
|
||||
) -> dict[str, str]:
|
||||
"""
|
||||
mask data for output
|
||||
Checks if mask_keys list exist in any key in the data set either from the start or at the end
|
||||
|
||||
Arguments:
|
||||
data_set {dict[str, str]} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
mask_keys {list[str] | None} -- _description_ (default: {None})
|
||||
mask_str {str} -- _description_ (default: {"***"})
|
||||
skip {bool} -- _description_ (default: {False})
|
||||
|
||||
Returns:
|
||||
dict[str, str] -- _description_
|
||||
"""
|
||||
if skip is True:
|
||||
return data_set
|
||||
if mask_keys is None:
|
||||
mask_keys = ["password", "secret"]
|
||||
return {
|
||||
key: mask_str
|
||||
if any(key.startswith(mask_key) or key.endswith(mask_key) for mask_key in mask_keys) else value
|
||||
for key, value in data_set.items()
|
||||
}
|
||||
|
||||
# __END__
|
||||
@@ -1,120 +0,0 @@
|
||||
"""
|
||||
A log handler wrapper
|
||||
"""
|
||||
|
||||
import logging.handlers
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Mapping
|
||||
|
||||
|
||||
class Log:
|
||||
"""
|
||||
logger setup
|
||||
"""
|
||||
|
||||
EXCEPTION: int = 60
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
log_path: Path,
|
||||
log_name: str,
|
||||
log_level_console: str = 'WARNING',
|
||||
log_level_file: str = 'DEBUG',
|
||||
add_start_info: bool = True
|
||||
):
|
||||
logging.addLevelName(Log.EXCEPTION, 'EXCEPTION')
|
||||
if not log_name.endswith('.log'):
|
||||
log_path = log_path.with_suffix('.log')
|
||||
# overall logger settings
|
||||
self.logger = logging.getLogger(log_name)
|
||||
# set maximum logging level for all logging output
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
# console logger
|
||||
self.__console_handler(log_level_console)
|
||||
# file logger
|
||||
self.__file_handler(log_level_file, log_path)
|
||||
# if requests set a start log
|
||||
if add_start_info is True:
|
||||
self.break_line('START')
|
||||
|
||||
def __filter_exceptions(self, record: logging.LogRecord) -> bool:
|
||||
return record.levelname != "EXCEPTION"
|
||||
|
||||
def __console_handler(self, log_level_console: str = 'WARNING'):
|
||||
# console logger
|
||||
if not isinstance(getattr(logging, log_level_console.upper(), None), int):
|
||||
log_level_console = 'WARNING'
|
||||
console_handler = logging.StreamHandler()
|
||||
formatter_console = logging.Formatter(
|
||||
(
|
||||
'[%(asctime)s.%(msecs)03d] '
|
||||
'[%(filename)s:%(funcName)s:%(lineno)d] '
|
||||
'<%(levelname)s> '
|
||||
'%(message)s'
|
||||
),
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
)
|
||||
console_handler.setLevel(log_level_console)
|
||||
# do not show exceptions logs on console
|
||||
console_handler.addFilter(self.__filter_exceptions)
|
||||
console_handler.setFormatter(formatter_console)
|
||||
self.logger.addHandler(console_handler)
|
||||
|
||||
def __file_handler(self, log_level_file: str, log_path: Path) -> None:
|
||||
# file logger
|
||||
if not isinstance(getattr(logging, log_level_file.upper(), None), int):
|
||||
log_level_file = 'DEBUG'
|
||||
file_handler = logging.handlers.TimedRotatingFileHandler(
|
||||
filename=log_path,
|
||||
encoding="utf-8",
|
||||
when="D",
|
||||
interval=1
|
||||
)
|
||||
formatter_file_handler = logging.Formatter(
|
||||
(
|
||||
'[%(asctime)s.%(msecs)03d] '
|
||||
'[%(pathname)s:%(funcName)s:%(lineno)d] '
|
||||
'[%(name)s:%(process)d] '
|
||||
'<%(levelname)s> '
|
||||
'%(message)s'
|
||||
),
|
||||
datefmt="%Y-%m-%dT%H:%M:%S",
|
||||
)
|
||||
file_handler.setLevel(log_level_file)
|
||||
file_handler.setFormatter(formatter_file_handler)
|
||||
self.logger.addHandler(file_handler)
|
||||
|
||||
def break_line(self, info: str = "BREAK"):
|
||||
"""
|
||||
add a break line as info level
|
||||
|
||||
Keyword Arguments:
|
||||
info {str} -- _description_ (default: {"BREAK"})
|
||||
"""
|
||||
self.logger.info("[%s] ================================>", info)
|
||||
|
||||
def exception(self, msg: object, *args: object, extra: Mapping[str, object] | None = None) -> None:
|
||||
"""
|
||||
log on exceotion level
|
||||
|
||||
Args:
|
||||
msg (object): _description_
|
||||
*args (object): arguments for msg
|
||||
extra: Mapping[str, object] | None: extra arguments for the formatting if needed
|
||||
"""
|
||||
self.logger.log(Log.EXCEPTION, msg, *args, exc_info=True, extra=extra)
|
||||
|
||||
def validate_log_level(self, log_level: str) -> bool:
|
||||
"""
|
||||
if the log level is invalid, will erturn false
|
||||
|
||||
Args:
|
||||
log_level (str): _description_
|
||||
|
||||
Returns:
|
||||
bool: _description_
|
||||
"""
|
||||
return isinstance(getattr(logging, log_level.upper(), None), int)
|
||||
|
||||
# __END__
|
||||
@@ -1,63 +0,0 @@
|
||||
"""
|
||||
Various string based date/time helpers
|
||||
"""
|
||||
|
||||
from math import floor
|
||||
import time
|
||||
|
||||
|
||||
def convert_timestamp(timestamp: float | int, show_micro: bool = True) -> str:
|
||||
"""
|
||||
format timestamp into human readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
show_micro {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# cut of the ms, but first round them up to four
|
||||
__timestamp_ms_split = str(round(timestamp, 4)).split(".")
|
||||
timestamp = int(__timestamp_ms_split[0])
|
||||
try:
|
||||
ms = int(__timestamp_ms_split[1])
|
||||
except IndexError:
|
||||
ms = 0
|
||||
timegroups = (86400, 3600, 60, 1)
|
||||
output: list[int] = []
|
||||
for i in timegroups:
|
||||
output.append(int(floor(timestamp / i)))
|
||||
timestamp = timestamp % i
|
||||
# output has days|hours|min|sec ms
|
||||
time_string = ""
|
||||
if output[0]:
|
||||
time_string = f"{output[0]}d"
|
||||
if output[0] or output[1]:
|
||||
time_string += f"{output[1]}h "
|
||||
if output[0] or output[1] or output[2]:
|
||||
time_string += f"{output[2]}m "
|
||||
time_string += f"{output[3]}s"
|
||||
if show_micro:
|
||||
time_string += f" {ms}ms" if ms else " 0ms"
|
||||
return time_string
|
||||
|
||||
|
||||
def create_time(timestamp: float, timestamp_format: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""
|
||||
just takes a timestamp and prints out humand readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timestamp_format {_type_} -- _description_ (default: {"%Y-%m-%d %H:%M:%S"})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return time.strftime(timestamp_format, time.localtime(timestamp))
|
||||
|
||||
# __END__
|
||||
37
src/corelibs/check_handling/regex_constants.py
Normal file
37
src/corelibs/check_handling/regex_constants.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""
|
||||
List of regex compiled strings that can be used
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
|
||||
def compile_re(reg: str) -> re.Pattern[str]:
|
||||
"""
|
||||
compile a regex with verbose flag
|
||||
|
||||
Arguments:
|
||||
reg {str} -- _description_
|
||||
|
||||
Returns:
|
||||
re.Pattern[str] -- _description_
|
||||
"""
|
||||
return re.compile(reg, re.VERBOSE)
|
||||
|
||||
|
||||
# email regex
|
||||
EMAIL_BASIC_REGEX: str = r"""
|
||||
^[A-Za-z0-9!#$%&'*+\-\/=?^_`{|}~][A-Za-z0-9!#$%:\(\)&'*+\-\/=?^_`{|}~\.]{0,63}
|
||||
@(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[a-zA-Z]{2,6}$
|
||||
"""
|
||||
# Domain regex with localhost
|
||||
DOMAIN_WITH_LOCALHOST_REGEX: str = r"""
|
||||
^(?:localhost|(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[A-Za-z]{2,})$
|
||||
"""
|
||||
# domain regex with loclhost and optional port
|
||||
DOMAIN_WITH_LOCALHOST_PORT_REGEX: str = r"""
|
||||
^(?:localhost|(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[A-Za-z]{2,})(?::\d+)?$
|
||||
"""
|
||||
# Domain, no localhost
|
||||
DOMAIN_REGEX: str = r"^(?!-)[A-Za-z0-9-]{1,63}(?<!-)(?:\.[A-Za-z0-9-]{1,63}(?<!-))*\.[A-Za-z]{2,}$"
|
||||
|
||||
# __END__
|
||||
586
src/corelibs/config_handling/settings_loader.py
Normal file
586
src/corelibs/config_handling/settings_loader.py
Normal file
@@ -0,0 +1,586 @@
|
||||
"""
|
||||
Load settings file for a certain group
|
||||
Check data for existing and valid
|
||||
Additional check for override settings as arguments
|
||||
"""
|
||||
|
||||
import re
|
||||
import configparser
|
||||
from typing import Any, Tuple, Sequence, cast
|
||||
from pathlib import Path
|
||||
from corelibs_var.var_helpers import is_int, is_float, str_to_bool
|
||||
from corelibs.logging_handling.log import Log
|
||||
from corelibs.iterator_handling.list_helpers import convert_to_list, is_list_in_list
|
||||
from corelibs.config_handling.settings_loader_handling.settings_loader_check import SettingsLoaderCheck
|
||||
|
||||
|
||||
class SettingsLoader:
|
||||
"""
|
||||
Settings Loader with Argument parser
|
||||
"""
|
||||
|
||||
# split char
|
||||
DEFAULT_ELEMENT_SPLIT_CHAR: str = ','
|
||||
|
||||
CONVERT_TO_LIST: list[str] = ['str', 'int', 'float', 'bool', 'auto']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
args: dict[str, Any],
|
||||
config_file: Path,
|
||||
log: 'Log | None' = None,
|
||||
always_print: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
init the Settings loader
|
||||
|
||||
Args:
|
||||
args (dict): Script Arguments
|
||||
config_file (Path): config file including path
|
||||
log (Log | None): Lop class, if set errors are written to this
|
||||
always_print (bool): Set to true to always print errors, even if Log is available
|
||||
element_split_char (str): Split character, default is ','
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
"""
|
||||
self.args = args
|
||||
self.config_file = config_file
|
||||
self.log = log
|
||||
self.always_print = always_print
|
||||
# config parser, load config file first
|
||||
self.config_parser: configparser.ConfigParser | None = self.__load_config_file()
|
||||
# for check settings, abort flag
|
||||
self.__check_settings_abort: bool = False
|
||||
|
||||
# error messages for raise ValueError
|
||||
self.__error_msg: list[str] = []
|
||||
|
||||
# MARK: load settings
|
||||
def load_settings(
|
||||
self,
|
||||
config_id: str,
|
||||
config_validate: dict[str, list[str]] | None = None,
|
||||
allow_not_exist: bool = False
|
||||
) -> dict[str, str]:
|
||||
"""
|
||||
neutral settings loader
|
||||
|
||||
The settings values on the right side are seen as a list if they have "," inside (see ELEMENT SPLIT CHAR)
|
||||
but only if the "check:list." is set
|
||||
|
||||
for the allowe entries set, each set is "key => checks", check set is "check type:settings"
|
||||
key: the key name in the settings file
|
||||
check: check set with the following allowed entries on the left side for type
|
||||
- mandatory: must be set as "mandatory:yes", if the key entry is missing or empty throws error
|
||||
- check: see __check_settings for the settings currently available
|
||||
- matching: a | list of entries where the value has to match too
|
||||
- in: the right side is another KEY value from the settings where this value must be inside
|
||||
- split: character to split entries, if set check:list+ must be set if checks are needed
|
||||
- convert: convert to int, float -> if element is number convert, else leave as is
|
||||
- empty: convert empty to, if nothing set on the right side then convert to None type
|
||||
|
||||
TODO: there should be a config/options argument for general settings
|
||||
|
||||
Args:
|
||||
config_id (str): what block to load
|
||||
config_validate (dict[str, list[str]]): list of allowed entries sets
|
||||
allow_not_exist (bool): If set to True, does not throw an error, but returns empty set
|
||||
|
||||
Returns:
|
||||
dict[str, str]: key = value list
|
||||
"""
|
||||
# reset error message list before run
|
||||
self.__error_msg = []
|
||||
# default set entries
|
||||
entry_set_empty: dict[str, str | None] = {}
|
||||
# entries that have to be split
|
||||
entry_split_char: dict[str, str] = {}
|
||||
# entries that should be converted
|
||||
entry_convert: dict[str, str] = {}
|
||||
# no args to set
|
||||
args_overrride: list[str] = []
|
||||
# all the settings for the config id given
|
||||
settings: dict[str, dict[str, Any]] = {
|
||||
config_id: {},
|
||||
}
|
||||
if config_validate is None:
|
||||
config_validate = {}
|
||||
if self.config_parser is not None:
|
||||
try:
|
||||
# load all data as is, validation is done afterwards
|
||||
settings[config_id] = dict(self.config_parser[config_id])
|
||||
except KeyError as e:
|
||||
if allow_not_exist is True:
|
||||
return {}
|
||||
raise ValueError(self.__print(
|
||||
f"[!] Cannot read [{config_id}] block in the file {self.config_file}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
try:
|
||||
for key, checks in config_validate.items():
|
||||
skip = True
|
||||
split_char = self.DEFAULT_ELEMENT_SPLIT_CHAR
|
||||
# if one is set as list in check -> do not skip, but add to list
|
||||
for check in checks:
|
||||
if check.startswith("convert:"):
|
||||
try:
|
||||
[_, convert_to] = check.split(":")
|
||||
if convert_to not in self.CONVERT_TO_LIST:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the convert type is invalid {check}: {convert_to}",
|
||||
'CRITICAL'
|
||||
))
|
||||
entry_convert[key] = convert_to
|
||||
except ValueError as e:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the convert type setup for entry failed: {check}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
if check.startswith('empty:'):
|
||||
try:
|
||||
[_, empty_set] = check.split(":")
|
||||
if not empty_set:
|
||||
empty_set = None
|
||||
entry_set_empty[key] = empty_set
|
||||
except ValueError as e:
|
||||
print(f"VALUE ERROR: {key}")
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the empty set type for entry failed: {check}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
# split char, also check to not set it twice, first one only
|
||||
if check.startswith("split:") and not entry_split_char.get(key):
|
||||
try:
|
||||
[_, split_char] = check.split(":")
|
||||
if len(split_char) == 0:
|
||||
self.__print(
|
||||
(
|
||||
f"[*] In [{config_id}] the [{key}] split char character is empty, "
|
||||
f"fallback to: {self.DEFAULT_ELEMENT_SPLIT_CHAR}"
|
||||
),
|
||||
"WARNING"
|
||||
)
|
||||
split_char = self.DEFAULT_ELEMENT_SPLIT_CHAR
|
||||
entry_split_char[key] = split_char
|
||||
skip = False
|
||||
except ValueError as e:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] In [{config_id}] the split character setup for entry failed: {check}: {e}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
if check == "args_override:yes":
|
||||
args_overrride.append(key)
|
||||
if skip:
|
||||
continue
|
||||
settings[config_id][key] = [
|
||||
__value.replace(" ", "")
|
||||
for __value in settings[config_id][key].split(split_char)
|
||||
]
|
||||
except KeyError as e:
|
||||
raise ValueError(self.__print(
|
||||
f"[!] Cannot read [{config_id}] block because the entry [{e}] could not be found",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
else:
|
||||
# ignore error if arguments are set
|
||||
if not self.__check_arguments(config_validate, True):
|
||||
raise ValueError(self.__print(f"[!] Cannot find file: {self.config_file}", 'CRITICAL'))
|
||||
# base set
|
||||
settings[config_id] = {}
|
||||
# make sure all are set
|
||||
# if we have arguments set, this override config settings
|
||||
error: bool = False
|
||||
for entry, validate in config_validate.items():
|
||||
# if we have command line option set, this one overrides config
|
||||
if (args_entry := self.__get_arg(entry)) is not None:
|
||||
self.__print(f"[*] Command line option override for: {entry}", 'WARNING')
|
||||
if (
|
||||
# only set if flagged as allowed override from args
|
||||
entry in args_overrride and
|
||||
(isinstance(args_entry, list) and entry_split_char.get(entry)) or
|
||||
(not isinstance(args_entry, list) and not entry_split_char.get(entry))
|
||||
):
|
||||
# args is list, but entry has not split, do not set
|
||||
settings[config_id][entry] = args_entry
|
||||
# validate checks
|
||||
for check in validate:
|
||||
# CHECKS
|
||||
# - mandatory
|
||||
# - check: regex check (see SettingsLoaderCheck class for entries)
|
||||
# - matching: entry in given list
|
||||
# - in: entry in other setting entry list
|
||||
# - length: for string length
|
||||
# - range: for int/float range check
|
||||
# mandatory check
|
||||
if check == "mandatory:yes" and (
|
||||
not settings[config_id].get(entry) or settings[config_id].get(entry) == ['']
|
||||
):
|
||||
error = True
|
||||
self.__print(f"[!] Missing content entry for: {entry}", 'ERROR')
|
||||
# skip if empty none
|
||||
if settings[config_id].get(entry) is None:
|
||||
continue
|
||||
if check.startswith("check:"):
|
||||
# replace the check and run normal checks
|
||||
settings[config_id][entry] = self.__check_settings(
|
||||
check, entry, settings[config_id][entry]
|
||||
)
|
||||
if self.__check_settings_abort is True:
|
||||
error = True
|
||||
elif check.startswith("matching:"):
|
||||
checks = check.replace("matching:", "").split("|")
|
||||
if __result := is_list_in_list(convert_to_list(settings[config_id][entry]), list(checks)):
|
||||
error = True
|
||||
self.__print(f"[!] [{entry}] '{__result}' not matching {checks}", 'ERROR')
|
||||
elif check.startswith("in:"):
|
||||
check = check.replace("in:", "")
|
||||
# skip if check does not exist, and set error
|
||||
if settings[config_id].get(check) is None:
|
||||
error = True
|
||||
self.__print(f"[!] [{entry}] '{check}' target does not exist", 'ERROR')
|
||||
continue
|
||||
# entry must be in check entry
|
||||
# in for list, else equal with convert to string
|
||||
if (
|
||||
__result := is_list_in_list(
|
||||
convert_to_list(settings[config_id][entry]),
|
||||
__checks := convert_to_list(settings[config_id][check])
|
||||
)
|
||||
):
|
||||
self.__print(f"[!] [{entry}] '{__result}' must be in the '{__checks}' values list", 'ERROR')
|
||||
error = True
|
||||
elif check.startswith('length:'):
|
||||
check = check.replace("length:", "")
|
||||
# length can be: n, n-, n-m, -m
|
||||
# as: equal, >= >=< =<
|
||||
self.__build_from_to_equal(entry, check)
|
||||
if not self.__length_range_validate(
|
||||
entry,
|
||||
'length',
|
||||
cast(list[str], convert_to_list(settings[config_id][entry])),
|
||||
self.__build_from_to_equal(entry, check, convert_to_int=True)
|
||||
):
|
||||
error = True
|
||||
elif check.startswith('range:'):
|
||||
check = check.replace("range:", "")
|
||||
if not self.__length_range_validate(
|
||||
entry,
|
||||
'range',
|
||||
cast(list[str], convert_to_list(settings[config_id][entry])),
|
||||
self.__build_from_to_equal(entry, check)
|
||||
):
|
||||
error = True
|
||||
# after post clean up if we have empty entries and we are mandatory
|
||||
if check == "mandatory:yes" and (
|
||||
not settings[config_id].get(entry) or settings[config_id].get(entry) == ['']
|
||||
):
|
||||
error = True
|
||||
self.__print(f"[!] Missing content entry for: {entry}", 'ERROR')
|
||||
if error is True:
|
||||
raise ValueError(
|
||||
self.__print(
|
||||
"[!] Missing or incorrect settings data. Cannot proceed: " + "; ".join(self.__error_msg),
|
||||
'CRITICAL'
|
||||
)
|
||||
)
|
||||
# set empty
|
||||
for [entry, empty_set] in entry_set_empty.items():
|
||||
# if set, skip, else set to empty value
|
||||
if settings[config_id].get(entry) or isinstance(settings[config_id].get(entry), list):
|
||||
continue
|
||||
settings[config_id][entry] = empty_set
|
||||
# Convert input
|
||||
for [entry, convert_type] in entry_convert.items():
|
||||
if convert_type in ["int", "any"] and is_int(settings[config_id][entry]):
|
||||
settings[config_id][entry] = int(settings[config_id][entry])
|
||||
elif convert_type in ["float", "any"] and is_float(settings[config_id][entry]):
|
||||
settings[config_id][entry] = float(settings[config_id][entry])
|
||||
elif convert_type in ["bool", "any"] and (
|
||||
settings[config_id][entry].lower() == "true" or
|
||||
settings[config_id][entry].lower() == "false"
|
||||
):
|
||||
try:
|
||||
settings[config_id][entry] = str_to_bool(settings[config_id][entry])
|
||||
except ValueError:
|
||||
self.__print(
|
||||
f"[!] Could not convert to boolean for '{entry}': {settings[config_id][entry]}",
|
||||
'ERROR'
|
||||
)
|
||||
# string is always string
|
||||
# TODO: empty and int/float/bool: set to none?
|
||||
|
||||
return settings[config_id]
|
||||
|
||||
# MARK: build from/to/requal logic
|
||||
def __build_from_to_equal(
|
||||
self, entry: str, check: str, convert_to_int: bool = False
|
||||
) -> Tuple[float | None, float | None, float | None]:
|
||||
"""
|
||||
split out the "n-m" part to get the to/from/equal
|
||||
|
||||
Arguments:
|
||||
entry {str} -- _description_
|
||||
check {str} -- _description_
|
||||
|
||||
Returns:
|
||||
Tuple[float | None, float | None, float | None] -- _description_
|
||||
|
||||
Throws:
|
||||
ValueError if range/length entries are not float
|
||||
"""
|
||||
__from = None
|
||||
__to = None
|
||||
__equal = None
|
||||
try:
|
||||
[__from, __to] = check.split('-')
|
||||
if (__from and not is_float(__from)) or (__to and not is_float(__to)):
|
||||
raise ValueError(self.__print(
|
||||
f"[{entry}] Check value for length is not in: {check}",
|
||||
'CRITICAL'
|
||||
))
|
||||
if len(__from) == 0:
|
||||
__from = None
|
||||
if len(__to) == 0:
|
||||
__to = None
|
||||
except ValueError as e:
|
||||
if not is_float(__equal := check):
|
||||
raise ValueError(self.__print(
|
||||
f"[{entry}] Check value for length is not a valid integer: {check}",
|
||||
'CRITICAL'
|
||||
)) from e
|
||||
if len(__equal) == 0:
|
||||
__equal = None
|
||||
# makre sure this is all int or None
|
||||
if __from is not None:
|
||||
__from = int(__from) if convert_to_int else float(__from)
|
||||
if __to is not None:
|
||||
__to = int(__to) if convert_to_int else float(__to)
|
||||
if __equal is not None:
|
||||
__equal = int(__equal) if convert_to_int else float(__equal)
|
||||
return (
|
||||
__from,
|
||||
__to,
|
||||
__equal
|
||||
)
|
||||
|
||||
# MARK: length/range validation
|
||||
def __length_range_validate(
|
||||
self,
|
||||
entry: str,
|
||||
check_type: str,
|
||||
values: Sequence[str | int | float],
|
||||
check: Tuple[float | None, float | None, float | None],
|
||||
) -> bool:
|
||||
(__from, __to, __equal) = check
|
||||
valid = True
|
||||
for value_raw in convert_to_list(values):
|
||||
# skip no tset values for range check
|
||||
if not value_raw:
|
||||
continue
|
||||
value = 0
|
||||
error_mark = ''
|
||||
if check_type == 'length':
|
||||
error_mark = 'length'
|
||||
value = len(str(value_raw))
|
||||
elif check_type == 'range':
|
||||
error_mark = 'range'
|
||||
value = float(str(value_raw))
|
||||
if __equal is not None and value != __equal:
|
||||
self.__print(f"[!] [{entry}] '{value_raw}' {error_mark} does not match {__equal}", 'ERROR')
|
||||
valid = False
|
||||
continue
|
||||
if __from is not None and __to is None and value < __from:
|
||||
self.__print(f"[!] [{entry}] '{value_raw}' {error_mark} smaller than minimum {__from}", 'ERROR')
|
||||
valid = False
|
||||
continue
|
||||
if __from is None and __to is not None and value > __to:
|
||||
self.__print(f"[!] [{entry}] '{value_raw}' {error_mark} larger than maximum {__to}", 'ERROR')
|
||||
valid = False
|
||||
continue
|
||||
if __from is not None and __to is not None and (
|
||||
value < __from or value > __to
|
||||
):
|
||||
self.__print(
|
||||
f"[!] [{entry}] '{value_raw}' {error_mark} outside valid range {__from} to {__to}",
|
||||
'ERROR'
|
||||
)
|
||||
valid = False
|
||||
continue
|
||||
return valid
|
||||
|
||||
# MARK: load config file data from file
|
||||
def __load_config_file(self) -> configparser.ConfigParser | None:
|
||||
"""
|
||||
load and parse the config file
|
||||
if not loadable return None
|
||||
"""
|
||||
# remove file name and get base path and check
|
||||
if not self.config_file.parent.is_dir():
|
||||
raise ValueError(f"Cannot find the config folder: {self.config_file.parent}")
|
||||
config = configparser.ConfigParser()
|
||||
if self.config_file.is_file():
|
||||
config.read(self.config_file)
|
||||
return config
|
||||
return None
|
||||
|
||||
# MARK: regex clean up one
|
||||
def __clean_invalid_setting(
|
||||
self,
|
||||
entry: str,
|
||||
validate: str,
|
||||
value: str,
|
||||
regex: str,
|
||||
regex_clean: str | None,
|
||||
replace: str = "",
|
||||
print_error: bool = True,
|
||||
) -> str:
|
||||
"""
|
||||
check is a string is invalid, print optional error message and clean up string
|
||||
|
||||
Args:
|
||||
entry (str): what entry key
|
||||
validate (str): validate type
|
||||
value (str): the value to check against
|
||||
regex (str): regex used for checking as r'...'
|
||||
regex_clean (str): regex used for cleaning as r'...'
|
||||
replace (str): replace with character. Defaults to ''
|
||||
print_error (bool): print the error message. Defaults to True
|
||||
"""
|
||||
check = re.compile(regex, re.VERBOSE)
|
||||
clean: re.Pattern[str] | None = None
|
||||
if regex_clean is not None:
|
||||
clean = re.compile(regex_clean, re.VERBOSE)
|
||||
# value must be set if clean is None, else empty value is allowed and will fail
|
||||
if (clean is None and value or clean) and not check.search(value):
|
||||
self.__print(
|
||||
f"[!] Invalid content for '{entry}' with check '{validate}' and data: {value}",
|
||||
'ERROR', print_error
|
||||
)
|
||||
# clean up if clean up is not none, else return EMPTY string
|
||||
if clean is not None:
|
||||
return clean.sub(replace, value)
|
||||
self.__check_settings_abort = True
|
||||
return ''
|
||||
# else return as is
|
||||
return value
|
||||
|
||||
# MARK: check settings, regx
|
||||
def __check_settings(
|
||||
self,
|
||||
check: str, entry: str, setting_value: list[str] | str
|
||||
) -> list[str] | str:
|
||||
"""
|
||||
check each setting valid
|
||||
The settings are defined in the SettingsLoaderCheck class
|
||||
|
||||
Args:
|
||||
check (str): What check to run
|
||||
entry (str): Variable name, just for information message
|
||||
setting_value (list[str | int] | str | int): settings value data
|
||||
|
||||
Returns:
|
||||
list[str | int] |111 str | int: cleaned up settings value data
|
||||
"""
|
||||
check = check.replace("check:", "")
|
||||
# get the check settings
|
||||
__check_settings = SettingsLoaderCheck.CHECK_SETTINGS.get(check)
|
||||
if __check_settings is None:
|
||||
raise ValueError(self.__print(
|
||||
f"[{entry}] Cannot get SettingsLoaderCheck.CHECK_SETTINGS for {check}",
|
||||
'CRITICAL'
|
||||
))
|
||||
# reset the abort check
|
||||
self.__check_settings_abort = False
|
||||
# either removes or replaces invalid characters in the list
|
||||
if isinstance(setting_value, list):
|
||||
# clean up invalid characters
|
||||
# loop over result and keep only filled (strip empty)
|
||||
setting_value = [e for e in [
|
||||
self.__clean_invalid_setting(
|
||||
entry, check, str(__entry),
|
||||
__check_settings['regex'], __check_settings['regex_clean'], __check_settings['replace']
|
||||
)
|
||||
for __entry in setting_value
|
||||
] if e]
|
||||
else:
|
||||
setting_value = self.__clean_invalid_setting(
|
||||
entry, check, str(setting_value),
|
||||
__check_settings['regex'], __check_settings['regex_clean'], __check_settings['replace']
|
||||
)
|
||||
# else:
|
||||
# self.__print(f"[!] Unkown type to check", 'ERROR)
|
||||
# return data
|
||||
return setting_value
|
||||
|
||||
# MARK: check arguments, for config file load fail
|
||||
def __check_arguments(self, arguments: dict[str, list[str]], all_set: bool = False) -> bool:
|
||||
"""
|
||||
check if ast least one argument is set
|
||||
|
||||
Args:
|
||||
arguments (list[str]): _description_
|
||||
|
||||
Returns:
|
||||
bool: _description_
|
||||
"""
|
||||
count_set = 0
|
||||
count_arguments = 0
|
||||
has_argument = False
|
||||
for argument, validate in arguments.items():
|
||||
# if argument is mandatory add to count, if not mandatory set has "has" to skip error
|
||||
mandatory = any(entry == "mandatory:yes" for entry in validate)
|
||||
if not mandatory:
|
||||
has_argument = True
|
||||
continue
|
||||
count_arguments += 1
|
||||
if self.__get_arg(argument):
|
||||
has_argument = True
|
||||
count_set += 1
|
||||
# for all set, True only if all are set
|
||||
if all_set is True:
|
||||
has_argument = count_set == count_arguments
|
||||
|
||||
return has_argument
|
||||
|
||||
# MARK: get argument from args dict
|
||||
def __get_arg(self, entry: str) -> Any:
|
||||
"""
|
||||
check if an argument entry xists, if None -> returns None else value of argument
|
||||
|
||||
Arguments:
|
||||
entry {str} -- _description_
|
||||
|
||||
Returns:
|
||||
Any -- _description_
|
||||
"""
|
||||
if self.args.get(entry) is None:
|
||||
return None
|
||||
return self.args.get(entry)
|
||||
|
||||
# MARK: error print
|
||||
def __print(self, msg: str, level: str, print_error: bool = True) -> str:
|
||||
"""
|
||||
print out error, if Log class is set then print to log instead
|
||||
|
||||
Arguments:
|
||||
msg {str} -- _description_
|
||||
level {str} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
print_error {bool} -- _description_ (default: {True})
|
||||
"""
|
||||
if self.log is not None:
|
||||
if not Log.validate_log_level(level):
|
||||
level = 'ERROR'
|
||||
self.log.logger.log(Log.get_log_level_int(level), msg, stacklevel=2)
|
||||
if self.log is None or self.always_print:
|
||||
if print_error:
|
||||
print(msg)
|
||||
if level == 'ERROR':
|
||||
# remove any prefix [!] for error message list
|
||||
self.__error_msg.append(msg.replace('[!] ', '').strip())
|
||||
return msg
|
||||
|
||||
|
||||
# __END__
|
||||
@@ -0,0 +1,81 @@
|
||||
"""
|
||||
Class of checks that can be run on value entries
|
||||
"""
|
||||
|
||||
from typing import TypedDict
|
||||
from corelibs.check_handling.regex_constants import (
|
||||
EMAIL_BASIC_REGEX, DOMAIN_WITH_LOCALHOST_REGEX, DOMAIN_WITH_LOCALHOST_PORT_REGEX, DOMAIN_REGEX
|
||||
)
|
||||
|
||||
|
||||
class SettingsLoaderCheckValue(TypedDict):
|
||||
"""Settings check entries"""
|
||||
|
||||
regex: str
|
||||
# if None, then on error we exit, eles we clean up data
|
||||
regex_clean: str | None
|
||||
replace: str
|
||||
|
||||
|
||||
class SettingsLoaderCheck:
|
||||
"""
|
||||
check:<NAME> or check:list+<NAME>
|
||||
"""
|
||||
|
||||
CHECK_SETTINGS: dict[str, SettingsLoaderCheckValue] = {
|
||||
"int": {
|
||||
"regex": r"^[0-9]+$",
|
||||
"regex_clean": r"[^0-9]",
|
||||
"replace": "",
|
||||
},
|
||||
"string.alphanumeric": {
|
||||
"regex": r"^[a-zA-Z0-9]+$",
|
||||
"regex_clean": r"[^a-zA-Z0-9]",
|
||||
"replace": "",
|
||||
},
|
||||
"string.alphanumeric.lower.dash": {
|
||||
"regex": r"^[a-z0-9-]+$",
|
||||
"regex_clean": r"[^a-z0-9-]",
|
||||
"replace": "",
|
||||
},
|
||||
# A-Z a-z 0-9 _ - . ONLY
|
||||
# This one does not remove, but replaces with _
|
||||
"string.alphanumeric.extended.replace": {
|
||||
"regex": r"^[_.a-zA-Z0-9-]+$",
|
||||
"regex_clean": r"[^_.a-zA-Z0-9-]",
|
||||
"replace": "_",
|
||||
},
|
||||
# This does a baisc email check, only alphanumeric with special characters
|
||||
"string.email.basic": {
|
||||
"regex": EMAIL_BASIC_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Domain check, including localhost no port
|
||||
"string.domain.with-localhost": {
|
||||
"regex": DOMAIN_WITH_LOCALHOST_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Domain check, with localhost and port
|
||||
"string.domain.with-localhost.port": {
|
||||
"regex": DOMAIN_WITH_LOCALHOST_PORT_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Domain check, no pure localhost allowed
|
||||
"string.domain": {
|
||||
"regex": DOMAIN_REGEX,
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
},
|
||||
# Basic date check, does not validate date itself
|
||||
"string.date": {
|
||||
"regex": r"^\d{4}[/-]\d{1,2}[/-]\d{1,2}$",
|
||||
"regex_clean": None,
|
||||
"replace": "",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# __END__
|
||||
170
src/corelibs/csv_handling/csv_interface.py
Normal file
170
src/corelibs/csv_handling/csv_interface.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""
|
||||
Write to CSV file
|
||||
- each class set is one file write with one header set
|
||||
"""
|
||||
|
||||
from typing import Any, Sequence
|
||||
from pathlib import Path
|
||||
from collections import Counter
|
||||
import csv
|
||||
from corelibs.file_handling.file_bom_encoding import is_bom_encoded, is_bom_encoded_info
|
||||
from corelibs.exceptions.csv_exceptions import (
|
||||
NoCsvReader, CompulsoryCsvHeaderCheckFailed, CsvHeaderDataMissing
|
||||
)
|
||||
|
||||
ENCODING = 'utf-8'
|
||||
ENCODING_UTF8_SIG = 'utf-8-sig'
|
||||
DELIMITER = ","
|
||||
QUOTECHAR = '"'
|
||||
# type: _QuotingType
|
||||
QUOTING = csv.QUOTE_MINIMAL
|
||||
|
||||
|
||||
class CsvWriter:
|
||||
"""
|
||||
write to a CSV file
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
file_name: Path,
|
||||
header_mapping: dict[str, str],
|
||||
header_order: list[str] | None = None,
|
||||
encoding: str = ENCODING,
|
||||
delimiter: str = DELIMITER,
|
||||
quotechar: str = QUOTECHAR,
|
||||
quoting: Any = QUOTING,
|
||||
):
|
||||
self.__file_name = file_name
|
||||
# Key: index for write for the line dict, Values: header entries
|
||||
self.header_mapping = header_mapping
|
||||
self.header: Sequence[str] = list(header_mapping.values())
|
||||
self.__delimiter = delimiter
|
||||
self.__quotechar = quotechar
|
||||
self.__quoting = quoting
|
||||
self.__encoding = encoding
|
||||
self.csv_file_writer = self.__open_csv(header_order)
|
||||
|
||||
def __open_csv(self, header_order: list[str] | None) -> csv.DictWriter[str]:
|
||||
"""
|
||||
open csv file for writing, write headers
|
||||
|
||||
Note that if there is no header_order set we use the order in header dictionary
|
||||
|
||||
Arguments:
|
||||
line {list[str] | None} -- optional dedicated header order
|
||||
|
||||
Returns:
|
||||
csv.DictWriter[str] | None: _description_
|
||||
"""
|
||||
# if header order is set, make sure all header value fields exist
|
||||
if not self.header:
|
||||
raise CsvHeaderDataMissing("No header data available to write CSV file")
|
||||
header_values = self.header
|
||||
if header_order is not None:
|
||||
if Counter(header_values) != Counter(header_order):
|
||||
raise CompulsoryCsvHeaderCheckFailed(
|
||||
"header order does not match header values: "
|
||||
f"{', '.join(header_values)} != {', '.join(header_order)}"
|
||||
)
|
||||
header_values = header_order
|
||||
# no duplicates
|
||||
if len(header_values) != len(set(header_values)):
|
||||
raise CompulsoryCsvHeaderCheckFailed(f"Header must have unique values only: {', '.join(header_values)}")
|
||||
try:
|
||||
fp = open(
|
||||
self.__file_name,
|
||||
"w",
|
||||
encoding=self.__encoding
|
||||
)
|
||||
csv_file_writer = csv.DictWriter(
|
||||
fp,
|
||||
fieldnames=header_values,
|
||||
delimiter=self.__delimiter,
|
||||
quotechar=self.__quotechar,
|
||||
quoting=self.__quoting,
|
||||
)
|
||||
csv_file_writer.writeheader()
|
||||
return csv_file_writer
|
||||
except OSError as err:
|
||||
raise NoCsvReader(f"Could not open CSV file for writing: {err}") from err
|
||||
|
||||
def write_csv(self, line: dict[str, str]) -> None:
|
||||
"""
|
||||
write member csv line
|
||||
|
||||
Arguments:
|
||||
line {dict[str, str]} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
csv_row: dict[str, Any] = {}
|
||||
# only write entries that are in the header list
|
||||
for key, value in self.header_mapping.items():
|
||||
csv_row[value] = line[key]
|
||||
self.csv_file_writer.writerow(csv_row)
|
||||
|
||||
|
||||
class CsvReader:
|
||||
"""
|
||||
read from a CSV file
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
file_name: Path,
|
||||
header_check: Sequence[str] | None = None,
|
||||
encoding: str = ENCODING,
|
||||
delimiter: str = DELIMITER,
|
||||
quotechar: str = QUOTECHAR,
|
||||
quoting: Any = QUOTING,
|
||||
):
|
||||
self.__file_name = file_name
|
||||
self.__header_check = header_check
|
||||
self.__delimiter = delimiter
|
||||
self.__quotechar = quotechar
|
||||
self.__quoting = quoting
|
||||
self.__encoding = encoding
|
||||
self.header: Sequence[str] | None = None
|
||||
self.csv_file_reader = self.__open_csv()
|
||||
|
||||
def __open_csv(self) -> csv.DictReader[str]:
|
||||
"""
|
||||
open csv file for reading
|
||||
|
||||
Returns:
|
||||
csv.DictReader | None: _description_
|
||||
"""
|
||||
try:
|
||||
# if UTF style check if this is BOM
|
||||
if self.__encoding.lower().startswith('utf-') and is_bom_encoded(self.__file_name):
|
||||
bom_info = is_bom_encoded_info(self.__file_name)
|
||||
if bom_info['encoding'] == 'utf-8':
|
||||
self.__encoding = ENCODING_UTF8_SIG
|
||||
else:
|
||||
self.__encoding = bom_info['encoding'] or self.__encoding
|
||||
fp = open(
|
||||
self.__file_name,
|
||||
"r", encoding=self.__encoding
|
||||
)
|
||||
csv_file_reader = csv.DictReader(
|
||||
fp,
|
||||
delimiter=self.__delimiter,
|
||||
quotechar=self.__quotechar,
|
||||
quoting=self.__quoting,
|
||||
)
|
||||
self.header = csv_file_reader.fieldnames
|
||||
if not self.header:
|
||||
raise CsvHeaderDataMissing("No header data available in CSV file")
|
||||
if self.__header_check is not None:
|
||||
header_diff = set(self.__header_check).difference(set(self.header or []))
|
||||
if header_diff:
|
||||
raise CompulsoryCsvHeaderCheckFailed(
|
||||
f"CSV header does not match expected header: {', '.join(header_diff)} missing"
|
||||
)
|
||||
return csv_file_reader
|
||||
except OSError as err:
|
||||
raise NoCsvReader(f"Could not open CSV file for reading: {err}") from err
|
||||
|
||||
# __END__
|
||||
235
src/corelibs/datetime_handling/datetime_helpers.py
Normal file
235
src/corelibs/datetime_handling/datetime_helpers.py
Normal file
@@ -0,0 +1,235 @@
|
||||
"""
|
||||
Various string based date/time helpers
|
||||
"""
|
||||
|
||||
from datetime import datetime, time
|
||||
from warnings import deprecated
|
||||
from zoneinfo import ZoneInfo
|
||||
from corelibs_datetime import datetime_helpers
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.create_time instead")
|
||||
def create_time(timestamp: float, timestamp_format: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""
|
||||
just takes a timestamp and prints out humand readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timestamp_format {_type_} -- _description_ (default: {"%Y-%m-%d %H:%M:%S"})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return datetime_helpers.create_time(timestamp, timestamp_format)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.get_system_timezone instead")
|
||||
def get_system_timezone():
|
||||
"""Get system timezone using datetime's automatic detection"""
|
||||
# Get current time with system timezone
|
||||
return datetime_helpers.get_system_timezone()
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_timezone_data instead")
|
||||
def parse_timezone_data(timezone_tz: str = '') -> ZoneInfo:
|
||||
"""
|
||||
parses a string to get the ZoneInfo
|
||||
If not set or not valid gets local time,
|
||||
if that is not possible get UTC
|
||||
|
||||
Keyword Arguments:
|
||||
timezone_tz {str} -- _description_ (default: {''})
|
||||
|
||||
Returns:
|
||||
ZoneInfo -- _description_
|
||||
"""
|
||||
return datetime_helpers.parse_timezone_data(timezone_tz)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.get_datetime_iso8601 instead")
|
||||
def get_datetime_iso8601(timezone_tz: str | ZoneInfo = '', sep: str = 'T', timespec: str = 'microseconds') -> str:
|
||||
"""
|
||||
set a datetime in the iso8601 format with microseconds
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
try:
|
||||
return datetime_helpers.get_datetime_iso8601(timezone_tz, sep, timespec)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.validate_date instead")
|
||||
def validate_date(date: str, not_before: datetime | None = None, not_after: datetime | None = None) -> bool:
|
||||
"""
|
||||
check if Y-m-d or Y/m/d are parsable and valid
|
||||
|
||||
Arguments:
|
||||
date {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return datetime_helpers.validate_date(date, not_before, not_after)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_flexible_date instead")
|
||||
def parse_flexible_date(
|
||||
date_str: str,
|
||||
timezone_tz: str | ZoneInfo | None = None,
|
||||
shift_time_zone: bool = True
|
||||
) -> datetime | None:
|
||||
"""
|
||||
Parse date string in multiple formats
|
||||
will add time zone info if not None
|
||||
on default it will change the TZ and time to the new time zone
|
||||
if no TZ info is set in date_str, then localtime is assumed
|
||||
|
||||
Arguments:
|
||||
date_str {str} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timezone_tz {str | ZoneInfo | None} -- _description_ (default: {None})
|
||||
shift_time_zone {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
datetime | None -- _description_
|
||||
"""
|
||||
return datetime_helpers.parse_flexible_date(
|
||||
date_str,
|
||||
timezone_tz,
|
||||
shift_time_zone
|
||||
)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.compare_dates instead")
|
||||
def compare_dates(date1_str: str, date2_str: str) -> None | bool:
|
||||
"""
|
||||
compare two dates, if the first one is newer than the second one return True
|
||||
If the dates are equal then false will be returned
|
||||
on error return None
|
||||
|
||||
Arguments:
|
||||
date1_str {str} -- _description_
|
||||
date2_str {str} -- _description_
|
||||
|
||||
Returns:
|
||||
None | bool -- _description_
|
||||
"""
|
||||
return datetime_helpers.compare_dates(date1_str, date2_str)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.find_newest_datetime_in_list instead")
|
||||
def find_newest_datetime_in_list(date_list: list[str]) -> None | str:
|
||||
"""
|
||||
Find the newest date from a list of ISO 8601 formatted date strings.
|
||||
Handles potential parsing errors gracefully.
|
||||
|
||||
Args:
|
||||
date_list (list): List of date strings in format '2025-08-06T16:17:39.747+09:00'
|
||||
|
||||
Returns:
|
||||
str: The date string with the newest/latest date, or None if list is empty or all dates are invalid
|
||||
"""
|
||||
return datetime_helpers.find_newest_datetime_in_list(date_list)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_day_of_week_range instead")
|
||||
def parse_day_of_week_range(dow_days: str) -> list[tuple[int, str]]:
|
||||
"""
|
||||
Parse a day of week list/range string and return a list of tuples with day index and name.
|
||||
Allowed are short (eg Mon) or long names (eg Monday).
|
||||
|
||||
Arguments:
|
||||
dow_days {str} -- A comma-separated list of days or ranges (e.g., "Mon,Wed-Fri")
|
||||
|
||||
Raises:
|
||||
ValueError: If the input format is invalid or if duplicate days are found.
|
||||
|
||||
Returns:
|
||||
list[tuple[int, str]] -- A list of tuples containing the day index and name.
|
||||
"""
|
||||
# we have Sun twice because it can be 0 or 7
|
||||
# Mon is 1 and Sun is 7, which is ISO standard
|
||||
try:
|
||||
return datetime_helpers.parse_day_of_week_range(dow_days)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.parse_time_range instead")
|
||||
def parse_time_range(time_str: str, time_format: str = "%H:%M") -> tuple[time, time]:
|
||||
"""
|
||||
Parse a time range string in the format "HH:MM-HH:MM" and return a tuple of two time objects.
|
||||
|
||||
Arguments:
|
||||
time_str {str} -- The time range string to parse.
|
||||
|
||||
Raises:
|
||||
ValueError: Invalid time block set
|
||||
ValueError: Invalid time format
|
||||
ValueError: Start time must be before end time
|
||||
|
||||
Returns:
|
||||
tuple[time, time] -- start time, end time: leading zeros formattd
|
||||
"""
|
||||
try:
|
||||
return datetime_helpers.parse_time_range(time_str, time_format)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.times_overlap_or_connect instead")
|
||||
def times_overlap_or_connect(time1: tuple[time, time], time2: tuple[time, time], allow_touching: bool = False) -> bool:
|
||||
"""
|
||||
Check if two time ranges overlap or connect
|
||||
|
||||
Args:
|
||||
time1 (tuple): (start_time, end_time) for first range
|
||||
time2 (tuple): (start_time, end_time) for second range
|
||||
allow_touching (bool): If True, touching ranges (e.g., 8:00-10:00 and 10:00-12:00) are allowed
|
||||
|
||||
Returns:
|
||||
bool: True if ranges overlap or connect (based on allow_touching)
|
||||
"""
|
||||
return datetime_helpers.times_overlap_or_connect(time1, time2, allow_touching)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.is_time_in_range instead")
|
||||
def is_time_in_range(current_time: str, start_time: str, end_time: str) -> bool:
|
||||
"""
|
||||
Check if current_time is within start_time and end_time (inclusive)
|
||||
Time format: "HH:MM" (24-hour format)
|
||||
|
||||
Arguments:
|
||||
current_time {str} -- _description_
|
||||
start_time {str} -- _description_
|
||||
end_time {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
# Convert string times to time objects
|
||||
return datetime_helpers.is_time_in_range(current_time, start_time, end_time)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.datetime_helpers.reorder_weekdays_from_today instead")
|
||||
def reorder_weekdays_from_today(base_day: str) -> dict[int, str]:
|
||||
"""
|
||||
Reorder the days of the week starting from the specified base_day.
|
||||
|
||||
Arguments:
|
||||
base_day {str} -- The day to start the week from (e.g., "Mon").
|
||||
|
||||
Returns:
|
||||
dict[int, str] -- A dictionary mapping day numbers to day names.
|
||||
"""
|
||||
try:
|
||||
return datetime_helpers.reorder_weekdays_from_today(base_day)
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Deprecated ValueError, change to KeyError: {e}") from e
|
||||
|
||||
# __END__
|
||||
88
src/corelibs/datetime_handling/timestamp_convert.py
Normal file
88
src/corelibs/datetime_handling/timestamp_convert.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""
|
||||
Convert timestamp strings with time units into seconds and vice versa.
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from corelibs_datetime import timestamp_convert
|
||||
from corelibs_datetime.timestamp_convert import TimeParseError as NewTimeParseError, TimeUnitError as NewTimeUnitError
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.TimeParseError instead")
|
||||
class TimeParseError(Exception):
|
||||
"""Custom exception for time parsing errors."""
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.TimeUnitError instead")
|
||||
class TimeUnitError(Exception):
|
||||
"""Custom exception for time parsing errors."""
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.convert_to_seconds instead")
|
||||
def convert_to_seconds(time_string: str | int | float) -> int:
|
||||
"""
|
||||
Conver a string with time units into a seconds string
|
||||
The following units are allowed
|
||||
Y: 365 days
|
||||
M: 30 days
|
||||
d, h, m, s
|
||||
|
||||
Arguments:
|
||||
time_string {str} -- _description_
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
|
||||
# skip out if this is a number of any type
|
||||
# numbers will br made float, rounded and then converted to int
|
||||
try:
|
||||
return timestamp_convert.convert_to_seconds(time_string)
|
||||
except NewTimeParseError as e:
|
||||
raise TimeParseError(f"Deprecated, use corelibs_datetime.timestamp_convert.TimeParseError: {e}") from e
|
||||
except NewTimeUnitError as e:
|
||||
raise TimeUnitError(f"Deprecated, use corelibs_datetime.timestamp_convert.TimeUnitError: {e}") from e
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.seconds_to_string instead")
|
||||
def seconds_to_string(seconds: str | int | float, show_microseconds: bool = False) -> str:
|
||||
"""
|
||||
Convert seconds to compact human readable format (e.g., "1d 2h 3m 4.567s")
|
||||
Zero values are omitted.
|
||||
milliseconds if requested are added as fractional part of seconds.
|
||||
Supports negative values with "-" prefix
|
||||
if not int or float, will return as is
|
||||
|
||||
Args:
|
||||
seconds (float): Time in seconds (can be negative)
|
||||
show_microseconds (bool): Whether to show microseconds precision
|
||||
|
||||
Returns:
|
||||
str: Compact human readable time format
|
||||
"""
|
||||
return timestamp_convert.seconds_to_string(seconds, show_microseconds)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_convert.convert_timestamp instead")
|
||||
def convert_timestamp(timestamp: float | int | str, show_microseconds: bool = True) -> str:
|
||||
"""
|
||||
format timestamp into human readable format. This function will add 0 values between set values
|
||||
for example if we have 1d 1s it would output 1d 0h 0m 1s
|
||||
Milliseconds will be shown if set, and added with ms at the end
|
||||
Negative values will be prefixed with "-"
|
||||
if not int or float, will return as is
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
show_micro {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return timestamp_convert.convert_timestamp(timestamp, show_microseconds)
|
||||
|
||||
# __END__
|
||||
21
src/corelibs/datetime_handling/timestamp_strings.py
Normal file
21
src/corelibs/datetime_handling/timestamp_strings.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""
|
||||
Current timestamp strings and time zones
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from zoneinfo import ZoneInfo
|
||||
from corelibs_datetime import timestamp_strings
|
||||
|
||||
|
||||
class TimestampStrings(timestamp_strings.TimestampStrings):
|
||||
"""
|
||||
set default time stamps
|
||||
"""
|
||||
|
||||
TIME_ZONE: str = 'Asia/Tokyo'
|
||||
|
||||
@deprecated("Use corelibs_datetime.timestamp_strings.TimestampStrings instead")
|
||||
def __init__(self, time_zone: str | ZoneInfo | None = None):
|
||||
super().__init__(time_zone)
|
||||
|
||||
# __END__
|
||||
76
src/corelibs/db_handling/sql_main.py
Normal file
76
src/corelibs/db_handling/sql_main.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Main SQL base for any SQL calls
|
||||
This is a wrapper for SQLiteIO or other future DB Interfaces
|
||||
[Note: at the moment only SQLiteIO is implemented]
|
||||
- on class creation connection with ValueError on fail
|
||||
- connect method checks if already connected and warns
|
||||
- connection class fails with ValueError if not valid target is selected (SQL wrapper type)
|
||||
- connected check class method
|
||||
- a process class that returns data as list or False if end or error
|
||||
|
||||
TODO: adapt more CoreLibs DB IO class flow here
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Literal
|
||||
from corelibs.debug_handling.debug_helpers import call_stack
|
||||
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
IDENT_SPLIT_CHARACTER: str = ':'
|
||||
|
||||
|
||||
class SQLMain:
|
||||
"""Main SQL interface class"""
|
||||
def __init__(self, log: 'Logger', db_ident: str):
|
||||
self.log = log
|
||||
self.dbh: SQLiteIO | None = None
|
||||
self.db_target: str | None = None
|
||||
self.connect(db_ident)
|
||||
if not self.connected():
|
||||
raise ValueError(f'Failed to connect to database [{call_stack()}]')
|
||||
|
||||
def connect(self, db_ident: str):
|
||||
"""setup basic connection"""
|
||||
if self.dbh is not None and self.dbh.conn is not None:
|
||||
self.log.warning(f"A database connection already exists for: {self.db_target} [{call_stack()}]")
|
||||
return
|
||||
self.db_target, db_dsn = db_ident.split(IDENT_SPLIT_CHARACTER)
|
||||
match self.db_target:
|
||||
case 'sqlite':
|
||||
# this is a Path only at the moment
|
||||
self.dbh = SQLiteIO(self.log, db_dsn, row_factory='Dict')
|
||||
case _:
|
||||
raise ValueError(f'SQL interface for {self.db_target} is not implemented [{call_stack()}]')
|
||||
if not self.dbh.db_connected():
|
||||
raise ValueError(f"DB Connection failed for: {self.db_target} [{call_stack()}]")
|
||||
|
||||
def close(self):
|
||||
"""close connection"""
|
||||
if self.dbh is None or not self.connected():
|
||||
return
|
||||
# self.log.info(f"Close DB Connection: {self.db_target} [{call_stack()}]")
|
||||
self.dbh.db_close()
|
||||
|
||||
def connected(self) -> bool:
|
||||
"""check connectuon"""
|
||||
if self.dbh is None or not self.dbh.db_connected():
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
return True
|
||||
|
||||
def process_query(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""mini wrapper for execute query"""
|
||||
if self.dbh is not None:
|
||||
result = self.dbh.execute_query(query, params)
|
||||
if result is False:
|
||||
return False
|
||||
else:
|
||||
self.log.error(f"Problem connecting to db: {self.db_target} [{call_stack()}]")
|
||||
return False
|
||||
return result
|
||||
|
||||
# __END__
|
||||
214
src/corelibs/db_handling/sqlite_io.py
Normal file
214
src/corelibs/db_handling/sqlite_io.py
Normal file
@@ -0,0 +1,214 @@
|
||||
"""
|
||||
SQLite DB::IO
|
||||
Will be moved to the CoreLibs
|
||||
also method names are subject to change
|
||||
"""
|
||||
|
||||
# import gc
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, TYPE_CHECKING
|
||||
import sqlite3
|
||||
from corelibs.debug_handling.debug_helpers import call_stack
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
class SQLiteIO():
|
||||
"""Mini SQLite interface"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
log: 'Logger',
|
||||
db_name: str | Path,
|
||||
autocommit: bool = False,
|
||||
enable_fkey: bool = True,
|
||||
row_factory: str | None = None
|
||||
):
|
||||
self.log = log
|
||||
self.db_name = db_name
|
||||
self.autocommit = autocommit
|
||||
self.enable_fkey = enable_fkey
|
||||
self.row_factory = row_factory
|
||||
self.conn: sqlite3.Connection | None = self.db_connect()
|
||||
|
||||
# def __del__(self):
|
||||
# self.db_close()
|
||||
|
||||
def db_connect(self) -> sqlite3.Connection | None:
|
||||
"""
|
||||
Connect to SQLite database, create if it doesn't exist
|
||||
"""
|
||||
try:
|
||||
# Connect to database (creates if doesn't exist)
|
||||
self.conn = sqlite3.connect(self.db_name, autocommit=self.autocommit)
|
||||
self.conn.setconfig(sqlite3.SQLITE_DBCONFIG_ENABLE_FKEY, True)
|
||||
# self.conn.execute("PRAGMA journal_mode=WAL")
|
||||
# self.log.debug(f"Connected to database: {self.db_name}")
|
||||
|
||||
def dict_factory(cursor: sqlite3.Cursor, row: list[Any]):
|
||||
fields = [column[0] for column in cursor.description]
|
||||
return dict(zip(fields, row))
|
||||
|
||||
match self.row_factory:
|
||||
case 'Row':
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
case 'Dict':
|
||||
self.conn.row_factory = dict_factory
|
||||
case _:
|
||||
self.conn.row_factory = None
|
||||
|
||||
return self.conn
|
||||
except (sqlite3.Error, sqlite3.OperationalError) as e:
|
||||
self.log.error(f"Error connecting to database [{type(e).__name__}] [{self.db_name}]: {e} [{call_stack()}]")
|
||||
self.log.error(f"Error code: {e.sqlite_errorcode if hasattr(e, 'sqlite_errorcode') else 'N/A'}")
|
||||
self.log.error(f"Error name: {e.sqlite_errorname if hasattr(e, 'sqlite_errorname') else 'N/A'}")
|
||||
return None
|
||||
|
||||
def db_close(self):
|
||||
"""close connection"""
|
||||
if self.conn is not None:
|
||||
self.conn.close()
|
||||
self.conn = None
|
||||
|
||||
def db_connected(self) -> bool:
|
||||
"""
|
||||
Return True if db connection is not none
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return True if self.conn else False
|
||||
|
||||
def __content_exists(self, content_name: str, sql_type: str) -> bool:
|
||||
"""
|
||||
Check if some content name for a certain type exists
|
||||
|
||||
Arguments:
|
||||
content_name {str} -- _description_
|
||||
sql_type {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
if self.conn is None:
|
||||
return False
|
||||
try:
|
||||
cursor = self.conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE type = ? AND name = ?
|
||||
""", (sql_type, content_name,))
|
||||
return cursor.fetchone() is not None
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error checking table [{content_name}/{sql_type}] existence: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def table_exists(self, table_name: str) -> bool:
|
||||
"""
|
||||
Check if a table exists in the database
|
||||
"""
|
||||
return self.__content_exists(table_name, 'table')
|
||||
|
||||
def trigger_exists(self, trigger_name: str) -> bool:
|
||||
"""
|
||||
Check if a triggere exits
|
||||
"""
|
||||
return self.__content_exists(trigger_name, 'trigger')
|
||||
|
||||
def index_exists(self, index_name: str) -> bool:
|
||||
"""
|
||||
Check if a triggere exits
|
||||
"""
|
||||
return self.__content_exists(index_name, 'index')
|
||||
|
||||
def meta_data_detail(self, table_name: str) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""table detail"""
|
||||
query_show_table = """
|
||||
SELECT
|
||||
ti.cid, ti.name, ti.type, ti.'notnull', ti.dflt_value, ti.pk,
|
||||
il_ii.idx_name, il_ii.idx_unique, il_ii.idx_origin, il_ii.idx_partial
|
||||
FROM
|
||||
sqlite_schema AS m,
|
||||
pragma_table_info(m.name) AS ti
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
il.name AS idx_name, il.'unique' AS idx_unique, il.origin AS idx_origin, il.partial AS idx_partial,
|
||||
ii.cid AS tbl_cid
|
||||
FROM
|
||||
sqlite_schema AS m,
|
||||
pragma_index_list(m.name) AS il,
|
||||
pragma_index_info(il.name) AS ii
|
||||
WHERE m.name = ?1
|
||||
) AS il_ii ON (ti.cid = il_ii.tbl_cid)
|
||||
WHERE
|
||||
m.name = ?1
|
||||
"""
|
||||
return self.execute_query(query_show_table, (table_name,))
|
||||
|
||||
def execute_cursor(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> sqlite3.Cursor | Literal[False]:
|
||||
"""execute a cursor, used in execute query or return one and for fetch_row"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
cursor = self.conn.cursor()
|
||||
if params:
|
||||
cursor.execute(query, params)
|
||||
else:
|
||||
cursor.execute(query)
|
||||
return cursor
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during executing cursor [{query}:{params}]: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def execute_query(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""query execute with or without params, returns result"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
if (cursor := self.execute_cursor(query, params)) is False:
|
||||
return False
|
||||
# fetch before commit because we need to get the RETURN before
|
||||
result = cursor.fetchall()
|
||||
# this is for INSERT/UPDATE/CREATE only
|
||||
self.conn.commit()
|
||||
return result
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during executing query [{query}:{params}]: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def return_one(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> tuple[Any, ...] | dict[str, Any] | Literal[False] | None:
|
||||
"""return one row, only for SELECT"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
if (cursor := self.execute_cursor(query, params)) is False:
|
||||
return False
|
||||
return cursor.fetchone()
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during return one: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def fetch_row(
|
||||
self, cursor: sqlite3.Cursor | Literal[False]
|
||||
) -> tuple[Any, ...] | dict[str, Any] | Literal[False] | None:
|
||||
"""read from cursor"""
|
||||
if self.conn is None or cursor is False:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
return cursor.fetchone()
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during fetch row: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
# __END__
|
||||
76
src/corelibs/debug_handling/debug_helpers.py
Normal file
76
src/corelibs/debug_handling/debug_helpers.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Various debug helpers
|
||||
"""
|
||||
|
||||
import traceback
|
||||
import os
|
||||
import sys
|
||||
from typing import Tuple, Type
|
||||
from types import TracebackType
|
||||
|
||||
# _typeshed.OptExcInfo
|
||||
OptExcInfo = Tuple[None, None, None] | Tuple[Type[BaseException], BaseException, TracebackType]
|
||||
|
||||
|
||||
def call_stack(
|
||||
start: int = 0,
|
||||
skip_last: int = -1,
|
||||
separator: str = ' -> ',
|
||||
reset_start_if_empty: bool = False
|
||||
) -> str:
|
||||
"""
|
||||
get the trace for the last entry
|
||||
|
||||
Keyword Arguments:
|
||||
start {int} -- start, if too might output will empty until reset_start_if_empty is set (default: {0})
|
||||
skip_last {int} -- how many of the last are skipped, defaults to -1 for current method (default: {-1})
|
||||
seperator {str} -- add stack separator, if empty defaults to ' -> ' (default: { -> })
|
||||
reset_start_if_empty {bool} -- if no stack returned because of too high start,
|
||||
reset to 0 for full read (default: {False})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# stack = traceback.extract_stack()[start:depth]
|
||||
# how many of the last entries we skip (so we do not get self), default is -1
|
||||
# start cannot be negative
|
||||
if skip_last > 0:
|
||||
skip_last = skip_last * -1
|
||||
stack = traceback.extract_stack()
|
||||
__stack = stack[start:skip_last]
|
||||
# start possible to high, reset start to 0
|
||||
if not __stack and reset_start_if_empty:
|
||||
start = 0
|
||||
__stack = stack[start:skip_last]
|
||||
if not separator:
|
||||
separator = ' -> '
|
||||
# print(f"* HERE: {dump_data(stack)}")
|
||||
return f"{separator}".join(f"{os.path.basename(f.filename)}:{f.name}:{f.lineno}" for f in __stack)
|
||||
|
||||
|
||||
def exception_stack(
|
||||
exc_stack: OptExcInfo | None = None,
|
||||
separator: str = ' -> '
|
||||
) -> str:
|
||||
"""
|
||||
Exception traceback, if no sys.exc_info is set, run internal
|
||||
|
||||
Keyword Arguments:
|
||||
exc_stack {OptExcInfo | None} -- _description_ (default: {None})
|
||||
separator {str} -- _description_ (default: {' -> '})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
if exc_stack is not None:
|
||||
_, _, exc_traceback = exc_stack
|
||||
else:
|
||||
exc_traceback = None
|
||||
_, _, exc_traceback = sys.exc_info()
|
||||
stack = traceback.extract_tb(exc_traceback)
|
||||
if not separator:
|
||||
separator = ' -> '
|
||||
# print(f"* HERE: {dump_data(stack)}")
|
||||
return f"{separator}".join(f"{os.path.basename(f.filename)}:{f.name}:{f.lineno}" for f in stack)
|
||||
|
||||
# __END__
|
||||
@@ -6,7 +6,7 @@ import json
|
||||
from typing import Any
|
||||
|
||||
|
||||
def dump_data(data: dict[Any, Any] | list[Any] | str | None) -> str:
|
||||
def dump_data(data: Any, use_indent: bool = True) -> str:
|
||||
"""
|
||||
dump formated output from dict/list
|
||||
|
||||
@@ -16,6 +16,7 @@ def dump_data(data: dict[Any, Any] | list[Any] | str | None) -> str:
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
return json.dumps(data, indent=4, ensure_ascii=False, default=str)
|
||||
indent = 4 if use_indent else None
|
||||
return json.dumps(data, indent=indent, ensure_ascii=False, default=str)
|
||||
|
||||
# __END__
|
||||
@@ -4,10 +4,10 @@ Various small helpers for data writing
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
if TYPE_CHECKING:
|
||||
from io import TextIOWrapper
|
||||
from io import TextIOWrapper, StringIO
|
||||
|
||||
|
||||
def write_l(line: str, fpl: 'TextIOWrapper | None' = None, print_line: bool = False):
|
||||
def write_l(line: str, fpl: 'TextIOWrapper | StringIO | None' = None, print_line: bool = False):
|
||||
"""
|
||||
Write a line to screen and to output file
|
||||
|
||||
199
src/corelibs/email_handling/send_email.py
Normal file
199
src/corelibs/email_handling/send_email.py
Normal file
@@ -0,0 +1,199 @@
|
||||
"""
|
||||
Send email wrapper
|
||||
"""
|
||||
|
||||
import smtplib
|
||||
from email.message import EmailMessage
|
||||
from typing import TYPE_CHECKING, Any
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
class SendEmail:
|
||||
"""
|
||||
send emails based on a template to a list of receivers
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
log: "Logger",
|
||||
settings: dict[str, Any],
|
||||
template: dict[str, str],
|
||||
from_email: str,
|
||||
combined_send: bool = True,
|
||||
receivers: list[str] | None = None,
|
||||
data: list[dict[str, str]] | None = None,
|
||||
):
|
||||
"""
|
||||
init send email class
|
||||
|
||||
Args:
|
||||
template (dict): Dictionary with body and subject
|
||||
from_email (str): from email as "Name" <email>
|
||||
combined_send (bool): True for sending as one set for all receivers
|
||||
receivers (list): list of emails to send to
|
||||
data (dict): data to replace in template
|
||||
args (Namespace): _description_
|
||||
"""
|
||||
self.log = log
|
||||
self.settings = settings
|
||||
# internal settings
|
||||
self.template = template
|
||||
self.from_email = from_email
|
||||
self.combined_send = combined_send
|
||||
self.receivers = receivers
|
||||
self.data = data
|
||||
|
||||
def send_email(
|
||||
self,
|
||||
data: list[dict[str, str]] | None,
|
||||
receivers: list[str] | None,
|
||||
template: dict[str, str] | None = None,
|
||||
from_email: str | None = None,
|
||||
combined_send: bool | None = None,
|
||||
test_only: bool | None = None
|
||||
):
|
||||
"""
|
||||
build email and send
|
||||
|
||||
Arguments:
|
||||
data {list[dict[str, str]] | None} -- _description_
|
||||
receivers {list[str] | None} -- _description_
|
||||
combined_send {bool | None} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
template {dict[str, str] | None} -- _description_ (default: {None})
|
||||
from_email {str | None} -- _description_ (default: {None})
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
ValueError: _description_
|
||||
"""
|
||||
if data is None and self.data is not None:
|
||||
data = self.data
|
||||
if data is None:
|
||||
raise ValueError("No replace data set, cannot send email")
|
||||
if receivers is None and self.receivers is not None:
|
||||
receivers = self.receivers
|
||||
if receivers is None:
|
||||
raise ValueError("No receivers list set, cannot send email")
|
||||
if combined_send is None:
|
||||
combined_send = self.combined_send
|
||||
if test_only is not None:
|
||||
self.settings['test'] = test_only
|
||||
|
||||
if template is None:
|
||||
template = self.template
|
||||
if from_email is None:
|
||||
from_email = self.from_email
|
||||
|
||||
if not template['subject'] or not template['body']:
|
||||
raise ValueError("Both Subject and Body must be set")
|
||||
|
||||
self.log.debug(
|
||||
"[EMAIL]:\n"
|
||||
f"Subject: {template['subject']}\n"
|
||||
f"Body: {template['body']}\n"
|
||||
f"From: {from_email}\n"
|
||||
f"Combined send: {combined_send}\n"
|
||||
f"Receivers: {receivers}\n"
|
||||
f"Replace data: {data}"
|
||||
)
|
||||
|
||||
# send email
|
||||
self.send_email_list(
|
||||
self.prepare_email_content(
|
||||
from_email, template, data
|
||||
),
|
||||
receivers,
|
||||
combined_send,
|
||||
test_only
|
||||
)
|
||||
|
||||
def prepare_email_content(
|
||||
self,
|
||||
from_email: str,
|
||||
template: dict[str, str],
|
||||
data: list[dict[str, str]],
|
||||
) -> list[EmailMessage]:
|
||||
"""
|
||||
prepare email for sending
|
||||
|
||||
Args:
|
||||
template (dict): template data for this email
|
||||
data (dict): data to replace in email
|
||||
|
||||
Returns:
|
||||
list: Email Message Objects as list
|
||||
"""
|
||||
_subject = ""
|
||||
_body = ""
|
||||
msg: list[EmailMessage] = []
|
||||
for replace in data:
|
||||
_subject = template["subject"]
|
||||
_body = template["body"]
|
||||
for key, value in replace.items():
|
||||
_subject = _subject.replace(f"{{{{{key}}}}}", value)
|
||||
_body = _body.replace(f"{{{{{key}}}}}", value)
|
||||
# create a simple email and add subhect, from email
|
||||
msg_email = EmailMessage()
|
||||
# msg.set_content(_body, charset='utf-8', cte='quoted-printable')
|
||||
msg_email.set_content(_body, charset="utf-8")
|
||||
msg_email["Subject"] = _subject
|
||||
msg_email["From"] = from_email
|
||||
# push to array for sening
|
||||
msg.append(msg_email)
|
||||
return msg
|
||||
|
||||
def send_email_list(
|
||||
self,
|
||||
email: list[EmailMessage], receivers: list[str],
|
||||
combined_send: bool | None = None,
|
||||
test_only: bool | None = None
|
||||
):
|
||||
"""
|
||||
send email to receivers list
|
||||
|
||||
Args:
|
||||
email (list): Email Message object with set obdy, subject, from as list
|
||||
receivers (array): email receivers list as array
|
||||
combined_send (bool): True for sending as one set for all receivers
|
||||
"""
|
||||
|
||||
if test_only is not None:
|
||||
self.settings['test'] = test_only
|
||||
|
||||
# localhost (postfix does the rest)
|
||||
smtp = None
|
||||
smtp_host = self.settings.get('smtp_host', "localhost")
|
||||
try:
|
||||
smtp = smtplib.SMTP(smtp_host)
|
||||
except ConnectionRefusedError as e:
|
||||
self.log.error("Could not open SMTP connection to: %s, %s", smtp_host, e)
|
||||
# loop over messages and then over recievers
|
||||
for msg in email:
|
||||
if combined_send is True:
|
||||
msg["To"] = ", ".join(receivers)
|
||||
if not self.settings.get('test'):
|
||||
if smtp is not None:
|
||||
smtp.send_message(msg, msg["From"], receivers)
|
||||
else:
|
||||
self.log.info(f"[EMAIL] Test, not sending email\n{msg}")
|
||||
else:
|
||||
for receiver in receivers:
|
||||
# send to
|
||||
self.log.debug(f"===> Send to: {receiver}")
|
||||
if "To" in msg:
|
||||
msg.replace_header("To", receiver)
|
||||
else:
|
||||
msg["To"] = receiver
|
||||
if not self.settings.get('test'):
|
||||
if smtp is not None:
|
||||
smtp.send_message(msg)
|
||||
else:
|
||||
self.log.info(f"[EMAIL] Test, not sending email\n{msg}")
|
||||
# close smtp
|
||||
if smtp is not None:
|
||||
smtp.quit()
|
||||
|
||||
# __END__
|
||||
152
src/corelibs/encryption_handling/symmetric_encryption.py
Normal file
152
src/corelibs/encryption_handling/symmetric_encryption.py
Normal file
@@ -0,0 +1,152 @@
|
||||
"""
|
||||
simple symmetric encryption
|
||||
Will be moved to CoreLibs
|
||||
TODO: set key per encryption run
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import base64
|
||||
import hashlib
|
||||
from typing import TypedDict, cast
|
||||
from cryptography.fernet import Fernet
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
|
||||
|
||||
class PackageData(TypedDict):
|
||||
"""encryption package"""
|
||||
encrypted_data: str
|
||||
salt: str
|
||||
key_hash: str
|
||||
|
||||
|
||||
class SymmetricEncryption:
|
||||
"""
|
||||
simple encryption
|
||||
|
||||
the encrypted package has "encrypted_data" and "salt" as fields, salt is needed to create the
|
||||
key from the password to decrypt
|
||||
"""
|
||||
|
||||
def __init__(self, password: str):
|
||||
if not password:
|
||||
raise ValueError("A password must be set")
|
||||
self.password = password
|
||||
self.password_hash = hashlib.sha256(password.encode('utf-8')).hexdigest()
|
||||
|
||||
def __derive_key_from_password(self, password: str, salt: bytes) -> bytes:
|
||||
_password = password.encode('utf-8')
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=32,
|
||||
salt=salt,
|
||||
iterations=100000,
|
||||
)
|
||||
key = base64.urlsafe_b64encode(kdf.derive(_password))
|
||||
return key
|
||||
|
||||
def __encrypt_with_metadata(self, data: str | bytes) -> PackageData:
|
||||
"""Encrypt data and include salt if password-based"""
|
||||
# convert to bytes (for encoding)
|
||||
if isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
|
||||
# generate salt and key from password
|
||||
salt = os.urandom(16)
|
||||
key = self.__derive_key_from_password(self.password, salt)
|
||||
# init the cypher suit
|
||||
cipher_suite = Fernet(key)
|
||||
|
||||
encrypted_data = cipher_suite.encrypt(data)
|
||||
|
||||
# If using password, include salt in the result
|
||||
return {
|
||||
'encrypted_data': base64.urlsafe_b64encode(encrypted_data).decode('utf-8'),
|
||||
'salt': base64.urlsafe_b64encode(salt).decode('utf-8'),
|
||||
'key_hash': hashlib.sha256(key).hexdigest()
|
||||
}
|
||||
|
||||
def encrypt_with_metadata(self, data: str | bytes, return_as: str = 'str') -> str | bytes | PackageData:
|
||||
"""encrypt with metadata, but returns data in string"""
|
||||
match return_as:
|
||||
case 'str':
|
||||
return self.encrypt_with_metadata_return_str(data)
|
||||
case 'json':
|
||||
return self.encrypt_with_metadata_return_str(data)
|
||||
case 'bytes':
|
||||
return self.encrypt_with_metadata_return_bytes(data)
|
||||
case 'dict':
|
||||
return self.encrypt_with_metadata_return_dict(data)
|
||||
case _:
|
||||
# default is string json
|
||||
return self.encrypt_with_metadata_return_str(data)
|
||||
|
||||
def encrypt_with_metadata_return_dict(self, data: str | bytes) -> PackageData:
|
||||
"""encrypt with metadata, but returns data as PackageData dict"""
|
||||
return self.__encrypt_with_metadata(data)
|
||||
|
||||
def encrypt_with_metadata_return_str(self, data: str | bytes) -> str:
|
||||
"""encrypt with metadata, but returns data in string"""
|
||||
return json.dumps(self.__encrypt_with_metadata(data))
|
||||
|
||||
def encrypt_with_metadata_return_bytes(self, data: str | bytes) -> bytes:
|
||||
"""encrypt with metadata, but returns data in bytes"""
|
||||
return json.dumps(self.__encrypt_with_metadata(data)).encode('utf-8')
|
||||
|
||||
def decrypt_with_metadata(self, encrypted_package: str | bytes | PackageData, password: str | None = None) -> str:
|
||||
"""Decrypt data that may include metadata"""
|
||||
try:
|
||||
# Try to parse as JSON (password-based encryption)
|
||||
if isinstance(encrypted_package, bytes):
|
||||
package_data = cast(PackageData, json.loads(encrypted_package.decode('utf-8')))
|
||||
elif isinstance(encrypted_package, str):
|
||||
package_data = cast(PackageData, json.loads(str(encrypted_package)))
|
||||
else:
|
||||
package_data = encrypted_package
|
||||
|
||||
encrypted_data = base64.urlsafe_b64decode(package_data['encrypted_data'])
|
||||
salt = base64.urlsafe_b64decode(package_data['salt'])
|
||||
pwd = password or self.password
|
||||
key = self.__derive_key_from_password(pwd, salt)
|
||||
if package_data['key_hash'] != hashlib.sha256(key).hexdigest():
|
||||
raise ValueError("Key hash is not matching, possible invalid password")
|
||||
cipher_suite = Fernet(key)
|
||||
decrypted_data = cipher_suite.decrypt(encrypted_data)
|
||||
|
||||
except (json.JSONDecodeError, KeyError, UnicodeDecodeError) as e:
|
||||
raise ValueError(f"Invalid encrypted package format {e}") from e
|
||||
|
||||
return decrypted_data.decode('utf-8')
|
||||
|
||||
@staticmethod
|
||||
def encrypt_data(data: str | bytes, password: str) -> str:
|
||||
"""
|
||||
Static method to encrypt some data
|
||||
|
||||
Arguments:
|
||||
data {str | bytes} -- _description_
|
||||
password {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
encryptor = SymmetricEncryption(password)
|
||||
return encryptor.encrypt_with_metadata_return_str(data)
|
||||
|
||||
@staticmethod
|
||||
def decrypt_data(data: str | bytes | PackageData, password: str) -> str:
|
||||
"""
|
||||
Static method to decrypt some data
|
||||
|
||||
Arguments:
|
||||
data {str | bytes | PackageData} -- _description_
|
||||
password {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
decryptor = SymmetricEncryption(password)
|
||||
return decryptor.decrypt_with_metadata(data, password=password)
|
||||
|
||||
# __END__
|
||||
23
src/corelibs/exceptions/csv_exceptions.py
Normal file
23
src/corelibs/exceptions/csv_exceptions.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
Exceptions for csv file reading and processing
|
||||
"""
|
||||
|
||||
|
||||
class NoCsvReader(Exception):
|
||||
"""
|
||||
CSV reader is none
|
||||
"""
|
||||
|
||||
|
||||
class CsvHeaderDataMissing(Exception):
|
||||
"""
|
||||
The csv reader returned None as headers, the header column in the csv file is missing
|
||||
"""
|
||||
|
||||
|
||||
class CompulsoryCsvHeaderCheckFailed(Exception):
|
||||
"""
|
||||
raise if the header is not matching to the excpeted values
|
||||
"""
|
||||
|
||||
# __END__
|
||||
75
src/corelibs/file_handling/file_bom_encoding.py
Normal file
75
src/corelibs/file_handling/file_bom_encoding.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""
|
||||
File check if BOM encoded, needed for CSV load
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
class BomEncodingInfo(TypedDict):
|
||||
"""BOM encoding info"""
|
||||
has_bom: bool
|
||||
bom_type: str | None
|
||||
encoding: str | None
|
||||
bom_length: int
|
||||
bom_pattern: bytes | None
|
||||
|
||||
|
||||
def is_bom_encoded(file_path: Path) -> bool:
|
||||
"""
|
||||
Detect if a file is BOM encoded
|
||||
|
||||
Args:
|
||||
file_path (str): Path to the file to check
|
||||
|
||||
Returns:
|
||||
bool: True if file has BOM, False otherwise
|
||||
"""
|
||||
return is_bom_encoded_info(file_path)['has_bom']
|
||||
|
||||
|
||||
def is_bom_encoded_info(file_path: Path) -> BomEncodingInfo:
|
||||
"""
|
||||
Enhanced BOM detection with additional file analysis
|
||||
|
||||
Args:
|
||||
file_path (str): Path to the file to check
|
||||
|
||||
Returns:
|
||||
dict: Comprehensive BOM and encoding information
|
||||
"""
|
||||
try:
|
||||
# Read first 1024 bytes for analysis
|
||||
with open(file_path, 'rb') as f:
|
||||
header = f.read(4)
|
||||
|
||||
bom_patterns = {
|
||||
b'\xef\xbb\xbf': ('UTF-8', 'utf-8', 3),
|
||||
b'\xff\xfe\x00\x00': ('UTF-32 LE', 'utf-32-le', 4),
|
||||
b'\x00\x00\xfe\xff': ('UTF-32 BE', 'utf-32-be', 4),
|
||||
b'\xff\xfe': ('UTF-16 LE', 'utf-16-le', 2),
|
||||
b'\xfe\xff': ('UTF-16 BE', 'utf-16-be', 2),
|
||||
}
|
||||
|
||||
for bom_pattern, (encoding_name, encoding, length) in bom_patterns.items():
|
||||
if header.startswith(bom_pattern):
|
||||
return {
|
||||
'has_bom': True,
|
||||
'bom_type': encoding_name,
|
||||
'encoding': encoding,
|
||||
'bom_length': length,
|
||||
'bom_pattern': bom_pattern
|
||||
}
|
||||
|
||||
return {
|
||||
'has_bom': False,
|
||||
'bom_type': None,
|
||||
'encoding': None,
|
||||
'bom_length': 0,
|
||||
'bom_pattern': None
|
||||
}
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error checking BOM encoding: {e}") from e
|
||||
|
||||
|
||||
# __END__
|
||||
@@ -7,7 +7,12 @@ import shutil
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def remove_all_in_directory(directory: Path, ignore_files: list[str] | None = None, verbose: bool = False) -> bool:
|
||||
def remove_all_in_directory(
|
||||
directory: Path,
|
||||
ignore_files: list[str] | None = None,
|
||||
verbose: bool = False,
|
||||
dry_run: bool = False
|
||||
) -> bool:
|
||||
"""
|
||||
remove all files and folders in a directory
|
||||
can exclude files or folders
|
||||
@@ -24,7 +29,10 @@ def remove_all_in_directory(directory: Path, ignore_files: list[str] | None = No
|
||||
if ignore_files is None:
|
||||
ignore_files = []
|
||||
if verbose:
|
||||
print(f"Remove old files in: {directory.name} [", end="", flush=True)
|
||||
print(
|
||||
f"{'[DRY RUN] ' if dry_run else ''}Remove old files in: {directory.name} [",
|
||||
end="", flush=True
|
||||
)
|
||||
# remove all files and folders in given directory by recursive globbing
|
||||
for file in directory.rglob("*"):
|
||||
# skip if in ignore files
|
||||
@@ -32,11 +40,13 @@ def remove_all_in_directory(directory: Path, ignore_files: list[str] | None = No
|
||||
continue
|
||||
# remove one file, or a whole directory
|
||||
if file.is_file():
|
||||
os.remove(file)
|
||||
if not dry_run:
|
||||
os.remove(file)
|
||||
if verbose:
|
||||
print(".", end="", flush=True)
|
||||
elif file.is_dir():
|
||||
shutil.rmtree(file)
|
||||
if not dry_run:
|
||||
shutil.rmtree(file)
|
||||
if verbose:
|
||||
print("/", end="", flush=True)
|
||||
if verbose:
|
||||
0
src/corelibs/iterator_handling/__init__.py
Normal file
0
src/corelibs/iterator_handling/__init__.py
Normal file
@@ -2,23 +2,41 @@
|
||||
wrapper around search path
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, TypedDict, NotRequired
|
||||
from warnings import deprecated
|
||||
|
||||
|
||||
class ArraySearchList(TypedDict):
|
||||
"""find in array from list search dict"""
|
||||
key: str
|
||||
value: str | bool | int | float | list[str | None]
|
||||
case_sensitive: NotRequired[bool]
|
||||
|
||||
|
||||
@deprecated("Use find_in_array_from_list()")
|
||||
def array_search(
|
||||
search_params: list[dict[str, str | bool | list[str | None]]],
|
||||
search_params: list[ArraySearchList],
|
||||
data: list[dict[str, Any]],
|
||||
return_index: bool = False
|
||||
) -> list[dict[str, Any]]:
|
||||
"""depreacted, old call order"""
|
||||
return find_in_array_from_list(data, search_params, return_index)
|
||||
|
||||
|
||||
def find_in_array_from_list(
|
||||
data: list[dict[str, Any]],
|
||||
search_params: list[ArraySearchList],
|
||||
return_index: bool = False
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
search in an array of dicts with an array of Key/Value set
|
||||
search in an list of dicts with an list of Key/Value set
|
||||
all Key/Value sets must match
|
||||
Value set can be list for OR match
|
||||
option: case_senstive: default True
|
||||
|
||||
Args:
|
||||
search_params (list): List of search params in "Key"/"Value" lists with options
|
||||
data (list): data to search in, must be a list
|
||||
search_params (list): List of search params in "key"/"value" lists with options
|
||||
return_index (bool): return index of list [default False]
|
||||
|
||||
Raises:
|
||||
@@ -32,18 +50,20 @@ def array_search(
|
||||
"""
|
||||
if not isinstance(search_params, list): # type: ignore
|
||||
raise ValueError("search_params must be a list")
|
||||
keys = []
|
||||
keys: list[str] = []
|
||||
# check that key and value exist and are set
|
||||
for search in search_params:
|
||||
if not search.get('Key') or not search.get('Value'):
|
||||
if not search.get('key') or not search.get('value'):
|
||||
raise KeyError(
|
||||
f"Either Key '{search.get('Key', '')}' or "
|
||||
f"Value '{search.get('Value', '')}' is missing or empty"
|
||||
f"Either Key '{search.get('key', '')}' or "
|
||||
f"Value '{search.get('value', '')}' is missing or empty"
|
||||
)
|
||||
# if double key -> abort
|
||||
if search.get("Key") in keys:
|
||||
if search.get("key") in keys:
|
||||
raise KeyError(
|
||||
f"Key {search.get('Key', '')} already exists in search_params"
|
||||
f"Key {search.get('key', '')} already exists in search_params"
|
||||
)
|
||||
keys.append(str(search['key']))
|
||||
|
||||
return_items: list[dict[str, Any]] = []
|
||||
for si_idx, search_item in enumerate(data):
|
||||
@@ -55,20 +75,20 @@ def array_search(
|
||||
# lower case left side
|
||||
# TODO: allow nested Keys. eg "Key: ["Key a", "key b"]" to be ["Key a"]["key b"]
|
||||
if search.get("case_sensitive", True) is False:
|
||||
search_value = search_item.get(str(search['Key']), "").lower()
|
||||
search_value = search_item.get(str(search['key']), "").lower()
|
||||
else:
|
||||
search_value = search_item.get(str(search['Key']), "")
|
||||
search_value = search_item.get(str(search['key']), "")
|
||||
# lower case right side
|
||||
if isinstance(search['Value'], list):
|
||||
if isinstance(search['value'], list):
|
||||
search_in = [
|
||||
str(k).lower()
|
||||
if search.get("case_sensitive", True) is False else k
|
||||
for k in search['Value']
|
||||
]
|
||||
str(k).lower()
|
||||
if search.get("case_sensitive", True) is False else k
|
||||
for k in search['value']
|
||||
]
|
||||
elif search.get("case_sensitive", True) is False:
|
||||
search_in = str(search['Value']).lower()
|
||||
search_in = str(search['value']).lower()
|
||||
else:
|
||||
search_in = search['Value']
|
||||
search_in = search['value']
|
||||
# compare check
|
||||
if (
|
||||
(
|
||||
@@ -60,4 +60,22 @@ def build_dict(
|
||||
delete_keys_from_set(any_dict, ignore_entries)
|
||||
)
|
||||
|
||||
|
||||
def set_entry(dict_set: dict[str, Any], key: str, value_set: Any) -> dict[str, Any]:
|
||||
"""
|
||||
set a new entry in the dict set
|
||||
|
||||
Arguments:
|
||||
key {str} -- _description_
|
||||
dict_set {dict[str, Any]} -- _description_
|
||||
value_set {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
dict[str, Any] -- _description_
|
||||
"""
|
||||
if not dict_set.get(key):
|
||||
dict_set[key] = {}
|
||||
dict_set[key] = value_set
|
||||
return dict_set
|
||||
|
||||
# __END__
|
||||
85
src/corelibs/iterator_handling/dict_mask.py
Normal file
85
src/corelibs/iterator_handling/dict_mask.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""
|
||||
Dict helpers
|
||||
"""
|
||||
|
||||
|
||||
from typing import TypeAlias, Union, Dict, List, Any, cast
|
||||
|
||||
# definitions for the mask run below
|
||||
MaskableValue: TypeAlias = Union[str, int, float, bool, None]
|
||||
NestedDict: TypeAlias = Dict[str, Union[MaskableValue, List[Any], 'NestedDict']]
|
||||
ProcessableValue: TypeAlias = Union[MaskableValue, List[Any], NestedDict]
|
||||
|
||||
|
||||
def mask(
|
||||
data_set: dict[str, Any],
|
||||
mask_keys: list[str] | None = None,
|
||||
mask_str: str = "***",
|
||||
mask_str_edges: str = '_',
|
||||
skip: bool = False
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
mask data for output
|
||||
Checks if mask_keys list exist in any key in the data set either from the start or at the end
|
||||
|
||||
Use the mask_str_edges to define how searches inside a string should work. Default it must start
|
||||
and end with '_', remove to search string in string
|
||||
|
||||
Arguments:
|
||||
data_set {dict[str, str]} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
mask_keys {list[str] | None} -- _description_ (default: {None})
|
||||
mask_str {str} -- _description_ (default: {"***"})
|
||||
mask_str_edges {str} -- _description_ (default: {"_"})
|
||||
skip {bool} -- if set to true skip (default: {False})
|
||||
|
||||
Returns:
|
||||
dict[str, str] -- _description_
|
||||
"""
|
||||
if skip is True:
|
||||
return data_set
|
||||
if mask_keys is None:
|
||||
mask_keys = ["encryption", "password", "secret"]
|
||||
else:
|
||||
# make sure it is lower case
|
||||
mask_keys = [mask_key.lower() for mask_key in mask_keys]
|
||||
|
||||
def should_mask_key(key: str) -> bool:
|
||||
"""Check if a key should be masked"""
|
||||
__key_lower = key.lower()
|
||||
return any(
|
||||
__key_lower.startswith(mask_key) or
|
||||
__key_lower.endswith(mask_key) or
|
||||
f"{mask_str_edges}{mask_key}{mask_str_edges}" in __key_lower
|
||||
for mask_key in mask_keys
|
||||
)
|
||||
|
||||
def mask_recursive(obj: ProcessableValue) -> ProcessableValue:
|
||||
"""Recursively mask values in nested structures"""
|
||||
if isinstance(obj, dict):
|
||||
return {
|
||||
key: mask_value(value) if should_mask_key(key) else mask_recursive(value)
|
||||
for key, value in obj.items()
|
||||
}
|
||||
if isinstance(obj, list):
|
||||
return [mask_recursive(item) for item in obj]
|
||||
return obj
|
||||
|
||||
def mask_value(value: Any) -> Any:
|
||||
"""Handle masking based on value type"""
|
||||
if isinstance(value, list):
|
||||
# Mask each individual value in the list
|
||||
return [mask_str for _ in cast('list[Any]', value)]
|
||||
if isinstance(value, dict):
|
||||
# Recursively process the dictionary instead of masking the whole thing
|
||||
return mask_recursive(cast('ProcessableValue', value))
|
||||
# Mask primitive values
|
||||
return mask_str
|
||||
|
||||
return {
|
||||
key: mask_value(value) if should_mask_key(key) else mask_recursive(value)
|
||||
for key, value in data_set.items()
|
||||
}
|
||||
|
||||
# __END__
|
||||
47
src/corelibs/iterator_handling/list_helpers.py
Normal file
47
src/corelibs/iterator_handling/list_helpers.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
List type helpers
|
||||
"""
|
||||
|
||||
from typing import Any, Sequence
|
||||
|
||||
|
||||
def convert_to_list(
|
||||
entry: str | int | float | bool | Sequence[str | int | float | bool | Sequence[Any]]
|
||||
) -> Sequence[str | int | float | bool | Sequence[Any]]:
|
||||
"""
|
||||
Convert any of the non list values (except dictionary) to a list
|
||||
|
||||
Arguments:
|
||||
entry {str | int | float | bool | list[str | int | float | bool]} -- _description_
|
||||
|
||||
Returns:
|
||||
list[str | int | float | bool] -- _description_
|
||||
"""
|
||||
if isinstance(entry, list):
|
||||
return entry
|
||||
return [entry]
|
||||
|
||||
|
||||
def is_list_in_list(
|
||||
list_a: Sequence[str | int | float | bool | Sequence[Any]],
|
||||
list_b: Sequence[str | int | float | bool | Sequence[Any]]
|
||||
) -> Sequence[str | int | float | bool | Sequence[Any]]:
|
||||
"""
|
||||
Return entries from list_a that are not in list_b
|
||||
Type safe compare
|
||||
|
||||
Arguments:
|
||||
list_a {list[Any]} -- _description_
|
||||
list_b {list[Any]} -- _description_
|
||||
|
||||
Returns:
|
||||
list[Any] -- _description_
|
||||
"""
|
||||
# Create sets of (value, type) tuples
|
||||
set_a = set((item, type(item)) for item in list_a)
|
||||
set_b = set((item, type(item)) for item in list_b)
|
||||
|
||||
# Get the difference and extract just the values
|
||||
return [item for item, _ in set_a - set_b]
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/json_handling/__init__.py
Normal file
0
src/corelibs/json_handling/__init__.py
Normal file
@@ -28,8 +28,12 @@ def jmespath_search(search_data: dict[Any, Any] | list[Any], search_params: str)
|
||||
raise ValueError(f"Compile failed: {search_params}: {excp}") from excp
|
||||
except jmespath.exceptions.ParseError as excp:
|
||||
raise ValueError(f"Parse failed: {search_params}: {excp}") from excp
|
||||
except jmespath.exceptions.JMESPathTypeError as excp:
|
||||
raise ValueError(f"Search failed with JMESPathTypeError: {search_params}: {excp}") from excp
|
||||
except TypeError as excp:
|
||||
raise ValueError(f"Type error for search_params: {excp}") from excp
|
||||
return search_result
|
||||
|
||||
# TODO: compile jmespath setup
|
||||
|
||||
# __END__
|
||||
64
src/corelibs/json_handling/json_helper.py
Normal file
64
src/corelibs/json_handling/json_helper.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""
|
||||
json encoder for datetime
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from json import JSONEncoder, dumps
|
||||
from datetime import datetime, date
|
||||
import copy
|
||||
from jsonpath_ng import parse # pyright: ignore[reportMissingTypeStubs, reportUnknownVariableType]
|
||||
|
||||
|
||||
# subclass JSONEncoder
|
||||
class DateTimeEncoder(JSONEncoder):
|
||||
"""
|
||||
Override the default method
|
||||
dumps(..., cls=DateTimeEncoder, ...)
|
||||
"""
|
||||
def default(self, o: Any) -> str | None:
|
||||
if isinstance(o, (date, datetime)):
|
||||
return o.isoformat()
|
||||
return None
|
||||
|
||||
|
||||
def default_isoformat(obj: Any) -> str | None:
|
||||
"""
|
||||
default override
|
||||
dumps(..., default=default, ...)
|
||||
"""
|
||||
if isinstance(obj, (date, datetime)):
|
||||
return obj.isoformat()
|
||||
return None
|
||||
|
||||
|
||||
def json_dumps(data: Any):
|
||||
"""
|
||||
wrapper for json.dumps with sure dump without throwing Exceptions
|
||||
|
||||
Arguments:
|
||||
data {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return dumps(data, ensure_ascii=False, default=str)
|
||||
|
||||
|
||||
def modify_with_jsonpath(data: dict[Any, Any], path: str, new_value: Any):
|
||||
"""
|
||||
Modify dictionary using JSONPath (more powerful than JMESPath for modifications)
|
||||
"""
|
||||
result = copy.deepcopy(data)
|
||||
jsonpath_expr = parse(path) # pyright: ignore[reportUnknownVariableType]
|
||||
|
||||
# Find and update all matches
|
||||
matches = jsonpath_expr.find(result) # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType]
|
||||
for match in matches: # pyright: ignore[reportUnknownVariableType]
|
||||
match.full_path.update(result, new_value) # pyright: ignore[reportUnknownMemberType]
|
||||
|
||||
return result
|
||||
|
||||
# __END__
|
||||
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/logging_handling/__init__.py
Normal file
0
src/corelibs/logging_handling/__init__.py
Normal file
953
src/corelibs/logging_handling/log.py
Normal file
953
src/corelibs/logging_handling/log.py
Normal file
@@ -0,0 +1,953 @@
|
||||
"""
|
||||
A log handler wrapper
|
||||
if log_settings['log_queue'] is set to multiprocessing.Queue it will launch with listeners
|
||||
attach "init_worker_logging" with the set log_queue
|
||||
"""
|
||||
|
||||
import re
|
||||
import logging.handlers
|
||||
import logging
|
||||
from datetime import datetime
|
||||
import time
|
||||
from pathlib import Path
|
||||
import atexit
|
||||
from enum import Flag, auto
|
||||
from typing import MutableMapping, TextIO, TypedDict, Any, TYPE_CHECKING, cast
|
||||
from corelibs_text_colors.text_colors import Colors
|
||||
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
||||
from corelibs.debug_handling.debug_helpers import call_stack, exception_stack
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from multiprocessing import Queue
|
||||
|
||||
|
||||
class ConsoleFormat(Flag):
|
||||
"""console format type bitmap flags"""
|
||||
TIME = auto()
|
||||
TIME_SECONDS = auto()
|
||||
TIME_MILLISECONDS = auto()
|
||||
TIME_MICROSECONDS = auto()
|
||||
TIMEZONE = auto()
|
||||
NAME = auto()
|
||||
FILE = auto()
|
||||
FUNCTION = auto()
|
||||
LINENO = auto()
|
||||
LEVEL = auto()
|
||||
|
||||
|
||||
class ConsoleFormatSettings:
|
||||
"""Console format quick settings groups"""
|
||||
# shows everything, time with milliseconds, and time zone, log name, file, function, line number
|
||||
ALL = (
|
||||
ConsoleFormat.TIME |
|
||||
ConsoleFormat.TIMEZONE |
|
||||
ConsoleFormat.NAME |
|
||||
ConsoleFormat.FILE |
|
||||
ConsoleFormat.FUNCTION |
|
||||
ConsoleFormat.LINENO |
|
||||
ConsoleFormat.LEVEL
|
||||
)
|
||||
# show time with no time zone, file, line and level
|
||||
CONDENSED = ConsoleFormat.TIME | ConsoleFormat.FILE | ConsoleFormat.LINENO | ConsoleFormat.LEVEL
|
||||
# only time and level
|
||||
MINIMAL = ConsoleFormat.TIME | ConsoleFormat.LEVEL
|
||||
# only level
|
||||
BARE = ConsoleFormat.LEVEL
|
||||
# only message
|
||||
NONE = ConsoleFormat(0)
|
||||
|
||||
@staticmethod
|
||||
def from_string(setting_str: str, default: ConsoleFormat | None = None) -> ConsoleFormat | None:
|
||||
"""
|
||||
Get a console format setting, if does not exist set to None
|
||||
|
||||
Arguments:
|
||||
setting_str {str} -- what to search for
|
||||
default {ConsoleFormat | None} -- if not found return this (default: {None})
|
||||
|
||||
Returns:
|
||||
ConsoleFormat | None -- found ConsoleFormat or None
|
||||
"""
|
||||
if hasattr(ConsoleFormatSettings, setting_str):
|
||||
return getattr(ConsoleFormatSettings, setting_str)
|
||||
return default
|
||||
|
||||
|
||||
# MARK: Log settings TypedDict
|
||||
class LogSettings(TypedDict):
|
||||
"""log settings, for Log setup"""
|
||||
log_level_console: LoggingLevel
|
||||
log_level_file: LoggingLevel
|
||||
per_run_log: bool
|
||||
console_enabled: bool
|
||||
console_color_output_enabled: bool
|
||||
console_format_type: ConsoleFormat
|
||||
add_start_info: bool
|
||||
add_end_info: bool
|
||||
log_queue: 'Queue[str] | None'
|
||||
|
||||
|
||||
class LoggerInit(TypedDict):
|
||||
"""for Logger init"""
|
||||
logger: logging.Logger
|
||||
log_queue: 'Queue[str] | None'
|
||||
|
||||
|
||||
# MARK: Custom color filter
|
||||
class CustomConsoleFormatter(logging.Formatter):
|
||||
"""
|
||||
Custom formatter with colors for console output
|
||||
"""
|
||||
|
||||
COLORS = {
|
||||
LoggingLevel.DEBUG.name: Colors.cyan,
|
||||
LoggingLevel.INFO.name: Colors.green,
|
||||
LoggingLevel.WARNING.name: Colors.yellow,
|
||||
LoggingLevel.ERROR.name: Colors.red,
|
||||
LoggingLevel.CRITICAL.name: Colors.red_bold,
|
||||
LoggingLevel.ALERT.name: Colors.yellow_bold,
|
||||
LoggingLevel.EMERGENCY.name: Colors.magenta_bold,
|
||||
LoggingLevel.EXCEPTION.name: Colors.magenta_bright, # will never be written to console
|
||||
}
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
"""
|
||||
set the color highlight
|
||||
|
||||
Arguments:
|
||||
record {logging.LogRecord} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# Add color to levelname for console output
|
||||
reset = Colors.reset
|
||||
color = self.COLORS.get(record.levelname, reset)
|
||||
# only highlight level for basic
|
||||
if record.levelname in [LoggingLevel.DEBUG.name, LoggingLevel.INFO.name]:
|
||||
record.levelname = f"{color}{record.levelname}{reset}"
|
||||
return super().format(record)
|
||||
# highlight whole line
|
||||
message = super().format(record)
|
||||
return f"{color}{message}{reset}"
|
||||
|
||||
# TODO: add custom handlers for stack_trace, if not set fill with %(filename)s:%(funcName)s:%(lineno)d
|
||||
# hasattr(record, 'stack_trace')
|
||||
# also for something like "context" where we add an array of anything to a message
|
||||
|
||||
|
||||
class CustomHandlerFilter(logging.Filter):
|
||||
"""
|
||||
Add a custom handler for filtering
|
||||
"""
|
||||
HANDLER_NAME_FILTER_EXCEPTION: str = 'console'
|
||||
|
||||
def __init__(self, handler_name: str, filter_exceptions: bool = False):
|
||||
super().__init__(name=handler_name)
|
||||
self.handler_name = handler_name
|
||||
self.filter_exceptions = filter_exceptions
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
# if console and exception do not show
|
||||
if self.handler_name == self.HANDLER_NAME_FILTER_EXCEPTION and self.filter_exceptions:
|
||||
return record.levelname != "EXCEPTION"
|
||||
# if cnosole entry is true and traget file filter
|
||||
if hasattr(record, 'console') and getattr(record, 'console') is True and self.handler_name == 'file':
|
||||
return False
|
||||
return True
|
||||
|
||||
# def __filter_exceptions(self, record: logging.LogRecord) -> bool:
|
||||
# return record.levelname != "EXCEPTION"
|
||||
|
||||
|
||||
# MARK: Parent class
|
||||
class LogParent:
|
||||
"""
|
||||
Parent class with general methods
|
||||
used by Log and Logger
|
||||
"""
|
||||
|
||||
# spacer lenght characters and the character
|
||||
SPACER_CHAR: str = '='
|
||||
SPACER_LENGTH: int = 32
|
||||
|
||||
def __init__(self):
|
||||
self.logger: logging.Logger
|
||||
self.log_queue: 'Queue[str] | None' = None
|
||||
self.handlers: dict[str, Any] = {}
|
||||
|
||||
# FIXME: we need to add a custom formater to add stack level listing if we want to
|
||||
# Important note, although they exist, it is recommended to use self.logger.NAME directly
|
||||
# so that the correct filename, method and row number is set
|
||||
# for > 50 use logger.log(LoggingLevel.<LEVEL>.value, ...)
|
||||
# for exception logger.log(LoggingLevel.EXCEPTION.value, ..., execInfo=True)
|
||||
# MARK: log message
|
||||
def log(self, level: int, msg: object, *args: object, extra: MutableMapping[str, object] | None = None):
|
||||
"""log general"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.log(level, msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: DEBUG 10
|
||||
def debug(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""debug"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.debug(msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: INFO 20
|
||||
def info(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""info"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.info(msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: WARNING 30
|
||||
def warning(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""warning"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.warning(msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: ERROR 40
|
||||
def error(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""error"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.error(msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: CRITICAL 50
|
||||
def critical(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""critcal"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.critical(msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: ALERT 55
|
||||
def alert(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""alert"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
# extra_dict = dict(extra)
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.log(LoggingLevel.ALERT.value, msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: EMERGECNY: 60
|
||||
def emergency(self, msg: object, *args: object, extra: MutableMapping[str, object] | None = None) -> None:
|
||||
"""emergency"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
self.logger.log(LoggingLevel.EMERGENCY.value, msg, *args, extra=extra, stacklevel=2)
|
||||
|
||||
# MARK: EXCEPTION: 70
|
||||
def exception(
|
||||
self,
|
||||
msg: object, *args: object, extra: MutableMapping[str, object] | None = None,
|
||||
log_error: bool = True
|
||||
) -> None:
|
||||
"""
|
||||
log on exceotion level, this is log.exception, but logs with a new level
|
||||
|
||||
Args:
|
||||
msg (object): _description_
|
||||
*args (object): arguments for msg
|
||||
extra: Mapping[str, object] | None: extra arguments for the formatting if needed
|
||||
log_error: (bool): If set to false will not write additional error message for console (Default True)
|
||||
"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['stack_trace'] = call_stack(skip_last=2)
|
||||
extra['exception_trace'] = exception_stack()
|
||||
# write to console first with extra flag for filtering in file
|
||||
if log_error:
|
||||
self.logger.log(
|
||||
LoggingLevel.ERROR.value,
|
||||
f"<=EXCEPTION={extra['exception_trace']}> {msg} [{extra['stack_trace']}]",
|
||||
*args, extra=dict(extra) | {'console': True}, stacklevel=2
|
||||
)
|
||||
self.logger.log(LoggingLevel.EXCEPTION.value, msg, *args, exc_info=True, extra=extra, stacklevel=2)
|
||||
|
||||
def break_line(self, info: str = "BREAK"):
|
||||
"""
|
||||
add a break line as info level
|
||||
|
||||
Keyword Arguments:
|
||||
info {str} -- _description_ (default: {"BREAK"})
|
||||
"""
|
||||
if not hasattr(self, 'logger'):
|
||||
raise ValueError('Logger is not yet initialized')
|
||||
self.logger.info("[%s] %s>", info, self.SPACER_CHAR * self.SPACER_LENGTH)
|
||||
|
||||
# MARK: queue handling
|
||||
def flush(self, handler_name: str | None = None, timeout: float = 2.0) -> bool:
|
||||
"""
|
||||
Flush all pending messages
|
||||
|
||||
Keyword Arguments:
|
||||
handler_name {str | None} -- _description_ (default: {None})
|
||||
timeout {float} -- _description_ (default: {2.0})
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
if not self.log_queue:
|
||||
return False
|
||||
|
||||
try:
|
||||
# Wait for queue to be processed
|
||||
start_time = time.time()
|
||||
while not self.log_queue.empty() and (time.time() - start_time) < timeout:
|
||||
time.sleep(0.01)
|
||||
|
||||
# Flush all handlers or handler given
|
||||
if handler_name:
|
||||
try:
|
||||
self.handlers[handler_name].flush()
|
||||
except IndexError:
|
||||
pass
|
||||
else:
|
||||
for handler in self.handlers.values():
|
||||
handler.flush()
|
||||
except OSError:
|
||||
return False
|
||||
return True
|
||||
|
||||
def cleanup(self):
|
||||
"""
|
||||
cleanup for any open queues in case we have an abort
|
||||
"""
|
||||
if not self.log_queue:
|
||||
return
|
||||
self.flush()
|
||||
# Close the queue properly
|
||||
self.log_queue.close()
|
||||
self.log_queue.join_thread()
|
||||
|
||||
# MARK: log level handling
|
||||
def set_log_level(self, handler_name: str, log_level: LoggingLevel) -> bool:
|
||||
"""
|
||||
set the logging level for a handler
|
||||
|
||||
Arguments:
|
||||
handler {str} -- _description_
|
||||
log_level {LoggingLevel} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
try:
|
||||
# flush queue befoe changing logging level
|
||||
self.flush(handler_name)
|
||||
self.handlers[handler_name].setLevel(log_level.name)
|
||||
return True
|
||||
except IndexError:
|
||||
if self.logger:
|
||||
self.logger.error('Handler %s not found, cannot change log level', handler_name)
|
||||
return False
|
||||
except AttributeError:
|
||||
if self.logger:
|
||||
self.logger.error(
|
||||
'Cannot change to log level %s for handler %s, log level invalid',
|
||||
LoggingLevel.name, handler_name
|
||||
)
|
||||
return False
|
||||
|
||||
def get_log_level(self, handler_name: str) -> LoggingLevel:
|
||||
"""
|
||||
gettthe logging level for a handler
|
||||
|
||||
Arguments:
|
||||
handler_name {str} -- _description_
|
||||
|
||||
Returns:
|
||||
LoggingLevel -- _description_
|
||||
"""
|
||||
try:
|
||||
return LoggingLevel.from_any(self.handlers[handler_name].level)
|
||||
except IndexError:
|
||||
return LoggingLevel.NOTSET
|
||||
|
||||
def any_handler_is_minimum_level(self, log_level: LoggingLevel) -> bool:
|
||||
"""
|
||||
if any handler is set to minimum level
|
||||
|
||||
Arguments:
|
||||
log_level {LoggingLevel} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
for handler in self.handlers.values():
|
||||
try:
|
||||
if LoggingLevel.from_any(handler.level).includes(log_level):
|
||||
return True
|
||||
except (IndexError, AttributeError):
|
||||
continue
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def validate_log_level(log_level: Any) -> bool:
|
||||
"""
|
||||
if the log level is invalid will return false, else return true
|
||||
|
||||
Args:
|
||||
log_level (Any): _description_
|
||||
|
||||
Returns:
|
||||
bool: _description_
|
||||
"""
|
||||
try:
|
||||
_ = LoggingLevel.from_any(log_level).value
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_log_level_int(log_level: Any) -> int:
|
||||
"""
|
||||
Return log level as INT
|
||||
If invalid returns the default log level
|
||||
|
||||
Arguments:
|
||||
log_level {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
try:
|
||||
return LoggingLevel.from_any(log_level).value
|
||||
except ValueError:
|
||||
return LoggingLevel.from_string(Log.DEFAULT_LOG_LEVEL.name).value
|
||||
|
||||
|
||||
# MARK: Logger
|
||||
class Logger(LogParent):
|
||||
"""
|
||||
The class we can pass on to other clases without re-init the class itself
|
||||
NOTE: if no queue object is handled over the logging level change might not take immediate effect
|
||||
"""
|
||||
|
||||
def __init__(self, logger_settings: LoggerInit):
|
||||
LogParent.__init__(self)
|
||||
self.logger = logger_settings['logger']
|
||||
self.lg = self.logger
|
||||
self.l = self.logger
|
||||
self.handlers = {str(_handler.name): _handler for _handler in self.logger.handlers}
|
||||
self.log_queue = logger_settings['log_queue']
|
||||
|
||||
|
||||
# MARK: LogSetup class
|
||||
class Log(LogParent):
|
||||
"""
|
||||
logger setup
|
||||
"""
|
||||
|
||||
CONSOLE_HANDLER: str = 'stream_handler'
|
||||
FILE_HANDLER: str = 'file_handler'
|
||||
|
||||
# spacer lenght characters and the character
|
||||
SPACER_CHAR: str = '='
|
||||
SPACER_LENGTH: int = 32
|
||||
# default logging level
|
||||
DEFAULT_LOG_LEVEL: LoggingLevel = LoggingLevel.WARNING
|
||||
DEFAULT_LOG_LEVEL_FILE: LoggingLevel = LoggingLevel.DEBUG
|
||||
DEFAULT_LOG_LEVEL_CONSOLE: LoggingLevel = LoggingLevel.WARNING
|
||||
# default settings
|
||||
DEFAULT_LOG_SETTINGS: LogSettings = {
|
||||
"log_level_console": DEFAULT_LOG_LEVEL_CONSOLE,
|
||||
"log_level_file": DEFAULT_LOG_LEVEL_FILE,
|
||||
"per_run_log": False,
|
||||
"console_enabled": True,
|
||||
"console_color_output_enabled": True,
|
||||
# do not print log title, file, function and line number
|
||||
"console_format_type": ConsoleFormatSettings.ALL,
|
||||
"add_start_info": True,
|
||||
"add_end_info": False,
|
||||
"log_queue": None,
|
||||
}
|
||||
|
||||
# MARK: constructor
|
||||
def __init__(
|
||||
self,
|
||||
log_path: Path,
|
||||
log_name: str,
|
||||
log_settings: (
|
||||
dict[str, 'LoggingLevel | str | bool | None | Queue[str] | ConsoleFormat'] | # noqa: E501 # pylint: disable=line-too-long
|
||||
LogSettings | None
|
||||
) = None,
|
||||
other_handlers: dict[str, Any] | None = None
|
||||
):
|
||||
LogParent.__init__(self)
|
||||
# add new level for alert, emergecny and exception
|
||||
logging.addLevelName(LoggingLevel.ALERT.value, LoggingLevel.ALERT.name)
|
||||
logging.addLevelName(LoggingLevel.EMERGENCY.value, LoggingLevel.EMERGENCY.name)
|
||||
logging.addLevelName(LoggingLevel.EXCEPTION.value, LoggingLevel.EXCEPTION.name)
|
||||
# parse the logging settings
|
||||
self.log_settings = self.__parse_log_settings(log_settings)
|
||||
# if path, set log name with .log
|
||||
# if log name with .log, strip .log for naming
|
||||
if log_path.is_dir():
|
||||
__log_file_name = re.sub(r'[^a-zA-Z0-9]', '', log_name)
|
||||
if not log_name.endswith('.log'):
|
||||
log_path = log_path.joinpath(Path(__log_file_name).with_suffix('.log'))
|
||||
else:
|
||||
log_path = log_path.joinpath(__log_file_name)
|
||||
elif not log_path.suffix == '.log':
|
||||
# add .log if the path is a file but without .log
|
||||
log_path = log_path.with_suffix('.log')
|
||||
# stip .log from the log name if set
|
||||
if not log_name.endswith('.log'):
|
||||
log_name = Path(log_name).stem
|
||||
# general log name
|
||||
self.log_name = log_name
|
||||
|
||||
self.log_queue: 'Queue[str] | None' = None
|
||||
self.listener: logging.handlers.QueueListener | None = None
|
||||
self.logger: logging.Logger
|
||||
|
||||
# setup handlers
|
||||
# NOTE if console with color is set first, some of the color formatting is set
|
||||
# in the file writer too, for the ones where color is set BEFORE the format
|
||||
# Any is logging.StreamHandler, logging.FileHandler and all logging.handlers.*
|
||||
self.handlers: dict[str, Any] = {}
|
||||
self.add_handler(self.FILE_HANDLER, self.__create_file_handler(
|
||||
self.FILE_HANDLER, self.log_settings['log_level_file'], log_path)
|
||||
)
|
||||
if self.log_settings['console_enabled']:
|
||||
# console
|
||||
self.add_handler(self.CONSOLE_HANDLER, self.__create_console_handler(
|
||||
self.CONSOLE_HANDLER,
|
||||
self.log_settings['log_level_console'],
|
||||
console_format_type=self.log_settings['console_format_type'],
|
||||
))
|
||||
# add other handlers,
|
||||
if other_handlers is not None:
|
||||
for handler_key, handler in other_handlers.items():
|
||||
self.add_handler(handler_key, handler)
|
||||
# init listener if we have a log_queue set
|
||||
self.__init_listener(self.log_settings['log_queue'])
|
||||
|
||||
# overall logger start
|
||||
self.__init_log(log_name)
|
||||
# if requests set a start log
|
||||
if self.log_settings['add_start_info'] is True:
|
||||
self.break_line('START')
|
||||
|
||||
# MARK: deconstructor
|
||||
def __del__(self):
|
||||
"""
|
||||
Call when class is destroyed, make sure the listender is closed or else we throw a thread error
|
||||
"""
|
||||
if hasattr(self, 'log_settings') and self.log_settings.get('add_end_info'):
|
||||
self.break_line('END')
|
||||
self.stop_listener()
|
||||
|
||||
# MARK: parse log settings
|
||||
def __parse_log_settings(
|
||||
self,
|
||||
log_settings: dict[str, 'LoggingLevel | str | bool | None | Queue[str] | ConsoleFormat'] | # noqa: E501 # pylint: disable=line-too-long
|
||||
LogSettings | None
|
||||
) -> LogSettings:
|
||||
# skip with defaul it not set
|
||||
if log_settings is None:
|
||||
return self.DEFAULT_LOG_SETTINGS
|
||||
# check entries
|
||||
default_log_settings = self.DEFAULT_LOG_SETTINGS
|
||||
# check log levels
|
||||
for __log_entry in ['log_level_console', 'log_level_file']:
|
||||
if log_settings.get(__log_entry) is None:
|
||||
continue
|
||||
# if not valid reset to default, if not in default set to WARNING
|
||||
if not self.validate_log_level(__log_level := log_settings.get(__log_entry, '')):
|
||||
__log_level = self.DEFAULT_LOG_SETTINGS.get(
|
||||
__log_entry, self.DEFAULT_LOG_LEVEL
|
||||
)
|
||||
default_log_settings[__log_entry] = LoggingLevel.from_any(__log_level)
|
||||
# check bool
|
||||
for __log_entry in [
|
||||
"per_run_log",
|
||||
"console_enabled",
|
||||
"console_color_output_enabled",
|
||||
"add_start_info",
|
||||
"add_end_info",
|
||||
]:
|
||||
if log_settings.get(__log_entry) is None:
|
||||
continue
|
||||
if not isinstance(__setting := log_settings.get(__log_entry, ''), bool):
|
||||
__setting = self.DEFAULT_LOG_SETTINGS.get(__log_entry, True)
|
||||
default_log_settings[__log_entry] = __setting
|
||||
# check console log type
|
||||
default_log_settings['console_format_type'] = cast('ConsoleFormat', log_settings.get(
|
||||
'console_format_type', self.DEFAULT_LOG_SETTINGS['console_format_type']
|
||||
))
|
||||
# check log queue
|
||||
__setting = log_settings.get('log_queue', self.DEFAULT_LOG_SETTINGS['log_queue'])
|
||||
if __setting is not None:
|
||||
__setting = cast('Queue[str]', __setting)
|
||||
default_log_settings['log_queue'] = __setting
|
||||
return default_log_settings
|
||||
|
||||
# def __filter_exceptions(self, record: logging.LogRecord) -> bool:
|
||||
# return record.levelname != "EXCEPTION"
|
||||
|
||||
# MARK: add a handler
|
||||
def add_handler(
|
||||
self,
|
||||
handler_name: str,
|
||||
handler: Any
|
||||
) -> bool:
|
||||
"""
|
||||
Add a log handler to the handlers dict
|
||||
|
||||
Arguments:
|
||||
handler_name {str} -- _description_
|
||||
handler {Any} -- _description_
|
||||
"""
|
||||
if self.handlers.get(handler_name):
|
||||
return False
|
||||
if self.listener is not None or hasattr(self, 'logger'):
|
||||
raise ValueError(
|
||||
f"Cannot add handler {handler_name}: {handler.get_name()} because logger is already running"
|
||||
)
|
||||
# TODO: handler must be some handler type, how to check?
|
||||
self.handlers[handler_name] = handler
|
||||
return True
|
||||
|
||||
# MARK: console logger format
|
||||
def __build_console_format_from_string(self, console_format_type: ConsoleFormat) -> str:
|
||||
"""
|
||||
Build console format string from the given console format type
|
||||
|
||||
Arguments:
|
||||
console_format_type {ConsoleFormat} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
format_string = ''
|
||||
# time part if any of the times are requested
|
||||
if (
|
||||
ConsoleFormat.TIME in console_format_type or
|
||||
ConsoleFormat.TIME_SECONDS in console_format_type or
|
||||
ConsoleFormat.TIME_MILLISECONDS in console_format_type or
|
||||
ConsoleFormat.TIME_MICROSECONDS in console_format_type
|
||||
):
|
||||
format_string += '[%(asctime)s] '
|
||||
# set log name
|
||||
if ConsoleFormat.NAME in console_format_type:
|
||||
format_string += '[%(name)s] '
|
||||
# for any file/function/line number call
|
||||
if (
|
||||
ConsoleFormat.FILE in console_format_type or
|
||||
ConsoleFormat.FUNCTION in console_format_type or
|
||||
ConsoleFormat.LINENO in console_format_type
|
||||
):
|
||||
format_string += '['
|
||||
set_group: list[str] = []
|
||||
if ConsoleFormat.FILE in console_format_type:
|
||||
set_group.append('%(filename)s')
|
||||
if ConsoleFormat.FUNCTION in console_format_type:
|
||||
set_group.append('%(funcName)s')
|
||||
if ConsoleFormat.LINENO in console_format_type:
|
||||
set_group.append('%(lineno)d')
|
||||
format_string += ':'.join(set_group)
|
||||
format_string += '] '
|
||||
# level if wanted
|
||||
if ConsoleFormat.LEVEL in console_format_type:
|
||||
format_string += '<%(levelname)s> '
|
||||
# always message
|
||||
format_string += '%(message)s'
|
||||
return format_string
|
||||
|
||||
def __set_time_format_for_console_formatter(
|
||||
self, formatter_console: CustomConsoleFormatter | logging.Formatter, console_format_type: ConsoleFormat
|
||||
) -> None:
|
||||
"""
|
||||
Format time for a given format handler, this is for console format only
|
||||
|
||||
Arguments:
|
||||
formatter_console {CustomConsoleFormatter | logging.Formatter} -- _description_
|
||||
console_format_type {ConsoleFormat} -- _description_
|
||||
"""
|
||||
# default for TIME is milliseconds
|
||||
# if we have multiple set, the smallest precision wins
|
||||
if ConsoleFormat.TIME_MICROSECONDS in console_format_type:
|
||||
iso_precision = 'microseconds'
|
||||
elif (
|
||||
ConsoleFormat.TIME_MILLISECONDS in console_format_type or
|
||||
ConsoleFormat.TIME in console_format_type
|
||||
):
|
||||
iso_precision = 'milliseconds'
|
||||
elif ConsoleFormat.TIME_SECONDS in console_format_type:
|
||||
iso_precision = 'seconds'
|
||||
else:
|
||||
iso_precision = 'milliseconds'
|
||||
# do timestamp modification only if we have time requested
|
||||
if (
|
||||
ConsoleFormat.TIME in console_format_type or
|
||||
ConsoleFormat.TIME_SECONDS in console_format_type or
|
||||
ConsoleFormat.TIME_MILLISECONDS in console_format_type or
|
||||
ConsoleFormat.TIME_MICROSECONDS in console_format_type
|
||||
):
|
||||
# if we have with TZ we as the asttimezone call
|
||||
if ConsoleFormat.TIMEZONE in console_format_type:
|
||||
formatter_console.formatTime = (
|
||||
lambda record, datefmt=None:
|
||||
datetime
|
||||
.fromtimestamp(record.created)
|
||||
.astimezone()
|
||||
.isoformat(sep=" ", timespec=iso_precision)
|
||||
)
|
||||
else:
|
||||
formatter_console.formatTime = (
|
||||
lambda record, datefmt=None:
|
||||
datetime
|
||||
.fromtimestamp(record.created)
|
||||
.isoformat(sep=" ", timespec=iso_precision)
|
||||
)
|
||||
|
||||
def __set_console_formatter(self, console_format_type: ConsoleFormat) -> CustomConsoleFormatter | logging.Formatter:
|
||||
"""
|
||||
Build the full formatter and return it
|
||||
|
||||
Arguments:
|
||||
console_format_type {ConsoleFormat} -- _description_
|
||||
|
||||
Returns:
|
||||
CustomConsoleFormatter | logging.Formatter -- _description_
|
||||
"""
|
||||
format_string = self.__build_console_format_from_string(console_format_type)
|
||||
if self.log_settings['console_color_output_enabled']:
|
||||
# formatter_console = CustomConsoleFormatter(format_string, datefmt=format_date)
|
||||
formatter_console = CustomConsoleFormatter(format_string)
|
||||
else:
|
||||
# formatter_console = logging.Formatter(format_string, datefmt=format_date)
|
||||
formatter_console = logging.Formatter(format_string)
|
||||
self.__set_time_format_for_console_formatter(formatter_console, console_format_type)
|
||||
self.log_settings['console_format_type'] = console_format_type
|
||||
return formatter_console
|
||||
|
||||
# MARK: console handler update
|
||||
def update_console_formatter(
|
||||
self,
|
||||
console_format_type: ConsoleFormat,
|
||||
):
|
||||
"""
|
||||
Update the console formatter for format layout and time stamp format
|
||||
|
||||
Arguments:
|
||||
console_format_type {ConsoleFormat} -- _description_
|
||||
"""
|
||||
# skip if console not enabled
|
||||
if not self.log_settings['console_enabled']:
|
||||
return
|
||||
# skip if format has not changed
|
||||
if self.log_settings['console_format_type'] == console_format_type:
|
||||
return
|
||||
# update the formatter
|
||||
self.handlers[self.CONSOLE_HANDLER].setFormatter(
|
||||
self.__set_console_formatter(console_format_type)
|
||||
)
|
||||
|
||||
# MARK: console handler
|
||||
def __create_console_handler(
|
||||
self, handler_name: str,
|
||||
log_level_console: LoggingLevel = LoggingLevel.WARNING,
|
||||
filter_exceptions: bool = True,
|
||||
console_format_type: ConsoleFormat = ConsoleFormatSettings.ALL,
|
||||
) -> logging.StreamHandler[TextIO]:
|
||||
# console logger
|
||||
if not self.validate_log_level(log_level_console):
|
||||
log_level_console = self.DEFAULT_LOG_LEVEL_CONSOLE
|
||||
console_handler = logging.StreamHandler()
|
||||
# print(f"Console format type: {console_format_type}")
|
||||
# build the format string based on what flags are set
|
||||
# format_string = self.__build_console_format_from_string(console_format_type)
|
||||
# # basic date, but this will be overridden to ISO in formatTime
|
||||
# # format_date = "%Y-%m-%d %H:%M:%S"
|
||||
# # color or not
|
||||
# if self.log_settings['console_color_output_enabled']:
|
||||
# # formatter_console = CustomConsoleFormatter(format_string, datefmt=format_date)
|
||||
# formatter_console = CustomConsoleFormatter(format_string)
|
||||
# else:
|
||||
# # formatter_console = logging.Formatter(format_string, datefmt=format_date)
|
||||
# formatter_console = logging.Formatter(format_string)
|
||||
# # set the time format
|
||||
# self.__set_time_format_for_console_formatter(formatter_console, console_format_type)
|
||||
console_handler.set_name(handler_name)
|
||||
console_handler.setLevel(log_level_console.name)
|
||||
# do not show exceptions logs on console
|
||||
console_handler.addFilter(CustomHandlerFilter('console', filter_exceptions))
|
||||
console_handler.setFormatter(self.__set_console_formatter(console_format_type))
|
||||
return console_handler
|
||||
|
||||
# MARK: file handler
|
||||
def __create_file_handler(
|
||||
self, handler_name: str,
|
||||
log_level_file: LoggingLevel, log_path: Path,
|
||||
# for TimedRotating, if per_run_log is off
|
||||
when: str = "D", interval: int = 1, backup_count: int = 0
|
||||
) -> logging.handlers.TimedRotatingFileHandler | logging.FileHandler:
|
||||
# file logger
|
||||
# when: S/M/H/D/W0-W6/midnight
|
||||
# interval: how many, 1D = every day
|
||||
# backup_count: how many old to keep, 0 = all
|
||||
if not self.validate_log_level(log_level_file):
|
||||
log_level_file = self.DEFAULT_LOG_LEVEL_FILE
|
||||
if self.log_settings['per_run_log']:
|
||||
# log path, remove them stem (".log"), then add the datetime and add .log again
|
||||
now = datetime.now()
|
||||
# we add microseconds part to get milli seconds
|
||||
new_stem = f"{log_path.stem}.{now.strftime('%Y-%m-%d_%H-%M-%S')}.{str(now.microsecond)[:3]}"
|
||||
file_handler = logging.FileHandler(
|
||||
filename=log_path.with_name(f"{new_stem}{log_path.suffix}"),
|
||||
encoding="utf-8",
|
||||
)
|
||||
else:
|
||||
file_handler = logging.handlers.TimedRotatingFileHandler(
|
||||
filename=log_path,
|
||||
encoding="utf-8",
|
||||
when=when,
|
||||
interval=interval,
|
||||
backupCount=backup_count
|
||||
)
|
||||
formatter_file_handler = logging.Formatter(
|
||||
(
|
||||
# time stamp
|
||||
# '[%(asctime)s.%(msecs)03d] '
|
||||
'[%(asctime)s] '
|
||||
# log name
|
||||
'[%(name)s] '
|
||||
# filename + pid
|
||||
# '[%(filename)s:%(process)d] '
|
||||
# pid + path/filename + func + line number
|
||||
'[%(process)d:%(pathname)s:%(funcName)s:%(lineno)d] '
|
||||
# error level
|
||||
'<%(levelname)s> '
|
||||
# message
|
||||
'%(message)s'
|
||||
),
|
||||
datefmt="%Y-%m-%dT%H:%M:%S",
|
||||
)
|
||||
formatter_file_handler.formatTime = (
|
||||
lambda record, datefmt=None:
|
||||
datetime
|
||||
.fromtimestamp(record.created)
|
||||
.astimezone()
|
||||
.isoformat(sep="T", timespec="microseconds")
|
||||
)
|
||||
file_handler.set_name(handler_name)
|
||||
file_handler.setLevel(log_level_file.name)
|
||||
# do not show errors flagged with console (they are from exceptions)
|
||||
file_handler.addFilter(CustomHandlerFilter('file'))
|
||||
file_handler.setFormatter(formatter_file_handler)
|
||||
return file_handler
|
||||
|
||||
# MARK: init listener
|
||||
def __init_listener(self, log_queue: 'Queue[str] | None' = None):
|
||||
"""
|
||||
If we have a Queue option start the logging queue
|
||||
|
||||
Keyword Arguments:
|
||||
log_queue {Queue[str] | None} -- _description_ (default: {None})
|
||||
"""
|
||||
if log_queue is None:
|
||||
return
|
||||
self.log_queue = log_queue
|
||||
atexit.register(self.stop_listener)
|
||||
self.listener = logging.handlers.QueueListener(
|
||||
self.log_queue,
|
||||
*self.handlers.values(),
|
||||
respect_handler_level=True
|
||||
)
|
||||
self.listener.start()
|
||||
|
||||
def stop_listener(self):
|
||||
"""
|
||||
stop the listener
|
||||
"""
|
||||
if self.listener is not None:
|
||||
self.flush()
|
||||
self.listener.stop()
|
||||
|
||||
# MARK: init main log
|
||||
def __init_log(self, log_name: str) -> None:
|
||||
"""
|
||||
Initialize the main loggger
|
||||
"""
|
||||
queue_handler: logging.handlers.QueueHandler | None = None
|
||||
if self.log_queue is not None:
|
||||
queue_handler = logging.handlers.QueueHandler(self.log_queue)
|
||||
# overall logger settings
|
||||
self.logger = logging.getLogger(log_name)
|
||||
# add all the handlers
|
||||
if queue_handler is None:
|
||||
for handler in self.handlers.values():
|
||||
self.logger.addHandler(handler)
|
||||
else:
|
||||
self.logger.addHandler(queue_handler)
|
||||
# set maximum logging level for all logging output
|
||||
# log level filtering is done per handler
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
# short name
|
||||
self.lg = self.logger
|
||||
self.l = self.logger
|
||||
|
||||
# MARK: init logger for Fork/Thread
|
||||
@staticmethod
|
||||
def init_worker_logging(log_queue: 'Queue[str]') -> logging.Logger:
|
||||
"""
|
||||
This initalizes a logger that can be used in pool/thread queue calls
|
||||
call in worker initializer as "Log.init_worker_logging(Queue[str])
|
||||
"""
|
||||
queue_handler = logging.handlers.QueueHandler(log_queue)
|
||||
# getLogger call MUST be WITHOUT and logger name
|
||||
root_logger = logging.getLogger()
|
||||
# base logging level, filtering is done in the handlers
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
root_logger.handlers.clear()
|
||||
root_logger.addHandler(queue_handler)
|
||||
|
||||
# for debug only
|
||||
root_logger.debug('[LOGGER] Init log: %s - %s', log_queue, root_logger.handlers)
|
||||
|
||||
return root_logger
|
||||
|
||||
def get_logger_settings(self) -> LoggerInit:
|
||||
"""
|
||||
get the logger settings we need to init the Logger class
|
||||
|
||||
Returns:
|
||||
LoggerInit -- _description_
|
||||
"""
|
||||
return {
|
||||
"logger": self.logger,
|
||||
"log_queue": self.log_queue
|
||||
}
|
||||
|
||||
# __END__
|
||||
@@ -0,0 +1,89 @@
|
||||
"""
|
||||
All logging levels
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class LoggingLevel(Enum):
|
||||
"""
|
||||
Log class levels
|
||||
"""
|
||||
NOTSET = logging.NOTSET # 0
|
||||
DEBUG = logging.DEBUG # 10
|
||||
INFO = logging.INFO # 20
|
||||
WARNING = logging.WARNING # 30
|
||||
ERROR = logging.ERROR # 40
|
||||
CRITICAL = logging.CRITICAL # 50
|
||||
ALERT = 55 # 55 (for Sys log)
|
||||
EMERGENCY = 60 # 60 (for Sys log)
|
||||
EXCEPTION = 70 # 70 (manualy set, error but with higher level)
|
||||
# Alternative names
|
||||
WARN = logging.WARN # 30 (alias for WARNING)
|
||||
FATAL = logging.FATAL # 50 (alias for CRITICAL)
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, level_str: str):
|
||||
"""Convert string to LogLevel enum"""
|
||||
try:
|
||||
return cls[level_str.upper()]
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Invalid log level: {level_str}") from e
|
||||
except AttributeError as e:
|
||||
raise ValueError(f"Invalid log level: {level_str}") from e
|
||||
|
||||
@classmethod
|
||||
def from_int(cls, level_int: int):
|
||||
"""Convert integer to LogLevel enum"""
|
||||
try:
|
||||
return cls(level_int)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid log level: {level_int}") from e
|
||||
|
||||
@classmethod
|
||||
def from_any(cls, level_any: Any):
|
||||
"""
|
||||
Convert any vale
|
||||
if self LoggingLevel return as is, else try to convert from int or string
|
||||
|
||||
Arguments:
|
||||
level_any {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
if isinstance(level_any, LoggingLevel):
|
||||
return level_any
|
||||
if isinstance(level_any, int):
|
||||
return cls.from_int(level_any)
|
||||
return cls.from_string(level_any)
|
||||
|
||||
def to_logging_level(self):
|
||||
"""Convert to logging module level"""
|
||||
return self.value
|
||||
|
||||
def to_lower_case(self):
|
||||
"""return loser case"""
|
||||
return self.name.lower()
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def includes(self, level: 'LoggingLevel'):
|
||||
"""
|
||||
if given level is included in set level
|
||||
eg: INFO set, ERROR is included in INFO because INFO level would print ERROR
|
||||
"""
|
||||
return self.value <= level.value
|
||||
|
||||
def is_higher_than(self, level: 'LoggingLevel'):
|
||||
"""if given value is higher than set"""
|
||||
return self.value > level.value
|
||||
|
||||
def is_lower_than(self, level: 'LoggingLevel'):
|
||||
"""if given value is lower than set"""
|
||||
return self.value < level.value
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/math_handling/__init__.py
Normal file
0
src/corelibs/math_handling/__init__.py
Normal file
35
src/corelibs/math_handling/math_helpers.py
Normal file
35
src/corelibs/math_handling/math_helpers.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
Various math helpers
|
||||
"""
|
||||
|
||||
import math
|
||||
|
||||
|
||||
def gcd(a: int, b: int):
|
||||
"""
|
||||
Calculate: Greatest Common Divisor
|
||||
|
||||
Arguments:
|
||||
a {int} -- _description_
|
||||
b {int} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return math.gcd(a, b)
|
||||
|
||||
|
||||
def lcd(a: int, b: int):
|
||||
"""
|
||||
Calculate: Least Common Denominator
|
||||
|
||||
Arguments:
|
||||
a {int} -- _description_
|
||||
b {int} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return math.lcm(a, b)
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/py.typed
Normal file
0
src/corelibs/py.typed
Normal file
0
src/corelibs/requests_handling/__init__.py
Normal file
0
src/corelibs/requests_handling/__init__.py
Normal file
20
src/corelibs/requests_handling/auth_helpers.py
Normal file
20
src/corelibs/requests_handling/auth_helpers.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""
|
||||
Various HTTP auth helpers
|
||||
"""
|
||||
|
||||
from base64 import b64encode
|
||||
|
||||
|
||||
def basic_auth(username: str, password: str) -> str:
|
||||
"""
|
||||
setup basic auth, for debug
|
||||
|
||||
Arguments:
|
||||
username {str} -- _description_
|
||||
password {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
token = b64encode(f"{username}:{password}".encode('utf-8')).decode("ascii")
|
||||
return f'Basic {token}'
|
||||
@@ -18,11 +18,12 @@ class Caller:
|
||||
header: dict[str, str],
|
||||
verify: bool = True,
|
||||
timeout: int = 20,
|
||||
proxy: dict[str, str] | None = None
|
||||
proxy: dict[str, str] | None = None,
|
||||
ca_file: str | None = None
|
||||
):
|
||||
self.headers = header
|
||||
self.timeout: int = timeout
|
||||
self.cafile = "/Library/Application Support/Netskope/STAgent/data/nscacert.pem"
|
||||
self.cafile = ca_file
|
||||
self.verify = verify
|
||||
self.proxy = proxy
|
||||
|
||||
@@ -32,7 +32,7 @@ show_position(file pos optional)
|
||||
import time
|
||||
from typing import Literal
|
||||
from math import floor
|
||||
from corelibs.string_handling.datetime_helpers import convert_timestamp
|
||||
from corelibs_datetime.timestamp_convert import convert_timestamp
|
||||
from corelibs.string_handling.byte_helpers import format_bytes
|
||||
|
||||
|
||||
0
src/corelibs/string_handling/__init__.py
Normal file
0
src/corelibs/string_handling/__init__.py
Normal file
@@ -2,16 +2,20 @@
|
||||
String helpers
|
||||
"""
|
||||
|
||||
import re
|
||||
from decimal import Decimal, getcontext
|
||||
from textwrap import shorten
|
||||
|
||||
|
||||
def shorten_string(string: str, length: int, hard_shorten: bool = False, placeholder: str = " [~]") -> str:
|
||||
def shorten_string(
|
||||
string: str | int | float, length: int, hard_shorten: bool = False, placeholder: str = " [~]"
|
||||
) -> str:
|
||||
"""
|
||||
check if entry is too long and cut it, but only for console output
|
||||
Note that if there are no spaces in the string, it will automatically use the hard split mode
|
||||
|
||||
Args:
|
||||
string (str): _description_
|
||||
string (str | int | float): _description_
|
||||
length (int): _description_
|
||||
hard_shorten (bool): if shorte should be done on fixed string lenght. Default: False
|
||||
placeholder (str): placeholder string. Default: " [~]"
|
||||
@@ -19,13 +23,19 @@ def shorten_string(string: str, length: int, hard_shorten: bool = False, placeho
|
||||
Returns:
|
||||
str: _description_
|
||||
"""
|
||||
length = int(length)
|
||||
string = str(string)
|
||||
# if placeholder > lenght
|
||||
if len(string) > length:
|
||||
if hard_shorten is True or " " not in string:
|
||||
# hard shorten error
|
||||
if len(placeholder) > length:
|
||||
raise ValueError(f"Cannot shorten string: placeholder {placeholder} is too large for max width")
|
||||
short_string = f"{string[:(length - len(placeholder))]}{placeholder}"
|
||||
else:
|
||||
short_string = shorten(string, width=length, placeholder=placeholder)
|
||||
try:
|
||||
short_string = shorten(string, width=length, placeholder=placeholder)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Cannot shorten string: {e}") from e
|
||||
else:
|
||||
short_string = string
|
||||
|
||||
@@ -66,6 +76,9 @@ def format_number(number: float, precision: int = 0) -> str:
|
||||
format numbers, current trailing zeros does not work
|
||||
use {:,} or {:,.f} or {:,.<N>f} <N> = number instead of this
|
||||
|
||||
The upper limit of the precision depends on the value of the number itself
|
||||
very large numbers will have no precision at all any more
|
||||
|
||||
Arguments:
|
||||
number {float} -- _description_
|
||||
|
||||
@@ -75,12 +88,35 @@ def format_number(number: float, precision: int = 0) -> str:
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
if precision < 0 and precision > 100:
|
||||
if precision < 0 or precision > 100:
|
||||
precision = 0
|
||||
if precision > 0:
|
||||
getcontext().prec = precision
|
||||
# make it a string to avoid mangling
|
||||
_number = Decimal(str(number))
|
||||
else:
|
||||
_number = number
|
||||
return (
|
||||
"{:,."
|
||||
f"{str(precision)}"
|
||||
"f}"
|
||||
).format(number)
|
||||
).format(_number)
|
||||
|
||||
|
||||
def prepare_url_slash(url: str) -> str:
|
||||
"""
|
||||
if the URL does not start with /, add slash
|
||||
strip all double slashes in URL
|
||||
|
||||
Arguments:
|
||||
url {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
url = re.sub(r'\/+', '/', url)
|
||||
if not url.startswith("/"):
|
||||
url = "/" + url
|
||||
return url
|
||||
|
||||
# __END__
|
||||
18
src/corelibs/string_handling/text_colors.py
Normal file
18
src/corelibs/string_handling/text_colors.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""
|
||||
Basic ANSI colors
|
||||
|
||||
Set colors with print(f"something {Colors.yellow}colorful{Colors.end})
|
||||
bold + underline + color combinations are possible.
|
||||
"""
|
||||
|
||||
from warnings import deprecated
|
||||
from corelibs_text_colors.text_colors import Colors as ColorsNew
|
||||
|
||||
|
||||
@deprecated("Use src.corelibs_text_colors.text_colors instead")
|
||||
class Colors(ColorsNew):
|
||||
"""
|
||||
ANSI colors defined
|
||||
"""
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/var_handling/__init__.py
Normal file
0
src/corelibs/var_handling/__init__.py
Normal file
25
src/corelibs/var_handling/enum_base.py
Normal file
25
src/corelibs/var_handling/enum_base.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""
|
||||
Enum base classes
|
||||
"""
|
||||
|
||||
import warnings
|
||||
from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
||||
|
||||
|
||||
class EnumBase(CorelibsEnumBase):
|
||||
"""
|
||||
base for enum
|
||||
|
||||
.. deprecated::
|
||||
Use corelibs_enum_base.EnumBase instead
|
||||
DEPRECATED: Use corelibs_enum_base.enum_base.EnumBase instead
|
||||
|
||||
lookup_any and from_any will return "EnumBase" and the sub class name
|
||||
run the return again to "from_any" to get a clean value, or cast it
|
||||
"""
|
||||
|
||||
|
||||
# At the module level, issue a deprecation warning
|
||||
warnings.warn("Use corelibs_enum_base.enum_base.EnumBase instead", DeprecationWarning, stacklevel=2)
|
||||
|
||||
# __EMD__
|
||||
15
src/corelibs/var_handling/enum_base.pyi
Normal file
15
src/corelibs/var_handling/enum_base.pyi
Normal file
@@ -0,0 +1,15 @@
|
||||
"""
|
||||
Enum base classes [STPUB]
|
||||
"""
|
||||
|
||||
from typing_extensions import deprecated
|
||||
from corelibs_enum_base.enum_base import EnumBase as CorelibsEnumBase
|
||||
|
||||
|
||||
@deprecated("Use corelibs_enum_base.enum_base.EnumBase instead")
|
||||
class EnumBase(CorelibsEnumBase):
|
||||
"""
|
||||
base for enum
|
||||
lookup_any and from_any will return "EnumBase" and the sub class name
|
||||
run the return again to "from_any" to get a clean value, or cast it
|
||||
"""
|
||||
54
src/corelibs/var_handling/var_helpers.py
Normal file
54
src/corelibs/var_handling/var_helpers.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""
|
||||
variable convert, check, etc helepr
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from warnings import deprecated
|
||||
import corelibs_var.var_helpers
|
||||
|
||||
|
||||
@deprecated("Use corelibs_var.var_helpers.is_int instead")
|
||||
def is_int(string: Any) -> bool:
|
||||
"""
|
||||
check if a value is int
|
||||
|
||||
Arguments:
|
||||
string {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return corelibs_var.var_helpers.is_int(string)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_var.var_helpers.is_float instead")
|
||||
def is_float(string: Any) -> bool:
|
||||
"""
|
||||
check if a value is float
|
||||
|
||||
Arguments:
|
||||
string {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return corelibs_var.var_helpers.is_float(string)
|
||||
|
||||
|
||||
@deprecated("Use corelibs_var.var_helpers.str_to_bool instead")
|
||||
def str_to_bool(string: str):
|
||||
"""
|
||||
convert string to bool
|
||||
|
||||
Arguments:
|
||||
s {str} -- _description_
|
||||
|
||||
Raises:
|
||||
ValueError: _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
return corelibs_var.var_helpers.str_to_bool(string)
|
||||
|
||||
# __END__
|
||||
35
test-run/check_handling/regex_checks.py
Normal file
35
test-run/check_handling/regex_checks.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
Test check andling for regex checks
|
||||
"""
|
||||
|
||||
import re
|
||||
from corelibs.check_handling.regex_constants import DOMAIN_WITH_LOCALHOST_REGEX
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Test regex checks
|
||||
"""
|
||||
test_domains = [
|
||||
"example.com",
|
||||
"localhost",
|
||||
"subdomain.localhost",
|
||||
"test.localhost.com",
|
||||
"some-domain.org"
|
||||
]
|
||||
|
||||
regex_domain_check = re.compile(DOMAIN_WITH_LOCALHOST_REGEX)
|
||||
print(f"REGEX: {DOMAIN_WITH_LOCALHOST_REGEX}")
|
||||
print(f"Check regex: {regex_domain_check.search('localhost')}")
|
||||
|
||||
for domain in test_domains:
|
||||
if regex_domain_check.search(domain):
|
||||
print(f"Matched: {domain}")
|
||||
else:
|
||||
print(f"Did not match: {domain}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
42
test-run/config_handling/config/settings.ini
Normal file
42
test-run/config_handling/config/settings.ini
Normal file
@@ -0,0 +1,42 @@
|
||||
[TestA]
|
||||
foo=bar
|
||||
overload_from_args=bar
|
||||
foobar=1
|
||||
bar=st
|
||||
arg_overload=should_not_be_set_because_of_command_line_is_list
|
||||
arg_overload_list=too,be,long
|
||||
arg_overload_not_set=this should not be set because of override flag
|
||||
just_values=too,be,long
|
||||
some_match=foo
|
||||
some_match_list=foo,bar
|
||||
test_list=a,b,c,d f, g h
|
||||
other_list=a|b|c|d|
|
||||
third_list=xy|ab|df|fg
|
||||
str_length=foobar
|
||||
int_range=20
|
||||
int_range_not_set=
|
||||
int_range_not_set_empty_set=5
|
||||
#
|
||||
match_target=foo
|
||||
match_target_list=foo,bar,baz
|
||||
#
|
||||
match_source_a=foo
|
||||
match_source_b=foo
|
||||
; match_source_c=foo
|
||||
match_source_list=foo,bar
|
||||
|
||||
[TestB]
|
||||
element_a=Static energy
|
||||
element_b=123.5
|
||||
element_c=True
|
||||
elemend_d=AB:CD;EF
|
||||
email=foo@bar.com,other+bar-fee@domain-com.cp,
|
||||
email_not_mandatory=
|
||||
email_bad=gii@bar.com
|
||||
|
||||
[LoadTest]
|
||||
a.b.c=foo
|
||||
d:e:f=bar
|
||||
|
||||
[ErrorTest]
|
||||
some_value=42
|
||||
2
test-run/config_handling/log/.gitignore
vendored
Normal file
2
test-run/config_handling/log/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
144
test-run/config_handling/settings_loader.py
Normal file
144
test-run/config_handling/settings_loader.py
Normal file
@@ -0,0 +1,144 @@
|
||||
"""
|
||||
Settings loader test
|
||||
"""
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log
|
||||
from corelibs.config_handling.settings_loader import SettingsLoader
|
||||
from corelibs.config_handling.settings_loader_handling.settings_loader_check import SettingsLoaderCheck
|
||||
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
CONFIG_DIR: Path = Path("config")
|
||||
LOG_DIR: Path = Path("log")
|
||||
CONFIG_FILE: str = "settings.ini"
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Main run
|
||||
"""
|
||||
|
||||
value = "2025/1/1"
|
||||
regex_c = re.compile(SettingsLoaderCheck.CHECK_SETTINGS['string.date']['regex'], re.VERBOSE)
|
||||
result = regex_c.search(value)
|
||||
print(f"regex {regex_c} check against {value} -> {result}")
|
||||
|
||||
# for log testing
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'settings_loader.log'),
|
||||
log_name="Settings Loader",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
"log_level_file": 'DEBUG',
|
||||
}
|
||||
)
|
||||
log.logger.info('Settings loader')
|
||||
|
||||
sl = SettingsLoader(
|
||||
{
|
||||
'overload_from_args': 'OVERLOAD from ARGS',
|
||||
'arg_overload': ['should', 'not', 'be', 'set'],
|
||||
'arg_overload_list': ['overload', 'this', 'list'],
|
||||
'arg_overload_not_set': "DO_NOT_SET",
|
||||
},
|
||||
ROOT_PATH.joinpath(CONFIG_DIR, CONFIG_FILE),
|
||||
log=log
|
||||
)
|
||||
try:
|
||||
config_load = 'TestA'
|
||||
config_data = sl.load_settings(
|
||||
config_load,
|
||||
{
|
||||
# "doesnt": ["split:,"],
|
||||
"overload_from_args": ["args_override:yes", "mandatory:yes"],
|
||||
"foobar": ["check:int"],
|
||||
"bar": ["mandatory:yes"],
|
||||
"arg_overload_list": ["args_override:yes", "split:,",],
|
||||
"arg_overload_not_set": [],
|
||||
"some_match": ["matching:foo|bar"],
|
||||
"some_match_list": ["split:,", "matching:foo|bar"],
|
||||
"test_list": [
|
||||
"check:string.alphanumeric",
|
||||
"split:,"
|
||||
],
|
||||
"other_list": ["split:|"],
|
||||
"third_list": [
|
||||
"split:|",
|
||||
"check:string.alphanumeric"
|
||||
],
|
||||
"str_length": [
|
||||
"length:2-10"
|
||||
],
|
||||
"int_range": [
|
||||
"range:2-50"
|
||||
],
|
||||
"int_range_not_set": [
|
||||
"range:2-50"
|
||||
],
|
||||
"int_range_not_set_empty_set": [
|
||||
"empty:"
|
||||
],
|
||||
"match_target": ["matching:foo"],
|
||||
"match_target_list": ["split:,", "matching:foo|bar|baz",],
|
||||
"match_source_a": ["in:match_target"],
|
||||
"match_source_b": ["in:match_target_list"],
|
||||
"match_source_list": ["split:,", "in:match_target_list"],
|
||||
}
|
||||
)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
try:
|
||||
config_load = 'TestB'
|
||||
config_data = sl.load_settings(
|
||||
config_load,
|
||||
{
|
||||
"email": [
|
||||
"split:,",
|
||||
"mandatory:yes",
|
||||
"check:string.email.basic"
|
||||
],
|
||||
"email_not_mandatory": [
|
||||
"split:,",
|
||||
# "mandatory:yes",
|
||||
"check:string.email.basic"
|
||||
],
|
||||
"email_bad": [
|
||||
"split:,",
|
||||
"mandatory:yes",
|
||||
"check:string.email.basic"
|
||||
]
|
||||
}
|
||||
)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
try:
|
||||
config_load = 'LoadTest'
|
||||
config_data = sl.load_settings(config_load)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
try:
|
||||
config_load = 'ErrorTest'
|
||||
config_data = sl.load_settings(
|
||||
config_load,
|
||||
{
|
||||
"some_value": [
|
||||
"check:string.email.basic",
|
||||
],
|
||||
}
|
||||
)
|
||||
print(f"[{config_load}] Load: {config_load} -> {dump_data(config_data)}")
|
||||
except ValueError as e:
|
||||
print(f"Could not load settings: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
236
test-run/datetime_handling/datetime_helpers.py
Normal file
236
test-run/datetime_handling/datetime_helpers.py
Normal file
@@ -0,0 +1,236 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
date string helper test
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from corelibs.datetime_handling.datetime_helpers import (
|
||||
get_datetime_iso8601, get_system_timezone, parse_timezone_data, validate_date,
|
||||
parse_flexible_date, compare_dates, find_newest_datetime_in_list,
|
||||
parse_day_of_week_range, parse_time_range, times_overlap_or_connect, is_time_in_range,
|
||||
reorder_weekdays_from_today
|
||||
)
|
||||
|
||||
|
||||
def __get_datetime_iso8601():
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
for tz in [
|
||||
'', 'Asia/Tokyo', 'UTC', 'Europe/Vienna',
|
||||
'America/New_York', 'Australia/Sydney',
|
||||
'invalid'
|
||||
]:
|
||||
print(f"{tz} -> {get_datetime_iso8601(tz)}")
|
||||
|
||||
|
||||
def __parse_timezone_data():
|
||||
for tz in [
|
||||
'JST', 'KST', 'UTC', 'CET', 'CEST',
|
||||
]:
|
||||
print(f"{tz} -> {parse_timezone_data(tz)}")
|
||||
|
||||
|
||||
def __validate_date():
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
|
||||
test_dates = [
|
||||
"2024-01-01",
|
||||
"2024-02-29", # Leap year
|
||||
"2023-02-29", # Invalid date
|
||||
"2024-13-01", # Invalid month
|
||||
"2024-00-10", # Invalid month
|
||||
"2024-04-31", # Invalid day
|
||||
"invalid-date"
|
||||
]
|
||||
|
||||
for date_str in test_dates:
|
||||
is_valid = validate_date(date_str)
|
||||
print(f"Date '{date_str}' is valid: {is_valid}")
|
||||
|
||||
# also test not before and not after
|
||||
not_before_dates = [
|
||||
"2023-12-31",
|
||||
"2024-01-01",
|
||||
"2024-02-29",
|
||||
]
|
||||
not_after_dates = [
|
||||
"2024-12-31",
|
||||
"2024-11-30",
|
||||
"2025-01-01",
|
||||
]
|
||||
|
||||
for date_str in not_before_dates:
|
||||
datetime.strptime(date_str, "%Y-%m-%d") # Ensure valid date format
|
||||
is_valid = validate_date(date_str, not_before=datetime.strptime("2024-01-01", "%Y-%m-%d"))
|
||||
print(f"Date '{date_str}' is valid (not before 2024-01-01): {is_valid}")
|
||||
|
||||
for date_str in not_after_dates:
|
||||
is_valid = validate_date(date_str, not_after=datetime.strptime("2024-12-31", "%Y-%m-%d"))
|
||||
print(f"Date '{date_str}' is valid (not after 2024-12-31): {is_valid}")
|
||||
|
||||
for date_str in test_dates:
|
||||
is_valid = validate_date(
|
||||
date_str,
|
||||
not_before=datetime.strptime("2024-01-01", "%Y-%m-%d"),
|
||||
not_after=datetime.strptime("2024-12-31", "%Y-%m-%d")
|
||||
)
|
||||
print(f"Date '{date_str}' is valid (2024 only): {is_valid}")
|
||||
|
||||
|
||||
def __parse_flexible_date():
|
||||
for date_str in [
|
||||
"2024-01-01",
|
||||
"01/02/2024",
|
||||
"February 29, 2024",
|
||||
"Invalid date",
|
||||
"2025-01-01 12:18:10",
|
||||
"2025-01-01 12:18:10.566",
|
||||
"2025-01-01T12:18:10.566",
|
||||
"2025-01-01T12:18:10.566+02:00",
|
||||
]:
|
||||
print(f"{date_str} -> {parse_flexible_date(date_str)}")
|
||||
|
||||
|
||||
def __compare_dates():
|
||||
|
||||
for date1, date2 in [
|
||||
("2024-01-01 12:00:00", "2024-01-01 15:30:00"),
|
||||
("2024-01-02", "2024-01-01"),
|
||||
("2024-01-01T10:00:00+02:00", "2024-01-01T08:00:00Z"),
|
||||
("invalid-date", "2024-01-01"),
|
||||
("2024-01-01", "invalid-date"),
|
||||
("invalid-date", "also-invalid"),
|
||||
]:
|
||||
result = compare_dates(date1, date2)
|
||||
print(f"Comparing '{date1}' and '{date2}': {result}")
|
||||
|
||||
|
||||
def __find_newest_datetime_in_list():
|
||||
date_list = [
|
||||
"2024-01-01 12:00:00",
|
||||
"2024-01-02 09:30:00",
|
||||
"2023-12-31 23:59:59",
|
||||
"2024-01-02 15:45:00",
|
||||
"2024-01-02T15:45:00.001",
|
||||
"invalid-date",
|
||||
]
|
||||
newest_date = find_newest_datetime_in_list(date_list)
|
||||
print(f"Newest date in list: {newest_date}")
|
||||
|
||||
|
||||
def __parse_day_of_week_range():
|
||||
ranges = [
|
||||
"Mon-Fri",
|
||||
"Saturday-Sunday",
|
||||
"Wed-Mon",
|
||||
"Fri-Fri",
|
||||
"mon-tue",
|
||||
"Invalid-Range"
|
||||
]
|
||||
for range_str in ranges:
|
||||
try:
|
||||
days = parse_day_of_week_range(range_str)
|
||||
print(f"Day range '{range_str}' -> {days}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error parsing day range '{range_str}': {e}")
|
||||
|
||||
|
||||
def __parse_time_range():
|
||||
ranges = [
|
||||
"08:00-17:00",
|
||||
"22:00-06:00",
|
||||
"12:30-12:30",
|
||||
"invalid-range"
|
||||
]
|
||||
for range_str in ranges:
|
||||
try:
|
||||
start_time, end_time = parse_time_range(range_str)
|
||||
print(f"Time range '{range_str}' -> Start: {start_time}, End: {end_time}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error parsing time range '{range_str}': {e}")
|
||||
|
||||
|
||||
def __times_overlap_or_connect():
|
||||
time_format = "%H:%M"
|
||||
time_ranges = [
|
||||
(("08:00", "12:00"), ("11:00", "15:00")), # Overlap
|
||||
(("22:00", "02:00"), ("01:00", "05:00")), # Overlap across midnight
|
||||
(("10:00", "12:00"), ("12:00", "14:00")), # Connect
|
||||
(("09:00", "11:00"), ("12:00", "14:00")), # No overlap
|
||||
]
|
||||
for (start1, end1), (start2, end2) in time_ranges:
|
||||
start1 = datetime.strptime(start1, time_format).time()
|
||||
end1 = datetime.strptime(end1, time_format).time()
|
||||
start2 = datetime.strptime(start2, time_format).time()
|
||||
end2 = datetime.strptime(end2, time_format).time()
|
||||
overlap = times_overlap_or_connect((start1, end1), (start2, end2))
|
||||
overlap_connect = times_overlap_or_connect((start1, end1), (start2, end2), True)
|
||||
print(f"Time ranges {start1}-{end1} and {start2}-{end2} overlap/connect: {overlap}/{overlap_connect}")
|
||||
|
||||
|
||||
def __is_time_in_range():
|
||||
time_format = "%H:%M:%S"
|
||||
test_cases = [
|
||||
("10:00:00", "09:00:00", "11:00:00"),
|
||||
("23:30:00", "22:00:00", "01:00:00"), # Across midnight
|
||||
("05:00:00", "06:00:00", "10:00:00"), # Not in range
|
||||
("12:00:00", "12:00:00", "12:00:00"), # Exact match
|
||||
]
|
||||
for (check_time, start_time, end_time) in test_cases:
|
||||
start_time = datetime.strptime(start_time, time_format).time()
|
||||
end_time = datetime.strptime(end_time, time_format).time()
|
||||
in_range = is_time_in_range(
|
||||
f"{check_time}", start_time.strftime("%H:%M:%S"), end_time.strftime("%H:%M:%S")
|
||||
)
|
||||
print(f"Time {check_time} in range {start_time}-{end_time}: {in_range}")
|
||||
|
||||
|
||||
def __reorder_weekdays_from_today():
|
||||
for base_day in [
|
||||
"Tue", "Wed", "Sunday", "Fri", "InvalidDay"
|
||||
]:
|
||||
try:
|
||||
reordered_days = reorder_weekdays_from_today(base_day)
|
||||
print(f"Reordered weekdays from {base_day}: {reordered_days}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error reordering weekdays from '{base_day}': {e}")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
print("\nDatetime ISO 8601 tests:\n")
|
||||
__get_datetime_iso8601()
|
||||
print("\nSystem time test:")
|
||||
print(f"System time: {get_system_timezone()}")
|
||||
print("\nParse timezone data tests:\n")
|
||||
__parse_timezone_data()
|
||||
print("\nValidate date tests:\n")
|
||||
__validate_date()
|
||||
print("\nParse flexible date tests:\n")
|
||||
__parse_flexible_date()
|
||||
print("\nCompare dates tests:\n")
|
||||
__compare_dates()
|
||||
print("\nFind newest datetime in list tests:\n")
|
||||
__find_newest_datetime_in_list()
|
||||
print("\nParse day of week range tests:\n")
|
||||
__parse_day_of_week_range()
|
||||
print("\nParse time range tests:\n")
|
||||
__parse_time_range()
|
||||
print("\nTimes overlap or connect tests:\n")
|
||||
__times_overlap_or_connect()
|
||||
print("\nIs time in range tests:\n")
|
||||
__is_time_in_range()
|
||||
print("\nReorder weekdays from today tests:\n")
|
||||
__reorder_weekdays_from_today()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
92
test-run/datetime_handling/timestamp_convert.py
Normal file
92
test-run/datetime_handling/timestamp_convert.py
Normal file
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
timestamp string checks
|
||||
"""
|
||||
|
||||
from corelibs.datetime_handling.timestamp_convert import (
|
||||
convert_timestamp, seconds_to_string, convert_to_seconds, TimeParseError, TimeUnitError
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
print("\n--- Testing convert_to_seconds ---\n")
|
||||
test_cases = [
|
||||
"5M 6d", # 5 months, 6 days
|
||||
"2h 30m 45s", # 2 hours, 30 minutes, 45 seconds
|
||||
"1Y 2M 3d", # 1 year, 2 months, 3 days
|
||||
"1h", # 1 hour
|
||||
"30m", # 30 minutes
|
||||
"2 hours 15 minutes", # 2 hours, 15 minutes
|
||||
"1d 12h", # 1 day, 12 hours
|
||||
"3M 2d 4h", # 3 months, 2 days, 4 hours
|
||||
"45s", # 45 seconds
|
||||
"-45s", # -45 seconds
|
||||
"-1h", # -1 hour
|
||||
"-30m", # -30 minutes
|
||||
"-2h 30m 45s", # -2 hours, 30 minutes, 45 seconds
|
||||
"-1d 12h", # -1 day, 12 hours
|
||||
"-3M 2d 4h", # -3 months, 2 days, 4 hours
|
||||
"-1Y 2M 3d", # -1 year, 2 months, 3 days
|
||||
"-2 hours 15 minutes", # -2 hours, 15 minutes
|
||||
"-1 year 2 months", # -1 year, 2 months
|
||||
"-2Y 6M 15d 8h 30m 45s", # Complex negative example
|
||||
"1 year 2 months", # 1 year, 2 months
|
||||
"2Y 6M 15d 8h 30m 45s", # Complex example
|
||||
# invalid tests
|
||||
"5M 6d 2M", # months appears twice
|
||||
"2h 30m 45s 1h", # hours appears twice
|
||||
"1d 2 days", # days appears twice (short and long form)
|
||||
"30m 45 minutes", # minutes appears twice
|
||||
"1Y 2 years", # years appears twice
|
||||
"1x 2 yrs", # invalid names
|
||||
|
||||
123, # int
|
||||
789.12, # float
|
||||
456.56, # float, high
|
||||
"4566", # int as string
|
||||
"5551.12", # float as string
|
||||
"5551.56", # float, high as string
|
||||
]
|
||||
|
||||
for time_string in test_cases:
|
||||
try:
|
||||
result = convert_to_seconds(time_string)
|
||||
print(f"Human readable to seconds: {time_string} => {result}")
|
||||
except (TimeParseError, TimeUnitError) as e:
|
||||
print(f"Error encountered for {time_string}: {type(e).__name__}: {e}")
|
||||
|
||||
print("\n--- Testing seconds_to_string and convert_timestamp ---\n")
|
||||
|
||||
test_values = [
|
||||
'as is string',
|
||||
-172800.001234, # -2 days, -0.001234 seconds
|
||||
-90061.789, # -1 day, -1 hour, -1 minute, -1.789 seconds
|
||||
-3661.456, # -1 hour, -1 minute, -1.456 seconds
|
||||
-65.123, # -1 minute, -5.123 seconds
|
||||
-1.5, # -1.5 seconds
|
||||
-0.001, # -1 millisecond
|
||||
-0.000001, # -1 microsecond
|
||||
0, # 0 seconds
|
||||
0.000001, # 1 microsecond
|
||||
0.001, # 1 millisecond
|
||||
1.5, # 1.5 seconds
|
||||
65.123, # 1 minute, 5.123 seconds
|
||||
3661.456, # 1 hour, 1 minute, 1.456 seconds
|
||||
90061.789, # 1 day, 1 hour, 1 minute, 1.789 seconds
|
||||
172800.001234 # 2 days, 0.001234 seconds
|
||||
]
|
||||
|
||||
for time_value in test_values:
|
||||
result = seconds_to_string(time_value, show_microseconds=True)
|
||||
result_alt = convert_timestamp(time_value, show_microseconds=True)
|
||||
print(f"Seconds to human readable: {time_value} => {result} / {result_alt}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
2
test-run/db_handling/database/.gitignore
vendored
Normal file
2
test-run/db_handling/database/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
2
test-run/db_handling/log/.gitignore
vendored
Normal file
2
test-run/db_handling/log/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
139
test-run/db_handling/sql_main.py
Normal file
139
test-run/db_handling/sql_main.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""
|
||||
SQL Main wrapper test
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import json
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log, Logger
|
||||
from corelibs.db_handling.sql_main import SQLMain
|
||||
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
DATABASE_DIR: Path = Path("database")
|
||||
LOG_DIR: Path = Path("log")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'sqlite_main.log'),
|
||||
log_name="SQLite Main",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
"log_level_file": 'DEBUG',
|
||||
}
|
||||
)
|
||||
sql_main = SQLMain(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_ident=f"sqlite:{ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_main.db')}"
|
||||
)
|
||||
if sql_main.connected():
|
||||
log.info("SQL Main connected successfully")
|
||||
else:
|
||||
log.error('SQL Main connection failed')
|
||||
if sql_main.dbh is None:
|
||||
log.error('SQL Main DBH instance is None')
|
||||
return
|
||||
|
||||
if sql_main.dbh.trigger_exists('trg_test_a_set_date_updated_on_update'):
|
||||
log.info("Trigger trg_test_a_set_date_updated_on_update exists")
|
||||
if sql_main.dbh.table_exists('test_a'):
|
||||
log.info("Table test_a exists, dropping for clean test")
|
||||
sql_main.dbh.execute_query("DROP TABLE test_a;")
|
||||
# create a dummy table
|
||||
table_sql = """
|
||||
CREATE TABLE IF NOT EXISTS test_a (
|
||||
test_a_id INTEGER PRIMARY KEY,
|
||||
date_created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%f', 'now')),
|
||||
date_updated TEXT,
|
||||
uid TEXT NOT NULL UNIQUE,
|
||||
set_current_timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
text_a TEXT,
|
||||
content,
|
||||
int_a INTEGER,
|
||||
float_a REAL
|
||||
);
|
||||
"""
|
||||
|
||||
result = sql_main.dbh.execute_query(table_sql)
|
||||
log.debug(f"Create table result: {result}")
|
||||
trigger_sql = """
|
||||
CREATE TRIGGER trg_test_a_set_date_updated_on_update
|
||||
AFTER UPDATE ON test_a
|
||||
FOR EACH ROW
|
||||
WHEN OLD.date_updated IS NULL OR NEW.date_updated = OLD.date_updated
|
||||
BEGIN
|
||||
UPDATE test_a
|
||||
SET date_updated = (strftime('%Y-%m-%d %H:%M:%f', 'now'))
|
||||
WHERE test_a_id = NEW.test_a_id;
|
||||
END;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(trigger_sql)
|
||||
log.debug(f"Create trigger result: {result}")
|
||||
result = sql_main.dbh.meta_data_detail('test_a')
|
||||
log.debug(f"Table meta data detail: {dump_data(result)}")
|
||||
# INSERT DATA
|
||||
sql = """
|
||||
INSERT INTO test_a (uid, text_a, content, int_a, float_a)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
RETURNING test_a_id, uid;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[1] Insert data result: {dump_data(result)}")
|
||||
__uid: str = ''
|
||||
if result is not False:
|
||||
# first one only of interest
|
||||
result = dict(result[0])
|
||||
__uid = str(result.get('uid', ''))
|
||||
# second insert
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[2] Insert data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.execute_query("SELECT * FROM test_a;")
|
||||
log.debug(f"Select data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row result: {dump_data(result)}")
|
||||
sql = """
|
||||
UPDATE test_a
|
||||
SET text_a = ?
|
||||
WHERE uid = ?;
|
||||
"""
|
||||
result = sql_main.dbh.execute_query(
|
||||
sql,
|
||||
(
|
||||
'Some updated text A',
|
||||
__uid,
|
||||
)
|
||||
)
|
||||
log.debug(f"Update data result: {dump_data(result)}")
|
||||
result = sql_main.dbh.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row after update result: {dump_data(result)}")
|
||||
|
||||
sql_main.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
146
test-run/db_handling/sqlite_io.py
Normal file
146
test-run/db_handling/sqlite_io.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""
|
||||
SQLite IO test
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import json
|
||||
import sqlite3
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log, Logger
|
||||
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
DATABASE_DIR: Path = Path("database")
|
||||
LOG_DIR: Path = Path("log")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'sqlite_io.log'),
|
||||
log_name="SQLite IO",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
"log_level_file": 'DEBUG',
|
||||
}
|
||||
)
|
||||
db = SQLiteIO(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_name=ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_io.db'),
|
||||
row_factory='Dict'
|
||||
)
|
||||
if db.db_connected():
|
||||
log.info(f"Connected to DB: {db.db_name}")
|
||||
if db.trigger_exists('trg_test_a_set_date_updated_on_update'):
|
||||
log.info("Trigger trg_test_a_set_date_updated_on_update exists")
|
||||
if db.table_exists('test_a'):
|
||||
log.info("Table test_a exists, dropping for clean test")
|
||||
db.execute_query("DROP TABLE test_a;")
|
||||
# create a dummy table
|
||||
table_sql = """
|
||||
CREATE TABLE IF NOT EXISTS test_a (
|
||||
test_a_id INTEGER PRIMARY KEY,
|
||||
date_created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%f', 'now')),
|
||||
date_updated TEXT,
|
||||
uid TEXT NOT NULL UNIQUE,
|
||||
set_current_timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
text_a TEXT,
|
||||
content,
|
||||
int_a INTEGER,
|
||||
float_a REAL
|
||||
);
|
||||
"""
|
||||
result = db.execute_query(table_sql)
|
||||
log.debug(f"Create table result: {result}")
|
||||
trigger_sql = """
|
||||
CREATE TRIGGER trg_test_a_set_date_updated_on_update
|
||||
AFTER UPDATE ON test_a
|
||||
FOR EACH ROW
|
||||
WHEN OLD.date_updated IS NULL OR NEW.date_updated = OLD.date_updated
|
||||
BEGIN
|
||||
UPDATE test_a
|
||||
SET date_updated = (strftime('%Y-%m-%d %H:%M:%f', 'now'))
|
||||
WHERE test_a_id = NEW.test_a_id;
|
||||
END;
|
||||
"""
|
||||
result = db.execute_query(trigger_sql)
|
||||
log.debug(f"Create trigger result: {result}")
|
||||
result = db.meta_data_detail('test_a')
|
||||
log.debug(f"Table meta data detail: {dump_data(result)}")
|
||||
# INSERT DATA
|
||||
sql = """
|
||||
INSERT INTO test_a (uid, text_a, content, int_a, float_a)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
RETURNING test_a_id, uid;
|
||||
"""
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[1] Insert data result: {dump_data(result)}")
|
||||
__uid: str = ''
|
||||
if result is not False:
|
||||
# first one only of interest
|
||||
result = dict(result[0])
|
||||
__uid = str(result.get('uid', ''))
|
||||
# second insert
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[2] Insert data result: {dump_data(result)}")
|
||||
result = db.execute_query("SELECT * FROM test_a;")
|
||||
log.debug(f"Select data result: {dump_data(result)}")
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row result: {dump_data(result)}")
|
||||
sql = """
|
||||
UPDATE test_a
|
||||
SET text_a = ?
|
||||
WHERE uid = ?;
|
||||
"""
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
'Some updated text A',
|
||||
__uid,
|
||||
)
|
||||
)
|
||||
log.debug(f"Update data result: {dump_data(result)}")
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row after update result: {dump_data(result)}")
|
||||
|
||||
db.db_close()
|
||||
|
||||
db = SQLiteIO(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_name=ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_io.db'),
|
||||
row_factory='Row'
|
||||
)
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
if result is not None and result is not False:
|
||||
log.debug(f"Fetch row result: {dump_data(result)} -> {dict(result)} -> {result.keys()}")
|
||||
log.debug(f"Access via index: {result[5]} -> {result['text_a']}")
|
||||
if isinstance(result, sqlite3.Row):
|
||||
log.debug('Result is sqlite3.Row as expected')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
34
test-run/encryption/symmetric_encryption.py
Normal file
34
test-run/encryption/symmetric_encryption.py
Normal file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Symmetric encryption test
|
||||
"""
|
||||
|
||||
import json
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.encryption_handling.symmetric_encryption import SymmetricEncryption
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
password = "strongpassword"
|
||||
se = SymmetricEncryption(password)
|
||||
|
||||
plaintext = "Hello, World!"
|
||||
ciphertext = se.encrypt_with_metadata_return_str(plaintext)
|
||||
decrypted = se.decrypt_with_metadata(ciphertext)
|
||||
print(f"Encrypted: {dump_data(json.loads(ciphertext))}")
|
||||
print(f"Input: {plaintext} -> {decrypted}")
|
||||
|
||||
static_ciphertext = SymmetricEncryption.encrypt_data(plaintext, password)
|
||||
decrypted = SymmetricEncryption.decrypt_data(static_ciphertext, password)
|
||||
print(f"Static Encrypted: {dump_data(json.loads(static_ciphertext))}")
|
||||
print(f"Input: {plaintext} -> {decrypted}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
31
test-run/file_handling/file_bom_check.py
Normal file
31
test-run/file_handling/file_bom_check.py
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
BOM check for files
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from corelibs.file_handling.file_bom_encoding import is_bom_encoded, is_bom_encoded_info
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Check files for BOM encoding
|
||||
"""
|
||||
base_path = Path(__file__).resolve().parent
|
||||
for file_path in [
|
||||
'test-data/sample_with_bom.csv',
|
||||
'test-data/sample_without_bom.csv',
|
||||
]:
|
||||
has_bom = is_bom_encoded(base_path.joinpath(file_path))
|
||||
bom_info = is_bom_encoded_info(base_path.joinpath(file_path))
|
||||
print(f'File: {file_path}')
|
||||
print(f' Has BOM: {has_bom}')
|
||||
print(f' BOM Info: {dump_data(bom_info)}')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
6
test-run/file_handling/test-data/sample_with_bom.csv
Normal file
6
test-run/file_handling/test-data/sample_with_bom.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
Name,Age,City,Country
|
||||
John Doe,25,New York,USA
|
||||
Jane Smith,30,London,UK
|
||||
山田太郎,28,東京,Japan
|
||||
María García,35,Madrid,Spain
|
||||
François Dupont,42,Paris,France
|
||||
|
6
test-run/file_handling/test-data/sample_without_bom.csv
Normal file
6
test-run/file_handling/test-data/sample_without_bom.csv
Normal file
@@ -0,0 +1,6 @@
|
||||
Name,Age,City,Country
|
||||
John Doe,25,New York,USA
|
||||
Jane Smith,30,London,UK
|
||||
山田太郎,28,東京,Japan
|
||||
María García,35,Madrid,Spain
|
||||
François Dupont,42,Paris,France
|
||||
|
52
test-run/iterator_handling/data_search.py
Normal file
52
test-run/iterator_handling/data_search.py
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Search data tests
|
||||
iterator_handling.data_search
|
||||
"""
|
||||
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.iterator_handling.data_search import find_in_array_from_list, ArraySearchList
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
data = [
|
||||
{
|
||||
"lookup_value_p": "A01",
|
||||
"lookup_value_c": "B01",
|
||||
"replace_value": "R01",
|
||||
},
|
||||
{
|
||||
"lookup_value_p": "A02",
|
||||
"lookup_value_c": "B02",
|
||||
"replace_value": "R02",
|
||||
},
|
||||
]
|
||||
test_foo = ArraySearchList(
|
||||
key = "lookup_value_p",
|
||||
value = "A01"
|
||||
)
|
||||
print(test_foo)
|
||||
search: list[ArraySearchList] = [
|
||||
{
|
||||
"key": "lookup_value_p",
|
||||
"value": "A01"
|
||||
},
|
||||
{
|
||||
"key": "lookup_value_c",
|
||||
"value": "B01"
|
||||
}
|
||||
]
|
||||
|
||||
result = find_in_array_from_list(data, search)
|
||||
|
||||
print(f"Search {dump_data(search)} -> {dump_data(result)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
120
test-run/iterator_handling/dict_helpers.py
Normal file
120
test-run/iterator_handling/dict_helpers.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""
|
||||
Iterator helper testing
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.iterator_handling.dict_mask import mask
|
||||
from corelibs.iterator_handling.dict_helpers import set_entry
|
||||
|
||||
|
||||
def __mask():
|
||||
data = {
|
||||
# "user": "john",
|
||||
# "encryption_key": "Secret key",
|
||||
# "ENCRYPTION.TEST": "Secret key test",
|
||||
# "inside_password_test": "Hide this",
|
||||
"password": ["secret1", "secret2"], # List value gets masked
|
||||
# "config": {
|
||||
# "db_password": {"primary": "secret", "backup": "secret2"}, # Dict value gets masked
|
||||
# "api_keys": ["key1", "key2", "key3"] # List value gets masked
|
||||
# },
|
||||
# "items": [ # List value that doesn't get masked, but gets processed recursively
|
||||
# {"name": "item1", "secret_key": "itemsecret"},
|
||||
# {"name": "item2", "passwords": ["pass1", "pass2"]}
|
||||
# ],
|
||||
# "normal_list": ["item1", "item2", "item3"] # Normal list, not masked
|
||||
}
|
||||
data = {
|
||||
"config": {
|
||||
# "password": ["secret1", "secret2"],
|
||||
# "password_other": {"password": ["secret1", "secret2"]},
|
||||
# "database": {
|
||||
# "host": "localhost",
|
||||
# "password": "db_secret",
|
||||
# "users": [
|
||||
# {"name": "admin", "password": "admin123"},
|
||||
# {"name": "user", "secret_key": "user456"}
|
||||
# ]
|
||||
# },
|
||||
# "api": {
|
||||
# # "endpoints": ["api1", "api2"],
|
||||
# "encryption_settings": {
|
||||
# "enabled": True,
|
||||
# "secret": "api_secret"
|
||||
# }
|
||||
# }
|
||||
"secret_key": "normal_value",
|
||||
"api_key": "normal_value",
|
||||
"my_key_value": "normal_value",
|
||||
}
|
||||
}
|
||||
data = {
|
||||
"basic": {
|
||||
"log_level_console": "DEBUG",
|
||||
"log_level_file": "DEBUG",
|
||||
"storage_interface": "sqlite",
|
||||
"content_start_date": "2023-1-1",
|
||||
"encryption_key": "ENCRYPTION_KEY"
|
||||
},
|
||||
"email": {
|
||||
"alert_email": [
|
||||
"test+z-sd@tequila.jp"
|
||||
]
|
||||
},
|
||||
"poller": {
|
||||
"max_forks": "1",
|
||||
"interface": "Zac"
|
||||
},
|
||||
"pusher": {
|
||||
"max_forks": "3",
|
||||
"interface": "Screendragon"
|
||||
},
|
||||
"api:Zac": {
|
||||
"type": "zac",
|
||||
"client_id": "oro_zac_demo",
|
||||
"client_secret": "CLIENT_SECRET",
|
||||
"username": "zacuser",
|
||||
"password": "ZACuser3",
|
||||
"hostname": "e-gra2.zac.ai",
|
||||
"appname": "e-gra2_api_trial",
|
||||
"api_path": "b/api/v2"
|
||||
},
|
||||
"api:Screendragon": {
|
||||
"type": "screendragon",
|
||||
"client_id": "omniprostaging",
|
||||
"encryption_client": "SOME_SECRET",
|
||||
"client_encryption": "SOME_SECRET",
|
||||
"secret_client": "SOME_SECRET",
|
||||
"client_secret": "SOME_SECRET",
|
||||
"hostname": "omniprostaging.screendragon.com",
|
||||
"appname": "sdapi",
|
||||
"api_path": "api"
|
||||
}
|
||||
}
|
||||
result = mask(data)
|
||||
print(f"** In: {dump_data(data)}")
|
||||
print(f"===> Masked: {dump_data(result)}")
|
||||
|
||||
|
||||
def __set_dict_value_entry():
|
||||
|
||||
dict_empty: dict[str, Any] = {}
|
||||
new = set_entry(dict_empty, 'a.b.c', 1)
|
||||
print(f"[1] Set dict entry: {dump_data(new)}")
|
||||
new = set_entry(new, 'dict', {'key': 'value'})
|
||||
print(f"[2] Set dict entry: {dump_data(new)}")
|
||||
new = set_entry(new, 'list', [1, 2, 3])
|
||||
print(f"[3] Set dict entry: {dump_data(new)}")
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Test: corelibs.string_handling.string_helpers
|
||||
"""
|
||||
__mask()
|
||||
__set_dict_value_entry()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
29
test-run/iterator_handling/list_helpers.py
Normal file
29
test-run/iterator_handling/list_helpers.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""
|
||||
test list helpers
|
||||
"""
|
||||
|
||||
from corelibs.iterator_handling.list_helpers import is_list_in_list, convert_to_list
|
||||
|
||||
|
||||
def __test_is_list_in_list_a():
|
||||
list_a = [1, "hello", 3.14, True, "world"]
|
||||
list_b = ["hello", True, 42]
|
||||
result = is_list_in_list(list_a, list_b)
|
||||
print(f"RESULT: {result}")
|
||||
|
||||
|
||||
def __convert_list():
|
||||
source = "hello"
|
||||
result = convert_to_list(source)
|
||||
print(f"IN: {source} -> {result}")
|
||||
|
||||
|
||||
def main():
|
||||
__test_is_list_in_list_a()
|
||||
__convert_list()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
54
test-run/json_handling/jmespath_helper.py
Normal file
54
test-run/json_handling/jmespath_helper.py
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
jmes path testing
|
||||
"""
|
||||
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.json_handling.jmespath_helper import jmespath_search
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
__set = {
|
||||
'a': 'b',
|
||||
'foobar': [1, 2, 'a'],
|
||||
'bar': {
|
||||
'a': 1,
|
||||
'b': 'c'
|
||||
},
|
||||
'baz': [
|
||||
{
|
||||
'aa': 1,
|
||||
'ab': 'cc'
|
||||
},
|
||||
{
|
||||
'ba': 2,
|
||||
'bb': 'dd'
|
||||
},
|
||||
],
|
||||
'foo': {
|
||||
'a': [1, 2, 3],
|
||||
'b': ['a', 'b', 'c']
|
||||
}
|
||||
}
|
||||
|
||||
__get = [
|
||||
'a',
|
||||
'bar.a',
|
||||
'foo.a',
|
||||
'baz[].aa',
|
||||
"[?\"c\" && contains(\"c\", 'b')]",
|
||||
"[?contains(\"c\", 'b')]",
|
||||
]
|
||||
for __jmespath in __get:
|
||||
result = jmespath_search(__set, __jmespath)
|
||||
print(f"GET {__jmespath}: {dump_data(result)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
52
test-run/json_handling/json_replace.py
Normal file
52
test-run/json_handling/json_replace.py
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
JSON content replace tets
|
||||
"""
|
||||
|
||||
from deepdiff import DeepDiff
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.json_handling.json_helper import modify_with_jsonpath
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
__data = {
|
||||
'a': 'b',
|
||||
'foobar': [1, 2, 'a'],
|
||||
'bar': {
|
||||
'a': 1,
|
||||
'b': 'c'
|
||||
},
|
||||
'baz': [
|
||||
{
|
||||
'aa': 1,
|
||||
'ab': 'cc'
|
||||
},
|
||||
{
|
||||
'ba': 2,
|
||||
'bb': 'dd'
|
||||
},
|
||||
],
|
||||
'foo': {
|
||||
'a': [1, 2, 3],
|
||||
'b': ['a', 'b', 'c']
|
||||
}
|
||||
}
|
||||
|
||||
# Modify some values using JSONPath
|
||||
__replace_data = modify_with_jsonpath(__data, 'bar.a', 42)
|
||||
__replace_data = modify_with_jsonpath(__replace_data, 'foo.b[1]', 'modified')
|
||||
__replace_data = modify_with_jsonpath(__replace_data, 'baz[0].ab', 'changed')
|
||||
|
||||
print(f"Original Data:\n{dump_data(__data)}\n")
|
||||
print(f"Modified Data:\n{dump_data(__replace_data)}\n")
|
||||
print(f"Differences:\n{dump_data(DeepDiff(__data, __replace_data, verbose_level=2))}\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
135
test-run/logging_handling/log.py
Normal file
135
test-run/logging_handling/log.py
Normal file
@@ -0,0 +1,135 @@
|
||||
"""
|
||||
Log logging_handling.log testing
|
||||
"""
|
||||
|
||||
# import atexit
|
||||
import sys
|
||||
from pathlib import Path
|
||||
# this is for testing only
|
||||
from corelibs.logging_handling.log import Log, Logger, ConsoleFormat, ConsoleFormatSettings
|
||||
from corelibs.debug_handling.debug_helpers import exception_stack, call_stack
|
||||
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Log testing
|
||||
"""
|
||||
script_path: Path = Path(__file__).resolve().parent
|
||||
log = Log(
|
||||
log_path=script_path.joinpath('log', 'test.log'),
|
||||
log_name="Test Log",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
# "log_level_console": None,
|
||||
"log_level_file": 'DEBUG',
|
||||
# "console_color_output_enabled": False,
|
||||
"per_run_log": True,
|
||||
# "console_format_type": ConsoleFormatSettings.NONE,
|
||||
# "console_format_type": ConsoleFormatSettings.MINIMAL,
|
||||
"console_format_type": ConsoleFormat.TIME_MICROSECONDS | ConsoleFormat.NAME | ConsoleFormat.LEVEL,
|
||||
# "console_format_type": ConsoleFormat.NAME,
|
||||
# "console_format_type": (
|
||||
# ConsoleFormat.TIME | ConsoleFormat.TIMEZONE | ConsoleFormat.LINENO | ConsoleFormat.LEVEL
|
||||
# ),
|
||||
}
|
||||
)
|
||||
logn = Logger(log.get_logger_settings())
|
||||
|
||||
log.info("ConsoleFormatType FILE is: %s", ConsoleFormat.FILE)
|
||||
log.info("ConsoleFormatSettings ALL is: %s", ConsoleFormatSettings.ALL)
|
||||
log.info("ConsoleFormatSettings lookup is: %s", ConsoleFormatSettings.from_string('ALL'))
|
||||
|
||||
log.logger.debug('[NORMAL] Debug test: %s', log.logger.name)
|
||||
log.lg.debug('[NORMAL] Debug test: %s', log.logger.name)
|
||||
log.debug('[NORMAL-] Debug test: %s', log.logger.name)
|
||||
logn.lg.debug('[NORMAL N] Debug test: %s', log.logger.name)
|
||||
logn.debug('[NORMAL N-] Debug test: %s', log.logger.name)
|
||||
log.logger.info('[NORMAL] Info test: %s', log.logger.name)
|
||||
log.info('[NORMAL-] Info test: %s', log.logger.name)
|
||||
log.logger.warning('[NORMAL] Warning test: %s', log.logger.name)
|
||||
log.warning('[NORMAL-] Warning test: %s', log.logger.name)
|
||||
log.logger.error('[NORMAL] Error test: %s', log.logger.name)
|
||||
log.error('[NORMAL-] Error test: %s', log.logger.name)
|
||||
log.logger.critical('[NORMAL] Critical test: %s', log.logger.name)
|
||||
log.critical('[NORMAL-] Critical test: %s', log.logger.name)
|
||||
log.logger.log(LoggingLevel.ALERT.value, '[NORMAL] alert test: %s', log.logger.name)
|
||||
log.alert('[NORMAL-] alert test: %s', log.logger.name)
|
||||
log.emergency('[NORMAL-] emergency test: %s', log.logger.name)
|
||||
log.logger.log(LoggingLevel.EMERGENCY.value, '[NORMAL] emergency test: %s', log.logger.name)
|
||||
log.exception('[NORMAL] Exception test: %s', log.logger.name)
|
||||
log.logger.log(LoggingLevel.EXCEPTION.value, '[NORMAL] exception test: %s', log.logger.name, exc_info=True)
|
||||
|
||||
bad_level = 'WRONG'
|
||||
if not Log.validate_log_level(bad_level):
|
||||
print(f"Invalid level: {bad_level}")
|
||||
good_level = 'WARNING'
|
||||
if Log.validate_log_level(good_level):
|
||||
print(f"Valid level: {good_level}")
|
||||
|
||||
print(f"ERROR is to_logging_level(): {LoggingLevel.ERROR.to_logging_level()}")
|
||||
print(f"ERROR is to_lower_case(): {LoggingLevel.ERROR.to_lower_case()}")
|
||||
print(f"ERROR is: {LoggingLevel.ERROR}")
|
||||
print(f"ERROR is value: {LoggingLevel.ERROR.value}")
|
||||
print(f"ERROR is name: {LoggingLevel.ERROR.name}")
|
||||
print(f"ERROR is from_string(lower): {LoggingLevel.from_string('ERROR')}")
|
||||
print(f"ERROR is from_string(upper): {LoggingLevel.from_string('ERROR')}")
|
||||
print(f"ERROR is from_int: {LoggingLevel.from_int(40)}")
|
||||
print(f"ERROR is from_any(text lower): {LoggingLevel.from_any('ERROR')}")
|
||||
print(f"ERROR is from_any(text upper): {LoggingLevel.from_any('ERROR')}")
|
||||
print(f"ERROR is from_any(int): {LoggingLevel.from_any(40)}")
|
||||
print(f"INFO <= ERROR: {LoggingLevel.INFO.includes(LoggingLevel.ERROR)}")
|
||||
print(f"INFO > ERROR: {LoggingLevel.INFO.is_higher_than(LoggingLevel.ERROR)}")
|
||||
print(f"INFO < ERROR: {LoggingLevel.INFO.is_lower_than(LoggingLevel.ERROR)}")
|
||||
print(f"INFO < ERROR: {LoggingLevel.INFO.is_lower_than(LoggingLevel.ERROR)}")
|
||||
|
||||
try:
|
||||
print(f"INVALID is A: {LoggingLevel.from_string('INVALID')}")
|
||||
except ValueError as e:
|
||||
print(f"* ERROR: {e}")
|
||||
|
||||
try:
|
||||
__test = 5 / 0
|
||||
print(f"Divied: {__test}")
|
||||
except ZeroDivisionError as e:
|
||||
print(f"** sys.exec_info(): {sys.exc_info()}")
|
||||
print(f"** sys.exec_info(): [{exception_stack()}] | [{exception_stack(sys.exc_info())}] | [{call_stack()}]")
|
||||
log.logger.critical("Divison through zero: %s", e)
|
||||
log.exception("Divison through zero: %s", e)
|
||||
|
||||
for handler in log.logger.handlers:
|
||||
print(
|
||||
f"** Handler (logger) {handler} [{handler.name}] -> "
|
||||
f"{handler.level} -> {LoggingLevel.from_any(handler.level)}"
|
||||
)
|
||||
|
||||
for key, handler in log.handlers.items():
|
||||
print(f"Handler (handlers) [{key}] {handler} -> {handler.level} -> {LoggingLevel.from_any(handler.level)}")
|
||||
log.set_log_level(Log.CONSOLE_HANDLER, LoggingLevel.ERROR)
|
||||
log.logger.warning('[NORMAL] Invisible Warning test: %s', log.logger.name)
|
||||
log.logger.error('[NORMAL] Visible Error test: %s', log.logger.name)
|
||||
log.logger.debug('[NORMAL] Visible Debug test: %s', log.logger.name)
|
||||
print(f"*** Any handler is minimum level ERROR: {log.any_handler_is_minimum_level(LoggingLevel.ERROR)}")
|
||||
print(f"*** Any handler is minimum level DEBUG: {log.any_handler_is_minimum_level(LoggingLevel.DEBUG)}")
|
||||
for handler in log.handlers.values():
|
||||
print(
|
||||
f"*** Setting handler {handler} is level {LoggingLevel.from_any(handler.level).name} -> "
|
||||
f"*** INC {LoggingLevel.from_any(handler.level).includes(LoggingLevel.DEBUG)}")
|
||||
|
||||
print(f"*** WARNING includes ERROR: {LoggingLevel.WARNING.includes(LoggingLevel.ERROR)}")
|
||||
print(f"*** ERROR includes WARNING: {LoggingLevel.ERROR.includes(LoggingLevel.WARNING)}")
|
||||
|
||||
log.set_log_level(Log.CONSOLE_HANDLER, LoggingLevel.DEBUG)
|
||||
log.debug('Current logging format: %s', log.log_settings['console_format_type'])
|
||||
log.update_console_formatter(ConsoleFormat.TIME | ConsoleFormat.LINENO)
|
||||
log.info('Does hit show less')
|
||||
log.update_console_formatter(ConsoleFormat.TIME | ConsoleFormat.LINENO)
|
||||
log.info('Does hit show less B')
|
||||
print(f"*** Any handler is minimum level ERROR: {log.any_handler_is_minimum_level(LoggingLevel.ERROR)}")
|
||||
print(f"*** Any handler is minimum level DEBUG: {log.any_handler_is_minimum_level(LoggingLevel.DEBUG)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
2
test-run/logging_handling/log/.gitignore
vendored
Normal file
2
test-run/logging_handling/log/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
91
test-run/logging_handling/log_pool.py
Normal file
91
test-run/logging_handling/log_pool.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""
|
||||
Pool Queue log handling
|
||||
Thread Queue log handling
|
||||
"""
|
||||
|
||||
import random
|
||||
import time
|
||||
from multiprocessing import Queue
|
||||
import concurrent.futures
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from corelibs.logging_handling.log import Log
|
||||
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
||||
|
||||
|
||||
def work_function(log_name: str, worker_id: int, data: list[int]) -> int:
|
||||
"""
|
||||
simulate worker
|
||||
|
||||
Arguments:
|
||||
worker_id {int} -- _description_
|
||||
data {list[int]} -- _description_
|
||||
|
||||
Returns:
|
||||
int -- _description_
|
||||
"""
|
||||
log = logging.getLogger(f'{log_name}-WorkerFn-{worker_id}')
|
||||
log.info('Starting worker: %s', worker_id)
|
||||
time.sleep(random.uniform(1, 3))
|
||||
result = sum(data) * worker_id
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Queue log tester
|
||||
"""
|
||||
print("[START] Queue logger test")
|
||||
log_queue: 'Queue[str]' = Queue()
|
||||
script_path: Path = Path(__file__).resolve().parent
|
||||
log = Log(
|
||||
log_path=script_path.joinpath('log', 'test.log'),
|
||||
log_name="Test Log",
|
||||
log_settings={
|
||||
"log_level_console": 'INFO',
|
||||
"log_level_file": 'INFO',
|
||||
"log_queue": log_queue,
|
||||
}
|
||||
)
|
||||
|
||||
log.logger.debug('Pool Fork logging test')
|
||||
max_forks = 2
|
||||
data_sets = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
|
||||
with concurrent.futures.ProcessPoolExecutor(
|
||||
max_workers=max_forks,
|
||||
initializer=Log.init_worker_logging,
|
||||
initargs=(log_queue,)
|
||||
) as executor:
|
||||
log.logger.info('Start workers')
|
||||
futures = [
|
||||
executor.submit(work_function, log.log_name, worker_id, data)
|
||||
for worker_id, data in enumerate(data_sets, 1)
|
||||
]
|
||||
log.logger.info('Workders started')
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
log.logger.warning('Processing result: %s', future.result())
|
||||
print(f"Processing result: {future.result()}")
|
||||
|
||||
log.set_log_level('stream_handler', LoggingLevel.ERROR)
|
||||
log.logger.error('SECOND Start workers')
|
||||
futures = [
|
||||
executor.submit(work_function, log.log_name, worker_id, data)
|
||||
for worker_id, data in enumerate(data_sets, 1)
|
||||
]
|
||||
log.logger.info('[INVISIBLE] Workders started')
|
||||
log.logger.error('[VISIBLE] Second Workders started')
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
log.logger.error('Processing result: %s', future.result())
|
||||
print(f"Processing result: {future.result()}")
|
||||
|
||||
log.set_log_level('stream_handler', LoggingLevel.DEBUG)
|
||||
log.logger.info('[END] Queue logger test')
|
||||
log.stop_listener()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
66
test-run/logging_handling/log_queue.py
Normal file
66
test-run/logging_handling/log_queue.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""
|
||||
Log logging_handling.log testing
|
||||
"""
|
||||
|
||||
# import atexit
|
||||
from pathlib import Path
|
||||
from multiprocessing import Queue
|
||||
import time
|
||||
# this is for testing only
|
||||
from corelibs.logging_handling.log import Log
|
||||
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Log testing
|
||||
"""
|
||||
script_path: Path = Path(__file__).resolve().parent
|
||||
|
||||
log_queue: 'Queue[str]' = Queue()
|
||||
log_q = Log(
|
||||
log_path=script_path.joinpath('log', 'test_queue.log'),
|
||||
log_name="Test Log",
|
||||
log_settings={
|
||||
"log_level_console": 'WARNING',
|
||||
"log_level_file": 'ERROR',
|
||||
"log_queue": log_queue
|
||||
# "console_color_output_enabled": False,
|
||||
}
|
||||
)
|
||||
|
||||
log_q.logger.debug('[QUEUE] Debug test: %s', log_q.logger.name)
|
||||
log_q.logger.info('[QUEUE] Info test: %s', log_q.logger.name)
|
||||
log_q.logger.warning('[QUEUE] Warning test: %s', log_q.logger.name)
|
||||
log_q.logger.error('[QUEUE] Error test: %s', log_q.logger.name)
|
||||
log_q.logger.critical('[QUEUE] Critical test: %s', log_q.logger.name)
|
||||
log_q.logger.log(LoggingLevel.EXCEPTION.value, '[QUEUE] Exception test: %s', log_q.logger.name, exc_info=True)
|
||||
time.sleep(0.1)
|
||||
|
||||
for handler in log_q.logger.handlers:
|
||||
print(f"[1] Handler (logger) {handler}")
|
||||
if log_q.listener is not None:
|
||||
for handler in log_q.listener.handlers:
|
||||
print(f"[1] Handler (queue) {handler}")
|
||||
for handler in log_q.handlers.items():
|
||||
print(f"[1] Handler (handlers) {handler}")
|
||||
|
||||
log_q.set_log_level('stream_handler', LoggingLevel.ERROR)
|
||||
log_q.logger.warning('[QUEUE-B] [INVISIBLE] Warning test: %s', log_q.logger.name)
|
||||
log_q.logger.error('[QUEUE-B] [VISIBLE] Error test: %s', log_q.logger.name)
|
||||
|
||||
for handler in log_q.logger.handlers:
|
||||
print(f"[2] Handler (logger) {handler}")
|
||||
if log_q.listener is not None:
|
||||
for handler in log_q.listener.handlers:
|
||||
print(f"[2] Handler (queue) {handler}")
|
||||
for handler in log_q.handlers.items():
|
||||
print(f"[2] Handler (handlers) {handler}")
|
||||
|
||||
log_q.stop_listener()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
31
test-run/logging_handling/log_queue_legacy.py
Normal file
31
test-run/logging_handling/log_queue_legacy.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""
|
||||
Log logging_handling.log testing
|
||||
"""
|
||||
|
||||
# import atexit
|
||||
from pathlib import Path
|
||||
from multiprocessing import Queue
|
||||
# this is for testing only
|
||||
from queue_logger.log_queue import QueueLogger
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Log testing
|
||||
"""
|
||||
script_path: Path = Path(__file__).resolve().parent
|
||||
|
||||
log_queue: 'Queue[str]' = Queue()
|
||||
log_q_legacy = QueueLogger(
|
||||
log_file=script_path.joinpath('log', 'test_queue_legacy.log'),
|
||||
log_name="Test Log Queue",
|
||||
log_queue=log_queue
|
||||
)
|
||||
log_q_legacy.mlog.info('Log test: %s', 'Queue Legacy')
|
||||
# log_q.stop_listener()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
96
test-run/logging_handling/queue_logger/log_queue.py
Normal file
96
test-run/logging_handling/queue_logger/log_queue.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""
|
||||
test queue logger interface
|
||||
NOTE: this has all moved to the default log interface
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.handlers
|
||||
from pathlib import Path
|
||||
from multiprocessing import Queue
|
||||
|
||||
|
||||
class QueueLogger:
|
||||
"""
|
||||
Queue logger
|
||||
"""
|
||||
|
||||
def __init__(self, log_file: Path, log_name: str, log_queue: 'Queue[str] | None' = None):
|
||||
self.log_file = log_file
|
||||
self.log_name = log_name
|
||||
self.handlers = self.setup_logging()
|
||||
self.log_queue: 'Queue[str]' = log_queue if log_queue is not None else Queue()
|
||||
self.listener = logging.handlers.QueueListener(self.log_queue, *self.handlers)
|
||||
self.listener.start()
|
||||
|
||||
self.mlog: logging.Logger = self.main_log(log_name)
|
||||
|
||||
def __del__(self):
|
||||
self.mlog.info("[%s] ================================>", "END")
|
||||
self.listener.stop()
|
||||
|
||||
def setup_logging(self):
|
||||
"""
|
||||
setup basic logging
|
||||
"""
|
||||
|
||||
# Create formatters
|
||||
file_formatter = logging.Formatter(
|
||||
'%(asctime)s - %(name)s - %(levelname)s - [PID:%(process)d] [%(filename)s:%(lineno)d] - %(message)s'
|
||||
)
|
||||
|
||||
console_formatter = logging.Formatter(
|
||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
|
||||
# Create handlers
|
||||
file_handler = logging.FileHandler(self.log_file)
|
||||
file_handler.setFormatter(file_formatter)
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setFormatter(console_formatter)
|
||||
console_handler.setLevel(logging.DEBUG)
|
||||
|
||||
return [file_handler, console_handler]
|
||||
|
||||
def main_log(self, log_name: str) -> logging.Logger:
|
||||
"""
|
||||
main logger
|
||||
|
||||
Arguments:
|
||||
log_name {str} -- _description_
|
||||
|
||||
Returns:
|
||||
logging.Logger -- _description_
|
||||
"""
|
||||
mlog_handler = logging.handlers.QueueHandler(self.log_queue)
|
||||
mlog = logging.getLogger(f'{log_name}-MainProcess')
|
||||
mlog.addHandler(mlog_handler)
|
||||
mlog.setLevel(logging.DEBUG)
|
||||
return mlog
|
||||
|
||||
@staticmethod
|
||||
def init_worker_logging(log_queue: 'Queue[str]', log_name: str, ):
|
||||
"""
|
||||
Initialize logging for worker processes
|
||||
"""
|
||||
|
||||
# Create QueueHandler
|
||||
queue_handler = logging.handlers.QueueHandler(log_queue)
|
||||
|
||||
# Setup root logger for this process
|
||||
# NOTE: This must be EMPTY or new SINGLE NEW logger is created, we need one for EACH fork
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
root_logger.handlers.clear()
|
||||
root_logger.addHandler(queue_handler)
|
||||
|
||||
root_logger.info('[LOGGER] Init log: %s - %s', log_queue, log_name)
|
||||
|
||||
return root_logger
|
||||
|
||||
def stop_listener(self):
|
||||
"""
|
||||
stop the listener
|
||||
"""
|
||||
self.listener.stop()
|
||||
|
Can't render this file because it contains an unexpected character in line 3 and column 165.
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user