Compare commits

...

16 Commits

Author SHA1 Message Date
Clemens Schwaighofer
765cc061c1 v0.22.1: Log update with closing queue on exit or abort 2025-08-05 10:33:55 +09:00
Clemens Schwaighofer
80319385f0 Add Log exist queue clean up if queue is set
to avoid hung threads on errors
2025-08-05 10:32:33 +09:00
Clemens Schwaighofer
29dd906fe0 v0.22.0: per run log file rotate 2025-08-01 16:04:18 +09:00
Clemens Schwaighofer
d5dc4028c3 Merge branch 'development' 2025-08-01 16:02:40 +09:00
Clemens Schwaighofer
0df049d453 Add per run log rotate flag
This flag will use the normal file handler with a file name that has date + time + milliseconds
to create a new file each time the script is run
2025-08-01 16:01:50 +09:00
Clemens Schwaighofer
0bd7c1f685 v0.21.1: Update convert time string to skip any numbers 2025-07-29 09:30:56 +09:00
Clemens Schwaighofer
2f08ecabbf For convert time string, skip convert if incoming value is a number of any type
Any float number will be rounded, and everything that is any kind of number will be then converted to int and returned
The rest will be converted to string and normal convert is run
2025-07-29 09:29:38 +09:00
Clemens Schwaighofer
12af1c80dc v0.21.0: string with time units to seconds int 2025-07-29 09:15:20 +09:00
Clemens Schwaighofer
a52b6e0a55 Merge branch 'development' 2025-07-29 09:14:11 +09:00
Clemens Schwaighofer
a586cf65e2 Convert string with time units to seconds 2025-07-29 09:13:36 +09:00
Clemens Schwaighofer
e2e7882bfa Log exception with new exception_stack call, exception_stack method added to the debug helpers 2025-07-28 15:27:55 +09:00
Clemens Schwaighofer
4f9c2b9d5f Add exception stack caller and add this to the logger exception call
So we get the location of the exception in the console log too
2025-07-28 15:26:23 +09:00
Clemens Schwaighofer
5203bcf1ea v0.19.1: Log exception call, add call stack to the console log output 2025-07-28 14:32:56 +09:00
Clemens Schwaighofer
f1e3bc8559 For Log exception write to ERROR, add the stack trace too 2025-07-28 14:32:14 +09:00
Clemens Schwaighofer
b97ca6f064 v0.19.0: add http basic auth creator method 2025-07-26 11:27:10 +09:00
Clemens Schwaighofer
d1ea9874da Add HTTP basic auth builder 2025-07-26 11:26:09 +09:00
11 changed files with 298 additions and 15 deletions

View File

@@ -3,3 +3,5 @@
- [x] stub files .pyi
- [ ] Add tests for all, we need 100% test coverate
- [x] Log: add custom format for "stack_correct" if set, this will override the normal stack block
- [ ] Log: add rotate for size based
- [ ] All folders and file names need to be revisited for naming and content collection

View File

@@ -1,7 +1,7 @@
# MARK: Project info
[project]
name = "corelibs"
version = "0.18.2"
version = "0.22.1"
description = "Collection of utils for Python scripts"
readme = "README.md"
requires-python = ">=3.13"

View File

@@ -4,7 +4,12 @@ Various debug helpers
import traceback
import os
import sys
from typing import Tuple, Type
from types import TracebackType
# _typeshed.OptExcInfo
OptExcInfo = Tuple[None, None, None] | Tuple[Type[BaseException], BaseException, TracebackType]
def call_stack(
start: int = 0,
@@ -41,4 +46,30 @@ def call_stack(
# print(f"* HERE: {dump_data(stack)}")
return f"{separator}".join(f"{os.path.basename(f.filename)}:{f.name}:{f.lineno}" for f in __stack)
def exception_stack(
exc_stack: OptExcInfo | None = None,
separator: str = ' -> '
) -> str:
"""
Exception traceback, if no sys.exc_info is set, run internal
Keyword Arguments:
exc_stack {OptExcInfo | None} -- _description_ (default: {None})
separator {str} -- _description_ (default: {' -> '})
Returns:
str -- _description_
"""
if exc_stack is not None:
_, _, exc_traceback = exc_stack
else:
exc_traceback = None
_, _, exc_traceback = sys.exc_info()
stack = traceback.extract_tb(exc_traceback)
if not separator:
separator = ' -> '
# print(f"* HERE: {dump_data(stack)}")
return f"{separator}".join(f"{os.path.basename(f.filename)}:{f.name}:{f.lineno}" for f in stack)
# __END__

View File

@@ -32,4 +32,6 @@ def jmespath_search(search_data: dict[Any, Any] | list[Any], search_params: str)
raise ValueError(f"Type error for search_params: {excp}") from excp
return search_result
# TODO: compile jmespath setup
# __END__

View File

@@ -7,12 +7,14 @@ attach "init_worker_logging" with the set log_queue
import re
import logging.handlers
import logging
from datetime import datetime
import time
from pathlib import Path
import atexit
from typing import MutableMapping, TextIO, TypedDict, Any, TYPE_CHECKING, cast
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
from corelibs.string_handling.text_colors import Colors
from corelibs.debug_handling.debug_helpers import call_stack
from corelibs.debug_handling.debug_helpers import call_stack, exception_stack
if TYPE_CHECKING:
from multiprocessing import Queue
@@ -23,6 +25,7 @@ class LogSettings(TypedDict):
"""log settings, for Log setup"""
log_level_console: LoggingLevel
log_level_file: LoggingLevel
per_run_log: bool
console_enabled: bool
console_color_output_enabled: bool
add_start_info: bool
@@ -119,6 +122,9 @@ class LogParent:
self.log_queue: 'Queue[str] | None' = None
self.handlers: dict[str, Any] = {}
def __del__(self):
self._cleanup()
# FIXME: we need to add a custom formater to add stack level listing if we want to
# Important note, although they exist, it is recommended to use self.logger.NAME directly
# so that the correct filename, method and row number is set
@@ -225,11 +231,13 @@ class LogParent:
if extra is None:
extra = {}
extra['stack_trace'] = call_stack(skip_last=2)
extra['exception_trace'] = exception_stack()
# write to console first with extra flag for filtering in file
if log_error:
self.logger.log(
LoggingLevel.ERROR.value,
f"<=EXCEPTION> {msg}", *args, extra=dict(extra) | {'console': True}, stacklevel=2
f"<=EXCEPTION={extra['exception_trace']}> {msg} [{extra['stack_trace']}]",
*args, extra=dict(extra) | {'console': True}, stacklevel=2
)
self.logger.log(LoggingLevel.EXCEPTION.value, msg, *args, exc_info=True, extra=extra, stacklevel=2)
@@ -278,6 +286,15 @@ class LogParent:
return False
return True
def _cleanup(self):
"""cleanup for any open queues in case we have an abort"""
if not self.log_queue:
return
self.flush()
# Close the queue properly
self.log_queue.close()
self.log_queue.join_thread()
# MARK: log level handling
def set_log_level(self, handler_name: str, log_level: LoggingLevel) -> bool:
"""
@@ -390,6 +407,7 @@ class Log(LogParent):
DEFAULT_LOG_SETTINGS: LogSettings = {
"log_level_console": DEFAULT_LOG_LEVEL_CONSOLE,
"log_level_file": DEFAULT_LOG_LEVEL_FILE,
"per_run_log": False,
"console_enabled": True,
"console_color_output_enabled": True,
"add_start_info": True,
@@ -438,7 +456,7 @@ class Log(LogParent):
# in the file writer too, for the ones where color is set BEFORE the format
# Any is logging.StreamHandler, logging.FileHandler and all logging.handlers.*
self.handlers: dict[str, Any] = {}
self.add_handler('file_handler', self.__create_timed_rotating_file_handler(
self.add_handler('file_handler', self.__create_file_handler(
'file_handler', self.log_settings['log_level_file'], log_path)
)
if self.log_settings['console_enabled']:
@@ -490,6 +508,7 @@ class Log(LogParent):
default_log_settings[__log_entry] = LoggingLevel.from_any(__log_level)
# check bool
for __log_entry in [
"per_run_log",
"console_enabled",
"console_color_output_enabled",
"add_start_info",
@@ -564,24 +583,35 @@ class Log(LogParent):
return console_handler
# MARK: file handler
def __create_timed_rotating_file_handler(
def __create_file_handler(
self, handler_name: str,
log_level_file: LoggingLevel, log_path: Path,
# for TimedRotating, if per_run_log is off
when: str = "D", interval: int = 1, backup_count: int = 0
) -> logging.handlers.TimedRotatingFileHandler:
) -> logging.handlers.TimedRotatingFileHandler | logging.FileHandler:
# file logger
# when: S/M/H/D/W0-W6/midnight
# interval: how many, 1D = every day
# backup_count: how many old to keep, 0 = all
if not self.validate_log_level(log_level_file):
log_level_file = self.DEFAULT_LOG_LEVEL_FILE
file_handler = logging.handlers.TimedRotatingFileHandler(
filename=log_path,
encoding="utf-8",
when=when,
interval=interval,
backupCount=backup_count
)
if self.log_settings['per_run_log']:
# log path, remove them stem (".log"), then add the datetime and add .log again
now = datetime.now()
# we add microseconds part to get milli seconds
new_stem=f"{log_path.stem}.{now.strftime('%Y-%m-%d_%H-%M-%S')}.{str(now.microsecond)[:3]}"
file_handler = logging.FileHandler(
filename=log_path.with_name(f"{new_stem}{log_path.suffix}"),
encoding="utf-8",
)
else:
file_handler = logging.handlers.TimedRotatingFileHandler(
filename=log_path,
encoding="utf-8",
when=when,
interval=interval,
backupCount=backup_count
)
formatter_file_handler = logging.Formatter(
(
# time stamp
@@ -617,6 +647,7 @@ class Log(LogParent):
if log_queue is None:
return
self.log_queue = log_queue
atexit.register(self._cleanup)
self.listener = logging.handlers.QueueListener(
self.log_queue,
*self.handlers.values(),

View File

@@ -0,0 +1,20 @@
"""
Various HTTP auth helpers
"""
from base64 import b64encode
def basic_auth(username: str, password: str) -> str:
"""
setup basic auth, for debug
Arguments:
username {str} -- _description_
password {str} -- _description_
Returns:
str -- _description_
"""
token = b64encode(f"{username}:{password}".encode('utf-8')).decode("ascii")
return f'Basic {token}'

View File

@@ -2,8 +2,18 @@
Current timestamp strings and time zones
"""
import re
from datetime import datetime
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
from corelibs.var_handling.var_helpers import is_float
class TimeParseError(Exception):
"""Custom exception for time parsing errors."""
class TimeUnitError(Exception):
"""Custom exception for time parsing errors."""
class TimestampStrings:
@@ -24,3 +34,79 @@ class TimestampStrings:
self.timestamp = self.timestamp_now.strftime("%Y-%m-%d %H:%M:%S")
self.timestamp_tz = self.timestamp_now_tz.strftime("%Y-%m-%d %H:%M:%S %Z")
self.timestamp_file = self.timestamp_now.strftime("%Y-%m-%d_%H%M%S")
def convert_to_seconds(time_string: str | int | float) -> int:
"""
Conver a string with time units into a seconds string
The following units are allowed
Y: 365 days
M: 30 days
d, h, m, s
Arguments:
time_string {str} -- _description_
Raises:
ValueError: _description_
Returns:
int -- _description_
"""
# skip out if this is a number of any type
# numbers will br made float, rounded and then converted to int
if is_float(time_string):
return int(round(float(time_string)))
time_string = str(time_string)
# Define time unit conversion factors
unit_factors: dict[str, int] = {
'Y': 31536000, # 365 days * 86400 seconds/day
'M': 2592000 * 12, # 1 year in seconds (assuming 365 days per year)
'd': 86400, # 1 day in seconds
'h': 3600, # 1 hour in seconds
'm': 60, # minutes to seconds
's': 1 # 1 second in seconds
}
long_unit_names: dict[str, str] = {
'year': 'Y',
'years': 'Y',
'month': 'M',
'months': 'M',
'day': 'd',
'days': 'd',
'hour': 'h',
'hours': 'h',
'minute': 'm',
'minutes': 'm',
'min': 'm',
'second': 's',
'seconds': 's',
'sec': 's',
}
total_seconds = 0
seen_units: list[str] = [] # Track units that have been encountered
# Use regex to match number and time unit pairs
for match in re.finditer(r'(\d+)\s*([a-zA-Z]+)', time_string):
value, unit = int(match.group(1)), match.group(2)
# full name check, fallback to original name
unit = long_unit_names.get(unit.lower(), unit)
# Check for duplicate units
if unit in seen_units:
raise TimeParseError(f"Unit '{unit}' appears more than once.")
# Check invalid unit
if unit not in unit_factors:
raise TimeUnitError(f"Unit '{unit}' is not a valid unit name.")
# Add to total seconds based on the units
if unit in unit_factors:
total_seconds += value * unit_factors[unit]
seen_units.append(unit)
return total_seconds

View File

@@ -0,0 +1,52 @@
#!/usr/bin/env python3
"""
jmes path testing
"""
from corelibs.debug_handling.dump_data import dump_data
from corelibs.json_handling.jmespath_helper import jmespath_search
def main() -> None:
"""
Comment
"""
__set = {
'a': 'b',
'foobar': [1, 2, 'a'],
'bar': {
'a': 1,
'b': 'c'
},
'baz': [
{
'aa': 1,
'ab': 'cc'
},
{
'ba': 2,
'bb': 'dd'
},
],
'foo': {
'a': [1, 2, 3],
'b': ['a', 'b', 'c']
}
}
__get = [
'a',
'bar.a',
'foo.a',
'baz[].aa'
]
for __jmespath in __get:
result = jmespath_search(__set, __jmespath)
print(f"GET {__jmespath}: {dump_data(result)}")
if __name__ == "__main__":
main()
# __END__

View File

@@ -3,9 +3,11 @@ Log logging_handling.log testing
"""
# import atexit
import sys
from pathlib import Path
# this is for testing only
from corelibs.logging_handling.log import Log, Logger
from corelibs.debug_handling.debug_helpers import exception_stack, call_stack
from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel
@@ -22,6 +24,7 @@ def main():
# "log_level_console": None,
"log_level_file": 'DEBUG',
# "console_color_output_enabled": False,
"per_run_log": True
}
)
logn = Logger(log.get_logger_settings())
@@ -78,6 +81,8 @@ def main():
__test = 5 / 0
print(f"Divied: {__test}")
except ZeroDivisionError as e:
print(f"** sys.exec_info(): {sys.exc_info()}")
print(f"** sys.exec_info(): [{exception_stack()}] | [{exception_stack(sys.exc_info())}] | [{call_stack()}]")
log.logger.critical("Divison through zero: %s", e)
log.exception("Divison through zero: %s", e)

View File

@@ -0,0 +1,54 @@
#!/usr/bin/env python3
"""
timestamp string checks
"""
from corelibs.string_handling.timestamp_strings import convert_to_seconds, TimeParseError, TimeUnitError
def main() -> None:
"""
Comment
"""
test_cases = [
"5M 6d", # 5 months, 6 days
"2h 30m 45s", # 2 hours, 30 minutes, 45 seconds
"1Y 2M 3d", # 1 year, 2 months, 3 days
"1h", # 1 hour
"30m", # 30 minutes
"2 hours 15 minutes", # 2 hours, 15 minutes
"1d 12h", # 1 day, 12 hours
"3M 2d 4h", # 3 months, 2 days, 4 hours
"45s", # 45 seconds
"1 year 2 months", # 1 year, 2 months
"2Y 6M 15d 8h 30m 45s", # Complex example
# ]
# invalid_test_cases = [
"5M 6d 2M", # months appears twice
"2h 30m 45s 1h", # hours appears twice
"1d 2 days", # days appears twice (short and long form)
"30m 45 minutes", # minutes appears twice
"1Y 2 years", # years appears twice
"1x 2 yrs", # invalid names
123, # int
789.12, # float
456.56, # float, high
"4566", # int as string
"5551.12", # float as string
"5551.56", # float, high as string
]
for time_string in test_cases:
try:
result = convert_to_seconds(time_string)
print(f"{time_string} => {result}")
except (TimeParseError, TimeUnitError) as e:
print(f"Error encountered for {time_string}: {type(e).__name__}: {e}")
if __name__ == "__main__":
main()
# __END__

4
uv.lock generated
View File

@@ -1,5 +1,5 @@
version = 1
revision = 2
revision = 3
requires-python = ">=3.13"
[[package]]
@@ -44,7 +44,7 @@ wheels = [
[[package]]
name = "corelibs"
version = "0.18.1"
version = "0.22.0"
source = { editable = "." }
dependencies = [
{ name = "jmespath" },