diff --git a/src/corelibs/logging_handling/log.py b/src/corelibs/logging_handling/log.py index 24fec14..d37cc67 100644 --- a/src/corelibs/logging_handling/log.py +++ b/src/corelibs/logging_handling/log.py @@ -20,12 +20,7 @@ if TYPE_CHECKING: # MARK: Log settings TypedDict class LogSettings(TypedDict): - """ - log settings - - Arguments: - TypedDict {_type_} -- _description_ - """ + """log settings, for Log setup""" log_level_console: LoggingLevel log_level_file: LoggingLevel console_enabled: bool @@ -35,6 +30,12 @@ class LogSettings(TypedDict): log_queue: 'Queue[str] | None' +class LoggerInit(TypedDict): + """for Logger init""" + logger: logging.Logger + log_queue: 'Queue[str] | None' + + # MARK: Custom color filter class CustomConsoleFormatter(logging.Formatter): """ @@ -81,6 +82,8 @@ class CustomHandlerFilter(logging.Filter): """ Add a custom handler for filtering """ + HANDLER_NAME_FILTER_EXCEPTION: str = 'console' + def __init__(self, handler_name: str, filter_exceptions: bool = False): super().__init__(name=handler_name) self.handler_name = handler_name @@ -88,7 +91,7 @@ class CustomHandlerFilter(logging.Filter): def filter(self, record: logging.LogRecord) -> bool: # if console and exception do not show - if self.handler_name == 'console' and self.filter_exceptions: + if self.handler_name == self.HANDLER_NAME_FILTER_EXCEPTION and self.filter_exceptions: return record.levelname != "EXCEPTION" # if cnosole entry is true and traget file filter if hasattr(record, 'console') and getattr(record, 'console') is True and self.handler_name == 'file': @@ -99,295 +102,21 @@ class CustomHandlerFilter(logging.Filter): # return record.levelname != "EXCEPTION" -# MARK: Log class -class Log: +# MARK: Parent class +class LogParent: """ - logger setup + Parent class with general methods + used by Log and Logger """ # spacer lenght characters and the character SPACER_CHAR: str = '=' SPACER_LENGTH: int = 32 - # default logging level - DEFAULT_LOG_LEVEL: LoggingLevel = LoggingLevel.WARNING - DEFAULT_LOG_LEVEL_FILE: LoggingLevel = LoggingLevel.DEBUG - DEFAULT_LOG_LEVEL_CONSOLE: LoggingLevel = LoggingLevel.WARNING - # default settings - DEFAULT_LOG_SETTINGS: LogSettings = { - "log_level_console": DEFAULT_LOG_LEVEL_CONSOLE, - "log_level_file": DEFAULT_LOG_LEVEL_FILE, - "console_enabled": True, - "console_color_output_enabled": True, - "add_start_info": True, - "add_end_info": False, - "log_queue": None, - } - # MARK: constructor - def __init__( - self, - log_path: Path, - log_name: str, - log_settings: dict[str, 'LoggingLevel | str | bool | None | Queue[str]'] | LogSettings | None = None, - other_handlers: dict[str, Any] | None = None - ): - # add new level for alert, emergecny and exception - logging.addLevelName(LoggingLevel.ALERT.value, LoggingLevel.ALERT.name) - logging.addLevelName(LoggingLevel.EMERGENCY.value, LoggingLevel.EMERGENCY.name) - logging.addLevelName(LoggingLevel.EXCEPTION.value, LoggingLevel.EXCEPTION.name) - # parse the logging settings - self.log_settings = self.__parse_log_settings(log_settings) - # if path, set log name with .log - # if log name with .log, strip .log for naming - if log_path.is_dir(): - __log_file_name = re.sub(r'[^a-zA-Z0-9]', '', log_name) - if not log_name.endswith('.log'): - log_path = log_path.joinpath(Path(__log_file_name).with_suffix('.log')) - else: - log_path = log_path.joinpath(__log_file_name) - elif not log_path.suffix == '.log': - # add .log if the path is a file but without .log - log_path = log_path.with_suffix('.log') - # stip .log from the log name if set - if not log_name.endswith('.log'): - log_name = Path(log_name).stem - # general log name - self.log_name = log_name - - self.log_queue: 'Queue[str] | None' = None - self.listener: logging.handlers.QueueListener | None = None + def __init__(self): self.logger: logging.Logger - - # setup handlers - # NOTE if console with color is set first, some of the color formatting is set - # in the file writer too, for the ones where color is set BEFORE the format - # Any is logging.StreamHandler, logging.FileHandler and all logging.handlers.* + self.log_queue: 'Queue[str] | None' = None self.handlers: dict[str, Any] = {} - self.add_handler('file_handler', self.__create_timed_rotating_file_handler( - self.log_settings['log_level_file'], log_path) - ) - if self.log_settings['console_enabled']: - # console - self.add_handler('stream_handler', self.__create_console_handler( - self.log_settings['log_level_console']) - ) - # add other handlers, - if other_handlers is not None: - for handler_key, handler in other_handlers.items(): - self.add_handler(handler_key, handler) - # init listener if we have a log_queue set - self.__init_listener(self.log_settings['log_queue']) - - # overall logger start - self.__init_log(log_name) - # if requests set a start log - if self.log_settings['add_start_info'] is True: - self.break_line('START') - - # MARK: deconstructor - def __del__(self): - """ - Call when class is destroyed, make sure the listender is closed or else we throw a thread error - """ - if self.log_settings['add_end_info']: - self.break_line('END') - self.stop_listener() - - # MARK: parse log settings - def __parse_log_settings( - self, - log_settings: dict[str, 'LoggingLevel | str | bool | None | Queue[str]'] | LogSettings | None - ) -> LogSettings: - # skip with defaul it not set - if log_settings is None: - return self.DEFAULT_LOG_SETTINGS - # check entries - default_log_settings = self.DEFAULT_LOG_SETTINGS - # check log levels - for __log_entry in ['log_level_console', 'log_level_file']: - if log_settings.get(__log_entry) is None: - continue - # if not valid reset to default, if not in default set to WARNING - if not self.validate_log_level(__log_level := log_settings.get(__log_entry, '')): - __log_level = self.DEFAULT_LOG_SETTINGS.get( - __log_entry, self.DEFAULT_LOG_LEVEL - ) - default_log_settings[__log_entry] = LoggingLevel.from_any(__log_level) - # check bool - for __log_entry in [ - "console_enabled", - "console_color_output_enabled", - "add_start_info", - "add_end_info", - ]: - if log_settings.get(__log_entry) is None: - continue - if not isinstance(__setting := log_settings.get(__log_entry, ''), bool): - __setting = self.DEFAULT_LOG_SETTINGS.get(__log_entry, True) - default_log_settings[__log_entry] = __setting - # check log queue - __setting = log_settings.get('log_queue', self.DEFAULT_LOG_SETTINGS['log_queue']) - if __setting is not None: - __setting = cast('Queue[str]', __setting) - default_log_settings['log_queue'] = __setting - return default_log_settings - - # def __filter_exceptions(self, record: logging.LogRecord) -> bool: - # return record.levelname != "EXCEPTION" - - # MARK: add a handler - def add_handler( - self, - handler_name: str, - handler: Any - ) -> bool: - """ - Add a log handler to the handlers dict - - Arguments: - handler_name {str} -- _description_ - handler {Any} -- _description_ - """ - if self.handlers.get(handler_name): - return False - if self.listener is not None or hasattr(self, 'logger'): - raise ValueError( - f"Cannot add handler {handler_name}: {handler.get_name()} because logger is already running" - ) - # TODO: handler must be some handler type, how to check? - self.handlers[handler_name] = handler - return True - - # MARK: console handler - def __create_console_handler( - self, log_level_console: LoggingLevel = LoggingLevel.WARNING, filter_exceptions: bool = True - ) -> logging.StreamHandler[TextIO]: - # console logger - if not self.validate_log_level(log_level_console): - log_level_console = self.DEFAULT_LOG_LEVEL_CONSOLE - console_handler = logging.StreamHandler() - # format layouts - format_string = ( - '[%(asctime)s.%(msecs)03d] ' - '[%(name)s] ' - '[%(filename)s:%(funcName)s:%(lineno)d] ' - '<%(levelname)s> ' - '%(message)s' - ) - format_date = "%Y-%m-%d %H:%M:%S" - # color or not - if self.log_settings['console_color_output_enabled']: - formatter_console = CustomConsoleFormatter(format_string, datefmt=format_date) - else: - formatter_console = logging.Formatter(format_string, datefmt=format_date) - console_handler.set_name('console') - console_handler.setLevel(log_level_console.name) - # do not show exceptions logs on console - console_handler.addFilter(CustomHandlerFilter('console', filter_exceptions)) - console_handler.setFormatter(formatter_console) - return console_handler - - # MARK: file handler - def __create_timed_rotating_file_handler( - self, log_level_file: LoggingLevel, log_path: Path, - when: str = "D", interval: int = 1, backup_count: int = 0 - ) -> logging.handlers.TimedRotatingFileHandler: - # file logger - # when: S/M/H/D/W0-W6/midnight - # interval: how many, 1D = every day - # backup_count: how many old to keep, 0 = all - if not self.validate_log_level(log_level_file): - log_level_file = self.DEFAULT_LOG_LEVEL_FILE - file_handler = logging.handlers.TimedRotatingFileHandler( - filename=log_path, - encoding="utf-8", - when=when, - interval=interval, - backupCount=backup_count - ) - formatter_file_handler = logging.Formatter( - ( - # time stamp - '[%(asctime)s.%(msecs)03d] ' - # log name - '[%(name)s] ' - # filename + pid - '[%(filename)s:%(process)d] ' - # path + func + line number - '[%(pathname)s:%(funcName)s:%(lineno)d] ' - # error level - '<%(levelname)s> ' - # message - '%(message)s' - ), - datefmt="%Y-%m-%dT%H:%M:%S", - ) - file_handler.set_name('file_timed_rotate') - file_handler.setLevel(log_level_file.name) - # do not show errors flagged with console (they are from exceptions) - file_handler.addFilter(CustomHandlerFilter('file')) - file_handler.setFormatter(formatter_file_handler) - return file_handler - - # MARK: init listener - def __init_listener(self, log_queue: 'Queue[str] | None' = None): - """ - If we have a Queue option start the logging queue - - Keyword Arguments: - log_queue {Queue[str] | None} -- _description_ (default: {None}) - """ - if log_queue is None: - return - self.log_queue = log_queue - self.listener = logging.handlers.QueueListener( - self.log_queue, - *self.handlers.values(), - respect_handler_level=True - ) - self.listener.start() - - # MARK: init main log - def __init_log(self, log_name: str) -> None: - """ - Initialize the main loggger - """ - queue_handler: logging.handlers.QueueHandler | None = None - if self.log_queue is not None: - queue_handler = logging.handlers.QueueHandler(self.log_queue) - # overall logger settings - self.logger = logging.getLogger(log_name) - # add all the handlers - if queue_handler is None: - for handler in self.handlers.values(): - self.logger.addHandler(handler) - else: - self.logger.addHandler(queue_handler) - # set maximum logging level for all logging output - # log level filtering is done per handler - self.logger.setLevel(logging.DEBUG) - # short name - self.lg = self.logger - self.l = self.logger - - # MARK: init logger for Fork/Thread - @staticmethod - def init_worker_logging(log_queue: 'Queue[str]') -> logging.Logger: - """ - This initalizes a logger that can be used in pool/thread queue calls - """ - queue_handler = logging.handlers.QueueHandler(log_queue) - # getLogger call MUST be WITHOUT and logger name - root_logger = logging.getLogger() - # base logging level, filtering is done in the handlers - root_logger.setLevel(logging.DEBUG) - root_logger.handlers.clear() - root_logger.addHandler(queue_handler) - - # for debug only - root_logger.debug('[LOGGER] Init log: %s - %s', log_queue, root_logger.handlers) - - return root_logger # FIXME: we need to add a custom formater to add stack level listing if we want to # Important note, although they exist, it is recommended to use self.logger.NAME directly @@ -503,7 +232,6 @@ class Log: ) self.logger.log(LoggingLevel.EXCEPTION.value, msg, *args, exc_info=True, extra=extra, stacklevel=2) - # MARK: break line def break_line(self, info: str = "BREAK"): """ add a break line as info level @@ -527,7 +255,7 @@ class Log: Returns: bool -- _description_ """ - if not self.listener or not self.log_queue: + if not self.log_queue: return False try: @@ -549,14 +277,6 @@ class Log: return False return True - def stop_listener(self): - """ - stop the listener - """ - if self.listener is not None: - self.flush() - self.listener.stop() - # MARK: log level handling def set_log_level(self, handler_name: str, log_level: LoggingLevel) -> bool: """ @@ -597,7 +317,7 @@ class Log: LoggingLevel -- _description_ """ try: - return self.handlers[handler_name] + return LoggingLevel.from_any(self.handlers[handler_name].level) except IndexError: return LoggingLevel.NOTSET @@ -635,4 +355,334 @@ class Log: except ValueError: return LoggingLevel.from_string(Log.DEFAULT_LOG_LEVEL.name).value + +# MARK: Logger +class Logger(LogParent): + """ + The class we can pass on to other clases without re-init the class itself + NOTE: if no queue object is handled over the logging level change might not take immediate effect + """ + + def __init__(self, logger_settings: LoggerInit): + LogParent.__init__(self) + self.logger = logger_settings['logger'] + self.lg = self.logger + self.l = self.logger + self.handlers = {str(_handler.name): _handler for _handler in self.logger.handlers} + self.log_queue = logger_settings['log_queue'] + + +# MARK: LogSetup class +class Log(LogParent): + """ + logger setup + """ + + # spacer lenght characters and the character + SPACER_CHAR: str = '=' + SPACER_LENGTH: int = 32 + # default logging level + DEFAULT_LOG_LEVEL: LoggingLevel = LoggingLevel.WARNING + DEFAULT_LOG_LEVEL_FILE: LoggingLevel = LoggingLevel.DEBUG + DEFAULT_LOG_LEVEL_CONSOLE: LoggingLevel = LoggingLevel.WARNING + # default settings + DEFAULT_LOG_SETTINGS: LogSettings = { + "log_level_console": DEFAULT_LOG_LEVEL_CONSOLE, + "log_level_file": DEFAULT_LOG_LEVEL_FILE, + "console_enabled": True, + "console_color_output_enabled": True, + "add_start_info": True, + "add_end_info": False, + "log_queue": None, + } + + # MARK: constructor + def __init__( + self, + log_path: Path, + log_name: str, + log_settings: dict[str, 'LoggingLevel | str | bool | None | Queue[str]'] | LogSettings | None = None, + other_handlers: dict[str, Any] | None = None + ): + LogParent.__init__(self) + # add new level for alert, emergecny and exception + logging.addLevelName(LoggingLevel.ALERT.value, LoggingLevel.ALERT.name) + logging.addLevelName(LoggingLevel.EMERGENCY.value, LoggingLevel.EMERGENCY.name) + logging.addLevelName(LoggingLevel.EXCEPTION.value, LoggingLevel.EXCEPTION.name) + # parse the logging settings + self.log_settings = self.__parse_log_settings(log_settings) + # if path, set log name with .log + # if log name with .log, strip .log for naming + if log_path.is_dir(): + __log_file_name = re.sub(r'[^a-zA-Z0-9]', '', log_name) + if not log_name.endswith('.log'): + log_path = log_path.joinpath(Path(__log_file_name).with_suffix('.log')) + else: + log_path = log_path.joinpath(__log_file_name) + elif not log_path.suffix == '.log': + # add .log if the path is a file but without .log + log_path = log_path.with_suffix('.log') + # stip .log from the log name if set + if not log_name.endswith('.log'): + log_name = Path(log_name).stem + # general log name + self.log_name = log_name + + self.log_queue: 'Queue[str] | None' = None + self.listener: logging.handlers.QueueListener | None = None + self.logger: logging.Logger + + # setup handlers + # NOTE if console with color is set first, some of the color formatting is set + # in the file writer too, for the ones where color is set BEFORE the format + # Any is logging.StreamHandler, logging.FileHandler and all logging.handlers.* + self.handlers: dict[str, Any] = {} + self.add_handler('file_handler', self.__create_timed_rotating_file_handler( + 'file_handler', self.log_settings['log_level_file'], log_path) + ) + if self.log_settings['console_enabled']: + # console + self.add_handler('stream_handler', self.__create_console_handler( + 'stream_handler', self.log_settings['log_level_console']) + ) + # add other handlers, + if other_handlers is not None: + for handler_key, handler in other_handlers.items(): + self.add_handler(handler_key, handler) + # init listener if we have a log_queue set + self.__init_listener(self.log_settings['log_queue']) + + # overall logger start + self.__init_log(log_name) + # if requests set a start log + if self.log_settings['add_start_info'] is True: + self.break_line('START') + + # MARK: deconstructor + def __del__(self): + """ + Call when class is destroyed, make sure the listender is closed or else we throw a thread error + """ + if self.log_settings['add_end_info']: + self.break_line('END') + self.stop_listener() + + # MARK: parse log settings + def __parse_log_settings( + self, + log_settings: dict[str, 'LoggingLevel | str | bool | None | Queue[str]'] | LogSettings | None + ) -> LogSettings: + # skip with defaul it not set + if log_settings is None: + return self.DEFAULT_LOG_SETTINGS + # check entries + default_log_settings = self.DEFAULT_LOG_SETTINGS + # check log levels + for __log_entry in ['log_level_console', 'log_level_file']: + if log_settings.get(__log_entry) is None: + continue + # if not valid reset to default, if not in default set to WARNING + if not self.validate_log_level(__log_level := log_settings.get(__log_entry, '')): + __log_level = self.DEFAULT_LOG_SETTINGS.get( + __log_entry, self.DEFAULT_LOG_LEVEL + ) + default_log_settings[__log_entry] = LoggingLevel.from_any(__log_level) + # check bool + for __log_entry in [ + "console_enabled", + "console_color_output_enabled", + "add_start_info", + "add_end_info", + ]: + if log_settings.get(__log_entry) is None: + continue + if not isinstance(__setting := log_settings.get(__log_entry, ''), bool): + __setting = self.DEFAULT_LOG_SETTINGS.get(__log_entry, True) + default_log_settings[__log_entry] = __setting + # check log queue + __setting = log_settings.get('log_queue', self.DEFAULT_LOG_SETTINGS['log_queue']) + if __setting is not None: + __setting = cast('Queue[str]', __setting) + default_log_settings['log_queue'] = __setting + return default_log_settings + + # def __filter_exceptions(self, record: logging.LogRecord) -> bool: + # return record.levelname != "EXCEPTION" + + # MARK: add a handler + def add_handler( + self, + handler_name: str, + handler: Any + ) -> bool: + """ + Add a log handler to the handlers dict + + Arguments: + handler_name {str} -- _description_ + handler {Any} -- _description_ + """ + if self.handlers.get(handler_name): + return False + if self.listener is not None or hasattr(self, 'logger'): + raise ValueError( + f"Cannot add handler {handler_name}: {handler.get_name()} because logger is already running" + ) + # TODO: handler must be some handler type, how to check? + self.handlers[handler_name] = handler + return True + + # MARK: console handler + def __create_console_handler( + self, handler_name: str, + log_level_console: LoggingLevel = LoggingLevel.WARNING, filter_exceptions: bool = True + ) -> logging.StreamHandler[TextIO]: + # console logger + if not self.validate_log_level(log_level_console): + log_level_console = self.DEFAULT_LOG_LEVEL_CONSOLE + console_handler = logging.StreamHandler() + # format layouts + format_string = ( + '[%(asctime)s.%(msecs)03d] ' + '[%(name)s] ' + '[%(filename)s:%(funcName)s:%(lineno)d] ' + '<%(levelname)s> ' + '%(message)s' + ) + format_date = "%Y-%m-%d %H:%M:%S" + # color or not + if self.log_settings['console_color_output_enabled']: + formatter_console = CustomConsoleFormatter(format_string, datefmt=format_date) + else: + formatter_console = logging.Formatter(format_string, datefmt=format_date) + console_handler.set_name(handler_name) + console_handler.setLevel(log_level_console.name) + # do not show exceptions logs on console + console_handler.addFilter(CustomHandlerFilter('console', filter_exceptions)) + console_handler.setFormatter(formatter_console) + return console_handler + + # MARK: file handler + def __create_timed_rotating_file_handler( + self, handler_name: str, + log_level_file: LoggingLevel, log_path: Path, + when: str = "D", interval: int = 1, backup_count: int = 0 + ) -> logging.handlers.TimedRotatingFileHandler: + # file logger + # when: S/M/H/D/W0-W6/midnight + # interval: how many, 1D = every day + # backup_count: how many old to keep, 0 = all + if not self.validate_log_level(log_level_file): + log_level_file = self.DEFAULT_LOG_LEVEL_FILE + file_handler = logging.handlers.TimedRotatingFileHandler( + filename=log_path, + encoding="utf-8", + when=when, + interval=interval, + backupCount=backup_count + ) + formatter_file_handler = logging.Formatter( + ( + # time stamp + '[%(asctime)s.%(msecs)03d] ' + # log name + '[%(name)s] ' + # filename + pid + '[%(filename)s:%(process)d] ' + # path + func + line number + '[%(pathname)s:%(funcName)s:%(lineno)d] ' + # error level + '<%(levelname)s> ' + # message + '%(message)s' + ), + datefmt="%Y-%m-%dT%H:%M:%S", + ) + file_handler.set_name(handler_name) + file_handler.setLevel(log_level_file.name) + # do not show errors flagged with console (they are from exceptions) + file_handler.addFilter(CustomHandlerFilter('file')) + file_handler.setFormatter(formatter_file_handler) + return file_handler + + # MARK: init listener + def __init_listener(self, log_queue: 'Queue[str] | None' = None): + """ + If we have a Queue option start the logging queue + + Keyword Arguments: + log_queue {Queue[str] | None} -- _description_ (default: {None}) + """ + if log_queue is None: + return + self.log_queue = log_queue + self.listener = logging.handlers.QueueListener( + self.log_queue, + *self.handlers.values(), + respect_handler_level=True + ) + self.listener.start() + + def stop_listener(self): + """ + stop the listener + """ + if self.listener is not None: + self.flush() + self.listener.stop() + + # MARK: init main log + def __init_log(self, log_name: str) -> None: + """ + Initialize the main loggger + """ + queue_handler: logging.handlers.QueueHandler | None = None + if self.log_queue is not None: + queue_handler = logging.handlers.QueueHandler(self.log_queue) + # overall logger settings + self.logger = logging.getLogger(log_name) + # add all the handlers + if queue_handler is None: + for handler in self.handlers.values(): + self.logger.addHandler(handler) + else: + self.logger.addHandler(queue_handler) + # set maximum logging level for all logging output + # log level filtering is done per handler + self.logger.setLevel(logging.DEBUG) + # short name + self.lg = self.logger + self.l = self.logger + + # MARK: init logger for Fork/Thread + @staticmethod + def init_worker_logging(log_queue: 'Queue[str]') -> logging.Logger: + """ + This initalizes a logger that can be used in pool/thread queue calls + """ + queue_handler = logging.handlers.QueueHandler(log_queue) + # getLogger call MUST be WITHOUT and logger name + root_logger = logging.getLogger() + # base logging level, filtering is done in the handlers + root_logger.setLevel(logging.DEBUG) + root_logger.handlers.clear() + root_logger.addHandler(queue_handler) + + # for debug only + root_logger.debug('[LOGGER] Init log: %s - %s', log_queue, root_logger.handlers) + + return root_logger + + def get_logger_settings(self) -> LoggerInit: + """ + get the logger settings we need to init the Logger class + + Returns: + LoggerInit -- _description_ + """ + return { + "logger": self.logger, + "log_queue": self.log_queue + } + # __END__ diff --git a/test-run/logging_handling/log.py b/test-run/logging_handling/log.py index b119d2a..35433b9 100644 --- a/test-run/logging_handling/log.py +++ b/test-run/logging_handling/log.py @@ -5,7 +5,7 @@ Log logging_handling.log testing # import atexit from pathlib import Path # this is for testing only -from corelibs.logging_handling.log import Log +from corelibs.logging_handling.log import Log, Logger from corelibs.logging_handling.logging_level_handling.logging_level import LoggingLevel @@ -18,16 +18,19 @@ def main(): log_path=script_path.joinpath('log', 'test.log'), log_name="Test Log", log_settings={ - # "log_level_console": 'DEBUG', - "log_level_console": None, + "log_level_console": 'DEBUG', + # "log_level_console": None, "log_level_file": 'DEBUG', # "console_color_output_enabled": False, } ) + logn = Logger(log.get_logger_settings()) log.logger.debug('[NORMAL] Debug test: %s', log.logger.name) log.lg.debug('[NORMAL] Debug test: %s', log.logger.name) log.debug('[NORMAL-] Debug test: %s', log.logger.name) + logn.lg.debug('[NORMAL N] Debug test: %s', log.logger.name) + logn.debug('[NORMAL N-] Debug test: %s', log.logger.name) log.logger.info('[NORMAL] Info test: %s', log.logger.name) log.info('[NORMAL-] Info test: %s', log.logger.name) log.logger.warning('[NORMAL] Warning test: %s', log.logger.name) @@ -79,7 +82,10 @@ def main(): log.exception("Divison through zero: %s", e) for handler in log.logger.handlers: - print(f"Handler (logger) {handler} -> {handler.level} -> {LoggingLevel.from_any(handler.level)}") + print( + f"** Handler (logger) {handler} [{handler.name}] -> " + f"{handler.level} -> {LoggingLevel.from_any(handler.level)}" + ) for key, handler in log.handlers.items(): print(f"Handler (handlers) [{key}] {handler} -> {handler.level} -> {LoggingLevel.from_any(handler.level)}")