Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1226721bc0 | ||
|
|
a76eae0cc7 | ||
|
|
53cf2a6f48 | ||
|
|
fe69530b38 | ||
|
|
bf83c1c394 | ||
|
|
84ce43ab93 | ||
|
|
5e0765ee24 | ||
|
|
6edf9398b7 | ||
|
|
30bf9c1bcb | ||
|
|
0b59f3cc7a | ||
|
|
2544fad9ce |
18
README.md
18
README.md
@@ -1,27 +1,37 @@
|
||||
# CoreLibs for Python
|
||||
|
||||
This is a pip package that can be installed into any project and covers the following pars
|
||||
> [!warning]
|
||||
> This is pre-production, location of methods and names of paths can change
|
||||
|
||||
This is a pip package that can be installed into any project and covers the following parts
|
||||
|
||||
- logging update with exception logs
|
||||
- requests wrapper for easier auth pass on access
|
||||
- dict fingerprinting
|
||||
- jmespath search
|
||||
- dump outputs for data
|
||||
- json helpers for conten replace and output
|
||||
- dump outputs for data for debugging
|
||||
- progress printing
|
||||
- string formatting, time creation, byte formatting
|
||||
- Enum base class
|
||||
- SQLite simple IO class
|
||||
- Symmetric encryption
|
||||
|
||||
## Current list
|
||||
|
||||
- config_handling: simple INI config file data loader with check/convert/etc
|
||||
- csv_handling: csv dict writer helper
|
||||
- debug_handling: various debug helpers like data dumper, timer, utilization, etc
|
||||
- db_handling: SQLite interface class
|
||||
- encyption_handling: symmetric encryption
|
||||
- file_handling: crc handling for file content and file names, progress bar
|
||||
- json_handling: jmespath support and json date support
|
||||
- json_handling: jmespath support and json date support, replace content in dict with json paths
|
||||
- iterator_handling: list and dictionary handling support (search, fingerprinting, etc)
|
||||
- logging_handling: extend log and also error message handling
|
||||
- requests_handling: requests wrapper for better calls with auth headers
|
||||
- script_handling: pid lock file handling, abort timer
|
||||
- string_handling: byte format, datetime format, hashing, string formats for numbrers, double byte string format, etc
|
||||
- string_handling: byte format, datetime format, datetime compare, hashing, string formats for numbers, double byte string format, etc
|
||||
- var_handling: var type checkers, enum base class
|
||||
|
||||
## UV setup
|
||||
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
# MARK: Project info
|
||||
[project]
|
||||
name = "corelibs"
|
||||
version = "0.26.0"
|
||||
version = "0.30.0"
|
||||
description = "Collection of utils for Python scripts"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
"cryptography>=46.0.3",
|
||||
"jmespath>=1.0.1",
|
||||
"jsonpath-ng>=1.7.0",
|
||||
"psutil>=7.0.0",
|
||||
"requests>=2.32.4",
|
||||
]
|
||||
@@ -28,6 +29,7 @@ build-backend = "hatchling.build"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"deepdiff>=8.6.1",
|
||||
"pytest>=8.4.1",
|
||||
"pytest-cov>=6.2.1",
|
||||
]
|
||||
@@ -61,3 +63,7 @@ ignore = [
|
||||
[tool.pylint.MASTER]
|
||||
# this is for the tests/etc folders
|
||||
init-hook='import sys; sys.path.append("src/")'
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = [
|
||||
"tests",
|
||||
]
|
||||
|
||||
0
src/corelibs/datetime_handling/__init__.py
Normal file
0
src/corelibs/datetime_handling/__init__.py
Normal file
435
src/corelibs/datetime_handling/datetime_helpers.py
Normal file
435
src/corelibs/datetime_handling/datetime_helpers.py
Normal file
@@ -0,0 +1,435 @@
|
||||
"""
|
||||
Various string based date/time helpers
|
||||
"""
|
||||
|
||||
import time as time_t
|
||||
from datetime import datetime, time
|
||||
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
||||
from typing import Callable
|
||||
|
||||
DAYS_OF_WEEK_LONG_TO_SHORT: dict[str, str] = {
|
||||
'Monday': 'Mon',
|
||||
'Tuesday': 'Tue',
|
||||
'Wednesay': 'Wed',
|
||||
'Thursday': 'Thu',
|
||||
'Friday': 'Fri',
|
||||
'Saturday': 'Sat',
|
||||
'Sunday': 'Sun',
|
||||
}
|
||||
DAYS_OF_WEEK_ISO: dict[int, str] = {
|
||||
1: 'Mon', 2: 'Tue', 3: 'Wed', 4: 'Thu', 5: 'Fri', 6: 'Sat', 7: 'Sun'
|
||||
}
|
||||
DAYS_OF_WEEK_ISO_REVERSED: dict[str, int] = {value: key for key, value in DAYS_OF_WEEK_ISO.items()}
|
||||
|
||||
|
||||
def create_time(timestamp: float, timestamp_format: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""
|
||||
just takes a timestamp and prints out humand readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timestamp_format {_type_} -- _description_ (default: {"%Y-%m-%d %H:%M:%S"})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return time_t.strftime(timestamp_format, time_t.localtime(timestamp))
|
||||
|
||||
|
||||
def get_system_timezone():
|
||||
"""Get system timezone using datetime's automatic detection"""
|
||||
# Get current time with system timezone
|
||||
local_time = datetime.now().astimezone()
|
||||
|
||||
# Extract timezone info
|
||||
system_tz = local_time.tzinfo
|
||||
timezone_name = str(system_tz)
|
||||
|
||||
return system_tz, timezone_name
|
||||
|
||||
|
||||
def parse_timezone_data(timezone_tz: str = '') -> ZoneInfo:
|
||||
"""
|
||||
parses a string to get the ZoneInfo
|
||||
If not set or not valid gets local time,
|
||||
if that is not possible get UTC
|
||||
|
||||
Keyword Arguments:
|
||||
timezone_tz {str} -- _description_ (default: {''})
|
||||
|
||||
Returns:
|
||||
ZoneInfo -- _description_
|
||||
"""
|
||||
try:
|
||||
return ZoneInfo(timezone_tz)
|
||||
except (ZoneInfoNotFoundError, ValueError, TypeError):
|
||||
# use default
|
||||
time_tz, time_tz_str = get_system_timezone()
|
||||
if time_tz is None:
|
||||
return ZoneInfo('UTC')
|
||||
# TODO build proper TZ lookup
|
||||
tz_mapping = {
|
||||
'JST': 'Asia/Tokyo',
|
||||
'KST': 'Asia/Seoul',
|
||||
'IST': 'Asia/Kolkata',
|
||||
'CST': 'Asia/Shanghai', # Default to China for CST
|
||||
'AEST': 'Australia/Sydney',
|
||||
'AWST': 'Australia/Perth',
|
||||
'EST': 'America/New_York',
|
||||
'EDT': 'America/New_York',
|
||||
'CDT': 'America/Chicago',
|
||||
'MST': 'America/Denver',
|
||||
'MDT': 'America/Denver',
|
||||
'PST': 'America/Los_Angeles',
|
||||
'PDT': 'America/Los_Angeles',
|
||||
'GMT': 'UTC',
|
||||
'UTC': 'UTC',
|
||||
'CET': 'Europe/Berlin',
|
||||
'CEST': 'Europe/Berlin',
|
||||
'BST': 'Europe/London',
|
||||
}
|
||||
try:
|
||||
return ZoneInfo(tz_mapping[time_tz_str])
|
||||
except (ZoneInfoNotFoundError, IndexError) as e:
|
||||
raise ValueError(f"No mapping for {time_tz_str}: {e}") from e
|
||||
|
||||
|
||||
def get_datetime_iso8601(timezone_tz: str | ZoneInfo = '', sep: str = 'T', timespec: str = 'microseconds') -> str:
|
||||
"""
|
||||
set a datetime in the iso8601 format with microseconds
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# parse if this is a string
|
||||
if isinstance(timezone_tz, str):
|
||||
timezone_tz = parse_timezone_data(timezone_tz)
|
||||
return datetime.now(timezone_tz).isoformat(sep=sep, timespec=timespec)
|
||||
|
||||
|
||||
def validate_date(date: str, not_before: datetime | None = None, not_after: datetime | None = None) -> bool:
|
||||
"""
|
||||
check if Y-m-d or Y/m/d are parsable and valid
|
||||
|
||||
Arguments:
|
||||
date {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
formats = ['%Y-%m-%d', '%Y/%m/%d']
|
||||
for __format in formats:
|
||||
try:
|
||||
__date = datetime.strptime(date, __format).date()
|
||||
if not_before is not None and __date < not_before.date():
|
||||
return False
|
||||
if not_after is not None and __date > not_after.date():
|
||||
return False
|
||||
return True
|
||||
except ValueError:
|
||||
continue
|
||||
return False
|
||||
|
||||
|
||||
def parse_flexible_date(
|
||||
date_str: str,
|
||||
timezone_tz: str | ZoneInfo | None = None,
|
||||
shift_time_zone: bool = True
|
||||
) -> datetime | None:
|
||||
"""
|
||||
Parse date string in multiple formats
|
||||
will add time zone info if not None
|
||||
on default it will change the TZ and time to the new time zone
|
||||
if no TZ info is set in date_str, then localtime is assumed
|
||||
|
||||
Arguments:
|
||||
date_str {str} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timezone_tz {str | ZoneInfo | None} -- _description_ (default: {None})
|
||||
shift_time_zone {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
datetime | None -- _description_
|
||||
"""
|
||||
|
||||
date_str = date_str.strip()
|
||||
|
||||
# Try different parsing methods
|
||||
parsers: list[Callable[[str], datetime]] = [
|
||||
# ISO 8601 format
|
||||
lambda x: datetime.fromisoformat(x), # pylint: disable=W0108
|
||||
# Simple date format
|
||||
lambda x: datetime.strptime(x, "%Y-%m-%d"),
|
||||
# Alternative ISO formats (fallback)
|
||||
lambda x: datetime.strptime(x, "%Y-%m-%dT%H:%M:%S"),
|
||||
lambda x: datetime.strptime(x, "%Y-%m-%dT%H:%M:%S.%f"),
|
||||
]
|
||||
|
||||
if timezone_tz is not None:
|
||||
if isinstance(timezone_tz, str):
|
||||
timezone_tz = parse_timezone_data(timezone_tz)
|
||||
|
||||
date_new = None
|
||||
for parser in parsers:
|
||||
try:
|
||||
date_new = parser(date_str)
|
||||
break
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
if date_new is not None:
|
||||
if timezone_tz is not None:
|
||||
# shift time zone (default), this will change the date
|
||||
# if the date has no +HH:MM it will take the local time zone as base
|
||||
if shift_time_zone:
|
||||
return date_new.astimezone(timezone_tz)
|
||||
# just add the time zone
|
||||
return date_new.replace(tzinfo=timezone_tz)
|
||||
return date_new
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def compare_dates(date1_str: str, date2_str: str) -> None | bool:
|
||||
"""
|
||||
compare two dates, if the first one is newer than the second one return True
|
||||
If the dates are equal then false will be returned
|
||||
on error return None
|
||||
|
||||
Arguments:
|
||||
date1_str {str} -- _description_
|
||||
date2_str {str} -- _description_
|
||||
|
||||
Returns:
|
||||
None | bool -- _description_
|
||||
"""
|
||||
|
||||
try:
|
||||
# Parse both dates
|
||||
date1 = parse_flexible_date(date1_str)
|
||||
date2 = parse_flexible_date(date2_str)
|
||||
|
||||
# Check if parsing was successful
|
||||
if date1 is None or date2 is None:
|
||||
return None
|
||||
|
||||
# Compare dates
|
||||
return date1.date() > date2.date()
|
||||
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def find_newest_datetime_in_list(date_list: list[str]) -> None | str:
|
||||
"""
|
||||
Find the newest date from a list of ISO 8601 formatted date strings.
|
||||
Handles potential parsing errors gracefully.
|
||||
|
||||
Args:
|
||||
date_list (list): List of date strings in format '2025-08-06T16:17:39.747+09:00'
|
||||
|
||||
Returns:
|
||||
str: The date string with the newest/latest date, or None if list is empty or all dates are invalid
|
||||
"""
|
||||
if not date_list:
|
||||
return None
|
||||
|
||||
valid_dates: list[tuple[str, datetime]] = []
|
||||
|
||||
for date_str in date_list:
|
||||
try:
|
||||
# Parse the date string and store both original string and parsed datetime
|
||||
parsed_date = parse_flexible_date(date_str)
|
||||
if parsed_date is None:
|
||||
continue
|
||||
valid_dates.append((date_str, parsed_date))
|
||||
except ValueError:
|
||||
# Skip invalid date strings
|
||||
continue
|
||||
|
||||
if not valid_dates:
|
||||
return None
|
||||
|
||||
# Find the date string with the maximum datetime value
|
||||
newest_date_str: str = max(valid_dates, key=lambda x: x[1])[0]
|
||||
|
||||
return newest_date_str
|
||||
|
||||
|
||||
def parse_day_of_week_range(dow_days: str) -> list[tuple[int, str]]:
|
||||
"""
|
||||
Parse a day of week list/range string and return a list of tuples with day index and name.
|
||||
Allowed are short (eg Mon) or long names (eg Monday).
|
||||
|
||||
Arguments:
|
||||
dow_days {str} -- A comma-separated list of days or ranges (e.g., "Mon,Wed-Fri")
|
||||
|
||||
Raises:
|
||||
ValueError: If the input format is invalid or if duplicate days are found.
|
||||
|
||||
Returns:
|
||||
list[tuple[int, str]] -- A list of tuples containing the day index and name.
|
||||
"""
|
||||
# we have Sun twice because it can be 0 or 7
|
||||
# Mon is 1 and Sun is 7, which is ISO standard
|
||||
dow_day = dow_days.split(",")
|
||||
dow_day = [day.strip() for day in dow_day if day.strip()]
|
||||
__out_dow_days: list[tuple[int, str]] = []
|
||||
for __dow_day in dow_day:
|
||||
# if we have a "-" in there fill
|
||||
if "-" in __dow_day:
|
||||
__dow_range = __dow_day.split("-")
|
||||
__dow_range = [day.strip().capitalize() for day in __dow_range if day.strip()]
|
||||
try:
|
||||
start_day = DAYS_OF_WEEK_ISO_REVERSED[__dow_range[0]]
|
||||
end_day = DAYS_OF_WEEK_ISO_REVERSED[__dow_range[1]]
|
||||
except KeyError:
|
||||
# try long time
|
||||
try:
|
||||
start_day = DAYS_OF_WEEK_ISO_REVERSED[DAYS_OF_WEEK_LONG_TO_SHORT[__dow_range[0]]]
|
||||
end_day = DAYS_OF_WEEK_ISO_REVERSED[DAYS_OF_WEEK_LONG_TO_SHORT[__dow_range[1]]]
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Invalid day of week entry found: {__dow_day}: {e}") from e
|
||||
# Check if this spans across the weekend (e.g., Fri-Mon)
|
||||
if start_day > end_day:
|
||||
# Handle weekend-spanning range: start_day to 7, then 1 to end_day
|
||||
__out_dow_days.extend(
|
||||
[
|
||||
(i, DAYS_OF_WEEK_ISO[i])
|
||||
for i in range(start_day, 8) # start_day to Sunday (7)
|
||||
]
|
||||
)
|
||||
__out_dow_days.extend(
|
||||
[
|
||||
(i, DAYS_OF_WEEK_ISO[i])
|
||||
for i in range(1, end_day + 1) # Monday (1) to end_day
|
||||
]
|
||||
)
|
||||
else:
|
||||
# Normal range: start_day to end_day
|
||||
__out_dow_days.extend(
|
||||
[
|
||||
(i, DAYS_OF_WEEK_ISO[i])
|
||||
for i in range(start_day, end_day + 1)
|
||||
]
|
||||
)
|
||||
else:
|
||||
try:
|
||||
__out_dow_days.append((DAYS_OF_WEEK_ISO_REVERSED[__dow_day], __dow_day))
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Invalid day of week entry found: {__dow_day}: {e}") from e
|
||||
# if there are duplicates, alert
|
||||
if len(__out_dow_days) != len(set(__out_dow_days)):
|
||||
raise ValueError(f"Duplicate day of week entries found: {__out_dow_days}")
|
||||
|
||||
return __out_dow_days
|
||||
|
||||
|
||||
def parse_time_range(time_str: str, time_format: str = "%H:%M") -> tuple[time, time]:
|
||||
"""
|
||||
Parse a time range string in the format "HH:MM-HH:MM" and return a tuple of two time objects.
|
||||
|
||||
Arguments:
|
||||
time_str {str} -- The time range string to parse.
|
||||
|
||||
Raises:
|
||||
ValueError: Invalid time block set
|
||||
ValueError: Invalid time format
|
||||
ValueError: Start time must be before end time
|
||||
|
||||
Returns:
|
||||
tuple[time, time] -- start time, end time: leading zeros formattd
|
||||
"""
|
||||
__time_str = time_str.strip()
|
||||
# split by "-"
|
||||
__time_split = __time_str.split("-")
|
||||
if len(__time_split) != 2:
|
||||
raise ValueError(f"Invalid time block: {__time_str}")
|
||||
try:
|
||||
__time_start = datetime.strptime(__time_split[0], time_format).time()
|
||||
__time_end = datetime.strptime(__time_split[1], time_format).time()
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid time block format [{__time_str}]: {e}") from e
|
||||
if __time_start >= __time_end:
|
||||
raise ValueError(f"Invalid time block set, start time after end time or equal: {__time_str}")
|
||||
|
||||
return __time_start, __time_end
|
||||
|
||||
|
||||
def times_overlap_or_connect(time1: tuple[time, time], time2: tuple[time, time], allow_touching: bool = False) -> bool:
|
||||
"""
|
||||
Check if two time ranges overlap or connect
|
||||
|
||||
Args:
|
||||
time1 (tuple): (start_time, end_time) for first range
|
||||
time2 (tuple): (start_time, end_time) for second range
|
||||
allow_touching (bool): If True, touching ranges (e.g., 8:00-10:00 and 10:00-12:00) are allowed
|
||||
|
||||
Returns:
|
||||
bool: True if ranges overlap or connect (based on allow_touching)
|
||||
"""
|
||||
start1, end1 = time1
|
||||
start2, end2 = time2
|
||||
|
||||
if allow_touching:
|
||||
# Only check for actual overlap (touching is OK)
|
||||
return start1 < end2 and start2 < end1
|
||||
# Check for overlap OR touching
|
||||
return start1 <= end2 and start2 <= end1
|
||||
|
||||
|
||||
def is_time_in_range(current_time: str, start_time: str, end_time: str) -> bool:
|
||||
"""
|
||||
Check if current_time is within start_time and end_time (inclusive)
|
||||
Time format: "HH:MM" (24-hour format)
|
||||
|
||||
Arguments:
|
||||
current_time {str} -- _description_
|
||||
start_time {str} -- _description_
|
||||
end_time {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
# Convert string times to time objects
|
||||
current = datetime.strptime(current_time, "%H:%M:%S").time()
|
||||
start = datetime.strptime(start_time, "%H:%M:%S").time()
|
||||
end = datetime.strptime(end_time, "%H:%M:%S").time()
|
||||
|
||||
# Handle case where range crosses midnight (e.g., 22:00 to 06:00)
|
||||
if start <= end:
|
||||
# Normal case: start time is before end time
|
||||
return start <= current <= end
|
||||
# Crosses midnight: e.g., 22:00 to 06:00
|
||||
return current >= start or current <= end
|
||||
|
||||
|
||||
def reorder_weekdays_from_today(base_day: str) -> dict[int, str]:
|
||||
"""
|
||||
Reorder the days of the week starting from the specified base_day.
|
||||
|
||||
Arguments:
|
||||
base_day {str} -- The day to start the week from (e.g., "Mon").
|
||||
|
||||
Returns:
|
||||
dict[int, str] -- A dictionary mapping day numbers to day names.
|
||||
"""
|
||||
try:
|
||||
today_num = DAYS_OF_WEEK_ISO_REVERSED[base_day]
|
||||
except KeyError:
|
||||
try:
|
||||
today_num = DAYS_OF_WEEK_ISO_REVERSED[DAYS_OF_WEEK_LONG_TO_SHORT[base_day]]
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Invalid day name provided: {base_day}: {e}") from e
|
||||
# Convert to list of tuples
|
||||
items = list(DAYS_OF_WEEK_ISO.items())
|
||||
# Reorder: from today onwards + from beginning to yesterday
|
||||
reordered_items = items[today_num - 1:] + items[:today_num - 1]
|
||||
|
||||
# Convert back to dictionary
|
||||
return dict(reordered_items)
|
||||
|
||||
# __END__
|
||||
@@ -1,10 +1,9 @@
|
||||
"""
|
||||
Current timestamp strings and time zones
|
||||
Convert timestamp strings with time units into seconds and vice versa.
|
||||
"""
|
||||
|
||||
from math import floor
|
||||
import re
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
||||
from corelibs.var_handling.var_helpers import is_float
|
||||
|
||||
|
||||
@@ -16,30 +15,6 @@ class TimeUnitError(Exception):
|
||||
"""Custom exception for time parsing errors."""
|
||||
|
||||
|
||||
class TimestampStrings:
|
||||
"""
|
||||
set default time stamps
|
||||
"""
|
||||
|
||||
TIME_ZONE: str = 'Asia/Tokyo'
|
||||
|
||||
def __init__(self, time_zone: str | ZoneInfo | None = None):
|
||||
self.timestamp_now = datetime.now()
|
||||
# set time zone as string
|
||||
time_zone = time_zone if time_zone is not None else self.TIME_ZONE
|
||||
self.time_zone = str(time_zone) if not isinstance(time_zone, str) else time_zone
|
||||
# set ZoneInfo type
|
||||
try:
|
||||
self.time_zone_zi = ZoneInfo(self.time_zone)
|
||||
except ZoneInfoNotFoundError as e:
|
||||
raise ValueError(f'Zone could not be loaded [{self.time_zone}]: {e}') from e
|
||||
self.timestamp_now_tz = datetime.now(self.time_zone_zi)
|
||||
self.today = self.timestamp_now.strftime('%Y-%m-%d')
|
||||
self.timestamp = self.timestamp_now.strftime("%Y-%m-%d %H:%M:%S")
|
||||
self.timestamp_tz = self.timestamp_now_tz.strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||
self.timestamp_file = self.timestamp_now.strftime("%Y-%m-%d_%H%M%S")
|
||||
|
||||
|
||||
def convert_to_seconds(time_string: str | int | float) -> int:
|
||||
"""
|
||||
Conver a string with time units into a seconds string
|
||||
@@ -124,7 +99,10 @@ def convert_to_seconds(time_string: str | int | float) -> int:
|
||||
def seconds_to_string(seconds: str | int | float, show_microseconds: bool = False) -> str:
|
||||
"""
|
||||
Convert seconds to compact human readable format (e.g., "1d 2h 3m 4.567s")
|
||||
Zero values are omitted.
|
||||
milliseconds if requested are added as fractional part of seconds.
|
||||
Supports negative values with "-" prefix
|
||||
if not int or float, will return as is
|
||||
|
||||
Args:
|
||||
seconds (float): Time in seconds (can be negative)
|
||||
@@ -172,4 +150,51 @@ def seconds_to_string(seconds: str | int | float, show_microseconds: bool = Fals
|
||||
result = " ".join(parts)
|
||||
return f"-{result}" if negative else result
|
||||
|
||||
|
||||
def convert_timestamp(timestamp: float | int | str, show_microseconds: bool = True) -> str:
|
||||
"""
|
||||
format timestamp into human readable format. This function will add 0 values between set values
|
||||
for example if we have 1d 1s it would output 1d 0h 0m 1s
|
||||
Milliseconds will be shown if set, and added with ms at the end
|
||||
Negative values will be prefixed with "-"
|
||||
if not int or float, will return as is
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
show_micro {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
if not isinstance(timestamp, (int, float)):
|
||||
return timestamp
|
||||
# cut of the ms, but first round them up to four
|
||||
__timestamp_ms_split = str(round(timestamp, 4)).split(".")
|
||||
timestamp = int(__timestamp_ms_split[0])
|
||||
negative = timestamp < 0
|
||||
timestamp = abs(timestamp)
|
||||
try:
|
||||
ms = int(__timestamp_ms_split[1])
|
||||
except IndexError:
|
||||
ms = 0
|
||||
timegroups = (86400, 3600, 60, 1)
|
||||
output: list[int] = []
|
||||
for i in timegroups:
|
||||
output.append(int(floor(timestamp / i)))
|
||||
timestamp = timestamp % i
|
||||
# output has days|hours|min|sec ms
|
||||
time_string = ""
|
||||
if output[0]:
|
||||
time_string = f"{output[0]}d "
|
||||
if output[0] or output[1]:
|
||||
time_string += f"{output[1]}h "
|
||||
if output[0] or output[1] or output[2]:
|
||||
time_string += f"{output[2]}m "
|
||||
time_string += f"{output[3]}s"
|
||||
if show_microseconds:
|
||||
time_string += f" {ms}ms" if ms else " 0ms"
|
||||
return f"-{time_string}" if negative else time_string
|
||||
|
||||
# __END__
|
||||
32
src/corelibs/datetime_handling/timestamp_strings.py
Normal file
32
src/corelibs/datetime_handling/timestamp_strings.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
Current timestamp strings and time zones
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
||||
|
||||
|
||||
class TimestampStrings:
|
||||
"""
|
||||
set default time stamps
|
||||
"""
|
||||
|
||||
TIME_ZONE: str = 'Asia/Tokyo'
|
||||
|
||||
def __init__(self, time_zone: str | ZoneInfo | None = None):
|
||||
self.timestamp_now = datetime.now()
|
||||
# set time zone as string
|
||||
time_zone = time_zone if time_zone is not None else self.TIME_ZONE
|
||||
self.time_zone = str(time_zone) if not isinstance(time_zone, str) else time_zone
|
||||
# set ZoneInfo type
|
||||
try:
|
||||
self.time_zone_zi = ZoneInfo(self.time_zone)
|
||||
except ZoneInfoNotFoundError as e:
|
||||
raise ValueError(f'Zone could not be loaded [{self.time_zone}]: {e}') from e
|
||||
self.timestamp_now_tz = datetime.now(self.time_zone_zi)
|
||||
self.today = self.timestamp_now.strftime('%Y-%m-%d')
|
||||
self.timestamp = self.timestamp_now.strftime("%Y-%m-%d %H:%M:%S")
|
||||
self.timestamp_tz = self.timestamp_now_tz.strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||
self.timestamp_file = self.timestamp_now.strftime("%Y-%m-%d_%H%M%S")
|
||||
|
||||
# __END__
|
||||
0
src/corelibs/db_handling/__init__.py
Normal file
0
src/corelibs/db_handling/__init__.py
Normal file
214
src/corelibs/db_handling/sqlite_io.py
Normal file
214
src/corelibs/db_handling/sqlite_io.py
Normal file
@@ -0,0 +1,214 @@
|
||||
"""
|
||||
SQLite DB::IO
|
||||
Will be moved to the CoreLibs
|
||||
also method names are subject to change
|
||||
"""
|
||||
|
||||
# import gc
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, TYPE_CHECKING
|
||||
import sqlite3
|
||||
from corelibs.debug_handling.debug_helpers import call_stack
|
||||
if TYPE_CHECKING:
|
||||
from corelibs.logging_handling.log import Logger
|
||||
|
||||
|
||||
class SQLiteIO():
|
||||
"""Mini SQLite interface"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
log: 'Logger',
|
||||
db_name: str | Path,
|
||||
autocommit: bool = False,
|
||||
enable_fkey: bool = True,
|
||||
row_factory: str | None = None
|
||||
):
|
||||
self.log = log
|
||||
self.db_name = db_name
|
||||
self.autocommit = autocommit
|
||||
self.enable_fkey = enable_fkey
|
||||
self.row_factory = row_factory
|
||||
self.conn: sqlite3.Connection | None = self.db_connect()
|
||||
|
||||
# def __del__(self):
|
||||
# self.db_close()
|
||||
|
||||
def db_connect(self) -> sqlite3.Connection | None:
|
||||
"""
|
||||
Connect to SQLite database, create if it doesn't exist
|
||||
"""
|
||||
try:
|
||||
# Connect to database (creates if doesn't exist)
|
||||
self.conn = sqlite3.connect(self.db_name, autocommit=self.autocommit)
|
||||
self.conn.setconfig(sqlite3.SQLITE_DBCONFIG_ENABLE_FKEY, True)
|
||||
# self.conn.execute("PRAGMA journal_mode=WAL")
|
||||
# self.log.debug(f"Connected to database: {self.db_name}")
|
||||
|
||||
def dict_factory(cursor: sqlite3.Cursor, row: list[Any]):
|
||||
fields = [column[0] for column in cursor.description]
|
||||
return dict(zip(fields, row))
|
||||
|
||||
match self.row_factory:
|
||||
case 'Row':
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
case 'Dict':
|
||||
self.conn.row_factory = dict_factory
|
||||
case _:
|
||||
self.conn.row_factory = None
|
||||
|
||||
return self.conn
|
||||
except (sqlite3.Error, sqlite3.OperationalError) as e:
|
||||
self.log.error(f"Error connecting to database [{type(e).__name__}] [{self.db_name}]: {e} [{call_stack()}]")
|
||||
self.log.error(f"Error code: {e.sqlite_errorcode if hasattr(e, 'sqlite_errorcode') else 'N/A'}")
|
||||
self.log.error(f"Error name: {e.sqlite_errorname if hasattr(e, 'sqlite_errorname') else 'N/A'}")
|
||||
return None
|
||||
|
||||
def db_close(self):
|
||||
"""close connection"""
|
||||
if self.conn is not None:
|
||||
self.conn.close()
|
||||
self.conn = None
|
||||
|
||||
def db_connected(self) -> bool:
|
||||
"""
|
||||
Return True if db connection is not none
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
return True if self.conn else False
|
||||
|
||||
def __content_exists(self, content_name: str, sql_type: str) -> bool:
|
||||
"""
|
||||
Check if some content name for a certain type exists
|
||||
|
||||
Arguments:
|
||||
content_name {str} -- _description_
|
||||
sql_type {str} -- _description_
|
||||
|
||||
Returns:
|
||||
bool -- _description_
|
||||
"""
|
||||
if self.conn is None:
|
||||
return False
|
||||
try:
|
||||
cursor = self.conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE type = ? AND name = ?
|
||||
""", (sql_type, content_name,))
|
||||
return cursor.fetchone() is not None
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error checking table [{content_name}/{sql_type}] existence: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def table_exists(self, table_name: str) -> bool:
|
||||
"""
|
||||
Check if a table exists in the database
|
||||
"""
|
||||
return self.__content_exists(table_name, 'table')
|
||||
|
||||
def trigger_exists(self, trigger_name: str) -> bool:
|
||||
"""
|
||||
Check if a triggere exits
|
||||
"""
|
||||
return self.__content_exists(trigger_name, 'trigger')
|
||||
|
||||
def index_exists(self, index_name: str) -> bool:
|
||||
"""
|
||||
Check if a triggere exits
|
||||
"""
|
||||
return self.__content_exists(index_name, 'index')
|
||||
|
||||
def meta_data_detail(self, table_name: str) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""table detail"""
|
||||
query_show_table = """
|
||||
SELECT
|
||||
ti.cid, ti.name, ti.type, ti.'notnull', ti.dflt_value, ti.pk,
|
||||
il_ii.idx_name, il_ii.idx_unique, il_ii.idx_origin, il_ii.idx_partial
|
||||
FROM
|
||||
sqlite_schema AS m,
|
||||
pragma_table_info(m.name) AS ti
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
il.name AS idx_name, il.'unique' AS idx_unique, il.origin AS idx_origin, il.partial AS idx_partial,
|
||||
ii.cid AS tbl_cid
|
||||
FROM
|
||||
sqlite_schema AS m,
|
||||
pragma_index_list(m.name) AS il,
|
||||
pragma_index_info(il.name) AS ii
|
||||
WHERE m.name = ?1
|
||||
) AS il_ii ON (ti.cid = il_ii.tbl_cid)
|
||||
WHERE
|
||||
m.name = ?1
|
||||
"""
|
||||
return self.execute_query(query_show_table, (table_name,))
|
||||
|
||||
def execute_cursor(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> sqlite3.Cursor | Literal[False]:
|
||||
"""execute a cursor, used in execute query or return one and for fetch_row"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
cursor = self.conn.cursor()
|
||||
if params:
|
||||
cursor.execute(query, params)
|
||||
else:
|
||||
cursor.execute(query)
|
||||
return cursor
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during executing cursor [{query}:{params}]: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def execute_query(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> list[tuple[Any, ...]] | list[dict[str, Any]] | Literal[False]:
|
||||
"""query execute with or without params, returns result"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
if (cursor := self.execute_cursor(query, params)) is False:
|
||||
return False
|
||||
# fetch before commit because we need to get the RETURN before
|
||||
result = cursor.fetchall()
|
||||
# this is for INSERT/UPDATE/CREATE only
|
||||
self.conn.commit()
|
||||
return result
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during executing query [{query}:{params}]: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def return_one(
|
||||
self, query: str, params: tuple[Any, ...] | None = None
|
||||
) -> tuple[Any, ...] | dict[str, Any] | Literal[False] | None:
|
||||
"""return one row, only for SELECT"""
|
||||
if self.conn is None:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
if (cursor := self.execute_cursor(query, params)) is False:
|
||||
return False
|
||||
return cursor.fetchone()
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during return one: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
def fetch_row(
|
||||
self, cursor: sqlite3.Cursor | Literal[False]
|
||||
) -> tuple[Any, ...] | dict[str, Any] | Literal[False] | None:
|
||||
"""read from cursor"""
|
||||
if self.conn is None or cursor is False:
|
||||
self.log.warning(f"No connection [{call_stack()}]")
|
||||
return False
|
||||
try:
|
||||
return cursor.fetchone()
|
||||
except sqlite3.Error as e:
|
||||
self.log.error(f"Error during fetch row: {e} [{call_stack()}]")
|
||||
return False
|
||||
|
||||
# __END__
|
||||
@@ -32,7 +32,7 @@ show_position(file pos optional)
|
||||
import time
|
||||
from typing import Literal
|
||||
from math import floor
|
||||
from corelibs.string_handling.datetime_helpers import convert_timestamp
|
||||
from corelibs.datetime_handling.datetime_helpers import convert_timestamp
|
||||
from corelibs.string_handling.byte_helpers import format_bytes
|
||||
|
||||
|
||||
|
||||
@@ -82,4 +82,22 @@ def mask(
|
||||
for key, value in data_set.items()
|
||||
}
|
||||
|
||||
|
||||
def set_entry(dict_set: dict[str, Any], key: str, value_set: Any) -> dict[str, Any]:
|
||||
"""
|
||||
set a new entry in the dict set
|
||||
|
||||
Arguments:
|
||||
key {str} -- _description_
|
||||
dict_set {dict[str, Any]} -- _description_
|
||||
value_set {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
dict[str, Any] -- _description_
|
||||
"""
|
||||
if not dict_set.get(key):
|
||||
dict_set[key] = {}
|
||||
dict_set[key] = value_set
|
||||
return dict_set
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -5,6 +5,8 @@ json encoder for datetime
|
||||
from typing import Any
|
||||
from json import JSONEncoder, dumps
|
||||
from datetime import datetime, date
|
||||
import copy
|
||||
from jsonpath_ng import parse # pyright: ignore[reportMissingTypeStubs, reportUnknownVariableType]
|
||||
|
||||
|
||||
# subclass JSONEncoder
|
||||
@@ -41,4 +43,22 @@ def json_dumps(data: Any):
|
||||
"""
|
||||
return dumps(data, ensure_ascii=False, default=str)
|
||||
|
||||
|
||||
def modify_with_jsonpath(data: dict[Any, Any], path: str, new_value: Any):
|
||||
"""
|
||||
Modify dictionary using JSONPath (more powerful than JMESPath for modifications)
|
||||
"""
|
||||
result = copy.deepcopy(data)
|
||||
jsonpath_expr = parse(path) # pyright: ignore[reportUnknownVariableType]
|
||||
|
||||
# Find and update all matches
|
||||
matches = jsonpath_expr.find(result) # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType]
|
||||
for match in matches: # pyright: ignore[reportUnknownVariableType]
|
||||
match.full_path.update(result, new_value) # pyright: ignore[reportUnknownMemberType]
|
||||
|
||||
return result
|
||||
|
||||
# __END__
|
||||
|
||||
|
||||
# __END__
|
||||
|
||||
@@ -1,136 +0,0 @@
|
||||
"""
|
||||
Various string based date/time helpers
|
||||
"""
|
||||
|
||||
from math import floor
|
||||
import time as time_t
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
||||
|
||||
|
||||
def convert_timestamp(timestamp: float | int, show_micro: bool = True) -> str:
|
||||
"""
|
||||
format timestamp into human readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
show_micro {bool} -- _description_ (default: {True})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# cut of the ms, but first round them up to four
|
||||
__timestamp_ms_split = str(round(timestamp, 4)).split(".")
|
||||
timestamp = int(__timestamp_ms_split[0])
|
||||
try:
|
||||
ms = int(__timestamp_ms_split[1])
|
||||
except IndexError:
|
||||
ms = 0
|
||||
timegroups = (86400, 3600, 60, 1)
|
||||
output: list[int] = []
|
||||
for i in timegroups:
|
||||
output.append(int(floor(timestamp / i)))
|
||||
timestamp = timestamp % i
|
||||
# output has days|hours|min|sec ms
|
||||
time_string = ""
|
||||
if output[0]:
|
||||
time_string = f"{output[0]}d"
|
||||
if output[0] or output[1]:
|
||||
time_string += f"{output[1]}h "
|
||||
if output[0] or output[1] or output[2]:
|
||||
time_string += f"{output[2]}m "
|
||||
time_string += f"{output[3]}s"
|
||||
if show_micro:
|
||||
time_string += f" {ms}ms" if ms else " 0ms"
|
||||
return time_string
|
||||
|
||||
|
||||
def create_time(timestamp: float, timestamp_format: str = "%Y-%m-%d %H:%M:%S") -> str:
|
||||
"""
|
||||
just takes a timestamp and prints out humand readable format
|
||||
|
||||
Arguments:
|
||||
timestamp {float} -- _description_
|
||||
|
||||
Keyword Arguments:
|
||||
timestamp_format {_type_} -- _description_ (default: {"%Y-%m-%d %H:%M:%S"})
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
return time_t.strftime(timestamp_format, time_t.localtime(timestamp))
|
||||
|
||||
|
||||
def get_system_timezone():
|
||||
"""Get system timezone using datetime's automatic detection"""
|
||||
# Get current time with system timezone
|
||||
local_time = datetime.now().astimezone()
|
||||
|
||||
# Extract timezone info
|
||||
system_tz = local_time.tzinfo
|
||||
timezone_name = str(system_tz)
|
||||
|
||||
return system_tz, timezone_name
|
||||
|
||||
|
||||
def parse_timezone_data(timezone_tz: str = '') -> ZoneInfo:
|
||||
"""
|
||||
parses a string to get the ZoneInfo
|
||||
If not set or not valid gets local time,
|
||||
if that is not possible get UTC
|
||||
|
||||
Keyword Arguments:
|
||||
timezone_tz {str} -- _description_ (default: {''})
|
||||
|
||||
Returns:
|
||||
ZoneInfo -- _description_
|
||||
"""
|
||||
try:
|
||||
return ZoneInfo(timezone_tz)
|
||||
except (ZoneInfoNotFoundError, ValueError, TypeError):
|
||||
# use default
|
||||
time_tz, time_tz_str = get_system_timezone()
|
||||
if time_tz is None:
|
||||
return ZoneInfo('UTC')
|
||||
# TODO build proper TZ lookup
|
||||
tz_mapping = {
|
||||
'JST': 'Asia/Tokyo',
|
||||
'KST': 'Asia/Seoul',
|
||||
'IST': 'Asia/Kolkata',
|
||||
'CST': 'Asia/Shanghai', # Default to China for CST
|
||||
'AEST': 'Australia/Sydney',
|
||||
'AWST': 'Australia/Perth',
|
||||
'EST': 'America/New_York',
|
||||
'EDT': 'America/New_York',
|
||||
'CDT': 'America/Chicago',
|
||||
'MST': 'America/Denver',
|
||||
'MDT': 'America/Denver',
|
||||
'PST': 'America/Los_Angeles',
|
||||
'PDT': 'America/Los_Angeles',
|
||||
'GMT': 'UTC',
|
||||
'UTC': 'UTC',
|
||||
'CET': 'Europe/Berlin',
|
||||
'CEST': 'Europe/Berlin',
|
||||
'BST': 'Europe/London',
|
||||
}
|
||||
try:
|
||||
return ZoneInfo(tz_mapping[time_tz_str])
|
||||
except (ZoneInfoNotFoundError, IndexError) as e:
|
||||
raise ValueError(f"No mapping for {time_tz_str}: {e}") from e
|
||||
|
||||
|
||||
def get_datetime_iso8601(timezone_tz: str | ZoneInfo = '', sep: str = 'T', timespec: str = 'microseconds') -> str:
|
||||
"""
|
||||
set a datetime in the iso8601 format with microseconds
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
# parse if this is a string
|
||||
if isinstance(timezone_tz, str):
|
||||
timezone_tz = parse_timezone_data(timezone_tz)
|
||||
return datetime.now(timezone_tz).isoformat(sep=sep, timespec=timespec)
|
||||
|
||||
# __END__
|
||||
@@ -2,6 +2,7 @@
|
||||
String helpers
|
||||
"""
|
||||
|
||||
import re
|
||||
from decimal import Decimal, getcontext
|
||||
from textwrap import shorten
|
||||
|
||||
@@ -101,4 +102,21 @@ def format_number(number: float, precision: int = 0) -> str:
|
||||
"f}"
|
||||
).format(_number)
|
||||
|
||||
|
||||
def prepare_url_slash(url: str) -> str:
|
||||
"""
|
||||
if the URL does not start with /, add slash
|
||||
strip all double slashes in URL
|
||||
|
||||
Arguments:
|
||||
url {str} -- _description_
|
||||
|
||||
Returns:
|
||||
str -- _description_
|
||||
"""
|
||||
url = re.sub(r'\/+', '/', url)
|
||||
if not url.startswith("/"):
|
||||
url = "/" + url
|
||||
return url
|
||||
|
||||
# __END__
|
||||
|
||||
75
src/corelibs/var_handling/enum_base.py
Normal file
75
src/corelibs/var_handling/enum_base.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""
|
||||
Enum base classes
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
|
||||
class EnumBase(Enum):
|
||||
"""
|
||||
base for enum
|
||||
lookup_any and from_any will return "EnumBase" and the sub class name
|
||||
run the return again to "from_any" to get a clean value, or cast it
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def lookup_key(cls, enum_key: str):
|
||||
"""Lookup from key side (must be string)"""
|
||||
# if there is a ":", then this is legacy, replace with ___
|
||||
if ":" in enum_key:
|
||||
enum_key = enum_key.replace(':', '___')
|
||||
try:
|
||||
return cls[enum_key.upper()]
|
||||
except KeyError as e:
|
||||
raise ValueError(f"Invalid key: {enum_key}") from e
|
||||
except AttributeError as e:
|
||||
raise ValueError(f"Invalid key: {enum_key}") from e
|
||||
|
||||
@classmethod
|
||||
def lookup_value(cls, enum_value: Any):
|
||||
"""Lookup through value side"""
|
||||
try:
|
||||
return cls(enum_value)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid value: {enum_value}") from e
|
||||
|
||||
@classmethod
|
||||
def from_any(cls, enum_any: Any):
|
||||
"""
|
||||
This only works in the following order
|
||||
-> class itself, as is
|
||||
-> str, assume key lookup
|
||||
-> if failed try other
|
||||
|
||||
Arguments:
|
||||
enum_any {Any} -- _description_
|
||||
|
||||
Returns:
|
||||
_type_ -- _description_
|
||||
"""
|
||||
if isinstance(enum_any, cls):
|
||||
return enum_any
|
||||
# try key first if it is string
|
||||
# if failed try value
|
||||
if isinstance(enum_any, str):
|
||||
try:
|
||||
return cls.lookup_key(enum_any)
|
||||
except (ValueError, AttributeError):
|
||||
try:
|
||||
return cls.lookup_value(enum_any)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Could not find as key or value: {enum_any}") from e
|
||||
return cls.lookup_value(enum_any)
|
||||
|
||||
def to_value(self) -> Any:
|
||||
"""Convert to value"""
|
||||
return self.value
|
||||
|
||||
def to_lower_case(self) -> str:
|
||||
"""return lower case"""
|
||||
return self.name.lower()
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""return [Enum].NAME like it was called with .name"""
|
||||
return self.name
|
||||
@@ -12,6 +12,7 @@ from corelibs.config_handling.settings_loader_handling.settings_loader_check imp
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
CONFIG_DIR: Path = Path("config")
|
||||
LOG_DIR: Path = Path("log")
|
||||
CONFIG_FILE: str = "settings.ini"
|
||||
|
||||
|
||||
@@ -26,9 +27,8 @@ def main():
|
||||
print(f"regex {regex_c} check against {value} -> {result}")
|
||||
|
||||
# for log testing
|
||||
script_path: Path = Path(__file__).resolve().parent
|
||||
log = Log(
|
||||
log_path=script_path.joinpath('log', 'settings_loader.log'),
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'settings_loader.log'),
|
||||
log_name="Settings Loader",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
|
||||
236
test-run/datetime_handling/datetime_helpers.py
Normal file
236
test-run/datetime_handling/datetime_helpers.py
Normal file
@@ -0,0 +1,236 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
date string helper test
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from corelibs.datetime_handling.datetime_helpers import (
|
||||
get_datetime_iso8601, get_system_timezone, parse_timezone_data, validate_date,
|
||||
parse_flexible_date, compare_dates, find_newest_datetime_in_list,
|
||||
parse_day_of_week_range, parse_time_range, times_overlap_or_connect, is_time_in_range,
|
||||
reorder_weekdays_from_today
|
||||
)
|
||||
|
||||
|
||||
def __get_datetime_iso8601():
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
for tz in [
|
||||
'', 'Asia/Tokyo', 'UTC', 'Europe/Vienna',
|
||||
'America/New_York', 'Australia/Sydney',
|
||||
'invalid'
|
||||
]:
|
||||
print(f"{tz} -> {get_datetime_iso8601(tz)}")
|
||||
|
||||
|
||||
def __parse_timezone_data():
|
||||
for tz in [
|
||||
'JST', 'KST', 'UTC', 'CET', 'CEST',
|
||||
]:
|
||||
print(f"{tz} -> {parse_timezone_data(tz)}")
|
||||
|
||||
|
||||
def __validate_date():
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
|
||||
test_dates = [
|
||||
"2024-01-01",
|
||||
"2024-02-29", # Leap year
|
||||
"2023-02-29", # Invalid date
|
||||
"2024-13-01", # Invalid month
|
||||
"2024-00-10", # Invalid month
|
||||
"2024-04-31", # Invalid day
|
||||
"invalid-date"
|
||||
]
|
||||
|
||||
for date_str in test_dates:
|
||||
is_valid = validate_date(date_str)
|
||||
print(f"Date '{date_str}' is valid: {is_valid}")
|
||||
|
||||
# also test not before and not after
|
||||
not_before_dates = [
|
||||
"2023-12-31",
|
||||
"2024-01-01",
|
||||
"2024-02-29",
|
||||
]
|
||||
not_after_dates = [
|
||||
"2024-12-31",
|
||||
"2024-11-30",
|
||||
"2025-01-01",
|
||||
]
|
||||
|
||||
for date_str in not_before_dates:
|
||||
datetime.strptime(date_str, "%Y-%m-%d") # Ensure valid date format
|
||||
is_valid = validate_date(date_str, not_before=datetime.strptime("2024-01-01", "%Y-%m-%d"))
|
||||
print(f"Date '{date_str}' is valid (not before 2024-01-01): {is_valid}")
|
||||
|
||||
for date_str in not_after_dates:
|
||||
is_valid = validate_date(date_str, not_after=datetime.strptime("2024-12-31", "%Y-%m-%d"))
|
||||
print(f"Date '{date_str}' is valid (not after 2024-12-31): {is_valid}")
|
||||
|
||||
for date_str in test_dates:
|
||||
is_valid = validate_date(
|
||||
date_str,
|
||||
not_before=datetime.strptime("2024-01-01", "%Y-%m-%d"),
|
||||
not_after=datetime.strptime("2024-12-31", "%Y-%m-%d")
|
||||
)
|
||||
print(f"Date '{date_str}' is valid (2024 only): {is_valid}")
|
||||
|
||||
|
||||
def __parse_flexible_date():
|
||||
for date_str in [
|
||||
"2024-01-01",
|
||||
"01/02/2024",
|
||||
"February 29, 2024",
|
||||
"Invalid date",
|
||||
"2025-01-01 12:18:10",
|
||||
"2025-01-01 12:18:10.566",
|
||||
"2025-01-01T12:18:10.566",
|
||||
"2025-01-01T12:18:10.566+02:00",
|
||||
]:
|
||||
print(f"{date_str} -> {parse_flexible_date(date_str)}")
|
||||
|
||||
|
||||
def __compare_dates():
|
||||
|
||||
for date1, date2 in [
|
||||
("2024-01-01 12:00:00", "2024-01-01 15:30:00"),
|
||||
("2024-01-02", "2024-01-01"),
|
||||
("2024-01-01T10:00:00+02:00", "2024-01-01T08:00:00Z"),
|
||||
("invalid-date", "2024-01-01"),
|
||||
("2024-01-01", "invalid-date"),
|
||||
("invalid-date", "also-invalid"),
|
||||
]:
|
||||
result = compare_dates(date1, date2)
|
||||
print(f"Comparing '{date1}' and '{date2}': {result}")
|
||||
|
||||
|
||||
def __find_newest_datetime_in_list():
|
||||
date_list = [
|
||||
"2024-01-01 12:00:00",
|
||||
"2024-01-02 09:30:00",
|
||||
"2023-12-31 23:59:59",
|
||||
"2024-01-02 15:45:00",
|
||||
"2024-01-02T15:45:00.001",
|
||||
"invalid-date",
|
||||
]
|
||||
newest_date = find_newest_datetime_in_list(date_list)
|
||||
print(f"Newest date in list: {newest_date}")
|
||||
|
||||
|
||||
def __parse_day_of_week_range():
|
||||
ranges = [
|
||||
"Mon-Fri",
|
||||
"Saturday-Sunday",
|
||||
"Wed-Mon",
|
||||
"Fri-Fri",
|
||||
"mon-tue",
|
||||
"Invalid-Range"
|
||||
]
|
||||
for range_str in ranges:
|
||||
try:
|
||||
days = parse_day_of_week_range(range_str)
|
||||
print(f"Day range '{range_str}' -> {days}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error parsing day range '{range_str}': {e}")
|
||||
|
||||
|
||||
def __parse_time_range():
|
||||
ranges = [
|
||||
"08:00-17:00",
|
||||
"22:00-06:00",
|
||||
"12:30-12:30",
|
||||
"invalid-range"
|
||||
]
|
||||
for range_str in ranges:
|
||||
try:
|
||||
start_time, end_time = parse_time_range(range_str)
|
||||
print(f"Time range '{range_str}' -> Start: {start_time}, End: {end_time}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error parsing time range '{range_str}': {e}")
|
||||
|
||||
|
||||
def __times_overlap_or_connect():
|
||||
time_format = "%H:%M"
|
||||
time_ranges = [
|
||||
(("08:00", "12:00"), ("11:00", "15:00")), # Overlap
|
||||
(("22:00", "02:00"), ("01:00", "05:00")), # Overlap across midnight
|
||||
(("10:00", "12:00"), ("12:00", "14:00")), # Connect
|
||||
(("09:00", "11:00"), ("12:00", "14:00")), # No overlap
|
||||
]
|
||||
for (start1, end1), (start2, end2) in time_ranges:
|
||||
start1 = datetime.strptime(start1, time_format).time()
|
||||
end1 = datetime.strptime(end1, time_format).time()
|
||||
start2 = datetime.strptime(start2, time_format).time()
|
||||
end2 = datetime.strptime(end2, time_format).time()
|
||||
overlap = times_overlap_or_connect((start1, end1), (start2, end2))
|
||||
overlap_connect = times_overlap_or_connect((start1, end1), (start2, end2), True)
|
||||
print(f"Time ranges {start1}-{end1} and {start2}-{end2} overlap/connect: {overlap}/{overlap_connect}")
|
||||
|
||||
|
||||
def __is_time_in_range():
|
||||
time_format = "%H:%M:%S"
|
||||
test_cases = [
|
||||
("10:00:00", "09:00:00", "11:00:00"),
|
||||
("23:30:00", "22:00:00", "01:00:00"), # Across midnight
|
||||
("05:00:00", "06:00:00", "10:00:00"), # Not in range
|
||||
("12:00:00", "12:00:00", "12:00:00"), # Exact match
|
||||
]
|
||||
for (check_time, start_time, end_time) in test_cases:
|
||||
start_time = datetime.strptime(start_time, time_format).time()
|
||||
end_time = datetime.strptime(end_time, time_format).time()
|
||||
in_range = is_time_in_range(
|
||||
f"{check_time}", start_time.strftime("%H:%M:%S"), end_time.strftime("%H:%M:%S")
|
||||
)
|
||||
print(f"Time {check_time} in range {start_time}-{end_time}: {in_range}")
|
||||
|
||||
|
||||
def __reorder_weekdays_from_today():
|
||||
for base_day in [
|
||||
"Tue", "Wed", "Sunday", "Fri", "InvalidDay"
|
||||
]:
|
||||
try:
|
||||
reordered_days = reorder_weekdays_from_today(base_day)
|
||||
print(f"Reordered weekdays from {base_day}: {reordered_days}")
|
||||
except ValueError as e:
|
||||
print(f"[!] Error reordering weekdays from '{base_day}': {e}")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
print("\nDatetime ISO 8601 tests:\n")
|
||||
__get_datetime_iso8601()
|
||||
print("\nSystem time test:")
|
||||
print(f"System time: {get_system_timezone()}")
|
||||
print("\nParse timezone data tests:\n")
|
||||
__parse_timezone_data()
|
||||
print("\nValidate date tests:\n")
|
||||
__validate_date()
|
||||
print("\nParse flexible date tests:\n")
|
||||
__parse_flexible_date()
|
||||
print("\nCompare dates tests:\n")
|
||||
__compare_dates()
|
||||
print("\nFind newest datetime in list tests:\n")
|
||||
__find_newest_datetime_in_list()
|
||||
print("\nParse day of week range tests:\n")
|
||||
__parse_day_of_week_range()
|
||||
print("\nParse time range tests:\n")
|
||||
__parse_time_range()
|
||||
print("\nTimes overlap or connect tests:\n")
|
||||
__times_overlap_or_connect()
|
||||
print("\nIs time in range tests:\n")
|
||||
__is_time_in_range()
|
||||
print("\nReorder weekdays from today tests:\n")
|
||||
__reorder_weekdays_from_today()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
@@ -4,8 +4,8 @@
|
||||
timestamp string checks
|
||||
"""
|
||||
|
||||
from corelibs.string_handling.timestamp_strings import (
|
||||
seconds_to_string, convert_to_seconds, TimeParseError, TimeUnitError
|
||||
from corelibs.datetime_handling.timestamp_convert import (
|
||||
convert_timestamp, seconds_to_string, convert_to_seconds, TimeParseError, TimeUnitError
|
||||
)
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
print("\n--- Testing convert_to_seconds ---\n")
|
||||
test_cases = [
|
||||
"5M 6d", # 5 months, 6 days
|
||||
"2h 30m 45s", # 2 hours, 30 minutes, 45 seconds
|
||||
@@ -58,6 +59,8 @@ def main() -> None:
|
||||
except (TimeParseError, TimeUnitError) as e:
|
||||
print(f"Error encountered for {time_string}: {type(e).__name__}: {e}")
|
||||
|
||||
print("\n--- Testing seconds_to_string and convert_timestamp ---\n")
|
||||
|
||||
test_values = [
|
||||
'as is string',
|
||||
-172800.001234, # -2 days, -0.001234 seconds
|
||||
@@ -79,7 +82,8 @@ def main() -> None:
|
||||
|
||||
for time_value in test_values:
|
||||
result = seconds_to_string(time_value, show_microseconds=True)
|
||||
print(f"Seconds to human readable: {time_value} => {result}")
|
||||
result_alt = convert_timestamp(time_value, show_microseconds=True)
|
||||
print(f"Seconds to human readable: {time_value} => {result} / {result_alt}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
2
test-run/db_handling/database/.gitignore
vendored
Normal file
2
test-run/db_handling/database/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
2
test-run/db_handling/log/.gitignore
vendored
Normal file
2
test-run/db_handling/log/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*
|
||||
!.gitignore
|
||||
148
test-run/db_handling/sqlite_io.py
Normal file
148
test-run/db_handling/sqlite_io.py
Normal file
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Main comment
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from uuid import uuid4
|
||||
import json
|
||||
import sqlite3
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.logging_handling.log import Log, Logger
|
||||
from corelibs.db_handling.sqlite_io import SQLiteIO
|
||||
|
||||
SCRIPT_PATH: Path = Path(__file__).resolve().parent
|
||||
ROOT_PATH: Path = SCRIPT_PATH
|
||||
DATABASE_DIR: Path = Path("database")
|
||||
LOG_DIR: Path = Path("log")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
log = Log(
|
||||
log_path=ROOT_PATH.joinpath(LOG_DIR, 'sqlite_io.log'),
|
||||
log_name="SQLite IO",
|
||||
log_settings={
|
||||
"log_level_console": 'DEBUG',
|
||||
"log_level_file": 'DEBUG',
|
||||
}
|
||||
)
|
||||
db = SQLiteIO(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_name=ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_io.db'),
|
||||
row_factory='Dict'
|
||||
)
|
||||
if db.db_connected():
|
||||
log.info(f"Connected to DB: {db.db_name}")
|
||||
if db.trigger_exists('trg_test_a_set_date_updated_on_update'):
|
||||
log.info("Trigger trg_test_a_set_date_updated_on_update exists")
|
||||
if db.table_exists('test_a'):
|
||||
log.info("Table test_a exists, dropping for clean test")
|
||||
db.execute_query("DROP TABLE test_a;")
|
||||
# create a dummy table
|
||||
table_sql = """
|
||||
CREATE TABLE IF NOT EXISTS test_a (
|
||||
test_a_id INTEGER PRIMARY KEY,
|
||||
date_created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%f', 'now')),
|
||||
date_updated TEXT,
|
||||
uid TEXT NOT NULL UNIQUE,
|
||||
set_current_timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
text_a TEXT,
|
||||
content,
|
||||
int_a INTEGER,
|
||||
float_a REAL
|
||||
);
|
||||
"""
|
||||
result = db.execute_query(table_sql)
|
||||
log.debug(f"Create table result: {result}")
|
||||
trigger_sql = """
|
||||
CREATE TRIGGER trg_test_a_set_date_updated_on_update
|
||||
AFTER UPDATE ON test_a
|
||||
FOR EACH ROW
|
||||
WHEN OLD.date_updated IS NULL OR NEW.date_updated = OLD.date_updated
|
||||
BEGIN
|
||||
UPDATE test_a
|
||||
SET date_updated = (strftime('%Y-%m-%d %H:%M:%f', 'now'))
|
||||
WHERE test_a_id = NEW.test_a_id;
|
||||
END;
|
||||
"""
|
||||
result = db.execute_query(trigger_sql)
|
||||
log.debug(f"Create trigger result: {result}")
|
||||
result = db.meta_data_detail('test_a')
|
||||
log.debug(f"Table meta data detail: {dump_data(result)}")
|
||||
# INSERT DATA
|
||||
sql = """
|
||||
INSERT INTO test_a (uid, text_a, content, int_a, float_a)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
RETURNING test_a_id, uid;
|
||||
"""
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[1] Insert data result: {dump_data(result)}")
|
||||
__uid: str = ''
|
||||
if result is not False:
|
||||
# first one only of interest
|
||||
result = dict(result[0])
|
||||
__uid = str(result.get('uid', ''))
|
||||
# second insert
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
str(uuid4()),
|
||||
'Some text A',
|
||||
json.dumps({'foo': 'bar', 'number': 42}),
|
||||
123,
|
||||
123.456,
|
||||
)
|
||||
)
|
||||
log.debug(f"[2] Insert data result: {dump_data(result)}")
|
||||
result = db.execute_query("SELECT * FROM test_a;")
|
||||
log.debug(f"Select data result: {dump_data(result)}")
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row result: {dump_data(result)}")
|
||||
sql = """
|
||||
UPDATE test_a
|
||||
SET text_a = ?
|
||||
WHERE uid = ?;
|
||||
"""
|
||||
result = db.execute_query(
|
||||
sql,
|
||||
(
|
||||
'Some updated text A',
|
||||
__uid,
|
||||
)
|
||||
)
|
||||
log.debug(f"Update data result: {dump_data(result)}")
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
log.debug(f"Fetch row after update result: {dump_data(result)}")
|
||||
|
||||
db.db_close()
|
||||
|
||||
db = SQLiteIO(
|
||||
log=Logger(log.get_logger_settings()),
|
||||
db_name=ROOT_PATH.joinpath(DATABASE_DIR, 'test_sqlite_io.db'),
|
||||
row_factory='Row'
|
||||
)
|
||||
result = db.return_one("SELECT * FROM test_a WHERE uid = ?;", (__uid,))
|
||||
if result is not None and result is not False:
|
||||
log.debug(f"Fetch row result: {dump_data(result)} -> {dict(result)} -> {result.keys()}")
|
||||
log.debug(f"Access via index: {result[5]} -> {result['text_a']}")
|
||||
if isinstance(result, sqlite3.Row):
|
||||
log.debug('Result is sqlite3.Row as expected')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
@@ -2,8 +2,9 @@
|
||||
Iterator helper testing
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.iterator_handling.dict_helpers import mask
|
||||
from corelibs.iterator_handling.dict_helpers import mask, set_entry
|
||||
|
||||
|
||||
def __mask():
|
||||
@@ -95,11 +96,23 @@ def __mask():
|
||||
print(f"===> Masked: {dump_data(result)}")
|
||||
|
||||
|
||||
def __set_dict_value_entry():
|
||||
|
||||
dict_empty: dict[str, Any] = {}
|
||||
new = set_entry(dict_empty, 'a.b.c', 1)
|
||||
print(f"[1] Set dict entry: {dump_data(new)}")
|
||||
new = set_entry(new, 'dict', {'key': 'value'})
|
||||
print(f"[2] Set dict entry: {dump_data(new)}")
|
||||
new = set_entry(new, 'list', [1, 2, 3])
|
||||
print(f"[3] Set dict entry: {dump_data(new)}")
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Test: corelibs.string_handling.string_helpers
|
||||
"""
|
||||
__mask()
|
||||
__set_dict_value_entry()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
52
test-run/json_handling/json_replace.py
Normal file
52
test-run/json_handling/json_replace.py
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
JSON content replace tets
|
||||
"""
|
||||
|
||||
from deepdiff import DeepDiff
|
||||
from corelibs.debug_handling.dump_data import dump_data
|
||||
from corelibs.json_handling.json_helper import modify_with_jsonpath
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
__data = {
|
||||
'a': 'b',
|
||||
'foobar': [1, 2, 'a'],
|
||||
'bar': {
|
||||
'a': 1,
|
||||
'b': 'c'
|
||||
},
|
||||
'baz': [
|
||||
{
|
||||
'aa': 1,
|
||||
'ab': 'cc'
|
||||
},
|
||||
{
|
||||
'ba': 2,
|
||||
'bb': 'dd'
|
||||
},
|
||||
],
|
||||
'foo': {
|
||||
'a': [1, 2, 3],
|
||||
'b': ['a', 'b', 'c']
|
||||
}
|
||||
}
|
||||
|
||||
# Modify some values using JSONPath
|
||||
__replace_data = modify_with_jsonpath(__data, 'bar.a', 42)
|
||||
__replace_data = modify_with_jsonpath(__replace_data, 'foo.b[1]', 'modified')
|
||||
__replace_data = modify_with_jsonpath(__replace_data, 'baz[0].ab', 'changed')
|
||||
|
||||
print(f"Original Data:\n{dump_data(__data)}\n")
|
||||
print(f"Modified Data:\n{dump_data(__replace_data)}\n")
|
||||
print(f"Differences:\n{dump_data(DeepDiff(__data, __replace_data, verbose_level=2))}\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
@@ -10,7 +10,8 @@ import sys
|
||||
import io
|
||||
from pathlib import Path
|
||||
from corelibs.file_handling.progress import Progress
|
||||
from corelibs.string_handling.datetime_helpers import convert_timestamp, create_time
|
||||
from corelibs.datetime_handling.datetime_helpers import create_time
|
||||
from corelibs.datetime_handling.timestamp_convert import convert_timestamp
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
date string helper test
|
||||
"""
|
||||
|
||||
from corelibs.string_handling.datetime_helpers import get_datetime_iso8601
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
print(get_datetime_iso8601())
|
||||
print(get_datetime_iso8601('Asia/Tokyo'))
|
||||
print(get_datetime_iso8601('UTC'))
|
||||
print(get_datetime_iso8601('Europe/Vienna'))
|
||||
print(get_datetime_iso8601('America/New_York'))
|
||||
print(get_datetime_iso8601('Australia/Sydney'))
|
||||
print(get_datetime_iso8601('invalid'))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
@@ -5,7 +5,7 @@ Test string_handling/string_helpers
|
||||
import sys
|
||||
from decimal import Decimal, getcontext
|
||||
from textwrap import shorten
|
||||
from corelibs.string_handling.string_helpers import shorten_string, format_number
|
||||
from corelibs.string_handling.string_helpers import shorten_string, format_number, prepare_url_slash
|
||||
from corelibs.string_handling.text_colors import Colors
|
||||
|
||||
|
||||
@@ -73,6 +73,18 @@ def __sh_colors():
|
||||
print(f"Underline/Yellow/Bold: {Colors.underline}{Colors.bold}{Colors.yellow}UNDERLINE YELLOW BOLD{Colors.reset}")
|
||||
|
||||
|
||||
def __prepare_url_slash():
|
||||
urls = [
|
||||
"api/v1/resource",
|
||||
"/api/v1/resource",
|
||||
"///api//v1//resource//",
|
||||
"api//v1/resource/",
|
||||
]
|
||||
for url in urls:
|
||||
prepared = prepare_url_slash(url)
|
||||
print(f"IN: {url} -> OUT: {prepared}")
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Test: corelibs.string_handling.string_helpers
|
||||
@@ -80,6 +92,7 @@ def main():
|
||||
__sh_shorten_string()
|
||||
__sh_format_number()
|
||||
__sh_colors()
|
||||
__prepare_url_slash()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -5,7 +5,7 @@ Test for double byte format
|
||||
"""
|
||||
|
||||
from zoneinfo import ZoneInfo
|
||||
from corelibs.string_handling.timestamp_strings import TimestampStrings
|
||||
from corelibs.datetime_handling.timestamp_strings import TimestampStrings
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
29
test-run/var_handling/enum_base.py
Normal file
29
test-run/var_handling/enum_base.py
Normal file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Enum handling
|
||||
"""
|
||||
|
||||
from corelibs.var_handling.enum_base import EnumBase
|
||||
|
||||
|
||||
class TestBlock(EnumBase):
|
||||
"""Test block enum"""
|
||||
BLOCK_A = "block_a"
|
||||
HAS_NUM = 5
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""
|
||||
Comment
|
||||
"""
|
||||
|
||||
print(f"BLOCK A: {TestBlock.from_any('BLOCK_A')}")
|
||||
print(f"HAS NUM: {TestBlock.from_any(5)}")
|
||||
print(f"DIRECT BLOCK: {TestBlock.BLOCK_A.name} -> {TestBlock.BLOCK_A.value}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# __END__
|
||||
3
tests/unit/datetime_handling/__init__.py
Normal file
3
tests/unit/datetime_handling/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""
|
||||
Unit tests for encryption_handling module
|
||||
"""
|
||||
@@ -3,7 +3,7 @@ Unit tests for convert_to_seconds function from timestamp_strings module.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from corelibs.string_handling.timestamp_strings import convert_to_seconds, TimeParseError, TimeUnitError
|
||||
from corelibs.datetime_handling.timestamp_convert import convert_to_seconds, TimeParseError, TimeUnitError
|
||||
|
||||
|
||||
class TestConvertToSeconds:
|
||||
690
tests/unit/datetime_handling/test_datetime_helpers.py
Normal file
690
tests/unit/datetime_handling/test_datetime_helpers.py
Normal file
@@ -0,0 +1,690 @@
|
||||
"""
|
||||
PyTest: datetime_handling/datetime_helpers
|
||||
"""
|
||||
|
||||
from datetime import datetime, time
|
||||
from zoneinfo import ZoneInfo
|
||||
import pytest
|
||||
|
||||
from corelibs.datetime_handling.datetime_helpers import (
|
||||
create_time,
|
||||
get_system_timezone,
|
||||
parse_timezone_data,
|
||||
get_datetime_iso8601,
|
||||
validate_date,
|
||||
parse_flexible_date,
|
||||
compare_dates,
|
||||
find_newest_datetime_in_list,
|
||||
parse_day_of_week_range,
|
||||
parse_time_range,
|
||||
times_overlap_or_connect,
|
||||
is_time_in_range,
|
||||
reorder_weekdays_from_today,
|
||||
DAYS_OF_WEEK_LONG_TO_SHORT,
|
||||
DAYS_OF_WEEK_ISO,
|
||||
DAYS_OF_WEEK_ISO_REVERSED,
|
||||
)
|
||||
|
||||
|
||||
class TestConstants:
|
||||
"""Test suite for module constants"""
|
||||
|
||||
def test_days_of_week_long_to_short(self):
|
||||
"""Test DAYS_OF_WEEK_LONG_TO_SHORT dictionary"""
|
||||
assert DAYS_OF_WEEK_LONG_TO_SHORT['Monday'] == 'Mon'
|
||||
assert DAYS_OF_WEEK_LONG_TO_SHORT['Tuesday'] == 'Tue'
|
||||
assert DAYS_OF_WEEK_LONG_TO_SHORT['Friday'] == 'Fri'
|
||||
assert DAYS_OF_WEEK_LONG_TO_SHORT['Sunday'] == 'Sun'
|
||||
assert len(DAYS_OF_WEEK_LONG_TO_SHORT) == 7
|
||||
|
||||
def test_days_of_week_iso(self):
|
||||
"""Test DAYS_OF_WEEK_ISO dictionary"""
|
||||
assert DAYS_OF_WEEK_ISO[1] == 'Mon'
|
||||
assert DAYS_OF_WEEK_ISO[5] == 'Fri'
|
||||
assert DAYS_OF_WEEK_ISO[7] == 'Sun'
|
||||
assert len(DAYS_OF_WEEK_ISO) == 7
|
||||
|
||||
def test_days_of_week_iso_reversed(self):
|
||||
"""Test DAYS_OF_WEEK_ISO_REVERSED dictionary"""
|
||||
assert DAYS_OF_WEEK_ISO_REVERSED['Mon'] == 1
|
||||
assert DAYS_OF_WEEK_ISO_REVERSED['Fri'] == 5
|
||||
assert DAYS_OF_WEEK_ISO_REVERSED['Sun'] == 7
|
||||
assert len(DAYS_OF_WEEK_ISO_REVERSED) == 7
|
||||
|
||||
|
||||
class TestCreateTime:
|
||||
"""Test suite for create_time function"""
|
||||
|
||||
def test_create_time_default_format(self):
|
||||
"""Test create_time with default format"""
|
||||
timestamp = 1609459200.0 # 2021-01-01 00:00:00 UTC
|
||||
result = create_time(timestamp)
|
||||
# Result depends on system timezone, so just check format
|
||||
assert len(result) == 19
|
||||
assert '-' in result
|
||||
assert ':' in result
|
||||
|
||||
def test_create_time_custom_format(self):
|
||||
"""Test create_time with custom format"""
|
||||
timestamp = 1609459200.0
|
||||
result = create_time(timestamp, "%Y/%m/%d")
|
||||
# Check basic format structure
|
||||
assert '/' in result
|
||||
assert len(result) == 10
|
||||
|
||||
def test_create_time_with_microseconds(self):
|
||||
"""Test create_time with microseconds in format"""
|
||||
timestamp = 1609459200.123456
|
||||
result = create_time(timestamp, "%Y-%m-%d %H:%M:%S")
|
||||
assert len(result) == 19
|
||||
|
||||
|
||||
class TestGetSystemTimezone:
|
||||
"""Test suite for get_system_timezone function"""
|
||||
|
||||
def test_get_system_timezone_returns_tuple(self):
|
||||
"""Test that get_system_timezone returns a tuple"""
|
||||
result = get_system_timezone()
|
||||
assert isinstance(result, tuple)
|
||||
assert len(result) == 2
|
||||
|
||||
def test_get_system_timezone_returns_valid_data(self):
|
||||
"""Test that get_system_timezone returns valid timezone info"""
|
||||
system_tz, timezone_name = get_system_timezone()
|
||||
assert system_tz is not None
|
||||
assert isinstance(timezone_name, str)
|
||||
assert len(timezone_name) > 0
|
||||
|
||||
|
||||
class TestParseTimezoneData:
|
||||
"""Test suite for parse_timezone_data function"""
|
||||
|
||||
def test_parse_timezone_data_valid_timezone(self):
|
||||
"""Test parse_timezone_data with valid timezone string"""
|
||||
result = parse_timezone_data('Asia/Tokyo')
|
||||
assert isinstance(result, ZoneInfo)
|
||||
assert str(result) == 'Asia/Tokyo'
|
||||
|
||||
def test_parse_timezone_data_utc(self):
|
||||
"""Test parse_timezone_data with UTC"""
|
||||
result = parse_timezone_data('UTC')
|
||||
assert isinstance(result, ZoneInfo)
|
||||
assert str(result) == 'UTC'
|
||||
|
||||
def test_parse_timezone_data_empty_string(self):
|
||||
"""Test parse_timezone_data with empty string falls back to system timezone"""
|
||||
result = parse_timezone_data('')
|
||||
assert isinstance(result, ZoneInfo)
|
||||
|
||||
def test_parse_timezone_data_invalid_timezone(self):
|
||||
"""Test parse_timezone_data with invalid timezone falls back to system timezone"""
|
||||
# Invalid timezones fall back to system timezone or UTC
|
||||
result = parse_timezone_data('Invalid/Timezone')
|
||||
assert isinstance(result, ZoneInfo)
|
||||
# Should be either system timezone or UTC
|
||||
|
||||
def test_parse_timezone_data_none(self):
|
||||
"""Test parse_timezone_data with None falls back to system timezone"""
|
||||
result = parse_timezone_data()
|
||||
assert isinstance(result, ZoneInfo)
|
||||
|
||||
def test_parse_timezone_data_various_timezones(self):
|
||||
"""Test parse_timezone_data with various timezone strings"""
|
||||
timezones = ['America/New_York', 'Europe/London', 'Asia/Seoul']
|
||||
for tz in timezones:
|
||||
result = parse_timezone_data(tz)
|
||||
assert isinstance(result, ZoneInfo)
|
||||
assert str(result) == tz
|
||||
|
||||
|
||||
class TestGetDatetimeIso8601:
|
||||
"""Test suite for get_datetime_iso8601 function"""
|
||||
|
||||
def test_get_datetime_iso8601_default_params(self):
|
||||
"""Test get_datetime_iso8601 with default parameters"""
|
||||
result = get_datetime_iso8601()
|
||||
# Should be in ISO 8601 format with T separator and microseconds
|
||||
assert 'T' in result
|
||||
assert '.' in result # microseconds
|
||||
# Check basic ISO 8601 format
|
||||
datetime.fromisoformat(result) # Should not raise
|
||||
|
||||
def test_get_datetime_iso8601_custom_timezone_string(self):
|
||||
"""Test get_datetime_iso8601 with custom timezone string"""
|
||||
result = get_datetime_iso8601('UTC')
|
||||
assert '+00:00' in result or 'Z' in result or result.endswith('+00:00')
|
||||
|
||||
def test_get_datetime_iso8601_custom_timezone_zoneinfo(self):
|
||||
"""Test get_datetime_iso8601 with ZoneInfo object"""
|
||||
tz = ZoneInfo('Asia/Tokyo')
|
||||
result = get_datetime_iso8601(tz)
|
||||
assert 'T' in result
|
||||
datetime.fromisoformat(result) # Should not raise
|
||||
|
||||
def test_get_datetime_iso8601_custom_separator(self):
|
||||
"""Test get_datetime_iso8601 with custom separator"""
|
||||
result = get_datetime_iso8601(sep=' ')
|
||||
assert ' ' in result
|
||||
assert 'T' not in result
|
||||
|
||||
def test_get_datetime_iso8601_different_timespec(self):
|
||||
"""Test get_datetime_iso8601 with different timespec values"""
|
||||
result_seconds = get_datetime_iso8601(timespec='seconds')
|
||||
assert '.' not in result_seconds # No microseconds
|
||||
|
||||
result_milliseconds = get_datetime_iso8601(timespec='milliseconds')
|
||||
# Should have milliseconds (3 digits after decimal)
|
||||
assert '.' in result_milliseconds
|
||||
|
||||
|
||||
class TestValidateDate:
|
||||
"""Test suite for validate_date function"""
|
||||
|
||||
def test_validate_date_valid_hyphen_format(self):
|
||||
"""Test validate_date with valid Y-m-d format"""
|
||||
assert validate_date('2023-12-25') is True
|
||||
assert validate_date('2024-01-01') is True
|
||||
|
||||
def test_validate_date_valid_slash_format(self):
|
||||
"""Test validate_date with valid Y/m/d format"""
|
||||
assert validate_date('2023/12/25') is True
|
||||
assert validate_date('2024/01/01') is True
|
||||
|
||||
def test_validate_date_invalid_format(self):
|
||||
"""Test validate_date with invalid format"""
|
||||
assert validate_date('25-12-2023') is False
|
||||
assert validate_date('2023.12.25') is False
|
||||
assert validate_date('invalid') is False
|
||||
|
||||
def test_validate_date_invalid_date(self):
|
||||
"""Test validate_date with invalid date values"""
|
||||
assert validate_date('2023-13-01') is False # Invalid month
|
||||
assert validate_date('2023-02-30') is False # Invalid day
|
||||
|
||||
def test_validate_date_with_not_before(self):
|
||||
"""Test validate_date with not_before constraint"""
|
||||
not_before = datetime(2023, 12, 1)
|
||||
assert validate_date('2023-12-25', not_before=not_before) is True
|
||||
assert validate_date('2023-11-25', not_before=not_before) is False
|
||||
|
||||
def test_validate_date_with_not_after(self):
|
||||
"""Test validate_date with not_after constraint"""
|
||||
not_after = datetime(2023, 12, 31)
|
||||
assert validate_date('2023-12-25', not_after=not_after) is True
|
||||
assert validate_date('2024-01-01', not_after=not_after) is False
|
||||
|
||||
def test_validate_date_with_both_constraints(self):
|
||||
"""Test validate_date with both not_before and not_after constraints"""
|
||||
not_before = datetime(2023, 12, 1)
|
||||
not_after = datetime(2023, 12, 31)
|
||||
assert validate_date('2023-12-15', not_before=not_before, not_after=not_after) is True
|
||||
assert validate_date('2023-11-30', not_before=not_before, not_after=not_after) is False
|
||||
assert validate_date('2024-01-01', not_before=not_before, not_after=not_after) is False
|
||||
|
||||
|
||||
class TestParseFlexibleDate:
|
||||
"""Test suite for parse_flexible_date function"""
|
||||
|
||||
def test_parse_flexible_date_iso8601_full(self):
|
||||
"""Test parse_flexible_date with full ISO 8601 format"""
|
||||
result = parse_flexible_date('2023-12-25T15:30:45')
|
||||
assert isinstance(result, datetime)
|
||||
assert result.year == 2023
|
||||
assert result.month == 12
|
||||
assert result.day == 25
|
||||
assert result.hour == 15
|
||||
assert result.minute == 30
|
||||
assert result.second == 45
|
||||
|
||||
def test_parse_flexible_date_iso8601_with_microseconds(self):
|
||||
"""Test parse_flexible_date with microseconds"""
|
||||
result = parse_flexible_date('2023-12-25T15:30:45.123456')
|
||||
assert isinstance(result, datetime)
|
||||
assert result.microsecond == 123456
|
||||
|
||||
def test_parse_flexible_date_simple_date(self):
|
||||
"""Test parse_flexible_date with simple date format"""
|
||||
result = parse_flexible_date('2023-12-25')
|
||||
assert isinstance(result, datetime)
|
||||
assert result.year == 2023
|
||||
assert result.month == 12
|
||||
assert result.day == 25
|
||||
|
||||
def test_parse_flexible_date_with_timezone_string(self):
|
||||
"""Test parse_flexible_date with timezone string"""
|
||||
result = parse_flexible_date('2023-12-25T15:30:45', timezone_tz='Asia/Tokyo')
|
||||
assert isinstance(result, datetime)
|
||||
assert result.tzinfo is not None
|
||||
|
||||
def test_parse_flexible_date_with_timezone_zoneinfo(self):
|
||||
"""Test parse_flexible_date with ZoneInfo object"""
|
||||
tz = ZoneInfo('UTC')
|
||||
result = parse_flexible_date('2023-12-25T15:30:45', timezone_tz=tz)
|
||||
assert isinstance(result, datetime)
|
||||
assert result.tzinfo is not None
|
||||
|
||||
def test_parse_flexible_date_with_timezone_no_shift(self):
|
||||
"""Test parse_flexible_date with timezone but no shift"""
|
||||
result = parse_flexible_date('2023-12-25T15:30:45', timezone_tz='UTC', shift_time_zone=False)
|
||||
assert isinstance(result, datetime)
|
||||
assert result.hour == 15 # Should not shift
|
||||
|
||||
def test_parse_flexible_date_with_timezone_shift(self):
|
||||
"""Test parse_flexible_date with timezone shift"""
|
||||
result = parse_flexible_date('2023-12-25T15:30:45+00:00', timezone_tz='Asia/Tokyo', shift_time_zone=True)
|
||||
assert isinstance(result, datetime)
|
||||
assert result.tzinfo is not None
|
||||
|
||||
def test_parse_flexible_date_invalid_format(self):
|
||||
"""Test parse_flexible_date with invalid format returns None"""
|
||||
result = parse_flexible_date('invalid-date')
|
||||
assert result is None
|
||||
|
||||
def test_parse_flexible_date_whitespace(self):
|
||||
"""Test parse_flexible_date with whitespace"""
|
||||
result = parse_flexible_date(' 2023-12-25 ')
|
||||
assert isinstance(result, datetime)
|
||||
assert result.year == 2023
|
||||
|
||||
|
||||
class TestCompareDates:
|
||||
"""Test suite for compare_dates function"""
|
||||
|
||||
def test_compare_dates_first_newer(self):
|
||||
"""Test compare_dates when first date is newer"""
|
||||
result = compare_dates('2024-01-02', '2024-01-01')
|
||||
assert result is True
|
||||
|
||||
def test_compare_dates_first_older(self):
|
||||
"""Test compare_dates when first date is older"""
|
||||
result = compare_dates('2024-01-01', '2024-01-02')
|
||||
assert result is False
|
||||
|
||||
def test_compare_dates_equal(self):
|
||||
"""Test compare_dates when dates are equal"""
|
||||
result = compare_dates('2024-01-01', '2024-01-01')
|
||||
assert result is False
|
||||
|
||||
def test_compare_dates_with_time(self):
|
||||
"""Test compare_dates with time components (should only compare dates)"""
|
||||
result = compare_dates('2024-01-02T10:00:00', '2024-01-01T23:59:59')
|
||||
assert result is True
|
||||
|
||||
def test_compare_dates_invalid_first_date(self):
|
||||
"""Test compare_dates with invalid first date"""
|
||||
result = compare_dates('invalid', '2024-01-01')
|
||||
assert result is None
|
||||
|
||||
def test_compare_dates_invalid_second_date(self):
|
||||
"""Test compare_dates with invalid second date"""
|
||||
result = compare_dates('2024-01-01', 'invalid')
|
||||
assert result is None
|
||||
|
||||
def test_compare_dates_both_invalid(self):
|
||||
"""Test compare_dates with both dates invalid"""
|
||||
result = compare_dates('invalid1', 'invalid2')
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestFindNewestDatetimeInList:
|
||||
"""Test suite for find_newest_datetime_in_list function"""
|
||||
|
||||
def test_find_newest_datetime_in_list_basic(self):
|
||||
"""Test find_newest_datetime_in_list with basic list"""
|
||||
dates = [
|
||||
'2023-12-25T10:00:00',
|
||||
'2024-01-01T12:00:00',
|
||||
'2023-11-15T08:00:00'
|
||||
]
|
||||
result = find_newest_datetime_in_list(dates)
|
||||
assert result == '2024-01-01T12:00:00'
|
||||
|
||||
def test_find_newest_datetime_in_list_with_timezone(self):
|
||||
"""Test find_newest_datetime_in_list with timezone-aware dates"""
|
||||
dates = [
|
||||
'2025-08-06T16:17:39.747+09:00',
|
||||
'2025-08-05T16:17:39.747+09:00',
|
||||
'2025-08-07T16:17:39.747+09:00'
|
||||
]
|
||||
result = find_newest_datetime_in_list(dates)
|
||||
assert result == '2025-08-07T16:17:39.747+09:00'
|
||||
|
||||
def test_find_newest_datetime_in_list_empty_list(self):
|
||||
"""Test find_newest_datetime_in_list with empty list"""
|
||||
result = find_newest_datetime_in_list([])
|
||||
assert result is None
|
||||
|
||||
def test_find_newest_datetime_in_list_single_date(self):
|
||||
"""Test find_newest_datetime_in_list with single date"""
|
||||
dates = ['2024-01-01T12:00:00']
|
||||
result = find_newest_datetime_in_list(dates)
|
||||
assert result == '2024-01-01T12:00:00'
|
||||
|
||||
def test_find_newest_datetime_in_list_with_invalid_dates(self):
|
||||
"""Test find_newest_datetime_in_list with some invalid dates"""
|
||||
dates = [
|
||||
'2023-12-25T10:00:00',
|
||||
'invalid-date',
|
||||
'2024-01-01T12:00:00'
|
||||
]
|
||||
result = find_newest_datetime_in_list(dates)
|
||||
assert result == '2024-01-01T12:00:00'
|
||||
|
||||
def test_find_newest_datetime_in_list_all_invalid(self):
|
||||
"""Test find_newest_datetime_in_list with all invalid dates"""
|
||||
dates = ['invalid1', 'invalid2', 'invalid3']
|
||||
result = find_newest_datetime_in_list(dates)
|
||||
assert result is None
|
||||
|
||||
def test_find_newest_datetime_in_list_mixed_formats(self):
|
||||
"""Test find_newest_datetime_in_list with mixed date formats"""
|
||||
dates = [
|
||||
'2023-12-25',
|
||||
'2024-01-01T12:00:00',
|
||||
'2023-11-15T08:00:00.123456'
|
||||
]
|
||||
result = find_newest_datetime_in_list(dates)
|
||||
assert result == '2024-01-01T12:00:00'
|
||||
|
||||
|
||||
class TestParseDayOfWeekRange:
|
||||
"""Test suite for parse_day_of_week_range function"""
|
||||
|
||||
def test_parse_day_of_week_range_single_day(self):
|
||||
"""Test parse_day_of_week_range with single day"""
|
||||
result = parse_day_of_week_range('Mon')
|
||||
assert result == [(1, 'Mon')]
|
||||
|
||||
def test_parse_day_of_week_range_multiple_days(self):
|
||||
"""Test parse_day_of_week_range with multiple days"""
|
||||
result = parse_day_of_week_range('Mon,Wed,Fri')
|
||||
assert len(result) == 3
|
||||
assert (1, 'Mon') in result
|
||||
assert (3, 'Wed') in result
|
||||
assert (5, 'Fri') in result
|
||||
|
||||
def test_parse_day_of_week_range_simple_range(self):
|
||||
"""Test parse_day_of_week_range with simple range"""
|
||||
result = parse_day_of_week_range('Mon-Fri')
|
||||
assert len(result) == 5
|
||||
assert result[0] == (1, 'Mon')
|
||||
assert result[-1] == (5, 'Fri')
|
||||
|
||||
def test_parse_day_of_week_range_weekend_spanning(self):
|
||||
"""Test parse_day_of_week_range with weekend-spanning range"""
|
||||
result = parse_day_of_week_range('Fri-Mon')
|
||||
assert len(result) == 4
|
||||
assert (5, 'Fri') in result
|
||||
assert (6, 'Sat') in result
|
||||
assert (7, 'Sun') in result
|
||||
assert (1, 'Mon') in result
|
||||
|
||||
def test_parse_day_of_week_range_long_names(self):
|
||||
"""Test parse_day_of_week_range with long day names - only works in ranges"""
|
||||
# Long names only work in ranges, not as standalone days
|
||||
# This is a limitation of the current implementation
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
parse_day_of_week_range('Monday,Wednesday')
|
||||
assert 'Invalid day of week entry found' in str(exc_info.value)
|
||||
|
||||
def test_parse_day_of_week_range_mixed_format(self):
|
||||
"""Test parse_day_of_week_range with short names and ranges"""
|
||||
result = parse_day_of_week_range('Mon,Wed-Fri')
|
||||
assert len(result) == 4
|
||||
assert (1, 'Mon') in result
|
||||
assert (3, 'Wed') in result
|
||||
assert (4, 'Thu') in result
|
||||
assert (5, 'Fri') in result
|
||||
|
||||
def test_parse_day_of_week_range_invalid_day(self):
|
||||
"""Test parse_day_of_week_range with invalid day"""
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
parse_day_of_week_range('InvalidDay')
|
||||
assert 'Invalid day of week entry found' in str(exc_info.value)
|
||||
|
||||
def test_parse_day_of_week_range_duplicate_days(self):
|
||||
"""Test parse_day_of_week_range with duplicate days"""
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
parse_day_of_week_range('Mon,Mon')
|
||||
assert 'Duplicate day of week entries found' in str(exc_info.value)
|
||||
|
||||
def test_parse_day_of_week_range_whitespace_handling(self):
|
||||
"""Test parse_day_of_week_range with extra whitespace"""
|
||||
result = parse_day_of_week_range(' Mon , Wed , Fri ')
|
||||
assert len(result) == 3
|
||||
assert (1, 'Mon') in result
|
||||
|
||||
|
||||
class TestParseTimeRange:
|
||||
"""Test suite for parse_time_range function"""
|
||||
|
||||
def test_parse_time_range_valid(self):
|
||||
"""Test parse_time_range with valid time range"""
|
||||
start, end = parse_time_range('09:00-17:00')
|
||||
assert start == time(9, 0)
|
||||
assert end == time(17, 0)
|
||||
|
||||
def test_parse_time_range_different_times(self):
|
||||
"""Test parse_time_range with different time values"""
|
||||
start, end = parse_time_range('08:30-12:45')
|
||||
assert start == time(8, 30)
|
||||
assert end == time(12, 45)
|
||||
|
||||
def test_parse_time_range_invalid_block(self):
|
||||
"""Test parse_time_range with invalid block format"""
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
parse_time_range('09:00')
|
||||
assert 'Invalid time block' in str(exc_info.value)
|
||||
|
||||
def test_parse_time_range_invalid_format(self):
|
||||
"""Test parse_time_range with invalid time format"""
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
parse_time_range('25:00-26:00')
|
||||
assert 'Invalid time block format' in str(exc_info.value)
|
||||
|
||||
def test_parse_time_range_start_after_end(self):
|
||||
"""Test parse_time_range with start time after end time"""
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
parse_time_range('17:00-09:00')
|
||||
assert 'start time after end time' in str(exc_info.value)
|
||||
|
||||
def test_parse_time_range_equal_times(self):
|
||||
"""Test parse_time_range with equal start and end times"""
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
parse_time_range('09:00-09:00')
|
||||
assert 'start time after end time or equal' in str(exc_info.value)
|
||||
|
||||
def test_parse_time_range_custom_format(self):
|
||||
"""Test parse_time_range with custom time format"""
|
||||
start, end = parse_time_range('09:00:00-17:00:00', time_format='%H:%M:%S')
|
||||
assert start == time(9, 0, 0)
|
||||
assert end == time(17, 0, 0)
|
||||
|
||||
def test_parse_time_range_whitespace(self):
|
||||
"""Test parse_time_range with whitespace"""
|
||||
start, end = parse_time_range(' 09:00-17:00 ')
|
||||
assert start == time(9, 0)
|
||||
assert end == time(17, 0)
|
||||
|
||||
|
||||
class TestTimesOverlapOrConnect:
|
||||
"""Test suite for times_overlap_or_connect function"""
|
||||
|
||||
def test_times_overlap_or_connect_clear_overlap(self):
|
||||
"""Test times_overlap_or_connect with clear overlap"""
|
||||
time1 = (time(9, 0), time(12, 0))
|
||||
time2 = (time(10, 0), time(14, 0))
|
||||
assert times_overlap_or_connect(time1, time2) is True
|
||||
|
||||
def test_times_overlap_or_connect_no_overlap(self):
|
||||
"""Test times_overlap_or_connect with no overlap"""
|
||||
time1 = (time(9, 0), time(12, 0))
|
||||
time2 = (time(13, 0), time(17, 0))
|
||||
assert times_overlap_or_connect(time1, time2) is False
|
||||
|
||||
def test_times_overlap_or_connect_touching_not_allowed(self):
|
||||
"""Test times_overlap_or_connect with touching ranges (not allowed)"""
|
||||
time1 = (time(8, 0), time(10, 0))
|
||||
time2 = (time(10, 0), time(12, 0))
|
||||
assert times_overlap_or_connect(time1, time2, allow_touching=False) is True
|
||||
|
||||
def test_times_overlap_or_connect_touching_allowed(self):
|
||||
"""Test times_overlap_or_connect with touching ranges (allowed)"""
|
||||
time1 = (time(8, 0), time(10, 0))
|
||||
time2 = (time(10, 0), time(12, 0))
|
||||
assert times_overlap_or_connect(time1, time2, allow_touching=True) is False
|
||||
|
||||
def test_times_overlap_or_connect_one_contains_other(self):
|
||||
"""Test times_overlap_or_connect when one range contains the other"""
|
||||
time1 = (time(9, 0), time(17, 0))
|
||||
time2 = (time(10, 0), time(12, 0))
|
||||
assert times_overlap_or_connect(time1, time2) is True
|
||||
|
||||
def test_times_overlap_or_connect_same_start(self):
|
||||
"""Test times_overlap_or_connect with same start time"""
|
||||
time1 = (time(9, 0), time(12, 0))
|
||||
time2 = (time(9, 0), time(14, 0))
|
||||
assert times_overlap_or_connect(time1, time2) is True
|
||||
|
||||
def test_times_overlap_or_connect_same_end(self):
|
||||
"""Test times_overlap_or_connect with same end time"""
|
||||
time1 = (time(9, 0), time(12, 0))
|
||||
time2 = (time(10, 0), time(12, 0))
|
||||
assert times_overlap_or_connect(time1, time2) is True
|
||||
|
||||
|
||||
class TestIsTimeInRange:
|
||||
"""Test suite for is_time_in_range function"""
|
||||
|
||||
def test_is_time_in_range_within_range(self):
|
||||
"""Test is_time_in_range with time within range"""
|
||||
assert is_time_in_range('10:00:00', '09:00:00', '17:00:00') is True
|
||||
|
||||
def test_is_time_in_range_at_start(self):
|
||||
"""Test is_time_in_range with time at start of range"""
|
||||
assert is_time_in_range('09:00:00', '09:00:00', '17:00:00') is True
|
||||
|
||||
def test_is_time_in_range_at_end(self):
|
||||
"""Test is_time_in_range with time at end of range"""
|
||||
assert is_time_in_range('17:00:00', '09:00:00', '17:00:00') is True
|
||||
|
||||
def test_is_time_in_range_before_range(self):
|
||||
"""Test is_time_in_range with time before range"""
|
||||
assert is_time_in_range('08:00:00', '09:00:00', '17:00:00') is False
|
||||
|
||||
def test_is_time_in_range_after_range(self):
|
||||
"""Test is_time_in_range with time after range"""
|
||||
assert is_time_in_range('18:00:00', '09:00:00', '17:00:00') is False
|
||||
|
||||
def test_is_time_in_range_crosses_midnight(self):
|
||||
"""Test is_time_in_range with range crossing midnight"""
|
||||
# Range from 22:00 to 06:00
|
||||
assert is_time_in_range('23:00:00', '22:00:00', '06:00:00') is True
|
||||
assert is_time_in_range('03:00:00', '22:00:00', '06:00:00') is True
|
||||
assert is_time_in_range('12:00:00', '22:00:00', '06:00:00') is False
|
||||
|
||||
def test_is_time_in_range_midnight_boundary(self):
|
||||
"""Test is_time_in_range at midnight"""
|
||||
assert is_time_in_range('00:00:00', '22:00:00', '06:00:00') is True
|
||||
|
||||
|
||||
class TestReorderWeekdaysFromToday:
|
||||
"""Test suite for reorder_weekdays_from_today function"""
|
||||
|
||||
def test_reorder_weekdays_from_monday(self):
|
||||
"""Test reorder_weekdays_from_today starting from Monday"""
|
||||
result = reorder_weekdays_from_today('Mon')
|
||||
values = list(result.values())
|
||||
assert values[0] == 'Mon'
|
||||
assert values[-1] == 'Sun'
|
||||
assert len(result) == 7
|
||||
|
||||
def test_reorder_weekdays_from_wednesday(self):
|
||||
"""Test reorder_weekdays_from_today starting from Wednesday"""
|
||||
result = reorder_weekdays_from_today('Wed')
|
||||
values = list(result.values())
|
||||
assert values[0] == 'Wed'
|
||||
assert values[1] == 'Thu'
|
||||
assert values[-1] == 'Tue'
|
||||
|
||||
def test_reorder_weekdays_from_sunday(self):
|
||||
"""Test reorder_weekdays_from_today starting from Sunday"""
|
||||
result = reorder_weekdays_from_today('Sun')
|
||||
values = list(result.values())
|
||||
assert values[0] == 'Sun'
|
||||
assert values[-1] == 'Sat'
|
||||
|
||||
def test_reorder_weekdays_from_long_name(self):
|
||||
"""Test reorder_weekdays_from_today with long day name"""
|
||||
result = reorder_weekdays_from_today('Friday')
|
||||
values = list(result.values())
|
||||
assert values[0] == 'Fri'
|
||||
assert values[-1] == 'Thu'
|
||||
|
||||
def test_reorder_weekdays_invalid_day(self):
|
||||
"""Test reorder_weekdays_from_today with invalid day name"""
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
reorder_weekdays_from_today('InvalidDay')
|
||||
assert 'Invalid day name provided' in str(exc_info.value)
|
||||
|
||||
def test_reorder_weekdays_preserves_all_days(self):
|
||||
"""Test that reorder_weekdays_from_today preserves all 7 days"""
|
||||
for day in ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']:
|
||||
result = reorder_weekdays_from_today(day)
|
||||
assert len(result) == 7
|
||||
assert set(result.values()) == {'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'}
|
||||
|
||||
|
||||
class TestEdgeCases:
|
||||
"""Test suite for edge cases and integration scenarios"""
|
||||
|
||||
def test_parse_flexible_date_with_various_iso_formats(self):
|
||||
"""Test parse_flexible_date handles various ISO format variations"""
|
||||
formats = [
|
||||
'2023-12-25',
|
||||
'2023-12-25T15:30:45',
|
||||
'2023-12-25T15:30:45.123456',
|
||||
]
|
||||
for date_str in formats:
|
||||
result = parse_flexible_date(date_str)
|
||||
assert result is not None
|
||||
assert isinstance(result, datetime)
|
||||
|
||||
def test_timezone_consistency_across_functions(self):
|
||||
"""Test timezone handling consistency across functions"""
|
||||
tz_str = 'Asia/Tokyo'
|
||||
tz_obj = parse_timezone_data(tz_str)
|
||||
|
||||
# Both should work with get_datetime_iso8601
|
||||
result1 = get_datetime_iso8601(tz_str)
|
||||
result2 = get_datetime_iso8601(tz_obj)
|
||||
|
||||
assert result1 is not None
|
||||
assert result2 is not None
|
||||
|
||||
def test_date_validation_and_parsing_consistency(self):
|
||||
"""Test that validate_date and parse_flexible_date agree"""
|
||||
valid_dates = ['2023-12-25', '2024/01/01']
|
||||
for date_str in valid_dates:
|
||||
# normalize format for parse_flexible_date
|
||||
normalized = date_str.replace('/', '-')
|
||||
assert validate_date(date_str) is True
|
||||
assert parse_flexible_date(normalized) is not None
|
||||
|
||||
def test_day_of_week_range_complex_scenario(self):
|
||||
"""Test parse_day_of_week_range with complex mixed input"""
|
||||
result = parse_day_of_week_range('Mon,Wed-Fri,Sun')
|
||||
assert len(result) == 5
|
||||
assert (1, 'Mon') in result
|
||||
assert (3, 'Wed') in result
|
||||
assert (4, 'Thu') in result
|
||||
assert (5, 'Fri') in result
|
||||
assert (7, 'Sun') in result
|
||||
|
||||
def test_time_range_boundary_conditions(self):
|
||||
"""Test parse_time_range with boundary times"""
|
||||
start, end = parse_time_range('00:00-23:59')
|
||||
assert start == time(0, 0)
|
||||
assert end == time(23, 59)
|
||||
|
||||
# __END__
|
||||
462
tests/unit/datetime_handling/test_seconds_to_string.py
Normal file
462
tests/unit/datetime_handling/test_seconds_to_string.py
Normal file
@@ -0,0 +1,462 @@
|
||||
"""
|
||||
PyTest: datetime_handling/timestamp_convert - seconds_to_string and convert_timestamp functions
|
||||
"""
|
||||
|
||||
from corelibs.datetime_handling.timestamp_convert import seconds_to_string, convert_timestamp
|
||||
|
||||
|
||||
class TestSecondsToString:
|
||||
"""Test suite for seconds_to_string function"""
|
||||
|
||||
def test_basic_integer_seconds(self):
|
||||
"""Test conversion of basic integer seconds"""
|
||||
assert seconds_to_string(0) == "0s"
|
||||
assert seconds_to_string(1) == "1s"
|
||||
assert seconds_to_string(30) == "30s"
|
||||
assert seconds_to_string(59) == "59s"
|
||||
|
||||
def test_minutes_conversion(self):
|
||||
"""Test conversion involving minutes"""
|
||||
assert seconds_to_string(60) == "1m"
|
||||
assert seconds_to_string(90) == "1m 30s"
|
||||
assert seconds_to_string(120) == "2m"
|
||||
assert seconds_to_string(3599) == "59m 59s"
|
||||
|
||||
def test_hours_conversion(self):
|
||||
"""Test conversion involving hours"""
|
||||
assert seconds_to_string(3600) == "1h"
|
||||
assert seconds_to_string(3660) == "1h 1m"
|
||||
assert seconds_to_string(3661) == "1h 1m 1s"
|
||||
assert seconds_to_string(7200) == "2h"
|
||||
assert seconds_to_string(7260) == "2h 1m"
|
||||
|
||||
def test_days_conversion(self):
|
||||
"""Test conversion involving days"""
|
||||
assert seconds_to_string(86400) == "1d"
|
||||
assert seconds_to_string(86401) == "1d 1s"
|
||||
assert seconds_to_string(90000) == "1d 1h"
|
||||
assert seconds_to_string(90061) == "1d 1h 1m 1s"
|
||||
assert seconds_to_string(172800) == "2d"
|
||||
|
||||
def test_complex_combinations(self):
|
||||
"""Test complex time combinations"""
|
||||
# 1 day, 2 hours, 3 minutes, 4 seconds
|
||||
total = 86400 + 7200 + 180 + 4
|
||||
assert seconds_to_string(total) == "1d 2h 3m 4s"
|
||||
|
||||
# 5 days, 23 hours, 59 minutes, 59 seconds
|
||||
total = 5 * 86400 + 23 * 3600 + 59 * 60 + 59
|
||||
assert seconds_to_string(total) == "5d 23h 59m 59s"
|
||||
|
||||
def test_fractional_seconds_default_precision(self):
|
||||
"""Test fractional seconds with default precision (3 decimal places)"""
|
||||
assert seconds_to_string(0.1) == "0.1s"
|
||||
assert seconds_to_string(0.123) == "0.123s"
|
||||
assert seconds_to_string(0.1234) == "0.123s"
|
||||
assert seconds_to_string(1.5) == "1.5s"
|
||||
assert seconds_to_string(1.567) == "1.567s"
|
||||
assert seconds_to_string(1.5678) == "1.568s"
|
||||
|
||||
def test_fractional_seconds_microsecond_precision(self):
|
||||
"""Test fractional seconds with microsecond precision"""
|
||||
assert seconds_to_string(0.1, show_microseconds=True) == "0.1s"
|
||||
assert seconds_to_string(0.123456, show_microseconds=True) == "0.123456s"
|
||||
assert seconds_to_string(0.1234567, show_microseconds=True) == "0.123457s"
|
||||
assert seconds_to_string(1.5, show_microseconds=True) == "1.5s"
|
||||
assert seconds_to_string(1.567890, show_microseconds=True) == "1.56789s"
|
||||
|
||||
def test_fractional_seconds_with_larger_units(self):
|
||||
"""Test fractional seconds combined with larger time units"""
|
||||
# 1 minute and 30.5 seconds
|
||||
assert seconds_to_string(90.5) == "1m 30.5s"
|
||||
assert seconds_to_string(90.5, show_microseconds=True) == "1m 30.5s"
|
||||
|
||||
# 1 hour, 1 minute, and 1.123 seconds
|
||||
total = 3600 + 60 + 1.123
|
||||
assert seconds_to_string(total) == "1h 1m 1.123s"
|
||||
assert seconds_to_string(total, show_microseconds=True) == "1h 1m 1.123s"
|
||||
|
||||
def test_negative_values(self):
|
||||
"""Test negative time values"""
|
||||
assert seconds_to_string(-1) == "-1s"
|
||||
assert seconds_to_string(-60) == "-1m"
|
||||
assert seconds_to_string(-90) == "-1m 30s"
|
||||
assert seconds_to_string(-3661) == "-1h 1m 1s"
|
||||
assert seconds_to_string(-86401) == "-1d 1s"
|
||||
assert seconds_to_string(-1.5) == "-1.5s"
|
||||
assert seconds_to_string(-90.123) == "-1m 30.123s"
|
||||
|
||||
def test_zero_handling(self):
|
||||
"""Test various zero values"""
|
||||
assert seconds_to_string(0) == "0s"
|
||||
assert seconds_to_string(0.0) == "0s"
|
||||
assert seconds_to_string(-0) == "0s"
|
||||
assert seconds_to_string(-0.0) == "0s"
|
||||
|
||||
def test_float_input_types(self):
|
||||
"""Test various float input types"""
|
||||
assert seconds_to_string(1.0) == "1s"
|
||||
assert seconds_to_string(60.0) == "1m"
|
||||
assert seconds_to_string(3600.0) == "1h"
|
||||
assert seconds_to_string(86400.0) == "1d"
|
||||
|
||||
def test_large_values(self):
|
||||
"""Test handling of large time values"""
|
||||
# 365 days (1 year)
|
||||
year_seconds = 365 * 86400
|
||||
assert seconds_to_string(year_seconds) == "365d"
|
||||
|
||||
# 1000 days
|
||||
assert seconds_to_string(1000 * 86400) == "1000d"
|
||||
|
||||
# Large number with all units
|
||||
large_time = 999 * 86400 + 23 * 3600 + 59 * 60 + 59.999
|
||||
result = seconds_to_string(large_time)
|
||||
assert result.startswith("999d")
|
||||
assert "23h" in result
|
||||
assert "59m" in result
|
||||
assert "59.999s" in result
|
||||
|
||||
def test_rounding_behavior(self):
|
||||
"""Test rounding behavior for fractional seconds"""
|
||||
# Default precision (3 decimal places) - values are truncated via rstrip
|
||||
assert seconds_to_string(1.0004) == "1s" # Truncates trailing zeros after rstrip
|
||||
assert seconds_to_string(1.0005) == "1s" # Truncates trailing zeros after rstrip
|
||||
assert seconds_to_string(1.9999) == "2s" # Rounds up and strips .000
|
||||
|
||||
# Microsecond precision (6 decimal places)
|
||||
assert seconds_to_string(1.0000004, show_microseconds=True) == "1s"
|
||||
assert seconds_to_string(1.0000005, show_microseconds=True) == "1.000001s"
|
||||
|
||||
def test_trailing_zero_removal(self):
|
||||
"""Test that trailing zeros are properly removed"""
|
||||
assert seconds_to_string(1.100) == "1.1s"
|
||||
assert seconds_to_string(1.120) == "1.12s"
|
||||
assert seconds_to_string(1.123) == "1.123s"
|
||||
assert seconds_to_string(1.100000, show_microseconds=True) == "1.1s"
|
||||
assert seconds_to_string(1.123000, show_microseconds=True) == "1.123s"
|
||||
|
||||
def test_invalid_input_types(self):
|
||||
"""Test handling of invalid input types"""
|
||||
# String inputs should be returned as-is
|
||||
assert seconds_to_string("invalid") == "invalid"
|
||||
assert seconds_to_string("not a number") == "not a number"
|
||||
assert seconds_to_string("") == ""
|
||||
|
||||
def test_edge_cases_boundary_values(self):
|
||||
"""Test edge cases at unit boundaries"""
|
||||
# Exactly 1 minute - 1 second
|
||||
assert seconds_to_string(59) == "59s"
|
||||
assert seconds_to_string(59.999) == "59.999s"
|
||||
|
||||
# Exactly 1 hour - 1 second
|
||||
assert seconds_to_string(3599) == "59m 59s"
|
||||
assert seconds_to_string(3599.999) == "59m 59.999s"
|
||||
|
||||
# Exactly 1 day - 1 second
|
||||
assert seconds_to_string(86399) == "23h 59m 59s"
|
||||
assert seconds_to_string(86399.999) == "23h 59m 59.999s"
|
||||
|
||||
def test_very_small_fractional_seconds(self):
|
||||
"""Test very small fractional values"""
|
||||
assert seconds_to_string(0.001) == "0.001s"
|
||||
assert seconds_to_string(0.0001) == "0s" # Below default precision
|
||||
assert seconds_to_string(0.000001, show_microseconds=True) == "0.000001s"
|
||||
assert seconds_to_string(0.0000001, show_microseconds=True) == "0s" # Below microsecond precision
|
||||
|
||||
def test_precision_consistency(self):
|
||||
"""Test that precision is consistent across different scenarios"""
|
||||
# With other units present
|
||||
assert seconds_to_string(61.123456) == "1m 1.123s"
|
||||
assert seconds_to_string(61.123456, show_microseconds=True) == "1m 1.123456s"
|
||||
|
||||
# Large values with fractional seconds
|
||||
large_val = 90061.123456 # 1d 1h 1m 1.123456s
|
||||
assert seconds_to_string(large_val) == "1d 1h 1m 1.123s"
|
||||
assert seconds_to_string(large_val, show_microseconds=True) == "1d 1h 1m 1.123456s"
|
||||
|
||||
def test_string_numeric_inputs(self):
|
||||
"""Test string inputs that represent numbers"""
|
||||
# String inputs should be returned as-is, even if they look like numbers
|
||||
assert seconds_to_string("60") == "60"
|
||||
assert seconds_to_string("1.5") == "1.5"
|
||||
assert seconds_to_string("0") == "0"
|
||||
assert seconds_to_string("-60") == "-60"
|
||||
|
||||
|
||||
class TestConvertTimestamp:
|
||||
"""Test suite for convert_timestamp function"""
|
||||
|
||||
def test_basic_integer_seconds(self):
|
||||
"""Test conversion of basic integer seconds"""
|
||||
assert convert_timestamp(0) == "0s 0ms"
|
||||
assert convert_timestamp(1) == "1s 0ms"
|
||||
assert convert_timestamp(30) == "30s 0ms"
|
||||
assert convert_timestamp(59) == "59s 0ms"
|
||||
|
||||
def test_basic_without_microseconds(self):
|
||||
"""Test conversion without showing microseconds"""
|
||||
assert convert_timestamp(0, show_microseconds=False) == "0s"
|
||||
assert convert_timestamp(1, show_microseconds=False) == "1s"
|
||||
assert convert_timestamp(30, show_microseconds=False) == "30s"
|
||||
assert convert_timestamp(59, show_microseconds=False) == "59s"
|
||||
|
||||
def test_minutes_conversion(self):
|
||||
"""Test conversion involving minutes"""
|
||||
assert convert_timestamp(60) == "1m 0s 0ms"
|
||||
assert convert_timestamp(90) == "1m 30s 0ms"
|
||||
assert convert_timestamp(120) == "2m 0s 0ms"
|
||||
assert convert_timestamp(3599) == "59m 59s 0ms"
|
||||
|
||||
def test_minutes_conversion_without_microseconds(self):
|
||||
"""Test conversion involving minutes without microseconds"""
|
||||
assert convert_timestamp(60, show_microseconds=False) == "1m 0s"
|
||||
assert convert_timestamp(90, show_microseconds=False) == "1m 30s"
|
||||
assert convert_timestamp(120, show_microseconds=False) == "2m 0s"
|
||||
|
||||
def test_hours_conversion(self):
|
||||
"""Test conversion involving hours"""
|
||||
assert convert_timestamp(3600) == "1h 0m 0s 0ms"
|
||||
assert convert_timestamp(3660) == "1h 1m 0s 0ms"
|
||||
assert convert_timestamp(3661) == "1h 1m 1s 0ms"
|
||||
assert convert_timestamp(7200) == "2h 0m 0s 0ms"
|
||||
assert convert_timestamp(7260) == "2h 1m 0s 0ms"
|
||||
|
||||
def test_hours_conversion_without_microseconds(self):
|
||||
"""Test conversion involving hours without microseconds"""
|
||||
assert convert_timestamp(3600, show_microseconds=False) == "1h 0m 0s"
|
||||
assert convert_timestamp(3660, show_microseconds=False) == "1h 1m 0s"
|
||||
assert convert_timestamp(3661, show_microseconds=False) == "1h 1m 1s"
|
||||
|
||||
def test_days_conversion(self):
|
||||
"""Test conversion involving days"""
|
||||
assert convert_timestamp(86400) == "1d 0h 0m 0s 0ms"
|
||||
assert convert_timestamp(86401) == "1d 0h 0m 1s 0ms"
|
||||
assert convert_timestamp(90000) == "1d 1h 0m 0s 0ms"
|
||||
assert convert_timestamp(90061) == "1d 1h 1m 1s 0ms"
|
||||
assert convert_timestamp(172800) == "2d 0h 0m 0s 0ms"
|
||||
|
||||
def test_days_conversion_without_microseconds(self):
|
||||
"""Test conversion involving days without microseconds"""
|
||||
assert convert_timestamp(86400, show_microseconds=False) == "1d 0h 0m 0s"
|
||||
assert convert_timestamp(86401, show_microseconds=False) == "1d 0h 0m 1s"
|
||||
assert convert_timestamp(90000, show_microseconds=False) == "1d 1h 0m 0s"
|
||||
|
||||
def test_complex_combinations(self):
|
||||
"""Test complex time combinations"""
|
||||
# 1 day, 2 hours, 3 minutes, 4 seconds
|
||||
total = 86400 + 7200 + 180 + 4
|
||||
assert convert_timestamp(total) == "1d 2h 3m 4s 0ms"
|
||||
|
||||
# 5 days, 23 hours, 59 minutes, 59 seconds
|
||||
total = 5 * 86400 + 23 * 3600 + 59 * 60 + 59
|
||||
assert convert_timestamp(total) == "5d 23h 59m 59s 0ms"
|
||||
|
||||
def test_fractional_seconds_with_microseconds(self):
|
||||
"""Test fractional seconds showing microseconds"""
|
||||
# Note: ms value is the integer of the decimal part string after rounding to 4 places
|
||||
assert convert_timestamp(0.1) == "0s 1ms" # 0.1 → "0.1" → ms=1
|
||||
assert convert_timestamp(0.123) == "0s 123ms" # 0.123 → "0.123" → ms=123
|
||||
assert convert_timestamp(0.1234) == "0s 1234ms" # 0.1234 → "0.1234" → ms=1234
|
||||
assert convert_timestamp(1.5) == "1s 5ms" # 1.5 → "1.5" → ms=5
|
||||
assert convert_timestamp(1.567) == "1s 567ms" # 1.567 → "1.567" → ms=567
|
||||
assert convert_timestamp(1.5678) == "1s 5678ms" # 1.5678 rounds to 1.5678 → ms=5678
|
||||
|
||||
def test_fractional_seconds_rounding(self):
|
||||
"""Test rounding of fractional seconds to 4 decimal places"""
|
||||
# The function rounds to 4 decimal places before splitting
|
||||
assert convert_timestamp(0.12345) == "0s 1235ms" # Rounds to 0.1235
|
||||
assert convert_timestamp(0.123456) == "0s 1235ms" # Rounds to 0.1235
|
||||
assert convert_timestamp(1.99999) == "2s 0ms" # Rounds to 2.0
|
||||
|
||||
def test_fractional_seconds_with_larger_units(self):
|
||||
"""Test fractional seconds combined with larger time units"""
|
||||
# 1 minute and 30.5 seconds
|
||||
assert convert_timestamp(90.5) == "1m 30s 5ms"
|
||||
|
||||
# 1 hour, 1 minute, and 1.123 seconds
|
||||
total = 3600 + 60 + 1.123
|
||||
assert convert_timestamp(total) == "1h 1m 1s 123ms"
|
||||
|
||||
def test_negative_values(self):
|
||||
"""Test negative time values"""
|
||||
assert convert_timestamp(-1) == "-1s 0ms"
|
||||
assert convert_timestamp(-60) == "-1m 0s 0ms"
|
||||
assert convert_timestamp(-90) == "-1m 30s 0ms"
|
||||
assert convert_timestamp(-3661) == "-1h 1m 1s 0ms"
|
||||
assert convert_timestamp(-86401) == "-1d 0h 0m 1s 0ms"
|
||||
assert convert_timestamp(-1.5) == "-1s 5ms"
|
||||
assert convert_timestamp(-90.123) == "-1m 30s 123ms"
|
||||
|
||||
def test_negative_without_microseconds(self):
|
||||
"""Test negative values without microseconds"""
|
||||
assert convert_timestamp(-1, show_microseconds=False) == "-1s"
|
||||
assert convert_timestamp(-60, show_microseconds=False) == "-1m 0s"
|
||||
assert convert_timestamp(-90.123, show_microseconds=False) == "-1m 30s"
|
||||
|
||||
def test_zero_handling(self):
|
||||
"""Test various zero values"""
|
||||
assert convert_timestamp(0) == "0s 0ms"
|
||||
assert convert_timestamp(0.0) == "0s 0ms"
|
||||
assert convert_timestamp(-0) == "0s 0ms"
|
||||
assert convert_timestamp(-0.0) == "0s 0ms"
|
||||
|
||||
def test_zero_filling_behavior(self):
|
||||
"""Test that zeros are filled between set values"""
|
||||
# If we have days and seconds, hours and minutes should be 0
|
||||
assert convert_timestamp(86401) == "1d 0h 0m 1s 0ms"
|
||||
|
||||
# If we have hours and seconds, minutes should be 0
|
||||
assert convert_timestamp(3601) == "1h 0m 1s 0ms"
|
||||
|
||||
# If we have days and hours, minutes and seconds should be 0
|
||||
assert convert_timestamp(90000) == "1d 1h 0m 0s 0ms"
|
||||
|
||||
def test_milliseconds_display(self):
|
||||
"""Test milliseconds are always shown when show_microseconds=True"""
|
||||
# Even with no fractional part, 0ms should be shown
|
||||
assert convert_timestamp(1) == "1s 0ms"
|
||||
assert convert_timestamp(60) == "1m 0s 0ms"
|
||||
assert convert_timestamp(3600) == "1h 0m 0s 0ms"
|
||||
|
||||
# With fractional part, ms should be shown
|
||||
assert convert_timestamp(1.001) == "1s 1ms" # "1.001" → ms=1
|
||||
assert convert_timestamp(1.0001) == "1s 1ms" # "1.0001" → ms=1
|
||||
|
||||
def test_float_input_types(self):
|
||||
"""Test various float input types"""
|
||||
assert convert_timestamp(1.0) == "1s 0ms"
|
||||
assert convert_timestamp(60.0) == "1m 0s 0ms"
|
||||
assert convert_timestamp(3600.0) == "1h 0m 0s 0ms"
|
||||
assert convert_timestamp(86400.0) == "1d 0h 0m 0s 0ms"
|
||||
|
||||
def test_large_values(self):
|
||||
"""Test handling of large time values"""
|
||||
# 365 days (1 year)
|
||||
year_seconds = 365 * 86400
|
||||
assert convert_timestamp(year_seconds) == "365d 0h 0m 0s 0ms"
|
||||
|
||||
# 1000 days
|
||||
assert convert_timestamp(1000 * 86400) == "1000d 0h 0m 0s 0ms"
|
||||
|
||||
# Large number with all units
|
||||
large_time = 999 * 86400 + 23 * 3600 + 59 * 60 + 59.999
|
||||
result = convert_timestamp(large_time)
|
||||
assert result.startswith("999d")
|
||||
assert "23h" in result
|
||||
assert "59m" in result
|
||||
assert "59s" in result
|
||||
assert "999ms" in result # 59.999 rounds to 59.999, ms=999
|
||||
|
||||
def test_invalid_input_types(self):
|
||||
"""Test handling of invalid input types"""
|
||||
# String inputs should be returned as-is
|
||||
assert convert_timestamp("invalid") == "invalid"
|
||||
assert convert_timestamp("not a number") == "not a number"
|
||||
assert convert_timestamp("") == ""
|
||||
|
||||
def test_string_numeric_inputs(self):
|
||||
"""Test string inputs that represent numbers"""
|
||||
# String inputs should be returned as-is, even if they look like numbers
|
||||
assert convert_timestamp("60") == "60"
|
||||
assert convert_timestamp("1.5") == "1.5"
|
||||
assert convert_timestamp("0") == "0"
|
||||
assert convert_timestamp("-60") == "-60"
|
||||
|
||||
def test_edge_cases_boundary_values(self):
|
||||
"""Test edge cases at unit boundaries"""
|
||||
# Exactly 1 minute - 1 second
|
||||
assert convert_timestamp(59) == "59s 0ms"
|
||||
assert convert_timestamp(59.999) == "59s 999ms"
|
||||
|
||||
# Exactly 1 hour - 1 second
|
||||
assert convert_timestamp(3599) == "59m 59s 0ms"
|
||||
assert convert_timestamp(3599.999) == "59m 59s 999ms"
|
||||
|
||||
# Exactly 1 day - 1 second
|
||||
assert convert_timestamp(86399) == "23h 59m 59s 0ms"
|
||||
assert convert_timestamp(86399.999) == "23h 59m 59s 999ms"
|
||||
|
||||
def test_very_small_fractional_seconds(self):
|
||||
"""Test very small fractional values"""
|
||||
assert convert_timestamp(0.001) == "0s 1ms" # 0.001 → "0.001" → ms=1
|
||||
assert convert_timestamp(0.0001) == "0s 1ms" # 0.0001 → "0.0001" → ms=1
|
||||
assert convert_timestamp(0.00005) == "0s 1ms" # 0.00005 rounds to 0.0001 → ms=1
|
||||
assert convert_timestamp(0.00004) == "0s 0ms" # 0.00004 rounds to 0.0 → ms=0
|
||||
|
||||
def test_milliseconds_extraction(self):
|
||||
"""Test that milliseconds are correctly extracted from fractional part"""
|
||||
# The ms value is the integer of the decimal part string, not a conversion
|
||||
# So 0.1 → "0.1" → ms=1, NOT 100ms as you might expect
|
||||
assert convert_timestamp(0.1) == "0s 1ms"
|
||||
# 0.01 seconds → "0.01" → ms=1 (int("01") = 1)
|
||||
assert convert_timestamp(0.01) == "0s 1ms"
|
||||
# 0.001 seconds → "0.001" → ms=1
|
||||
assert convert_timestamp(0.001) == "0s 1ms"
|
||||
# 0.0001 seconds → "0.0001" → ms=1
|
||||
assert convert_timestamp(0.0001) == "0s 1ms"
|
||||
# 0.00004 seconds rounds to "0.0" → ms=0
|
||||
assert convert_timestamp(0.00004) == "0s 0ms"
|
||||
|
||||
def test_comparison_with_seconds_to_string(self):
|
||||
"""Test differences between convert_timestamp and seconds_to_string"""
|
||||
# convert_timestamp fills zeros and adds ms
|
||||
# seconds_to_string omits zeros and no ms
|
||||
assert convert_timestamp(86401) == "1d 0h 0m 1s 0ms"
|
||||
assert seconds_to_string(86401) == "1d 1s"
|
||||
|
||||
assert convert_timestamp(3661) == "1h 1m 1s 0ms"
|
||||
assert seconds_to_string(3661) == "1h 1m 1s"
|
||||
|
||||
# With microseconds disabled, still different due to zero-filling
|
||||
assert convert_timestamp(86401, show_microseconds=False) == "1d 0h 0m 1s"
|
||||
assert seconds_to_string(86401) == "1d 1s"
|
||||
|
||||
def test_precision_consistency(self):
|
||||
"""Test that precision is consistent across different scenarios"""
|
||||
# With other units present
|
||||
assert convert_timestamp(61.123456) == "1m 1s 1235ms" # Rounds to 61.1235
|
||||
|
||||
# Large values with fractional seconds
|
||||
large_val = 90061.123456 # 1d 1h 1m 1.123456s
|
||||
assert convert_timestamp(large_val) == "1d 1h 1m 1s 1235ms" # Rounds to .1235
|
||||
|
||||
def test_microseconds_flag_consistency(self):
|
||||
"""Test that show_microseconds flag works consistently"""
|
||||
test_values = [0, 1, 60, 3600, 86400, 1.5, 90.123, -60]
|
||||
|
||||
for val in test_values:
|
||||
with_ms = convert_timestamp(val, show_microseconds=True)
|
||||
without_ms = convert_timestamp(val, show_microseconds=False)
|
||||
|
||||
# With microseconds should contain 'ms', without should not
|
||||
assert "ms" in with_ms
|
||||
assert "ms" not in without_ms
|
||||
|
||||
# Both should start with same sign if negative
|
||||
if val < 0:
|
||||
assert with_ms.startswith("-")
|
||||
assert without_ms.startswith("-")
|
||||
|
||||
def test_format_consistency(self):
|
||||
"""Test that output format is consistent"""
|
||||
# All outputs should have consistent spacing and unit ordering
|
||||
# Format should be: [d ]h m s[ ms]
|
||||
result = convert_timestamp(93784.5678) # 1d 2h 3m 4.5678s
|
||||
# 93784.5678 rounds to 93784.5678, splits to ["93784", "5678"]
|
||||
assert result == "1d 2h 3m 4s 5678ms"
|
||||
|
||||
# Verify parts are in correct order
|
||||
parts = result.split()
|
||||
# Extract units properly: last 1-2 chars that are letters
|
||||
units = []
|
||||
for p in parts:
|
||||
if p.endswith('ms'):
|
||||
units.append('ms')
|
||||
elif p[-1].isalpha():
|
||||
units.append(p[-1])
|
||||
# Should be in order: d, h, m, s, ms
|
||||
expected_order = ['d', 'h', 'm', 's', 'ms']
|
||||
assert units == expected_order
|
||||
|
||||
# __END__
|
||||
@@ -1,5 +1,5 @@
|
||||
"""
|
||||
PyTest: string_handling/timestamp_strings
|
||||
PyTest: datetime_handling/timestamp_strings
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
@@ -8,7 +8,7 @@ from zoneinfo import ZoneInfo
|
||||
import pytest
|
||||
|
||||
# Assuming the class is in a file called timestamp_strings.py
|
||||
from corelibs.string_handling.timestamp_strings import TimestampStrings
|
||||
from corelibs.datetime_handling.timestamp_strings import TimestampStrings
|
||||
|
||||
|
||||
class TestTimestampStrings:
|
||||
@@ -16,7 +16,7 @@ class TestTimestampStrings:
|
||||
|
||||
def test_default_initialization(self):
|
||||
"""Test initialization with default timezone"""
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
mock_now = datetime(2023, 12, 25, 15, 30, 45)
|
||||
mock_datetime.now.return_value = mock_now
|
||||
|
||||
@@ -32,7 +32,7 @@ class TestTimestampStrings:
|
||||
"""Test initialization with custom timezone"""
|
||||
custom_tz = 'America/New_York'
|
||||
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
mock_now = datetime(2023, 12, 25, 15, 30, 45)
|
||||
mock_datetime.now.return_value = mock_now
|
||||
|
||||
@@ -52,7 +52,7 @@ class TestTimestampStrings:
|
||||
|
||||
def test_timestamp_formats(self):
|
||||
"""Test various timestamp format outputs"""
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
# Mock both datetime.now() calls
|
||||
mock_now = datetime(2023, 12, 25, 9, 5, 3)
|
||||
mock_now_tz = datetime(2023, 12, 25, 23, 5, 3, tzinfo=ZoneInfo('Asia/Tokyo'))
|
||||
@@ -68,7 +68,7 @@ class TestTimestampStrings:
|
||||
|
||||
def test_different_timezones_produce_different_results(self):
|
||||
"""Test that different timezones produce different timestamp_tz values"""
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
mock_now = datetime(2023, 12, 25, 12, 0, 0)
|
||||
mock_datetime.now.return_value = mock_now
|
||||
|
||||
@@ -86,7 +86,7 @@ class TestTimestampStrings:
|
||||
|
||||
def test_none_timezone_uses_default(self):
|
||||
"""Test that passing None for timezone uses class default"""
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
mock_now = datetime(2023, 12, 25, 15, 30, 45)
|
||||
mock_datetime.now.return_value = mock_now
|
||||
|
||||
@@ -96,7 +96,7 @@ class TestTimestampStrings:
|
||||
|
||||
def test_timestamp_file_format_no_colons(self):
|
||||
"""Test that timestamp_file format doesn't contain colons (safe for filenames)"""
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
mock_now = datetime(2023, 12, 25, 15, 30, 45)
|
||||
mock_datetime.now.return_value = mock_now
|
||||
|
||||
@@ -108,7 +108,7 @@ class TestTimestampStrings:
|
||||
|
||||
def test_multiple_instances_independent(self):
|
||||
"""Test that multiple instances don't interfere with each other"""
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
mock_now = datetime(2023, 12, 25, 15, 30, 45)
|
||||
mock_datetime.now.return_value = mock_now
|
||||
|
||||
@@ -121,8 +121,8 @@ class TestTimestampStrings:
|
||||
|
||||
def test_zoneinfo_called_correctly_with_string(self):
|
||||
"""Test that ZoneInfo is called with correct timezone when passing string"""
|
||||
with patch('corelibs.string_handling.timestamp_strings.ZoneInfo') as mock_zoneinfo:
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.ZoneInfo') as mock_zoneinfo:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
mock_now = datetime(2023, 12, 25, 15, 30, 45)
|
||||
mock_datetime.now.return_value = mock_now
|
||||
|
||||
@@ -134,7 +134,7 @@ class TestTimestampStrings:
|
||||
|
||||
def test_zoneinfo_object_parameter(self):
|
||||
"""Test that ZoneInfo objects can be passed directly as timezone parameter"""
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
mock_now = datetime(2023, 12, 25, 15, 30, 45)
|
||||
mock_now_tz = datetime(2023, 12, 25, 15, 30, 45, tzinfo=ZoneInfo('Europe/Paris'))
|
||||
mock_datetime.now.side_effect = [mock_now, mock_now_tz]
|
||||
@@ -149,7 +149,7 @@ class TestTimestampStrings:
|
||||
|
||||
def test_zoneinfo_object_vs_string_equivalence(self):
|
||||
"""Test that ZoneInfo object and string produce equivalent results"""
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
mock_now = datetime(2023, 12, 25, 15, 30, 45)
|
||||
mock_now_tz = datetime(2023, 12, 25, 15, 30, 45, tzinfo=ZoneInfo('Europe/Paris'))
|
||||
mock_datetime.now.side_effect = [mock_now, mock_now_tz, mock_now, mock_now_tz]
|
||||
@@ -171,7 +171,7 @@ class TestTimestampStrings:
|
||||
|
||||
def test_edge_case_midnight(self):
|
||||
"""Test timestamp formatting at midnight"""
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
mock_now = datetime(2023, 12, 25, 0, 0, 0)
|
||||
mock_datetime.now.return_value = mock_now
|
||||
|
||||
@@ -182,7 +182,7 @@ class TestTimestampStrings:
|
||||
|
||||
def test_edge_case_new_year(self):
|
||||
"""Test timestamp formatting at new year"""
|
||||
with patch('corelibs.string_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
with patch('corelibs.datetime_handling.timestamp_strings.datetime') as mock_datetime:
|
||||
mock_now = datetime(2024, 1, 1, 0, 0, 0)
|
||||
mock_datetime.now.return_value = mock_now
|
||||
|
||||
3
tests/unit/db_handling/__init__.py
Normal file
3
tests/unit/db_handling/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""
|
||||
db_handling tests
|
||||
"""
|
||||
1133
tests/unit/db_handling/test_sqlite_io.py
Normal file
1133
tests/unit/db_handling/test_sqlite_io.py
Normal file
File diff suppressed because it is too large
Load Diff
3
tests/unit/json_handling/__init__.py
Normal file
3
tests/unit/json_handling/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""
|
||||
tests for json_handling module
|
||||
"""
|
||||
698
tests/unit/json_handling/test_json_helper.py
Normal file
698
tests/unit/json_handling/test_json_helper.py
Normal file
@@ -0,0 +1,698 @@
|
||||
"""
|
||||
tests for corelibs.json_handling.json_helper
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime, date
|
||||
from typing import Any
|
||||
from corelibs.json_handling.json_helper import (
|
||||
DateTimeEncoder,
|
||||
default,
|
||||
json_dumps,
|
||||
modify_with_jsonpath
|
||||
)
|
||||
|
||||
|
||||
# MARK: DateTimeEncoder tests
|
||||
class TestDateTimeEncoder:
|
||||
"""Test cases for DateTimeEncoder class"""
|
||||
|
||||
def test_datetime_encoding(self):
|
||||
"""Test encoding datetime objects"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45, 123456)
|
||||
data = {"timestamp": dt}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["timestamp"] == "2025-10-23T15:30:45.123456"
|
||||
|
||||
def test_date_encoding(self):
|
||||
"""Test encoding date objects"""
|
||||
d = date(2025, 10, 23)
|
||||
data = {"date": d}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["date"] == "2025-10-23"
|
||||
|
||||
def test_mixed_datetime_date_encoding(self):
|
||||
"""Test encoding mixed datetime and date objects"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45)
|
||||
d = date(2025, 10, 23)
|
||||
data = {
|
||||
"timestamp": dt,
|
||||
"date": d,
|
||||
"name": "test"
|
||||
}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["timestamp"] == "2025-10-23T15:30:45"
|
||||
assert decoded["date"] == "2025-10-23"
|
||||
assert decoded["name"] == "test"
|
||||
|
||||
def test_nested_datetime_encoding(self):
|
||||
"""Test encoding nested structures with datetime objects"""
|
||||
data = {
|
||||
"event": {
|
||||
"name": "Meeting",
|
||||
"start": datetime(2025, 10, 23, 10, 0, 0),
|
||||
"end": datetime(2025, 10, 23, 11, 0, 0),
|
||||
"participants": [
|
||||
{"name": "Alice", "joined": datetime(2025, 10, 23, 10, 5, 0)},
|
||||
{"name": "Bob", "joined": datetime(2025, 10, 23, 10, 10, 0)}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["event"]["start"] == "2025-10-23T10:00:00"
|
||||
assert decoded["event"]["end"] == "2025-10-23T11:00:00"
|
||||
assert decoded["event"]["participants"][0]["joined"] == "2025-10-23T10:05:00"
|
||||
assert decoded["event"]["participants"][1]["joined"] == "2025-10-23T10:10:00"
|
||||
|
||||
def test_list_of_datetimes(self):
|
||||
"""Test encoding list of datetime objects"""
|
||||
data = {
|
||||
"timestamps": [
|
||||
datetime(2025, 10, 23, 10, 0, 0),
|
||||
datetime(2025, 10, 23, 11, 0, 0),
|
||||
datetime(2025, 10, 23, 12, 0, 0)
|
||||
]
|
||||
}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["timestamps"][0] == "2025-10-23T10:00:00"
|
||||
assert decoded["timestamps"][1] == "2025-10-23T11:00:00"
|
||||
assert decoded["timestamps"][2] == "2025-10-23T12:00:00"
|
||||
|
||||
def test_encoder_with_normal_types(self):
|
||||
"""Test that encoder works with standard JSON types"""
|
||||
data = {
|
||||
"string": "test",
|
||||
"number": 42,
|
||||
"float": 3.14,
|
||||
"boolean": True,
|
||||
"null": None,
|
||||
"list": [1, 2, 3],
|
||||
"dict": {"key": "value"}
|
||||
}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded == data
|
||||
|
||||
def test_encoder_returns_none_for_unsupported_types(self):
|
||||
"""Test that encoder default method returns None for unsupported types"""
|
||||
encoder = DateTimeEncoder()
|
||||
|
||||
# The default method should return None for non-date/datetime objects
|
||||
result = encoder.default("string")
|
||||
assert result is None
|
||||
|
||||
result = encoder.default(42)
|
||||
assert result is None
|
||||
|
||||
result = encoder.default([1, 2, 3])
|
||||
assert result is None
|
||||
|
||||
|
||||
# MARK: default function tests
|
||||
class TestDefaultFunction:
|
||||
"""Test cases for the default function"""
|
||||
|
||||
def test_default_datetime(self):
|
||||
"""Test default function with datetime"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45)
|
||||
result = default(dt)
|
||||
assert result == "2025-10-23T15:30:45"
|
||||
|
||||
def test_default_date(self):
|
||||
"""Test default function with date"""
|
||||
d = date(2025, 10, 23)
|
||||
result = default(d)
|
||||
assert result == "2025-10-23"
|
||||
|
||||
def test_default_with_microseconds(self):
|
||||
"""Test default function with datetime including microseconds"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45, 123456)
|
||||
result = default(dt)
|
||||
assert result == "2025-10-23T15:30:45.123456"
|
||||
|
||||
def test_default_returns_none_for_other_types(self):
|
||||
"""Test that default returns None for non-date/datetime objects"""
|
||||
assert default("string") is None
|
||||
assert default(42) is None
|
||||
assert default(3.14) is None
|
||||
assert default(True) is None
|
||||
assert default(None) is None
|
||||
assert default([1, 2, 3]) is None
|
||||
assert default({"key": "value"}) is None
|
||||
|
||||
def test_default_as_json_default_parameter(self):
|
||||
"""Test using default function as default parameter in json.dumps"""
|
||||
data = {
|
||||
"timestamp": datetime(2025, 10, 23, 15, 30, 45),
|
||||
"date": date(2025, 10, 23),
|
||||
"name": "test"
|
||||
}
|
||||
|
||||
result = json.dumps(data, default=default)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["timestamp"] == "2025-10-23T15:30:45"
|
||||
assert decoded["date"] == "2025-10-23"
|
||||
assert decoded["name"] == "test"
|
||||
|
||||
|
||||
# MARK: json_dumps tests
|
||||
class TestJsonDumps:
|
||||
"""Test cases for json_dumps function"""
|
||||
|
||||
def test_basic_dict(self):
|
||||
"""Test json_dumps with basic dictionary"""
|
||||
data = {"name": "test", "value": 42}
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
assert decoded == data
|
||||
|
||||
def test_unicode_characters(self):
|
||||
"""Test json_dumps preserves unicode characters (ensure_ascii=False)"""
|
||||
data = {"name": "テスト", "emoji": "🎉", "chinese": "测试"}
|
||||
result = json_dumps(data)
|
||||
|
||||
# ensure_ascii=False means unicode characters should be preserved
|
||||
assert "テスト" in result
|
||||
assert "🎉" in result
|
||||
assert "测试" in result
|
||||
|
||||
decoded = json.loads(result)
|
||||
assert decoded == data
|
||||
|
||||
def test_datetime_objects_as_string(self):
|
||||
"""Test json_dumps converts datetime to string (default=str)"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45)
|
||||
data = {"timestamp": dt}
|
||||
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
|
||||
# default=str will convert datetime to its string representation
|
||||
assert isinstance(decoded["timestamp"], str)
|
||||
assert "2025-10-23" in decoded["timestamp"]
|
||||
|
||||
def test_date_objects_as_string(self):
|
||||
"""Test json_dumps converts date to string"""
|
||||
d = date(2025, 10, 23)
|
||||
data = {"date": d}
|
||||
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert isinstance(decoded["date"], str)
|
||||
assert "2025-10-23" in decoded["date"]
|
||||
|
||||
def test_complex_nested_structure(self):
|
||||
"""Test json_dumps with complex nested structures"""
|
||||
data = {
|
||||
"user": {
|
||||
"name": "John",
|
||||
"age": 30,
|
||||
"active": True,
|
||||
"balance": 100.50,
|
||||
"tags": ["admin", "user"],
|
||||
"metadata": {
|
||||
"created": datetime(2025, 1, 1, 0, 0, 0),
|
||||
"updated": date(2025, 10, 23)
|
||||
}
|
||||
},
|
||||
"items": [
|
||||
{"id": 1, "name": "Item 1"},
|
||||
{"id": 2, "name": "Item 2"}
|
||||
]
|
||||
}
|
||||
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["user"]["name"] == "John"
|
||||
assert decoded["user"]["age"] == 30
|
||||
assert decoded["user"]["active"] is True
|
||||
assert decoded["user"]["balance"] == 100.50
|
||||
assert decoded["user"]["tags"] == ["admin", "user"]
|
||||
assert decoded["items"][0]["id"] == 1
|
||||
|
||||
def test_empty_dict(self):
|
||||
"""Test json_dumps with empty dictionary"""
|
||||
data: dict[str, Any] = {}
|
||||
result = json_dumps(data)
|
||||
assert result == "{}"
|
||||
|
||||
def test_empty_list(self):
|
||||
"""Test json_dumps with empty list"""
|
||||
data: list[Any] = []
|
||||
result = json_dumps(data)
|
||||
assert result == "[]"
|
||||
|
||||
def test_list_data(self):
|
||||
"""Test json_dumps with list as root element"""
|
||||
data = [1, 2, 3, "test", True, None]
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
assert decoded == data
|
||||
|
||||
def test_none_value(self):
|
||||
"""Test json_dumps with None"""
|
||||
data = None
|
||||
result = json_dumps(data)
|
||||
assert result == "null"
|
||||
|
||||
def test_boolean_values(self):
|
||||
"""Test json_dumps with boolean values"""
|
||||
data = {"true_val": True, "false_val": False}
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
assert decoded["true_val"] is True
|
||||
assert decoded["false_val"] is False
|
||||
|
||||
def test_numeric_values(self):
|
||||
"""Test json_dumps with various numeric values"""
|
||||
data = {
|
||||
"int": 42,
|
||||
"float": 3.14,
|
||||
"negative": -10,
|
||||
"zero": 0,
|
||||
"scientific": 1e10
|
||||
}
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
assert decoded == data
|
||||
|
||||
def test_custom_object_conversion(self):
|
||||
"""Test json_dumps with custom objects (converted via str)"""
|
||||
class CustomObject:
|
||||
"""test class"""
|
||||
def __str__(self):
|
||||
return "custom_value"
|
||||
|
||||
data = {"custom": CustomObject()}
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
assert decoded["custom"] == "custom_value"
|
||||
|
||||
def test_special_float_values(self):
|
||||
"""Test json_dumps handles special float values"""
|
||||
data = {
|
||||
"infinity": float('inf'),
|
||||
"neg_infinity": float('-inf'),
|
||||
"nan": float('nan')
|
||||
}
|
||||
result = json_dumps(data)
|
||||
# These should be converted to strings via default=str
|
||||
assert "Infinity" in result or "inf" in result.lower()
|
||||
|
||||
|
||||
# MARK: modify_with_jsonpath tests
|
||||
class TestModifyWithJsonpath:
|
||||
"""Test cases for modify_with_jsonpath function"""
|
||||
|
||||
def test_simple_path_modification(self):
|
||||
"""Test modifying a simple path"""
|
||||
data = {"name": "old_name", "age": 30}
|
||||
result = modify_with_jsonpath(data, "$.name", "new_name")
|
||||
|
||||
assert result["name"] == "new_name"
|
||||
assert result["age"] == 30
|
||||
# Original data should not be modified
|
||||
assert data["name"] == "old_name"
|
||||
|
||||
def test_nested_path_modification(self):
|
||||
"""Test modifying nested path"""
|
||||
data = {
|
||||
"user": {
|
||||
"profile": {
|
||||
"name": "John",
|
||||
"age": 30
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.user.profile.name", "Jane")
|
||||
|
||||
assert result["user"]["profile"]["name"] == "Jane"
|
||||
assert result["user"]["profile"]["age"] == 30
|
||||
# Original should be unchanged
|
||||
assert data["user"]["profile"]["name"] == "John"
|
||||
|
||||
def test_array_index_modification(self):
|
||||
"""Test modifying array element by index"""
|
||||
data = {
|
||||
"items": [
|
||||
{"id": 1, "name": "Item 1"},
|
||||
{"id": 2, "name": "Item 2"},
|
||||
{"id": 3, "name": "Item 3"}
|
||||
]
|
||||
}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.items[1].name", "Updated Item 2")
|
||||
|
||||
assert result["items"][1]["name"] == "Updated Item 2"
|
||||
assert result["items"][0]["name"] == "Item 1"
|
||||
assert result["items"][2]["name"] == "Item 3"
|
||||
# Original unchanged
|
||||
assert data["items"][1]["name"] == "Item 2"
|
||||
|
||||
def test_wildcard_modification(self):
|
||||
"""Test modifying multiple elements with wildcard"""
|
||||
data = {
|
||||
"users": [
|
||||
{"name": "Alice", "active": True},
|
||||
{"name": "Bob", "active": True},
|
||||
{"name": "Charlie", "active": True}
|
||||
]
|
||||
}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.users[*].active", False)
|
||||
|
||||
# All active fields should be updated
|
||||
for user in result["users"]:
|
||||
assert user["active"] is False
|
||||
# Original unchanged
|
||||
for user in data["users"]:
|
||||
assert user["active"] is True
|
||||
|
||||
def test_deep_copy_behavior(self):
|
||||
"""Test that modifications don't affect the original data"""
|
||||
original = {
|
||||
"level1": {
|
||||
"level2": {
|
||||
"level3": {
|
||||
"value": "original"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result = modify_with_jsonpath(original, "$.level1.level2.level3.value", "modified")
|
||||
|
||||
assert result["level1"]["level2"]["level3"]["value"] == "modified"
|
||||
assert original["level1"]["level2"]["level3"]["value"] == "original"
|
||||
|
||||
# Verify deep copy by modifying nested dict in result
|
||||
result["level1"]["level2"]["new_key"] = "new_value"
|
||||
assert "new_key" not in original["level1"]["level2"]
|
||||
|
||||
def test_modify_to_different_type(self):
|
||||
"""Test changing value to different type"""
|
||||
data = {"count": "10"}
|
||||
result = modify_with_jsonpath(data, "$.count", 10)
|
||||
|
||||
assert result["count"] == 10
|
||||
assert isinstance(result["count"], int)
|
||||
assert data["count"] == "10"
|
||||
|
||||
def test_modify_to_complex_object(self):
|
||||
"""Test replacing value with complex object"""
|
||||
data = {"simple": "value"}
|
||||
new_value = {"complex": {"nested": "structure"}}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.simple", new_value)
|
||||
|
||||
assert result["simple"] == new_value
|
||||
assert result["simple"]["complex"]["nested"] == "structure"
|
||||
|
||||
def test_modify_to_list(self):
|
||||
"""Test replacing value with list"""
|
||||
data = {"items": None}
|
||||
result = modify_with_jsonpath(data, "$.items", [1, 2, 3])
|
||||
|
||||
assert result["items"] == [1, 2, 3]
|
||||
assert data["items"] is None
|
||||
|
||||
def test_modify_to_none(self):
|
||||
"""Test setting value to None"""
|
||||
data = {"value": "something"}
|
||||
result = modify_with_jsonpath(data, "$.value", None)
|
||||
|
||||
assert result["value"] is None
|
||||
assert data["value"] == "something"
|
||||
|
||||
def test_recursive_descent(self):
|
||||
"""Test using recursive descent operator"""
|
||||
data: dict[str, Any] = {
|
||||
"store": {
|
||||
"book": [
|
||||
{"title": "Book 1", "price": 10},
|
||||
{"title": "Book 2", "price": 20}
|
||||
],
|
||||
"bicycle": {
|
||||
"price": 100
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Update all prices
|
||||
result = modify_with_jsonpath(data, "$..price", 0)
|
||||
|
||||
assert result["store"]["book"][0]["price"] == 0
|
||||
assert result["store"]["book"][1]["price"] == 0
|
||||
assert result["store"]["bicycle"]["price"] == 0
|
||||
# Original unchanged
|
||||
assert data["store"]["book"][0]["price"] == 10
|
||||
|
||||
def test_specific_array_elements(self):
|
||||
"""Test updating specific array elements by index"""
|
||||
data = {
|
||||
"products": [
|
||||
{"name": "Product 1", "price": 100, "stock": 5},
|
||||
{"name": "Product 2", "price": 200, "stock": 0},
|
||||
{"name": "Product 3", "price": 150, "stock": 10}
|
||||
]
|
||||
}
|
||||
|
||||
# Update first product's price
|
||||
result = modify_with_jsonpath(data, "$.products[0].price", 0)
|
||||
|
||||
assert result["products"][0]["price"] == 0
|
||||
assert result["products"][1]["price"] == 200 # not modified
|
||||
assert result["products"][2]["price"] == 150 # not modified
|
||||
|
||||
def test_empty_dict(self):
|
||||
"""Test modifying empty dictionary"""
|
||||
data: dict[str, Any] = {}
|
||||
result = modify_with_jsonpath(data, "$.nonexistent", "value")
|
||||
|
||||
# Should return the original empty dict since path doesn't exist
|
||||
assert result == {}
|
||||
|
||||
def test_complex_real_world_scenario(self):
|
||||
"""Test complex real-world modification scenario"""
|
||||
data: dict[str, Any] = {
|
||||
"api_version": "1.0",
|
||||
"config": {
|
||||
"database": {
|
||||
"host": "localhost",
|
||||
"port": 5432,
|
||||
"credentials": {
|
||||
"username": "admin",
|
||||
"password": "secret"
|
||||
}
|
||||
},
|
||||
"services": [
|
||||
{"name": "auth", "enabled": True, "port": 8001},
|
||||
{"name": "api", "enabled": True, "port": 8002},
|
||||
{"name": "cache", "enabled": False, "port": 8003}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
# Update database port
|
||||
result = modify_with_jsonpath(data, "$.config.database.port", 5433)
|
||||
assert result["config"]["database"]["port"] == 5433
|
||||
|
||||
# Update all service ports
|
||||
result2 = modify_with_jsonpath(result, "$.config.services[*].enabled", True)
|
||||
assert all(service["enabled"] for service in result2["config"]["services"])
|
||||
|
||||
# Original unchanged
|
||||
assert data["config"]["database"]["port"] == 5432
|
||||
assert data["config"]["services"][2]["enabled"] is False
|
||||
|
||||
def test_list_slice_modification(self):
|
||||
"""Test modifying list slice"""
|
||||
data = {"numbers": [1, 2, 3, 4, 5]}
|
||||
|
||||
# Modify first three elements
|
||||
result = modify_with_jsonpath(data, "$.numbers[0:3]", 0)
|
||||
|
||||
assert result["numbers"][0] == 0
|
||||
assert result["numbers"][1] == 0
|
||||
assert result["numbers"][2] == 0
|
||||
assert result["numbers"][3] == 4
|
||||
assert result["numbers"][4] == 5
|
||||
|
||||
def test_modify_with_datetime_value(self):
|
||||
"""Test modifying with datetime value"""
|
||||
data = {"timestamp": "2025-01-01T00:00:00"}
|
||||
new_datetime = datetime(2025, 10, 23, 15, 30, 45)
|
||||
|
||||
result = modify_with_jsonpath(data, "$.timestamp", new_datetime)
|
||||
|
||||
assert result["timestamp"] == new_datetime
|
||||
assert isinstance(result["timestamp"], datetime)
|
||||
|
||||
|
||||
# MARK: Integration tests
|
||||
class TestIntegration:
|
||||
"""Integration tests combining multiple functions"""
|
||||
|
||||
def test_encoder_and_json_dumps_comparison(self):
|
||||
"""Test that DateTimeEncoder and json_dumps handle datetimes differently"""
|
||||
dt = datetime(2025, 10, 23, 15, 30, 45)
|
||||
data = {"timestamp": dt}
|
||||
|
||||
# Using DateTimeEncoder produces ISO format
|
||||
with_encoder = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded_encoder = json.loads(with_encoder)
|
||||
assert decoded_encoder["timestamp"] == "2025-10-23T15:30:45"
|
||||
|
||||
# Using json_dumps (default=str) produces string representation
|
||||
with_dumps = json_dumps(data)
|
||||
decoded_dumps = json.loads(with_dumps)
|
||||
assert isinstance(decoded_dumps["timestamp"], str)
|
||||
assert "2025-10-23" in decoded_dumps["timestamp"]
|
||||
|
||||
def test_modify_and_serialize(self):
|
||||
"""Test modifying data and then serializing it"""
|
||||
data = {
|
||||
"event": {
|
||||
"name": "Meeting",
|
||||
"date": date(2025, 10, 23),
|
||||
"attendees": [
|
||||
{"name": "Alice", "confirmed": False},
|
||||
{"name": "Bob", "confirmed": False}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
# Modify confirmation status
|
||||
modified = modify_with_jsonpath(data, "$.event.attendees[*].confirmed", True)
|
||||
|
||||
# Serialize with datetime handling
|
||||
serialized = json.dumps(modified, cls=DateTimeEncoder)
|
||||
decoded = json.loads(serialized)
|
||||
|
||||
assert decoded["event"]["date"] == "2025-10-23"
|
||||
assert decoded["event"]["attendees"][0]["confirmed"] is True
|
||||
assert decoded["event"]["attendees"][1]["confirmed"] is True
|
||||
|
||||
def test_round_trip_with_modification(self):
|
||||
"""Test full round trip: serialize -> modify -> serialize"""
|
||||
original = {
|
||||
"config": {
|
||||
"updated": datetime(2025, 10, 23, 15, 30, 45),
|
||||
"version": "1.0"
|
||||
}
|
||||
}
|
||||
|
||||
# Serialize
|
||||
json_str = json.dumps(original, cls=DateTimeEncoder)
|
||||
|
||||
# Deserialize
|
||||
deserialized = json.loads(json_str)
|
||||
|
||||
# Modify
|
||||
modified = modify_with_jsonpath(deserialized, "$.config.version", "2.0")
|
||||
|
||||
# Serialize again
|
||||
final_json = json_dumps(modified)
|
||||
final_data = json.loads(final_json)
|
||||
|
||||
assert final_data["config"]["version"] == "2.0"
|
||||
assert final_data["config"]["updated"] == "2025-10-23T15:30:45"
|
||||
|
||||
|
||||
# MARK: Edge cases
|
||||
class TestEdgeCases:
|
||||
"""Test edge cases and error scenarios"""
|
||||
|
||||
def test_circular_reference_in_modify(self):
|
||||
"""Test that modify_with_jsonpath handles data without circular references"""
|
||||
# Note: JSON doesn't support circular references, so we test normal nested data
|
||||
data = {
|
||||
"a": {
|
||||
"b": {
|
||||
"c": "value"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.a.b.c", "new_value")
|
||||
assert result["a"]["b"]["c"] == "new_value"
|
||||
|
||||
def test_unicode_in_keys_and_values(self):
|
||||
"""Test handling unicode in both keys and values"""
|
||||
data = {
|
||||
"日本語": "テスト",
|
||||
"emoji_🎉": "🚀",
|
||||
"normal": "value"
|
||||
}
|
||||
|
||||
result = json_dumps(data)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["日本語"] == "テスト"
|
||||
assert decoded["emoji_🎉"] == "🚀"
|
||||
assert decoded["normal"] == "value"
|
||||
|
||||
def test_very_nested_structure(self):
|
||||
"""Test deeply nested structure"""
|
||||
# Create a 10-level deep nested structure
|
||||
data: dict[str, Any] = {"level0": {}}
|
||||
current = data["level0"]
|
||||
for i in range(1, 10):
|
||||
current[f"level{i}"] = {}
|
||||
current = current[f"level{i}"]
|
||||
current["value"] = "deep_value"
|
||||
|
||||
result = modify_with_jsonpath(data, "$..value", "modified_deep_value")
|
||||
|
||||
# Navigate to the deep value
|
||||
current = result["level0"]
|
||||
for i in range(1, 10):
|
||||
current = current[f"level{i}"]
|
||||
assert current["value"] == "modified_deep_value"
|
||||
|
||||
def test_large_list_modification(self):
|
||||
"""Test modifying large list"""
|
||||
data = {"items": [{"id": i, "value": i * 10} for i in range(100)]}
|
||||
|
||||
result = modify_with_jsonpath(data, "$.items[*].value", 0)
|
||||
|
||||
assert all(item["value"] == 0 for item in result["items"])
|
||||
assert len(result["items"]) == 100
|
||||
|
||||
def test_mixed_date_types_encoding(self):
|
||||
"""Test encoding with both date and datetime in same structure"""
|
||||
data = {
|
||||
"created_date": date(2025, 10, 23),
|
||||
"created_datetime": datetime(2025, 10, 23, 15, 30, 45),
|
||||
"updated_date": date(2025, 10, 24),
|
||||
"updated_datetime": datetime(2025, 10, 24, 16, 45, 30)
|
||||
}
|
||||
|
||||
result = json.dumps(data, cls=DateTimeEncoder)
|
||||
decoded = json.loads(result)
|
||||
|
||||
assert decoded["created_date"] == "2025-10-23"
|
||||
assert decoded["created_datetime"] == "2025-10-23T15:30:45"
|
||||
assert decoded["updated_date"] == "2025-10-24"
|
||||
assert decoded["updated_datetime"] == "2025-10-24T16:45:30"
|
||||
@@ -1,186 +0,0 @@
|
||||
"""
|
||||
PyTest: string_handling/timestamp_strings - seconds_to_string function
|
||||
"""
|
||||
|
||||
from corelibs.string_handling.timestamp_strings import seconds_to_string
|
||||
|
||||
|
||||
class TestSecondsToString:
|
||||
"""Test suite for seconds_to_string function"""
|
||||
|
||||
def test_basic_integer_seconds(self):
|
||||
"""Test conversion of basic integer seconds"""
|
||||
assert seconds_to_string(0) == "0s"
|
||||
assert seconds_to_string(1) == "1s"
|
||||
assert seconds_to_string(30) == "30s"
|
||||
assert seconds_to_string(59) == "59s"
|
||||
|
||||
def test_minutes_conversion(self):
|
||||
"""Test conversion involving minutes"""
|
||||
assert seconds_to_string(60) == "1m"
|
||||
assert seconds_to_string(90) == "1m 30s"
|
||||
assert seconds_to_string(120) == "2m"
|
||||
assert seconds_to_string(3599) == "59m 59s"
|
||||
|
||||
def test_hours_conversion(self):
|
||||
"""Test conversion involving hours"""
|
||||
assert seconds_to_string(3600) == "1h"
|
||||
assert seconds_to_string(3660) == "1h 1m"
|
||||
assert seconds_to_string(3661) == "1h 1m 1s"
|
||||
assert seconds_to_string(7200) == "2h"
|
||||
assert seconds_to_string(7260) == "2h 1m"
|
||||
|
||||
def test_days_conversion(self):
|
||||
"""Test conversion involving days"""
|
||||
assert seconds_to_string(86400) == "1d"
|
||||
assert seconds_to_string(86401) == "1d 1s"
|
||||
assert seconds_to_string(90000) == "1d 1h"
|
||||
assert seconds_to_string(90061) == "1d 1h 1m 1s"
|
||||
assert seconds_to_string(172800) == "2d"
|
||||
|
||||
def test_complex_combinations(self):
|
||||
"""Test complex time combinations"""
|
||||
# 1 day, 2 hours, 3 minutes, 4 seconds
|
||||
total = 86400 + 7200 + 180 + 4
|
||||
assert seconds_to_string(total) == "1d 2h 3m 4s"
|
||||
|
||||
# 5 days, 23 hours, 59 minutes, 59 seconds
|
||||
total = 5 * 86400 + 23 * 3600 + 59 * 60 + 59
|
||||
assert seconds_to_string(total) == "5d 23h 59m 59s"
|
||||
|
||||
def test_fractional_seconds_default_precision(self):
|
||||
"""Test fractional seconds with default precision (3 decimal places)"""
|
||||
assert seconds_to_string(0.1) == "0.1s"
|
||||
assert seconds_to_string(0.123) == "0.123s"
|
||||
assert seconds_to_string(0.1234) == "0.123s"
|
||||
assert seconds_to_string(1.5) == "1.5s"
|
||||
assert seconds_to_string(1.567) == "1.567s"
|
||||
assert seconds_to_string(1.5678) == "1.568s"
|
||||
|
||||
def test_fractional_seconds_microsecond_precision(self):
|
||||
"""Test fractional seconds with microsecond precision"""
|
||||
assert seconds_to_string(0.1, show_microseconds=True) == "0.1s"
|
||||
assert seconds_to_string(0.123456, show_microseconds=True) == "0.123456s"
|
||||
assert seconds_to_string(0.1234567, show_microseconds=True) == "0.123457s"
|
||||
assert seconds_to_string(1.5, show_microseconds=True) == "1.5s"
|
||||
assert seconds_to_string(1.567890, show_microseconds=True) == "1.56789s"
|
||||
|
||||
def test_fractional_seconds_with_larger_units(self):
|
||||
"""Test fractional seconds combined with larger time units"""
|
||||
# 1 minute and 30.5 seconds
|
||||
assert seconds_to_string(90.5) == "1m 30.5s"
|
||||
assert seconds_to_string(90.5, show_microseconds=True) == "1m 30.5s"
|
||||
|
||||
# 1 hour, 1 minute, and 1.123 seconds
|
||||
total = 3600 + 60 + 1.123
|
||||
assert seconds_to_string(total) == "1h 1m 1.123s"
|
||||
assert seconds_to_string(total, show_microseconds=True) == "1h 1m 1.123s"
|
||||
|
||||
def test_negative_values(self):
|
||||
"""Test negative time values"""
|
||||
assert seconds_to_string(-1) == "-1s"
|
||||
assert seconds_to_string(-60) == "-1m"
|
||||
assert seconds_to_string(-90) == "-1m 30s"
|
||||
assert seconds_to_string(-3661) == "-1h 1m 1s"
|
||||
assert seconds_to_string(-86401) == "-1d 1s"
|
||||
assert seconds_to_string(-1.5) == "-1.5s"
|
||||
assert seconds_to_string(-90.123) == "-1m 30.123s"
|
||||
|
||||
def test_zero_handling(self):
|
||||
"""Test various zero values"""
|
||||
assert seconds_to_string(0) == "0s"
|
||||
assert seconds_to_string(0.0) == "0s"
|
||||
assert seconds_to_string(-0) == "0s"
|
||||
assert seconds_to_string(-0.0) == "0s"
|
||||
|
||||
def test_float_input_types(self):
|
||||
"""Test various float input types"""
|
||||
assert seconds_to_string(1.0) == "1s"
|
||||
assert seconds_to_string(60.0) == "1m"
|
||||
assert seconds_to_string(3600.0) == "1h"
|
||||
assert seconds_to_string(86400.0) == "1d"
|
||||
|
||||
def test_large_values(self):
|
||||
"""Test handling of large time values"""
|
||||
# 365 days (1 year)
|
||||
year_seconds = 365 * 86400
|
||||
assert seconds_to_string(year_seconds) == "365d"
|
||||
|
||||
# 1000 days
|
||||
assert seconds_to_string(1000 * 86400) == "1000d"
|
||||
|
||||
# Large number with all units
|
||||
large_time = 999 * 86400 + 23 * 3600 + 59 * 60 + 59.999
|
||||
result = seconds_to_string(large_time)
|
||||
assert result.startswith("999d")
|
||||
assert "23h" in result
|
||||
assert "59m" in result
|
||||
assert "59.999s" in result
|
||||
|
||||
def test_rounding_behavior(self):
|
||||
"""Test rounding behavior for fractional seconds"""
|
||||
# Default precision (3 decimal places) - values are truncated via rstrip
|
||||
assert seconds_to_string(1.0004) == "1s" # Truncates trailing zeros after rstrip
|
||||
assert seconds_to_string(1.0005) == "1s" # Truncates trailing zeros after rstrip
|
||||
assert seconds_to_string(1.9999) == "2s" # Rounds up and strips .000
|
||||
|
||||
# Microsecond precision (6 decimal places)
|
||||
assert seconds_to_string(1.0000004, show_microseconds=True) == "1s"
|
||||
assert seconds_to_string(1.0000005, show_microseconds=True) == "1.000001s"
|
||||
|
||||
def test_trailing_zero_removal(self):
|
||||
"""Test that trailing zeros are properly removed"""
|
||||
assert seconds_to_string(1.100) == "1.1s"
|
||||
assert seconds_to_string(1.120) == "1.12s"
|
||||
assert seconds_to_string(1.123) == "1.123s"
|
||||
assert seconds_to_string(1.100000, show_microseconds=True) == "1.1s"
|
||||
assert seconds_to_string(1.123000, show_microseconds=True) == "1.123s"
|
||||
|
||||
def test_invalid_input_types(self):
|
||||
"""Test handling of invalid input types"""
|
||||
# String inputs should be returned as-is
|
||||
assert seconds_to_string("invalid") == "invalid"
|
||||
assert seconds_to_string("not a number") == "not a number"
|
||||
assert seconds_to_string("") == ""
|
||||
|
||||
def test_edge_cases_boundary_values(self):
|
||||
"""Test edge cases at unit boundaries"""
|
||||
# Exactly 1 minute - 1 second
|
||||
assert seconds_to_string(59) == "59s"
|
||||
assert seconds_to_string(59.999) == "59.999s"
|
||||
|
||||
# Exactly 1 hour - 1 second
|
||||
assert seconds_to_string(3599) == "59m 59s"
|
||||
assert seconds_to_string(3599.999) == "59m 59.999s"
|
||||
|
||||
# Exactly 1 day - 1 second
|
||||
assert seconds_to_string(86399) == "23h 59m 59s"
|
||||
assert seconds_to_string(86399.999) == "23h 59m 59.999s"
|
||||
|
||||
def test_very_small_fractional_seconds(self):
|
||||
"""Test very small fractional values"""
|
||||
assert seconds_to_string(0.001) == "0.001s"
|
||||
assert seconds_to_string(0.0001) == "0s" # Below default precision
|
||||
assert seconds_to_string(0.000001, show_microseconds=True) == "0.000001s"
|
||||
assert seconds_to_string(0.0000001, show_microseconds=True) == "0s" # Below microsecond precision
|
||||
|
||||
def test_precision_consistency(self):
|
||||
"""Test that precision is consistent across different scenarios"""
|
||||
# With other units present
|
||||
assert seconds_to_string(61.123456) == "1m 1.123s"
|
||||
assert seconds_to_string(61.123456, show_microseconds=True) == "1m 1.123456s"
|
||||
|
||||
# Large values with fractional seconds
|
||||
large_val = 90061.123456 # 1d 1h 1m 1.123456s
|
||||
assert seconds_to_string(large_val) == "1d 1h 1m 1.123s"
|
||||
assert seconds_to_string(large_val, show_microseconds=True) == "1d 1h 1m 1.123456s"
|
||||
|
||||
def test_string_numeric_inputs(self):
|
||||
"""Test string inputs that represent numbers"""
|
||||
# String inputs should be returned as-is, even if they look like numbers
|
||||
assert seconds_to_string("60") == "60"
|
||||
assert seconds_to_string("1.5") == "1.5"
|
||||
assert seconds_to_string("0") == "0"
|
||||
assert seconds_to_string("-60") == "-60"
|
||||
|
||||
# __END__
|
||||
@@ -5,7 +5,7 @@ PyTest: string_handling/string_helpers
|
||||
from textwrap import shorten
|
||||
import pytest
|
||||
from corelibs.string_handling.string_helpers import (
|
||||
shorten_string, left_fill, format_number
|
||||
shorten_string, left_fill, format_number, prepare_url_slash
|
||||
)
|
||||
|
||||
|
||||
@@ -191,6 +191,75 @@ class TestFormatNumber:
|
||||
assert result == "0.001"
|
||||
|
||||
|
||||
class TestPrepareUrlSlash:
|
||||
"""Tests for prepare_url_slash function"""
|
||||
|
||||
def test_url_without_leading_slash(self):
|
||||
"""Test that URL without leading slash gets one added"""
|
||||
result = prepare_url_slash("api/users")
|
||||
assert result == "/api/users"
|
||||
|
||||
def test_url_with_leading_slash(self):
|
||||
"""Test that URL with leading slash remains unchanged"""
|
||||
result = prepare_url_slash("/api/users")
|
||||
assert result == "/api/users"
|
||||
|
||||
def test_url_with_double_slashes(self):
|
||||
"""Test that double slashes are reduced to single slash"""
|
||||
result = prepare_url_slash("/api//users")
|
||||
assert result == "/api/users"
|
||||
|
||||
def test_url_with_multiple_slashes(self):
|
||||
"""Test that multiple consecutive slashes are reduced to single slash"""
|
||||
result = prepare_url_slash("api///users////data")
|
||||
assert result == "/api/users/data"
|
||||
|
||||
def test_url_with_leading_double_slash(self):
|
||||
"""Test URL starting with double slash"""
|
||||
result = prepare_url_slash("//api/users")
|
||||
assert result == "/api/users"
|
||||
|
||||
def test_url_without_slash_and_double_slashes(self):
|
||||
"""Test URL without leading slash and containing double slashes"""
|
||||
result = prepare_url_slash("api//users//data")
|
||||
assert result == "/api/users/data"
|
||||
|
||||
def test_single_slash(self):
|
||||
"""Test single slash URL"""
|
||||
result = prepare_url_slash("/")
|
||||
assert result == "/"
|
||||
|
||||
def test_multiple_slashes_only(self):
|
||||
"""Test URL with only multiple slashes"""
|
||||
result = prepare_url_slash("///")
|
||||
assert result == "/"
|
||||
|
||||
def test_empty_string(self):
|
||||
"""Test empty string"""
|
||||
result = prepare_url_slash("")
|
||||
assert result == "/"
|
||||
|
||||
def test_url_with_query_params(self):
|
||||
"""Test URL with query parameters"""
|
||||
result = prepare_url_slash("/api/users?id=1")
|
||||
assert result == "/api/users?id=1"
|
||||
|
||||
def test_url_with_double_slashes_and_query(self):
|
||||
"""Test URL with double slashes and query parameters"""
|
||||
result = prepare_url_slash("api//users?id=1")
|
||||
assert result == "/api/users?id=1"
|
||||
|
||||
def test_complex_url_path(self):
|
||||
"""Test complex URL path with multiple segments"""
|
||||
result = prepare_url_slash("api/v1/users/123/profile")
|
||||
assert result == "/api/v1/users/123/profile"
|
||||
|
||||
def test_complex_url_with_multiple_issues(self):
|
||||
"""Test URL with both missing leading slash and multiple double slashes"""
|
||||
result = prepare_url_slash("api//v1///users//123////profile")
|
||||
assert result == "/api/v1/users/123/profile"
|
||||
|
||||
|
||||
# Additional integration tests
|
||||
class TestIntegration:
|
||||
"""Integration tests combining functions"""
|
||||
@@ -236,4 +305,23 @@ def test_format_number_parametrized(number: float | int, precision: int, expecte
|
||||
"""Parametrized test for format_number"""
|
||||
assert format_number(number, precision) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("input_url,expected", [
|
||||
("api/users", "/api/users"),
|
||||
("/api/users", "/api/users"),
|
||||
("api//users", "/api/users"),
|
||||
("/api//users", "/api/users"),
|
||||
("//api/users", "/api/users"),
|
||||
("api///users////data", "/api/users/data"),
|
||||
("/", "/"),
|
||||
("///", "/"),
|
||||
("", "/"),
|
||||
("api/v1/users/123", "/api/v1/users/123"),
|
||||
("/api/users?id=1&name=test", "/api/users?id=1&name=test"),
|
||||
("api//users//123//profile", "/api/users/123/profile"),
|
||||
])
|
||||
def test_prepare_url_slash_parametrized(input_url: str, expected: str):
|
||||
"""Parametrized test for prepare_url_slash"""
|
||||
assert prepare_url_slash(input_url) == expected
|
||||
|
||||
# __END__
|
||||
|
||||
3
tests/unit/var_handling/__init__.py
Normal file
3
tests/unit/var_handling/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""
|
||||
var_handling tests
|
||||
"""
|
||||
546
tests/unit/var_handling/test_enum_base.py
Normal file
546
tests/unit/var_handling/test_enum_base.py
Normal file
@@ -0,0 +1,546 @@
|
||||
"""
|
||||
var_handling.enum_base tests
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
import pytest
|
||||
from corelibs.var_handling.enum_base import EnumBase
|
||||
|
||||
|
||||
class SampleBlock(EnumBase):
|
||||
"""Sample block enum for testing purposes"""
|
||||
BLOCK_A = "block_a"
|
||||
BLOCK_B = "block_b"
|
||||
HAS_NUM = 5
|
||||
HAS_FLOAT = 3.14
|
||||
LEGACY_KEY = "legacy_value"
|
||||
|
||||
|
||||
class SimpleEnum(EnumBase):
|
||||
"""Simple enum with string values"""
|
||||
OPTION_ONE = "one"
|
||||
OPTION_TWO = "two"
|
||||
OPTION_THREE = "three"
|
||||
|
||||
|
||||
class NumericEnum(EnumBase):
|
||||
"""Enum with only numeric values"""
|
||||
FIRST = 1
|
||||
SECOND = 2
|
||||
THIRD = 3
|
||||
|
||||
|
||||
class TestEnumBaseLookupKey:
|
||||
"""Test cases for lookup_key class method"""
|
||||
|
||||
def test_lookup_key_valid_uppercase(self):
|
||||
"""Test lookup_key with valid uppercase key"""
|
||||
result = SampleBlock.lookup_key("BLOCK_A")
|
||||
assert result == SampleBlock.BLOCK_A
|
||||
assert result.name == "BLOCK_A"
|
||||
assert result.value == "block_a"
|
||||
|
||||
def test_lookup_key_valid_lowercase(self):
|
||||
"""Test lookup_key with valid lowercase key (should convert to uppercase)"""
|
||||
result = SampleBlock.lookup_key("block_a")
|
||||
assert result == SampleBlock.BLOCK_A
|
||||
assert result.name == "BLOCK_A"
|
||||
|
||||
def test_lookup_key_valid_mixed_case(self):
|
||||
"""Test lookup_key with mixed case key"""
|
||||
result = SampleBlock.lookup_key("BlOcK_a")
|
||||
assert result == SampleBlock.BLOCK_A
|
||||
assert result.name == "BLOCK_A"
|
||||
|
||||
def test_lookup_key_with_numeric_enum(self):
|
||||
"""Test lookup_key with numeric enum member"""
|
||||
result = SampleBlock.lookup_key("HAS_NUM")
|
||||
assert result == SampleBlock.HAS_NUM
|
||||
assert result.value == 5
|
||||
|
||||
def test_lookup_key_legacy_colon_replacement(self):
|
||||
"""Test lookup_key with legacy colon format (converts : to ___)"""
|
||||
# This assumes the enum has a key that might be accessed with legacy format
|
||||
# Should convert : to ___ and look up LEGACY___KEY
|
||||
# Since we don't have this key, we test the behavior with a valid conversion
|
||||
# Let's test with a known key that would work
|
||||
with pytest.raises(ValueError, match="Invalid key"):
|
||||
SampleBlock.lookup_key("BLOCK:A") # Should fail as BLOCK___A doesn't exist
|
||||
|
||||
def test_lookup_key_invalid_key(self):
|
||||
"""Test lookup_key with invalid key"""
|
||||
with pytest.raises(ValueError, match="Invalid key: NONEXISTENT"):
|
||||
SampleBlock.lookup_key("NONEXISTENT")
|
||||
|
||||
def test_lookup_key_empty_string(self):
|
||||
"""Test lookup_key with empty string"""
|
||||
with pytest.raises(ValueError, match="Invalid key"):
|
||||
SampleBlock.lookup_key("")
|
||||
|
||||
def test_lookup_key_with_special_characters(self):
|
||||
"""Test lookup_key with special characters that might cause AttributeError"""
|
||||
with pytest.raises(ValueError, match="Invalid key"):
|
||||
SampleBlock.lookup_key("@#$%")
|
||||
|
||||
def test_lookup_key_numeric_string(self):
|
||||
"""Test lookup_key with numeric string that isn't a key"""
|
||||
with pytest.raises(ValueError, match="Invalid key"):
|
||||
SampleBlock.lookup_key("123")
|
||||
|
||||
|
||||
class TestEnumBaseLookupValue:
|
||||
"""Test cases for lookup_value class method"""
|
||||
|
||||
def test_lookup_value_valid_string(self):
|
||||
"""Test lookup_value with valid string value"""
|
||||
result = SampleBlock.lookup_value("block_a")
|
||||
assert result == SampleBlock.BLOCK_A
|
||||
assert result.name == "BLOCK_A"
|
||||
assert result.value == "block_a"
|
||||
|
||||
def test_lookup_value_valid_integer(self):
|
||||
"""Test lookup_value with valid integer value"""
|
||||
result = SampleBlock.lookup_value(5)
|
||||
assert result == SampleBlock.HAS_NUM
|
||||
assert result.name == "HAS_NUM"
|
||||
assert result.value == 5
|
||||
|
||||
def test_lookup_value_valid_float(self):
|
||||
"""Test lookup_value with valid float value"""
|
||||
result = SampleBlock.lookup_value(3.14)
|
||||
assert result == SampleBlock.HAS_FLOAT
|
||||
assert result.name == "HAS_FLOAT"
|
||||
assert result.value == 3.14
|
||||
|
||||
def test_lookup_value_invalid_string(self):
|
||||
"""Test lookup_value with invalid string value"""
|
||||
with pytest.raises(ValueError, match="Invalid value: nonexistent"):
|
||||
SampleBlock.lookup_value("nonexistent")
|
||||
|
||||
def test_lookup_value_invalid_integer(self):
|
||||
"""Test lookup_value with invalid integer value"""
|
||||
with pytest.raises(ValueError, match="Invalid value: 999"):
|
||||
SampleBlock.lookup_value(999)
|
||||
|
||||
def test_lookup_value_case_sensitive(self):
|
||||
"""Test that lookup_value is case-sensitive for string values"""
|
||||
with pytest.raises(ValueError, match="Invalid value"):
|
||||
SampleBlock.lookup_value("BLOCK_A") # Value is "block_a", not "BLOCK_A"
|
||||
|
||||
|
||||
class TestEnumBaseFromAny:
|
||||
"""Test cases for from_any class method"""
|
||||
|
||||
def test_from_any_with_enum_instance(self):
|
||||
"""Test from_any with an enum instance (should return as-is)"""
|
||||
enum_instance = SampleBlock.BLOCK_A
|
||||
result = SampleBlock.from_any(enum_instance)
|
||||
assert result is enum_instance
|
||||
assert result == SampleBlock.BLOCK_A
|
||||
|
||||
def test_from_any_with_string_as_key(self):
|
||||
"""Test from_any with string that matches a key"""
|
||||
result = SampleBlock.from_any("BLOCK_A")
|
||||
assert result == SampleBlock.BLOCK_A
|
||||
assert result.name == "BLOCK_A"
|
||||
assert result.value == "block_a"
|
||||
|
||||
def test_from_any_with_string_as_key_lowercase(self):
|
||||
"""Test from_any with lowercase string key"""
|
||||
result = SampleBlock.from_any("block_a")
|
||||
# Should first try as key (convert to uppercase and find BLOCK_A)
|
||||
assert result == SampleBlock.BLOCK_A
|
||||
|
||||
def test_from_any_with_string_as_value(self):
|
||||
"""Test from_any with string that only matches a value"""
|
||||
# Use a value that isn't also a valid key
|
||||
result = SampleBlock.from_any("block_b")
|
||||
# Should try key first (fail), then value (succeed)
|
||||
assert result == SampleBlock.BLOCK_B
|
||||
assert result.value == "block_b"
|
||||
|
||||
def test_from_any_with_integer(self):
|
||||
"""Test from_any with integer value"""
|
||||
result = SampleBlock.from_any(5)
|
||||
assert result == SampleBlock.HAS_NUM
|
||||
assert result.value == 5
|
||||
|
||||
def test_from_any_with_float(self):
|
||||
"""Test from_any with float value"""
|
||||
result = SampleBlock.from_any(3.14)
|
||||
assert result == SampleBlock.HAS_FLOAT
|
||||
assert result.value == 3.14
|
||||
|
||||
def test_from_any_with_invalid_string(self):
|
||||
"""Test from_any with string that doesn't match key or value"""
|
||||
with pytest.raises(ValueError, match="Could not find as key or value: invalid_string"):
|
||||
SampleBlock.from_any("invalid_string")
|
||||
|
||||
def test_from_any_with_invalid_integer(self):
|
||||
"""Test from_any with integer that doesn't match any value"""
|
||||
with pytest.raises(ValueError, match="Invalid value: 999"):
|
||||
SampleBlock.from_any(999)
|
||||
|
||||
def test_from_any_string_key_priority(self):
|
||||
"""Test that from_any tries key lookup before value for strings"""
|
||||
# Create an enum where a value matches another key
|
||||
class AmbiguousEnum(EnumBase):
|
||||
KEY_A = "key_b" # Value is the name of another key
|
||||
KEY_B = "value_b"
|
||||
|
||||
# When we look up "KEY_B", it should find it as a key, not as value "key_b"
|
||||
result = AmbiguousEnum.from_any("KEY_B")
|
||||
assert result == AmbiguousEnum.KEY_B
|
||||
assert result.value == "value_b"
|
||||
|
||||
|
||||
class TestEnumBaseToValue:
|
||||
"""Test cases for to_value instance method"""
|
||||
|
||||
def test_to_value_string_value(self):
|
||||
"""Test to_value with string enum value"""
|
||||
result = SampleBlock.BLOCK_A.to_value()
|
||||
assert result == "block_a"
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_to_value_integer_value(self):
|
||||
"""Test to_value with integer enum value"""
|
||||
result = SampleBlock.HAS_NUM.to_value()
|
||||
assert result == 5
|
||||
assert isinstance(result, int)
|
||||
|
||||
def test_to_value_float_value(self):
|
||||
"""Test to_value with float enum value"""
|
||||
result = SampleBlock.HAS_FLOAT.to_value()
|
||||
assert result == 3.14
|
||||
assert isinstance(result, float)
|
||||
|
||||
def test_to_value_equals_value_attribute(self):
|
||||
"""Test that to_value returns the same as .value"""
|
||||
enum_instance = SampleBlock.BLOCK_A
|
||||
assert enum_instance.to_value() == enum_instance.value
|
||||
|
||||
|
||||
class TestEnumBaseToLowerCase:
|
||||
"""Test cases for to_lower_case instance method"""
|
||||
|
||||
def test_to_lower_case_uppercase_name(self):
|
||||
"""Test to_lower_case with uppercase enum name"""
|
||||
result = SampleBlock.BLOCK_A.to_lower_case()
|
||||
assert result == "block_a"
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_to_lower_case_mixed_name(self):
|
||||
"""Test to_lower_case with name containing underscores"""
|
||||
result = SampleBlock.HAS_NUM.to_lower_case()
|
||||
assert result == "has_num"
|
||||
|
||||
def test_to_lower_case_consistency(self):
|
||||
"""Test that to_lower_case always returns lowercase"""
|
||||
for member in SampleBlock:
|
||||
result = member.to_lower_case()
|
||||
assert result == result.lower()
|
||||
assert result == member.name.lower()
|
||||
|
||||
|
||||
class TestEnumBaseStrMethod:
|
||||
"""Test cases for __str__ magic method"""
|
||||
|
||||
def test_str_returns_name(self):
|
||||
"""Test that str() returns the enum name"""
|
||||
result = str(SampleBlock.BLOCK_A)
|
||||
assert result == "BLOCK_A"
|
||||
assert result == SampleBlock.BLOCK_A.name
|
||||
|
||||
def test_str_all_members(self):
|
||||
"""Test str() for all enum members"""
|
||||
for member in SampleBlock:
|
||||
result = str(member)
|
||||
assert result == member.name
|
||||
assert isinstance(result, str)
|
||||
|
||||
def test_str_in_formatting(self):
|
||||
"""Test that str works in string formatting"""
|
||||
formatted = f"Enum: {SampleBlock.BLOCK_A}"
|
||||
assert formatted == "Enum: BLOCK_A"
|
||||
|
||||
def test_str_vs_repr(self):
|
||||
"""Test difference between str and repr"""
|
||||
enum_instance = SampleBlock.BLOCK_A
|
||||
str_result = str(enum_instance)
|
||||
repr_result = repr(enum_instance)
|
||||
|
||||
assert str_result == "BLOCK_A"
|
||||
# repr should include class name
|
||||
assert "SampleBlock" in repr_result
|
||||
|
||||
|
||||
# Parametrized tests for comprehensive coverage
|
||||
class TestParametrized:
|
||||
"""Parametrized tests for better coverage"""
|
||||
|
||||
@pytest.mark.parametrize("key,expected_member", [
|
||||
("BLOCK_A", SampleBlock.BLOCK_A),
|
||||
("block_a", SampleBlock.BLOCK_A),
|
||||
("BLOCK_B", SampleBlock.BLOCK_B),
|
||||
("HAS_NUM", SampleBlock.HAS_NUM),
|
||||
("has_num", SampleBlock.HAS_NUM),
|
||||
("HAS_FLOAT", SampleBlock.HAS_FLOAT),
|
||||
])
|
||||
def test_lookup_key_parametrized(self, key: str, expected_member: EnumBase):
|
||||
"""Test lookup_key with various valid keys"""
|
||||
result = SampleBlock.lookup_key(key)
|
||||
assert result == expected_member
|
||||
|
||||
@pytest.mark.parametrize("value,expected_member", [
|
||||
("block_a", SampleBlock.BLOCK_A),
|
||||
("block_b", SampleBlock.BLOCK_B),
|
||||
(5, SampleBlock.HAS_NUM),
|
||||
(3.14, SampleBlock.HAS_FLOAT),
|
||||
("legacy_value", SampleBlock.LEGACY_KEY),
|
||||
])
|
||||
def test_lookup_value_parametrized(self, value: Any, expected_member: EnumBase):
|
||||
"""Test lookup_value with various valid values"""
|
||||
result = SampleBlock.lookup_value(value)
|
||||
assert result == expected_member
|
||||
|
||||
@pytest.mark.parametrize("input_any,expected_member", [
|
||||
("BLOCK_A", SampleBlock.BLOCK_A),
|
||||
("block_a", SampleBlock.BLOCK_A),
|
||||
("block_b", SampleBlock.BLOCK_B),
|
||||
(5, SampleBlock.HAS_NUM),
|
||||
(3.14, SampleBlock.HAS_FLOAT),
|
||||
(SampleBlock.BLOCK_A, SampleBlock.BLOCK_A), # Pass enum instance
|
||||
])
|
||||
def test_from_any_parametrized(self, input_any: Any, expected_member: EnumBase):
|
||||
"""Test from_any with various valid inputs"""
|
||||
result = SampleBlock.from_any(input_any)
|
||||
assert result == expected_member
|
||||
|
||||
@pytest.mark.parametrize("invalid_key", [
|
||||
"NONEXISTENT",
|
||||
"invalid",
|
||||
"123",
|
||||
"",
|
||||
"BLOCK_C",
|
||||
])
|
||||
def test_lookup_key_invalid_parametrized(self, invalid_key: str):
|
||||
"""Test lookup_key with various invalid keys"""
|
||||
with pytest.raises(ValueError, match="Invalid key"):
|
||||
SampleBlock.lookup_key(invalid_key)
|
||||
|
||||
@pytest.mark.parametrize("invalid_value", [
|
||||
"nonexistent",
|
||||
999,
|
||||
-1,
|
||||
0.0,
|
||||
"BLOCK_A", # This is a key name, not a value
|
||||
])
|
||||
def test_lookup_value_invalid_parametrized(self, invalid_value: Any):
|
||||
"""Test lookup_value with various invalid values"""
|
||||
with pytest.raises(ValueError, match="Invalid value"):
|
||||
SampleBlock.lookup_value(invalid_value)
|
||||
|
||||
|
||||
# Edge cases and special scenarios
|
||||
class TestEdgeCases:
|
||||
"""Test edge cases and special scenarios"""
|
||||
|
||||
def test_enum_with_single_member(self):
|
||||
"""Test EnumBase with only one member"""
|
||||
class SingleEnum(EnumBase):
|
||||
ONLY_ONE = "single"
|
||||
|
||||
result = SingleEnum.from_any("ONLY_ONE")
|
||||
assert result == SingleEnum.ONLY_ONE
|
||||
assert result.to_value() == "single"
|
||||
|
||||
def test_enum_iteration(self):
|
||||
"""Test iterating over enum members"""
|
||||
members = list(SampleBlock)
|
||||
assert len(members) == 5
|
||||
assert SampleBlock.BLOCK_A in members
|
||||
assert SampleBlock.BLOCK_B in members
|
||||
assert SampleBlock.HAS_NUM in members
|
||||
|
||||
def test_enum_membership(self):
|
||||
"""Test checking membership in enum"""
|
||||
assert SampleBlock.BLOCK_A in SampleBlock
|
||||
assert SampleBlock.HAS_NUM in SampleBlock
|
||||
|
||||
def test_enum_comparison(self):
|
||||
"""Test comparing enum members"""
|
||||
assert SampleBlock.BLOCK_A == SampleBlock.BLOCK_A
|
||||
assert SampleBlock.BLOCK_A != SampleBlock.BLOCK_B
|
||||
assert SampleBlock.from_any("BLOCK_A") == SampleBlock.BLOCK_A
|
||||
|
||||
def test_enum_identity(self):
|
||||
"""Test enum member identity"""
|
||||
member1 = SampleBlock.BLOCK_A
|
||||
member2 = SampleBlock.lookup_key("BLOCK_A")
|
||||
member3 = SampleBlock.from_any("BLOCK_A")
|
||||
|
||||
assert member1 is member2
|
||||
assert member1 is member3
|
||||
assert member2 is member3
|
||||
|
||||
def test_different_enum_classes(self):
|
||||
"""Test that different enum classes are distinct"""
|
||||
# Even if they have same keys/values, they're different
|
||||
class OtherEnum(EnumBase):
|
||||
BLOCK_A = "block_a"
|
||||
|
||||
result1 = SampleBlock.from_any("BLOCK_A")
|
||||
result2 = OtherEnum.from_any("BLOCK_A")
|
||||
|
||||
assert result1 != result2
|
||||
assert not isinstance(result1, type(result2))
|
||||
|
||||
def test_numeric_enum_operations(self):
|
||||
"""Test operations specific to numeric enums"""
|
||||
assert NumericEnum.FIRST.to_value() == 1
|
||||
assert NumericEnum.SECOND.to_value() == 2
|
||||
assert NumericEnum.THIRD.to_value() == 3
|
||||
|
||||
# Test from_any with integers
|
||||
assert NumericEnum.from_any(1) == NumericEnum.FIRST
|
||||
assert NumericEnum.from_any(2) == NumericEnum.SECOND
|
||||
|
||||
def test_mixed_value_types_in_same_enum(self):
|
||||
"""Test enum with mixed value types"""
|
||||
# SampleBlock already has mixed types (strings, int, float)
|
||||
assert isinstance(SampleBlock.BLOCK_A.to_value(), str)
|
||||
assert isinstance(SampleBlock.HAS_NUM.to_value(), int)
|
||||
assert isinstance(SampleBlock.HAS_FLOAT.to_value(), float)
|
||||
|
||||
def test_from_any_chained_calls(self):
|
||||
"""Test that from_any can be chained (idempotent)"""
|
||||
result1 = SampleBlock.from_any("BLOCK_A")
|
||||
result2 = SampleBlock.from_any(result1)
|
||||
result3 = SampleBlock.from_any(result2)
|
||||
|
||||
assert result1 == result2 == result3
|
||||
assert result1 is result2 is result3
|
||||
|
||||
|
||||
# Integration tests
|
||||
class TestIntegration:
|
||||
"""Integration tests combining multiple methods"""
|
||||
|
||||
def test_round_trip_key_lookup(self):
|
||||
"""Test round-trip from key to enum and back"""
|
||||
original_key = "BLOCK_A"
|
||||
enum_member = SampleBlock.lookup_key(original_key)
|
||||
result_name = str(enum_member)
|
||||
|
||||
assert result_name == original_key
|
||||
|
||||
def test_round_trip_value_lookup(self):
|
||||
"""Test round-trip from value to enum and back"""
|
||||
original_value = "block_a"
|
||||
enum_member = SampleBlock.lookup_value(original_value)
|
||||
result_value = enum_member.to_value()
|
||||
|
||||
assert result_value == original_value
|
||||
|
||||
def test_from_any_workflow(self):
|
||||
"""Test realistic workflow using from_any"""
|
||||
# Simulate receiving various types of input
|
||||
inputs = [
|
||||
"BLOCK_A", # Key as string
|
||||
"block_b", # Value as string
|
||||
5, # Numeric value
|
||||
SampleBlock.HAS_FLOAT, # Already an enum
|
||||
]
|
||||
|
||||
expected = [
|
||||
SampleBlock.BLOCK_A,
|
||||
SampleBlock.BLOCK_B,
|
||||
SampleBlock.HAS_NUM,
|
||||
SampleBlock.HAS_FLOAT,
|
||||
]
|
||||
|
||||
for input_val, expected_val in zip(inputs, expected):
|
||||
result = SampleBlock.from_any(input_val)
|
||||
assert result == expected_val
|
||||
|
||||
def test_enum_in_dictionary(self):
|
||||
"""Test using enum as dictionary key"""
|
||||
enum_dict = {
|
||||
SampleBlock.BLOCK_A: "Value A",
|
||||
SampleBlock.BLOCK_B: "Value B",
|
||||
SampleBlock.HAS_NUM: "Value Num",
|
||||
}
|
||||
|
||||
assert enum_dict[SampleBlock.BLOCK_A] == "Value A"
|
||||
block_b = SampleBlock.from_any("BLOCK_B")
|
||||
assert isinstance(block_b, SampleBlock)
|
||||
assert enum_dict[block_b] == "Value B"
|
||||
|
||||
def test_enum_in_set(self):
|
||||
"""Test using enum in a set"""
|
||||
enum_set = {SampleBlock.BLOCK_A, SampleBlock.BLOCK_B, SampleBlock.BLOCK_A}
|
||||
|
||||
assert len(enum_set) == 2 # BLOCK_A should be deduplicated
|
||||
assert SampleBlock.BLOCK_A in enum_set
|
||||
assert SampleBlock.from_any("BLOCK_B") in enum_set
|
||||
|
||||
|
||||
# Real-world usage scenarios
|
||||
class TestRealWorldScenarios:
|
||||
"""Test real-world usage scenarios from enum_test.py"""
|
||||
|
||||
def test_original_enum_test_scenario(self):
|
||||
"""Test the scenario from the original enum_test.py"""
|
||||
# BLOCK A: {SampleBlock.from_any('BLOCK_A')}
|
||||
result_a = SampleBlock.from_any('BLOCK_A')
|
||||
assert result_a == SampleBlock.BLOCK_A
|
||||
assert str(result_a) == "BLOCK_A"
|
||||
|
||||
# HAS NUM: {SampleBlock.from_any(5)}
|
||||
result_num = SampleBlock.from_any(5)
|
||||
assert result_num == SampleBlock.HAS_NUM
|
||||
assert result_num.to_value() == 5
|
||||
|
||||
# DIRECT BLOCK: {SampleBlock.BLOCK_A.name} -> {SampleBlock.BLOCK_A.value}
|
||||
assert SampleBlock.BLOCK_A.name == "BLOCK_A"
|
||||
assert SampleBlock.BLOCK_A.value == "block_a"
|
||||
|
||||
def test_config_value_parsing(self):
|
||||
"""Test parsing values from configuration (common use case)"""
|
||||
# Simulate config values that might come as strings
|
||||
config_values = ["OPTION_ONE", "option_two", "OPTION_THREE"]
|
||||
|
||||
results = [SimpleEnum.from_any(val) for val in config_values]
|
||||
|
||||
assert results[0] == SimpleEnum.OPTION_ONE
|
||||
assert results[1] == SimpleEnum.OPTION_TWO
|
||||
assert results[2] == SimpleEnum.OPTION_THREE
|
||||
|
||||
def test_api_response_mapping(self):
|
||||
"""Test mapping API response values to enum"""
|
||||
# Simulate API returning numeric codes
|
||||
api_codes = [1, 2, 3]
|
||||
|
||||
results = [NumericEnum.from_any(code) for code in api_codes]
|
||||
|
||||
assert results[0] == NumericEnum.FIRST
|
||||
assert results[1] == NumericEnum.SECOND
|
||||
assert results[2] == NumericEnum.THIRD
|
||||
|
||||
def test_validation_with_error_handling(self):
|
||||
"""Test validation with proper error handling"""
|
||||
valid_input = "BLOCK_A"
|
||||
invalid_input = "INVALID"
|
||||
|
||||
# Valid input should work
|
||||
result = SampleBlock.from_any(valid_input)
|
||||
assert result == SampleBlock.BLOCK_A
|
||||
|
||||
# Invalid input should raise ValueError
|
||||
try:
|
||||
SampleBlock.from_any(invalid_input)
|
||||
assert False, "Should have raised ValueError"
|
||||
except ValueError as e:
|
||||
assert "Could not find as key or value" in str(e)
|
||||
assert "INVALID" in str(e)
|
||||
48
uv.lock
generated
48
uv.lock
generated
@@ -108,17 +108,19 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "corelibs"
|
||||
version = "0.26.0"
|
||||
version = "0.30.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "cryptography" },
|
||||
{ name = "jmespath" },
|
||||
{ name = "jsonpath-ng" },
|
||||
{ name = "psutil" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "deepdiff" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-cov" },
|
||||
]
|
||||
@@ -127,12 +129,14 @@ dev = [
|
||||
requires-dist = [
|
||||
{ name = "cryptography", specifier = ">=46.0.3" },
|
||||
{ name = "jmespath", specifier = ">=1.0.1" },
|
||||
{ name = "jsonpath-ng", specifier = ">=1.7.0" },
|
||||
{ name = "psutil", specifier = ">=7.0.0" },
|
||||
{ name = "requests", specifier = ">=2.32.4" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "deepdiff", specifier = ">=8.6.1" },
|
||||
{ name = "pytest", specifier = ">=8.4.1" },
|
||||
{ name = "pytest-cov", specifier = ">=6.2.1" },
|
||||
]
|
||||
@@ -254,6 +258,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deepdiff"
|
||||
version = "8.6.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "orderly-set" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/76/36c9aab3d5c19a94091f7c6c6e784efca50d87b124bf026c36e94719f33c/deepdiff-8.6.1.tar.gz", hash = "sha256:ec56d7a769ca80891b5200ec7bd41eec300ced91ebcc7797b41eb2b3f3ff643a", size = 634054, upload-time = "2025-09-03T19:40:41.461Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/e6/efe534ef0952b531b630780e19cabd416e2032697019d5295defc6ef9bd9/deepdiff-8.6.1-py3-none-any.whl", hash = "sha256:ee8708a7f7d37fb273a541fa24ad010ed484192cd0c4ffc0fa0ed5e2d4b9e78b", size = 91378, upload-time = "2025-09-03T19:40:39.679Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.11"
|
||||
@@ -281,6 +297,27 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonpath-ng"
|
||||
version = "1.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "ply" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6d/86/08646239a313f895186ff0a4573452038eed8c86f54380b3ebac34d32fb2/jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c", size = 37838, upload-time = "2024-10-11T15:41:42.404Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/35/5a/73ecb3d82f8615f32ccdadeb9356726d6cae3a4bbc840b437ceb95708063/jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6", size = 30105, upload-time = "2024-11-20T17:58:30.418Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "orderly-set"
|
||||
version = "5.5.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4a/88/39c83c35d5e97cc203e9e77a4f93bf87ec89cf6a22ac4818fdcc65d66584/orderly_set-5.5.0.tar.gz", hash = "sha256:e87185c8e4d8afa64e7f8160ee2c542a475b738bc891dc3f58102e654125e6ce", size = 27414, upload-time = "2025-07-10T20:10:55.885Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/12/27/fb8d7338b4d551900fa3e580acbe7a0cf655d940e164cb5c00ec31961094/orderly_set-5.5.0-py3-none-any.whl", hash = "sha256:46f0b801948e98f427b412fcabb831677194c05c3b699b80de260374baa0b1e7", size = 13068, upload-time = "2025-07-10T20:10:54.377Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
@@ -299,6 +336,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ply"
|
||||
version = "3.11"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", size = 159130, upload-time = "2018-02-15T19:01:31.097Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce", size = 49567, upload-time = "2018-02-15T19:01:27.172Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
version = "7.1.1"
|
||||
|
||||
Reference in New Issue
Block a user