ruff: apply automatic fixes

This commit is contained in:
Jörg Thalheim
2025-08-20 13:52:45 +02:00
parent 798d445f3e
commit ea2d6aab65
217 changed files with 2283 additions and 1739 deletions

View File

@@ -16,7 +16,7 @@ class LogGroupConfig:
name: str # The name of this group level (single directory name)
nickname: str | None = None # Optional display name for easier visibility
children: dict[str, "LogGroupConfig"] = field(
default_factory=dict
default_factory=dict,
) # Nested child groups
def get_display_name(self) -> str:
@@ -24,6 +24,7 @@ class LogGroupConfig:
Returns:
The nickname if available, otherwise the group name.
"""
return self.nickname if self.nickname else self.name
@@ -35,10 +36,13 @@ class LogGroupConfig:
Returns:
A new LogGroupConfig instance with the child added.
"""
new_children = {**self.children, child.name: child}
return LogGroupConfig(
name=self.name, nickname=self.nickname, children=new_children
name=self.name,
nickname=self.nickname,
children=new_children,
)
def get_child(self, name: str) -> "LogGroupConfig | None":
@@ -49,6 +53,7 @@ class LogGroupConfig:
Returns:
The child LogGroupConfig if found, None otherwise.
"""
return self.children.get(name)
@@ -62,6 +67,7 @@ def is_correct_day_format(date_day: str) -> bool:
Returns:
True if the date_day matches YYYY-MM-DD format, False otherwise.
"""
try:
datetime.datetime.strptime(date_day, "%Y-%m-%d").replace(tzinfo=datetime.UTC)
@@ -85,6 +91,7 @@ class LogFile:
Raises:
ValueError: If date_day or date_second are not in the correct format.
"""
# Validate formats upon initialization.
if not is_correct_day_format(self.date_day):
@@ -92,7 +99,7 @@ class LogFile:
raise ValueError(msg)
try:
datetime.datetime.strptime(self.date_second, "%H-%M-%S").replace(
tzinfo=datetime.UTC
tzinfo=datetime.UTC,
)
except ValueError as ex:
msg = f"LogFile.date_second '{self.date_second}' is not in HH-MM-SS format."
@@ -104,10 +111,12 @@ class LogFile:
Returns:
A datetime object constructed from date_day and date_second.
"""
# Formats are pre-validated by __post_init__.
return datetime.datetime.strptime(
f"{self.date_day} {self.date_second}", "%Y-%m-%d %H-%M-%S"
f"{self.date_day} {self.date_second}",
"%Y-%m-%d %H-%M-%S",
).replace(tzinfo=datetime.UTC)
def get_file_path(self) -> Path:
@@ -115,6 +124,7 @@ class LogFile:
Returns:
The complete Path object for this log file including nested directory structure.
"""
# Create nested directory structure for hierarchical groups
path = self._base_dir / self.date_day
@@ -135,6 +145,7 @@ class LogFile:
Returns:
True if all significant fields are equal, False otherwise.
"""
if not isinstance(other, LogFile):
return NotImplemented
@@ -157,6 +168,7 @@ class LogFile:
Returns:
True if this instance should be sorted before the other.
"""
if not isinstance(other, LogFile):
return NotImplemented
@@ -186,6 +198,7 @@ class LogDayDir:
Raises:
ValueError: If date_day is not in YYYY-MM-DD format.
"""
if not is_correct_day_format(self.date_day):
msg = f"LogDayDir.date_day '{self.date_day}' is not in YYYY-MM-DD format."
@@ -197,6 +210,7 @@ class LogDayDir:
Returns:
A date object constructed from date_day.
"""
return (
datetime.datetime.strptime(self.date_day, "%Y-%m-%d")
@@ -209,6 +223,7 @@ class LogDayDir:
Returns:
The Path object for this day's log directory.
"""
return self._base_dir / self.date_day
@@ -220,6 +235,7 @@ class LogDayDir:
Returns:
True if date_day and base_dir are equal, False otherwise.
"""
if not isinstance(other, LogDayDir):
return NotImplemented
@@ -235,6 +251,7 @@ class LogDayDir:
Returns:
True if this instance should be sorted before the other.
"""
if not isinstance(other, LogDayDir):
return NotImplemented
@@ -252,6 +269,7 @@ class LogManager:
Attributes:
base_dir: The base directory where all log files are stored.
root_group_configs: Dictionary of root-level group configurations.
"""
base_dir: Path
@@ -265,6 +283,7 @@ class LogManager:
Returns:
A new LogManager instance with the group configuration added.
"""
new_configs = {**self.root_group_configs, group_config.name: group_config}
return LogManager(base_dir=self.base_dir, root_group_configs=new_configs)
@@ -279,6 +298,7 @@ class LogManager:
Returns:
The LogGroupConfig if found, None otherwise.
"""
if not group_path:
return None
@@ -301,7 +321,10 @@ class LogManager:
return current_config
def create_log_file(
self, func: Callable | str, op_key: str, group_path: list[str] | None = None
self,
func: Callable | str,
op_key: str,
group_path: list[str] | None = None,
) -> LogFile:
"""Create a new log file for the given function and operation.
@@ -316,6 +339,7 @@ class LogManager:
Raises:
ValueError: If the group structure is not registered.
FileExistsError: If the log file already exists.
"""
now_utc = datetime.datetime.now(tz=datetime.UTC)
@@ -372,6 +396,7 @@ class LogManager:
Returns:
True if the group structure is registered, False otherwise.
"""
# Special case: allow "default" group without registration
if group_path == ["default"]:
@@ -397,6 +422,7 @@ class LogManager:
Returns:
True if the group structure is valid, False otherwise.
"""
if not group_path:
return False
@@ -429,6 +455,7 @@ class LogManager:
Returns:
A sorted list of LogDayDir instances (newest first). Returns empty list if base directory doesn't exist.
"""
if not self.base_dir.exists() or not self.base_dir.is_dir():
return []
@@ -436,18 +463,18 @@ class LogManager:
log_day_dirs_list: list[LogDayDir] = []
for day_dir_candidate_path in self.base_dir.iterdir():
if day_dir_candidate_path.is_dir() and is_correct_day_format(
day_dir_candidate_path.name
day_dir_candidate_path.name,
):
try:
log_day_dirs_list.append(
LogDayDir(
date_day=day_dir_candidate_path.name,
_base_dir=self.base_dir,
)
),
)
except ValueError:
log.warning(
f"Skipping directory with invalid date format '{day_dir_candidate_path.name}'."
f"Skipping directory with invalid date format '{day_dir_candidate_path.name}'.",
)
return sorted(log_day_dirs_list) # Sorts using LogDayDir.__lt__ (newest first)
@@ -468,6 +495,7 @@ class LogManager:
Returns:
The LogFile if found, None otherwise.
"""
days_to_search: list[LogDayDir]
@@ -495,7 +523,10 @@ class LogManager:
return None
def _find_log_file_in_day(
self, day_dir: LogDayDir, op_key: str, selector: list[str] | None = None
self,
day_dir: LogDayDir,
op_key: str,
selector: list[str] | None = None,
) -> LogFile | None:
"""Find a log file in a specific day directory.
@@ -506,6 +537,7 @@ class LogManager:
Returns:
The LogFile if found, None otherwise.
"""
base_path = day_dir.get_dir_path()
@@ -520,15 +552,17 @@ class LogManager:
if search_path.exists() and search_path.is_dir():
return self._search_in_path(search_path, op_key, selector)
else:
# Search all groups in this day
if base_path.exists() and base_path.is_dir():
return self._search_in_path(base_path, op_key, None)
# Search all groups in this day
elif base_path.exists() and base_path.is_dir():
return self._search_in_path(base_path, op_key, None)
return None
def _search_in_path(
self, search_path: Path, op_key: str, group_path: list[str] | None
self,
search_path: Path,
op_key: str,
group_path: list[str] | None,
) -> LogFile | None:
"""Search for log files in a given path.
@@ -539,6 +573,7 @@ class LogManager:
Returns:
The LogFile if found, None otherwise.
"""
log_files: list[LogFile] = []
@@ -601,7 +636,9 @@ class LogManager:
return None
def filter(
self, selector: list[str] | None = None, date_day: str | None = None
self,
selector: list[str] | None = None,
date_day: str | None = None,
) -> list[str]:
"""Filter and list folders at the specified hierarchical path.
@@ -615,6 +652,7 @@ class LogManager:
Returns:
List of folder names (decoded) at the specified path level.
"""
if selector is None:
selector = []

View File

@@ -14,6 +14,7 @@ def list_log_days() -> list[str]:
Raises:
AssertionError: If LOG_MANAGER_INSTANCE is not initialized.
"""
assert LOG_MANAGER_INSTANCE is not None
return [day.date_day for day in LOG_MANAGER_INSTANCE.list_log_days()]
@@ -21,7 +22,8 @@ def list_log_days() -> list[str]:
@API.register
def list_log_groups(
selector: list[str] | None, date_day: str | None = None
selector: list[str] | None,
date_day: str | None = None,
) -> list[str]:
"""List all log groups at the specified hierarchical path.
@@ -34,6 +36,7 @@ def list_log_groups(
Raises:
AssertionError: If LOG_MANAGER_INSTANCE is not initialized.
"""
assert LOG_MANAGER_INSTANCE is not None
return LOG_MANAGER_INSTANCE.filter(selector, date_day=date_day)
@@ -41,7 +44,9 @@ def list_log_groups(
@API.register
def get_log_file(
id_key: str, selector: list[str] | None = None, date_day: str | None = None
id_key: str,
selector: list[str] | None = None,
date_day: str | None = None,
) -> str:
"""Get the contents of a specific log file by operation key.
@@ -56,11 +61,14 @@ def get_log_file(
Raises:
ClanError: If the log file is not found.
AssertionError: If LOG_MANAGER_INSTANCE is not initialized.
"""
assert LOG_MANAGER_INSTANCE is not None
log_file = LOG_MANAGER_INSTANCE.get_log_file(
op_key=id_key, selector=selector, date_day=date_day
op_key=id_key,
selector=selector,
date_day=date_day,
)
if log_file is None:
msg = f"Log file with op_key '{id_key}' not found in selector '{selector}' and date_day '{date_day}'."

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env python3
"""
Simple LogManager example with filter function.
"""Simple LogManager example with filter function.
This demonstrates:
- Dynamic group names with URL encoding
@@ -80,7 +79,7 @@ def main() -> None:
)
if specific_log:
print(
f"5. Found specific log: {specific_log.op_key} in {specific_log.func_name}"
f"5. Found specific log: {specific_log.op_key} in {specific_log.func_name}",
)
else:
print("5. Specific log not found")

View File

@@ -1,5 +1,4 @@
"""
Simplified tests for the log manager focusing only on features used by the API.
"""Simplified tests for the log manager focusing only on features used by the API.
Tests are based on actual usage patterns from example_usage.py and api.py.
"""
@@ -150,14 +149,15 @@ class TestLogManagerGroupConfiguration:
"""Test finding nested group configuration."""
# ["clans", "dynamic_name", "machines"] - should find machines config
config = configured_log_manager.find_group_config(
["clans", "repo1", "machines"]
["clans", "repo1", "machines"],
)
assert config is not None
assert config.name == "machines"
assert config.nickname == "Machines"
def test_find_group_config_nonexistent(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test finding non-existent group configuration."""
config = configured_log_manager.find_group_config(["nonexistent"])
@@ -171,7 +171,8 @@ class TestLogFileCreation:
"""Test log file creation features used in example_usage.py."""
def test_create_log_file_default_group(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test creating log file with default group."""
log_file = configured_log_manager.create_log_file(example_function, "test_op")
@@ -185,7 +186,8 @@ class TestLogFileCreation:
assert log_file.get_file_path().exists()
def test_create_log_file_with_nested_groups(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test creating log file with nested groups like example_usage.py."""
repos = ["/home/user/Projects/qubasas_clan", "https://github.com/qubasa/myclan"]
@@ -210,7 +212,8 @@ class TestLogFileCreation:
# Dynamic elements should be URL encoded if they contain special chars
def test_create_log_file_unregistered_group_fails(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test that creating log file with unregistered group fails."""
with pytest.raises(ValueError, match="Group structure.*is not valid"):
@@ -221,7 +224,8 @@ class TestLogFileCreation:
)
def test_create_log_file_invalid_structure_fails(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test that invalid nested structure fails."""
with pytest.raises(ValueError, match="Group structure.*is not valid"):
@@ -236,19 +240,23 @@ class TestFilterFunction:
"""Test filter functionality used in example_usage.py and api.py."""
def test_filter_empty_returns_top_level_groups(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test that empty filter returns top-level groups."""
# Create some log files first
configured_log_manager.create_log_file(
run_machine_update, "test_op", ["clans", "repo1", "machines", "machine1"]
run_machine_update,
"test_op",
["clans", "repo1", "machines", "machine1"],
)
top_level = configured_log_manager.filter([])
assert "clans" in top_level
def test_filter_lists_dynamic_names(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test filtering lists dynamic names like example_usage.py."""
repos = ["repo1", "repo2"]
@@ -271,17 +279,20 @@ class TestFilterFunction:
if clans_repos:
first_repo = clans_repos[0]
repo_machines = configured_log_manager.filter(
["clans", first_repo, "machines"]
["clans", first_repo, "machines"],
)
assert set(repo_machines) == set(machines)
def test_filter_with_specific_date(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test filtering with specific date."""
# Create log file
log_file = configured_log_manager.create_log_file(
run_machine_update, "test_op", ["clans", "repo1", "machines", "machine1"]
run_machine_update,
"test_op",
["clans", "repo1", "machines", "machine1"],
)
# Filter with the specific date
@@ -320,7 +331,8 @@ class TestGetLogFile:
assert found_log_file.func_name == "run_machine_update"
def test_get_log_file_with_selector(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test getting log file with specific selector like example_usage.py."""
# Create log files in different locations
@@ -347,12 +359,15 @@ class TestGetLogFile:
"""Test getting log file with specific date."""
# Create log file
log_file = configured_log_manager.create_log_file(
run_machine_update, "deploy_demo", ["clans", "repo1", "machines", "demo"]
run_machine_update,
"deploy_demo",
["clans", "repo1", "machines", "demo"],
)
# Find it by op_key and date
found_log_file = configured_log_manager.get_log_file(
"deploy_demo", date_day=log_file.date_day
"deploy_demo",
date_day=log_file.date_day,
)
assert found_log_file is not None
assert found_log_file.op_key == "deploy_demo"
@@ -362,7 +377,8 @@ class TestGetLogFile:
datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=1)
).strftime("%Y-%m-%d")
not_found = configured_log_manager.get_log_file(
"deploy_demo", date_day=tomorrow
"deploy_demo",
date_day=tomorrow,
)
assert not_found is None
@@ -384,10 +400,14 @@ class TestListLogDays:
"""Test listing log days when logs exist."""
# Create log files
configured_log_manager.create_log_file(
run_machine_update, "op1", ["clans", "repo1", "machines", "machine1"]
run_machine_update,
"op1",
["clans", "repo1", "machines", "machine1"],
)
configured_log_manager.create_log_file(
run_machine_update, "op2", ["clans", "repo2", "machines", "machine2"]
run_machine_update,
"op2",
["clans", "repo2", "machines", "machine2"],
)
days = configured_log_manager.list_log_days()
@@ -402,7 +422,8 @@ class TestApiCompatibility:
"""Test that the log manager works with the API functions."""
def test_api_workflow_like_example_usage(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test a complete workflow like example_usage.py and api.py."""
repos = ["/home/user/Projects/qubasas_clan", "https://github.com/qubasa/myclan"]
@@ -447,7 +468,8 @@ class TestLogFileSorting:
"""Test LogFile sorting functionality - newest first is a key feature."""
def test_logfile_comparison_by_datetime(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test that LogFiles are sorted by datetime (newest first)."""
from clan_lib.log_manager import LogFile
@@ -482,7 +504,8 @@ class TestLogFileSorting:
assert sorted_files[1] == older_file
def test_logfile_comparison_by_date(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test that LogFiles are sorted by date (newer dates first)."""
from clan_lib.log_manager import LogFile
@@ -516,7 +539,8 @@ class TestLogFileSorting:
assert sorted_files[1] == older_date_file
def test_logfile_secondary_sort_by_group(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test that LogFiles with same datetime are sorted by group name (alphabetical)."""
from clan_lib.log_manager import LogFile
@@ -551,7 +575,8 @@ class TestLogFileSorting:
assert sorted_files[1] == group_b_file
def test_logfile_tertiary_sort_by_func_name(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test that LogFiles with same datetime and group are sorted by func_name (alphabetical)."""
from clan_lib.log_manager import LogFile
@@ -585,7 +610,8 @@ class TestLogFileSorting:
assert sorted_files[1] == func_b_file
def test_logfile_quaternary_sort_by_op_key(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test that LogFiles with same datetime, group, and func_name are sorted by op_key (alphabetical)."""
from clan_lib.log_manager import LogFile
@@ -619,10 +645,10 @@ class TestLogFileSorting:
assert sorted_files[1] == op_b_file
def test_logfile_complex_sorting_scenario(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test complex sorting with multiple LogFiles demonstrating full sort order."""
from clan_lib.log_manager import LogFile
# Create multiple files with different characteristics
@@ -720,7 +746,7 @@ class TestLogFileSorting:
]
for i, (exp_op, exp_date, exp_group, exp_func, exp_time) in enumerate(
expected_order
expected_order,
):
actual = sorted_files[i]
assert actual.op_key == exp_op, (
@@ -740,7 +766,8 @@ class TestLogFileSorting:
)
def test_get_log_file_returns_newest_when_multiple_exist(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test that get_log_file returns the newest file when multiple files with same op_key exist in different locations."""
# Create log files with same op_key in different locations (different groups/machines)
@@ -771,7 +798,8 @@ class TestLogFileSorting:
# When searching with specific selector, should find the specific one
specific_log = configured_log_manager.get_log_file(
"deploy_operation", selector=["clans", "repo1", "machines", "machine1"]
"deploy_operation",
selector=["clans", "repo1", "machines", "machine1"],
)
assert specific_log is not None
assert specific_log.op_key == "deploy_operation"
@@ -779,7 +807,8 @@ class TestLogFileSorting:
assert "machine1" in specific_log.group
def test_list_log_days_sorted_newest_first(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test that list_log_days returns days sorted newest first."""
# Create log files on different days by manipulating the date
@@ -817,7 +846,8 @@ class TestURLEncoding:
"""Test URL encoding for dynamic group names with special characters."""
def test_special_characters_in_dynamic_names(
self, configured_log_manager: LogManager
self,
configured_log_manager: LogManager,
) -> None:
"""Test that special characters in dynamic names are handled correctly."""
special_repo = "/home/user/Projects/my clan" # Contains space

View File

@@ -68,7 +68,9 @@ class TestURLEncoding:
group_path = ["clans", dynamic_name, "machines", f"machine-{dynamic_name}"]
log_file = log_manager.create_log_file(
sample_function, f"test_{dynamic_name}", group_path
sample_function,
f"test_{dynamic_name}",
group_path,
)
# Check that the file was created and encoded names appear in path
@@ -78,7 +80,8 @@ class TestURLEncoding:
# Verify encoding for both dynamic elements (indices 1 and 3)
expected_encoded_repo = urllib.parse.quote(dynamic_name, safe="")
expected_encoded_machine = urllib.parse.quote(
f"machine-{dynamic_name}", safe=""
f"machine-{dynamic_name}",
safe="",
)
assert expected_encoded_repo in str(file_path)
@@ -126,7 +129,9 @@ class TestURLEncoding:
group_path = ["clans", dynamic_name, "default"]
log_file = log_manager.create_log_file(
sample_function, "unicode_test", group_path
sample_function,
"unicode_test",
group_path,
)
file_path = log_file.get_file_path()
@@ -153,7 +158,9 @@ class TestURLEncoding:
group_path = ["default"]
log_file = log_manager.create_log_file(
sample_function, "simple_test", group_path
sample_function,
"simple_test",
group_path,
)
file_path = log_file.get_file_path()
@@ -177,7 +184,9 @@ class TestURLEncoding:
group_path = ["clans", "", "default"]
log_file = log_manager.create_log_file(
sample_function, "empty_test", group_path
sample_function,
"empty_test",
group_path,
)
file_path = log_file.get_file_path()