clan-lib: Add LogManager class

This commit is contained in:
Qubasa
2025-06-12 16:24:40 +02:00
parent 726b8f4c6c
commit e6025493c4
5 changed files with 1187 additions and 48 deletions

View File

@@ -7,8 +7,15 @@ import os
from dataclasses import dataclass
from pathlib import Path
from clan_lib.api import API, ErrorDataClass, SuccessDataClass
import clan_lib.machines.actions # noqa: F401
from clan_lib.api import API, ApiError, ErrorDataClass, SuccessDataClass
from clan_lib.api.log_manager import LogDayDir, LogFile, LogFuncDir, LogManager
# TODO: We have to manually import python files to make the API.register be triggered.
# We NEED to fix this, as this is super unintuitive and error-prone.
from clan_lib.api.tasks import list_tasks as dummy_list # noqa: F401
from clan_lib.custom_logger import setup_logging
from clan_lib.dirs import user_data_dir
from clan_app.api.file_gtk import open_file
from clan_app.deps.webview.webview import Size, SizeHint, Webview
@@ -40,6 +47,8 @@ def app_run(app_opts: ClanAppOptions) -> int:
# This seems to call the gtk api correctly but and gtk also seems to our icon, but somehow the icon is not loaded.
webview.icon = "clan-white"
log_manager = LogManager(base_dir=user_data_dir() / "clan-app" / "logs")
def cancel_task(
task_id: str, *, op_key: str
) -> SuccessDataClass[None] | ErrorDataClass:
@@ -70,14 +79,118 @@ def app_run(app_opts: ClanAppOptions) -> int:
status="success",
)
# TODO: We have to manually import python files to make the API.register be triggered.
# We NEED to fix this, as this is super unintuitive and error-prone.
import clan_lib.machines.actions # noqa: F401
def list_log_days(
*, op_key: str
) -> SuccessDataClass[list[LogDayDir]] | ErrorDataClass:
"""List all log days."""
log.debug("Listing all log days.")
return SuccessDataClass(
op_key=op_key,
data=log_manager.list_log_days(),
status="success",
)
def list_log_funcs_at_day(
day: str, *, op_key: str
) -> SuccessDataClass[list[LogFuncDir]] | ErrorDataClass:
"""List all log functions at a specific day."""
log.debug(f"Listing all log functions for day: {day}")
try:
log_day_dir = LogDayDir(date_day=day, _base_dir=log_manager.base_dir)
except ValueError:
return ErrorDataClass(
op_key=op_key,
status="error",
errors=[
ApiError(
message="Invalid day format",
description=f"Day {day} is not in the correct format (YYYY-MM-DD).",
location=["app::list_log_funcs_at_day", "day"],
)
],
)
return SuccessDataClass(
op_key=op_key,
data=log_day_dir.get_log_files(),
status="success",
)
def list_log_files(
day: str, func_name: str, *, op_key: str
) -> SuccessDataClass[list[LogFile]] | ErrorDataClass:
"""List all log functions at a specific day."""
log.debug(f"Listing all log functions for day: {day}")
try:
log_func_dir = LogFuncDir(
date_day=day, func_name=func_name, _base_dir=log_manager.base_dir
)
except ValueError:
return ErrorDataClass(
op_key=op_key,
status="error",
errors=[
ApiError(
message="Invalid day format",
description=f"Day {day} is not in the correct format (YYYY-MM-DD).",
location=["app::list_log_files", "day"],
)
],
)
return SuccessDataClass(
op_key=op_key,
data=log_func_dir.get_log_files(),
status="success",
)
def get_log_file(
id_key: str, *, op_key: str
) -> SuccessDataClass[str] | ErrorDataClass:
"""Get a specific log file."""
try:
log_file = log_manager.get_log_file(id_key)
except ValueError:
return ErrorDataClass(
op_key=op_key,
status="error",
errors=[
ApiError(
message="Invalid log file ID",
description=f"Log file ID {id_key} is not in the correct format.",
location=["app::get_log_file", "id_key"],
)
],
)
if not log_file:
return ErrorDataClass(
op_key=op_key,
status="error",
errors=[
ApiError(
message="Log file not found",
description=f"Log file with id {id_key} not found.",
location=["app::get_log_file", "id_key"],
)
],
)
log_file_path = log_file.get_file_path()
return SuccessDataClass(
op_key=op_key,
data=log_file_path.read_text(encoding="utf-8"),
status="success",
)
API.overwrite_fn(list_tasks)
API.overwrite_fn(open_file)
API.overwrite_fn(cancel_task)
webview.bind_jsonschema_api(API)
API.overwrite_fn(list_log_days)
API.overwrite_fn(list_log_funcs_at_day)
API.overwrite_fn(list_log_files)
API.overwrite_fn(get_log_file)
webview.bind_jsonschema_api(API, log_manager=log_manager)
webview.size = Size(1280, 1024, SizeHint.NONE)
webview.navigate(content_uri)
webview.run()

View File

@@ -1,5 +1,6 @@
import ctypes
import functools
import io
import json
import logging
import threading
@@ -15,7 +16,9 @@ from clan_lib.api import (
dataclass_to_dict,
from_dict,
)
from clan_lib.async_run import set_should_cancel
from clan_lib.api.log_manager import LogManager
from clan_lib.async_run import AsyncContext, get_async_ctx, set_async_ctx
from clan_lib.custom_logger import setup_logging
from ._webview_ffi import _encode_c_string, _webview_lib
@@ -60,6 +63,7 @@ class Webview:
def api_wrapper(
self,
log_manager: LogManager,
api: MethodRegistry,
method_name: str,
wrap_method: Callable[..., Any],
@@ -71,53 +75,104 @@ class Webview:
args = json.loads(request_data.decode())
log.debug(f"Calling {method_name}({args[0]})")
# Initialize dataclasses from the payload
reconciled_arguments = {}
for k, v in args[0].items():
# Some functions expect to be called with dataclass instances
# But the js api returns dictionaries.
# Introspect the function and create the expected dataclass from dict dynamically
# Depending on the introspected argument_type
arg_class = api.get_method_argtype(method_name, k)
try:
# Initialize dataclasses from the payload
reconciled_arguments = {}
for k, v in args[0].items():
# Some functions expect to be called with dataclass instances
# But the js api returns dictionaries.
# Introspect the function and create the expected dataclass from dict dynamically
# Depending on the introspected argument_type
arg_class = api.get_method_argtype(method_name, k)
# TODO: rename from_dict into something like construct_checked_value
# from_dict really takes Anything and returns an instance of the type/class
reconciled_arguments[k] = from_dict(arg_class, v)
# TODO: rename from_dict into something like construct_checked_value
# from_dict really takes Anything and returns an instance of the type/class
reconciled_arguments[k] = from_dict(arg_class, v)
reconciled_arguments["op_key"] = op_key
# TODO: We could remove the wrapper in the MethodRegistry
# and just call the method directly
reconciled_arguments["op_key"] = op_key
except Exception as e:
log.exception(f"Error while parsing arguments for {method_name}")
result = ErrorDataClass(
op_key=op_key,
status="error",
errors=[
ApiError(
message="An internal error occured",
description=str(e),
location=["bind_jsonschema_api", method_name],
)
],
)
serialized = json.dumps(
dataclass_to_dict(result), indent=4, ensure_ascii=False
)
self.return_(op_key, FuncStatus.SUCCESS, serialized)
return
def thread_task(stop_event: threading.Event) -> None:
try:
set_should_cancel(lambda: stop_event.is_set())
result = wrap_method(**reconciled_arguments)
ctx: AsyncContext = get_async_ctx()
ctx.should_cancel = lambda: stop_event.is_set()
log_file = log_manager.create_log_file(
wrap_method, op_key=op_key
).get_file_path()
serialized = json.dumps(
dataclass_to_dict(result), indent=4, ensure_ascii=False
)
with log_file.open("ab") as log_f:
# Redirect all cmd.run logs to this file.
ctx.stderr = log_f
ctx.stdout = log_f
set_async_ctx(ctx)
log.debug(f"Result for {method_name}: {serialized}")
self.return_(op_key, FuncStatus.SUCCESS, serialized)
except Exception as e:
log.exception(f"Error while handling result of {method_name}")
result = ErrorDataClass(
op_key=op_key,
status="error",
errors=[
ApiError(
message="An internal error occured",
description=str(e),
location=["bind_jsonschema_api", method_name],
)
],
# Add a new handler to the root logger that writes to log_f
handler_stream = io.TextIOWrapper(
log_f, encoding="utf-8", write_through=True, line_buffering=True
)
serialized = json.dumps(
dataclass_to_dict(result), indent=4, ensure_ascii=False
handler = setup_logging(
log.getEffectiveLevel(), log_file=handler_stream
)
self.return_(op_key, FuncStatus.FAILURE, serialized)
finally:
del self.threads[op_key]
log.info("Starting thread for webview API call")
try:
# Original logic: call the wrapped API method.
result = wrap_method(**reconciled_arguments)
# Serialize the result to JSON.
serialized = json.dumps(
dataclass_to_dict(result), indent=4, ensure_ascii=False
)
# This log message will now also be written to log_f
# through the thread_log_handler.
log.debug(f"Result for {method_name}: {serialized}")
# Return the successful result.
self.return_(op_key, FuncStatus.SUCCESS, serialized)
except Exception as e:
log.exception(f"Error while handling result of {method_name}")
result = ErrorDataClass(
op_key=op_key,
status="error",
errors=[
ApiError(
message="An internal error occured",
description=str(e),
location=["bind_jsonschema_api", method_name],
)
],
)
serialized = json.dumps(
dataclass_to_dict(result), indent=4, ensure_ascii=False
)
self.return_(op_key, FuncStatus.SUCCESS, serialized)
finally:
# Crucial cleanup: remove the handler from the root logger.
# This stops redirecting logs for this thread to log_f and prevents
# the handler from being used after log_f is closed.
handler.root_logger.removeHandler(handler.new_handler)
# Close the handler. For a StreamHandler using a stream it doesn't
# own (log_f is managed by the 'with' statement), this typically
# flushes the stream.
handler.new_handler.close()
del self.threads[op_key]
stop_event = threading.Event()
thread = threading.Thread(
@@ -173,10 +228,11 @@ class Webview:
log.info("Shutting down webview...")
self.destroy()
def bind_jsonschema_api(self, api: MethodRegistry) -> None:
def bind_jsonschema_api(self, api: MethodRegistry, log_manager: LogManager) -> None:
for name, method in api.functions.items():
wrapper = functools.partial(
self.api_wrapper,
log_manager,
api,
name,
method,