clan-app: Add a 'group' to log_manager. Move log_manager to own subdirectory
This commit is contained in:
@@ -8,14 +8,15 @@ from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
import clan_lib.machines.actions # noqa: F401
|
||||
from clan_lib.api import API, ApiError, ErrorDataClass, SuccessDataClass
|
||||
from clan_lib.api.log_manager import LogDayDir, LogFile, LogFuncDir, LogManager
|
||||
from clan_lib.api import API, ErrorDataClass, SuccessDataClass
|
||||
|
||||
# TODO: We have to manually import python files to make the API.register be triggered.
|
||||
# We NEED to fix this, as this is super unintuitive and error-prone.
|
||||
from clan_lib.api.tasks import list_tasks as dummy_list # noqa: F401
|
||||
from clan_lib.custom_logger import setup_logging
|
||||
from clan_lib.dirs import user_data_dir
|
||||
from clan_lib.log_manager import LogManager
|
||||
from clan_lib.log_manager import api as log_manager_api
|
||||
|
||||
from clan_app.api.file_gtk import open_file
|
||||
from clan_app.deps.webview.webview import Size, SizeHint, Webview
|
||||
@@ -47,7 +48,9 @@ def app_run(app_opts: ClanAppOptions) -> int:
|
||||
# This seems to call the gtk api correctly but and gtk also seems to our icon, but somehow the icon is not loaded.
|
||||
webview.icon = "clan-white"
|
||||
|
||||
log_manager = LogManager(base_dir=user_data_dir() / "clan-app" / "logs")
|
||||
log_manager_api.LOG_MANAGER_INSTANCE = LogManager(
|
||||
base_dir=user_data_dir() / "clan-app" / "logs"
|
||||
)
|
||||
|
||||
def cancel_task(
|
||||
task_id: str, *, op_key: str
|
||||
@@ -79,118 +82,10 @@ def app_run(app_opts: ClanAppOptions) -> int:
|
||||
status="success",
|
||||
)
|
||||
|
||||
def list_log_days(
|
||||
*, op_key: str
|
||||
) -> SuccessDataClass[list[LogDayDir]] | ErrorDataClass:
|
||||
"""List all log days."""
|
||||
log.debug("Listing all log days.")
|
||||
return SuccessDataClass(
|
||||
op_key=op_key,
|
||||
data=log_manager.list_log_days(),
|
||||
status="success",
|
||||
)
|
||||
|
||||
def list_log_funcs_at_day(
|
||||
day: str, *, op_key: str
|
||||
) -> SuccessDataClass[list[LogFuncDir]] | ErrorDataClass:
|
||||
"""List all log functions at a specific day."""
|
||||
log.debug(f"Listing all log functions for day: {day}")
|
||||
try:
|
||||
log_day_dir = LogDayDir(date_day=day, _base_dir=log_manager.base_dir)
|
||||
except ValueError:
|
||||
return ErrorDataClass(
|
||||
op_key=op_key,
|
||||
status="error",
|
||||
errors=[
|
||||
ApiError(
|
||||
message="Invalid day format",
|
||||
description=f"Day {day} is not in the correct format (YYYY-MM-DD).",
|
||||
location=["app::list_log_funcs_at_day", "day"],
|
||||
)
|
||||
],
|
||||
)
|
||||
return SuccessDataClass(
|
||||
op_key=op_key,
|
||||
data=log_day_dir.get_log_files(),
|
||||
status="success",
|
||||
)
|
||||
|
||||
def list_log_files(
|
||||
day: str, func_name: str, *, op_key: str
|
||||
) -> SuccessDataClass[list[LogFile]] | ErrorDataClass:
|
||||
"""List all log functions at a specific day."""
|
||||
log.debug(f"Listing all log functions for day: {day}")
|
||||
|
||||
try:
|
||||
log_func_dir = LogFuncDir(
|
||||
date_day=day, func_name=func_name, _base_dir=log_manager.base_dir
|
||||
)
|
||||
except ValueError:
|
||||
return ErrorDataClass(
|
||||
op_key=op_key,
|
||||
status="error",
|
||||
errors=[
|
||||
ApiError(
|
||||
message="Invalid day format",
|
||||
description=f"Day {day} is not in the correct format (YYYY-MM-DD).",
|
||||
location=["app::list_log_files", "day"],
|
||||
)
|
||||
],
|
||||
)
|
||||
return SuccessDataClass(
|
||||
op_key=op_key,
|
||||
data=log_func_dir.get_log_files(),
|
||||
status="success",
|
||||
)
|
||||
|
||||
def get_log_file(
|
||||
id_key: str, *, op_key: str
|
||||
) -> SuccessDataClass[str] | ErrorDataClass:
|
||||
"""Get a specific log file."""
|
||||
|
||||
try:
|
||||
log_file = log_manager.get_log_file(id_key)
|
||||
except ValueError:
|
||||
return ErrorDataClass(
|
||||
op_key=op_key,
|
||||
status="error",
|
||||
errors=[
|
||||
ApiError(
|
||||
message="Invalid log file ID",
|
||||
description=f"Log file ID {id_key} is not in the correct format.",
|
||||
location=["app::get_log_file", "id_key"],
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
if not log_file:
|
||||
return ErrorDataClass(
|
||||
op_key=op_key,
|
||||
status="error",
|
||||
errors=[
|
||||
ApiError(
|
||||
message="Log file not found",
|
||||
description=f"Log file with id {id_key} not found.",
|
||||
location=["app::get_log_file", "id_key"],
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
log_file_path = log_file.get_file_path()
|
||||
return SuccessDataClass(
|
||||
op_key=op_key,
|
||||
data=log_file_path.read_text(encoding="utf-8"),
|
||||
status="success",
|
||||
)
|
||||
|
||||
API.overwrite_fn(list_tasks)
|
||||
API.overwrite_fn(open_file)
|
||||
API.overwrite_fn(cancel_task)
|
||||
API.overwrite_fn(list_log_days)
|
||||
API.overwrite_fn(list_log_funcs_at_day)
|
||||
API.overwrite_fn(list_log_files)
|
||||
API.overwrite_fn(get_log_file)
|
||||
webview.bind_jsonschema_api(API, log_manager=log_manager)
|
||||
webview.bind_jsonschema_api(API, log_manager=log_manager_api.LOG_MANAGER_INSTANCE)
|
||||
webview.size = Size(1280, 1024, SizeHint.NONE)
|
||||
webview.navigate(content_uri)
|
||||
webview.run()
|
||||
|
||||
@@ -16,9 +16,9 @@ from clan_lib.api import (
|
||||
dataclass_to_dict,
|
||||
from_dict,
|
||||
)
|
||||
from clan_lib.api.log_manager import LogManager
|
||||
from clan_lib.async_run import AsyncContext, get_async_ctx, set_async_ctx
|
||||
from clan_lib.custom_logger import setup_logging
|
||||
from clan_lib.log_manager import LogManager
|
||||
|
||||
from ._webview_ffi import _encode_c_string, _webview_lib
|
||||
|
||||
|
||||
95
pkgs/clan-app/ui/package-lock.json
generated
95
pkgs/clan-app/ui/package-lock.json
generated
@@ -15,7 +15,6 @@
|
||||
"@modular-forms/solid": "^0.25.1",
|
||||
"@solid-primitives/storage": "^4.3.2",
|
||||
"@solidjs/router": "^0.15.3",
|
||||
"@solidjs/testing-library": "^0.8.10",
|
||||
"@tanstack/eslint-plugin-query": "^5.51.12",
|
||||
"@tanstack/solid-query": "^5.76.0",
|
||||
"solid-js": "^1.9.7",
|
||||
@@ -29,10 +28,8 @@
|
||||
"@storybook/addon-a11y": "^9.0.8",
|
||||
"@storybook/addon-docs": "^9.0.8",
|
||||
"@storybook/addon-links": "^9.0.8",
|
||||
"@storybook/addon-onboarding": "^9.0.8",
|
||||
"@storybook/addon-viewport": "^9.0.8",
|
||||
"@storybook/addon-vitest": "^9.0.8",
|
||||
"@tailwindcss/typography": "^0.5.13",
|
||||
"@types/node": "^22.15.19",
|
||||
"@types/three": "^0.176.0",
|
||||
"@typescript-eslint/parser": "^8.32.1",
|
||||
@@ -127,6 +124,7 @@
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
|
||||
"integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/helper-validator-identifier": "^7.27.1",
|
||||
@@ -268,6 +266,7 @@
|
||||
"version": "7.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz",
|
||||
"integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
@@ -365,6 +364,7 @@
|
||||
"version": "7.27.6",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.6.tgz",
|
||||
"integrity": "sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
@@ -2035,27 +2035,6 @@
|
||||
"solid-js": "^1.8.6"
|
||||
}
|
||||
},
|
||||
"node_modules/@solidjs/testing-library": {
|
||||
"version": "0.8.10",
|
||||
"resolved": "https://registry.npmjs.org/@solidjs/testing-library/-/testing-library-0.8.10.tgz",
|
||||
"integrity": "sha512-qdeuIerwyq7oQTIrrKvV0aL9aFeuwTd86VYD3afdq5HYEwoox1OBTJy4y8A3TFZr8oAR0nujYgCzY/8wgHGfeQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@testing-library/dom": "^10.4.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@solidjs/router": ">=0.9.0",
|
||||
"solid-js": ">=1.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@solidjs/router": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@storybook/addon-a11y": {
|
||||
"version": "9.0.12",
|
||||
"resolved": "https://registry.npmjs.org/@storybook/addon-a11y/-/addon-a11y-9.0.12.tgz",
|
||||
@@ -2120,20 +2099,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@storybook/addon-onboarding": {
|
||||
"version": "9.0.12",
|
||||
"resolved": "https://registry.npmjs.org/@storybook/addon-onboarding/-/addon-onboarding-9.0.12.tgz",
|
||||
"integrity": "sha512-hqgaINYMDiA2op+Cb77LvwdJkgpMUMAnp5ugJjkn5icLpSTkZxnaQrlC0lTHOZBxUjR5NlS2ApSAuMvrCXQLAw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/storybook"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"storybook": "^9.0.12"
|
||||
}
|
||||
},
|
||||
"node_modules/@storybook/addon-viewport": {
|
||||
"version": "9.0.8",
|
||||
"resolved": "https://registry.npmjs.org/@storybook/addon-viewport/-/addon-viewport-9.0.8.tgz",
|
||||
@@ -2262,22 +2227,6 @@
|
||||
"tslib": "^2.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tailwindcss/typography": {
|
||||
"version": "0.5.16",
|
||||
"resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.16.tgz",
|
||||
"integrity": "sha512-0wDLwCVF5V3x3b1SGXPCDcdsbDHMBe+lkFzBRaHeLvNi+nrrnZ1lA18u+OTWO8iSWU2GxUOCvlXtDuqftc1oiA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"lodash.castarray": "^4.4.0",
|
||||
"lodash.isplainobject": "^4.0.6",
|
||||
"lodash.merge": "^4.6.2",
|
||||
"postcss-selector-parser": "6.0.10"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"tailwindcss": ">=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@tanstack/eslint-plugin-query": {
|
||||
"version": "5.78.0",
|
||||
"resolved": "https://registry.npmjs.org/@tanstack/eslint-plugin-query/-/eslint-plugin-query-5.78.0.tgz",
|
||||
@@ -2324,6 +2273,7 @@
|
||||
"version": "10.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.4.0.tgz",
|
||||
"integrity": "sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/code-frame": "^7.10.4",
|
||||
@@ -2427,6 +2377,7 @@
|
||||
"version": "5.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz",
|
||||
"integrity": "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/babel__core": {
|
||||
@@ -3178,6 +3129,7 @@
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz",
|
||||
"integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"dequal": "^2.0.3"
|
||||
@@ -4005,6 +3957,7 @@
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
|
||||
"integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
@@ -4026,6 +3979,7 @@
|
||||
"version": "0.5.16",
|
||||
"resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz",
|
||||
"integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/dom-serializer": {
|
||||
@@ -5337,6 +5291,7 @@
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
||||
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/js-yaml": {
|
||||
@@ -5616,20 +5571,6 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.castarray": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.castarray/-/lodash.castarray-4.4.0.tgz",
|
||||
"integrity": "sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.isplainobject": {
|
||||
"version": "4.0.6",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
|
||||
"integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.merge": {
|
||||
"version": "4.6.2",
|
||||
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
|
||||
@@ -5657,6 +5598,7 @@
|
||||
"version": "1.5.0",
|
||||
"resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz",
|
||||
"integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"lz-string": "bin/bin.js"
|
||||
@@ -6378,20 +6320,6 @@
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/postcss-selector-parser": {
|
||||
"version": "6.0.10",
|
||||
"resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz",
|
||||
"integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cssesc": "^3.0.0",
|
||||
"util-deprecate": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/postcss-url": {
|
||||
"version": "10.1.3",
|
||||
"resolved": "https://registry.npmjs.org/postcss-url/-/postcss-url-10.1.3.tgz",
|
||||
@@ -6517,6 +6445,7 @@
|
||||
"version": "27.5.1",
|
||||
"resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz",
|
||||
"integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ansi-regex": "^5.0.1",
|
||||
@@ -6531,6 +6460,7 @@
|
||||
"version": "5.2.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz",
|
||||
"integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
@@ -6632,6 +6562,7 @@
|
||||
"version": "17.0.2",
|
||||
"resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz",
|
||||
"integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/read-cache": {
|
||||
|
||||
@@ -61,7 +61,7 @@ export const ApiTester = () => {
|
||||
return await callApi(
|
||||
values.endpoint as keyof API,
|
||||
JSON.parse(values.payload || "{}"),
|
||||
);
|
||||
).promise;
|
||||
},
|
||||
staleTime: Infinity,
|
||||
enabled: false,
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import datetime
|
||||
import logging
|
||||
import urllib.parse
|
||||
from collections.abc import Callable # Union for str | None
|
||||
from dataclasses import dataclass
|
||||
from functools import total_ordering
|
||||
from pathlib import Path
|
||||
|
||||
from clan_lib.api import API
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -25,6 +24,7 @@ def is_correct_day_format(date_day: str) -> bool:
|
||||
class LogFile:
|
||||
op_key: str
|
||||
date_day: str # YYYY-MM-DD
|
||||
group: str
|
||||
func_name: str
|
||||
_base_dir: Path
|
||||
date_second: str # HH-MM-SS
|
||||
@@ -51,9 +51,10 @@ class LogFile:
|
||||
|
||||
@classmethod
|
||||
def from_path(cls, file: Path) -> "LogFile":
|
||||
date_day = file.parent.parent.name
|
||||
date_day = file.parent.parent.parent.name
|
||||
group = urllib.parse.unquote(file.parent.parent.name)
|
||||
func_name = file.parent.name
|
||||
base_dir = file.parent.parent.parent
|
||||
base_dir = file.parent.parent.parent.parent
|
||||
|
||||
filename_stem = file.stem
|
||||
parts = filename_stem.split("_", 1)
|
||||
@@ -67,6 +68,7 @@ class LogFile:
|
||||
return LogFile(
|
||||
op_key=op_key_str,
|
||||
date_day=date_day,
|
||||
group=group,
|
||||
date_second=date_second_str,
|
||||
func_name=func_name,
|
||||
_base_dir=base_dir,
|
||||
@@ -76,6 +78,7 @@ class LogFile:
|
||||
return (
|
||||
self._base_dir
|
||||
/ self.date_day
|
||||
/ urllib.parse.quote(self.group, safe="")
|
||||
/ self.func_name
|
||||
/ f"{self.date_second}_{self.op_key}.log"
|
||||
)
|
||||
@@ -86,6 +89,7 @@ class LogFile:
|
||||
# Compare all significant fields for equality
|
||||
return (
|
||||
self._datetime_obj == other._datetime_obj
|
||||
and self.group == other.group
|
||||
and self.func_name == other.func_name
|
||||
and self.op_key == other.op_key
|
||||
and self._base_dir == other._base_dir
|
||||
@@ -97,10 +101,13 @@ class LogFile:
|
||||
# Primary sort: datetime (newest first). self is "less than" other if self is newer.
|
||||
if self._datetime_obj != other._datetime_obj:
|
||||
return self._datetime_obj > other._datetime_obj
|
||||
# Secondary sort: func_name (alphabetical ascending)
|
||||
# Secondary sort: group (alphabetical ascending)
|
||||
if self.group != other.group:
|
||||
return self.group < other.group
|
||||
# Tertiary sort: func_name (alphabetical ascending)
|
||||
if self.func_name != other.func_name:
|
||||
return self.func_name < other.func_name
|
||||
# Tertiary sort: op_key (alphabetical ascending)
|
||||
# Quaternary sort: op_key (alphabetical ascending)
|
||||
return self.op_key < other.op_key
|
||||
|
||||
|
||||
@@ -108,6 +115,7 @@ class LogFile:
|
||||
@dataclass(frozen=True)
|
||||
class LogFuncDir:
|
||||
date_day: str
|
||||
group: str
|
||||
func_name: str
|
||||
_base_dir: Path
|
||||
|
||||
@@ -125,7 +133,12 @@ class LogFuncDir:
|
||||
)
|
||||
|
||||
def get_dir_path(self) -> Path:
|
||||
return self._base_dir / self.date_day / self.func_name
|
||||
return (
|
||||
self._base_dir
|
||||
/ self.date_day
|
||||
/ urllib.parse.quote(self.group, safe="")
|
||||
/ self.func_name
|
||||
)
|
||||
|
||||
def get_log_files(self) -> list[LogFile]:
|
||||
dir_path = self.get_dir_path()
|
||||
@@ -149,6 +162,7 @@ class LogFuncDir:
|
||||
return NotImplemented
|
||||
return (
|
||||
self.date_day == other.date_day
|
||||
and self.group == other.group
|
||||
and self.func_name == other.func_name
|
||||
and self._base_dir == other._base_dir
|
||||
)
|
||||
@@ -159,10 +173,79 @@ class LogFuncDir:
|
||||
# Primary sort: date (newest first)
|
||||
if self._date_obj != other._date_obj:
|
||||
return self._date_obj > other._date_obj
|
||||
# Secondary sort: func_name (alphabetical ascending)
|
||||
# Secondary sort: group (alphabetical ascending)
|
||||
if self.group != other.group:
|
||||
return self.group < other.group
|
||||
# Tertiary sort: func_name (alphabetical ascending)
|
||||
return self.func_name < other.func_name
|
||||
|
||||
|
||||
@total_ordering
|
||||
@dataclass(frozen=True)
|
||||
class LogGroupDir:
|
||||
date_day: str
|
||||
group: str
|
||||
_base_dir: Path
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if not is_correct_day_format(self.date_day):
|
||||
msg = f"LogGroupDir.date_day '{self.date_day}' is not in YYYY-MM-DD format."
|
||||
raise ValueError(msg)
|
||||
|
||||
@property
|
||||
def _date_obj(self) -> datetime.date:
|
||||
return (
|
||||
datetime.datetime.strptime(self.date_day, "%Y-%m-%d")
|
||||
.replace(tzinfo=datetime.UTC)
|
||||
.date()
|
||||
)
|
||||
|
||||
def get_dir_path(self) -> Path:
|
||||
return self._base_dir / self.date_day / urllib.parse.quote(self.group, safe="")
|
||||
|
||||
def get_log_files(self) -> list[LogFuncDir]:
|
||||
dir_path = self.get_dir_path()
|
||||
if not dir_path.exists() or not dir_path.is_dir():
|
||||
return []
|
||||
|
||||
func_dirs_list: list[LogFuncDir] = []
|
||||
for func_dir_path in dir_path.iterdir():
|
||||
if func_dir_path.is_dir():
|
||||
try:
|
||||
func_dirs_list.append(
|
||||
LogFuncDir(
|
||||
date_day=self.date_day,
|
||||
group=self.group,
|
||||
func_name=func_dir_path.name,
|
||||
_base_dir=self._base_dir,
|
||||
)
|
||||
)
|
||||
except ValueError:
|
||||
log.warning(
|
||||
f"Skipping malformed function directory '{func_dir_path.name}' in '{dir_path}'."
|
||||
)
|
||||
|
||||
return sorted(func_dirs_list)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, LogGroupDir):
|
||||
return NotImplemented
|
||||
return (
|
||||
self.date_day == other.date_day
|
||||
and self.group == other.group
|
||||
and self._base_dir == other._base_dir
|
||||
)
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
if not isinstance(other, LogGroupDir):
|
||||
return NotImplemented
|
||||
# Primary sort: date (newest first)
|
||||
if self._date_obj != other._date_obj:
|
||||
return self._date_obj > other._date_obj
|
||||
# Secondary sort: group (alphabetical ascending)
|
||||
return self.group < other.group
|
||||
|
||||
|
||||
@total_ordering
|
||||
@dataclass(frozen=True)
|
||||
class LogDayDir:
|
||||
@@ -185,32 +268,30 @@ class LogDayDir:
|
||||
def get_dir_path(self) -> Path:
|
||||
return self._base_dir / self.date_day
|
||||
|
||||
# This method returns a list of LogFuncDir objects, as per the original structure.
|
||||
def get_log_files(self) -> list[LogFuncDir]:
|
||||
def get_log_files(self) -> list[LogGroupDir]:
|
||||
dir_path = self.get_dir_path()
|
||||
if not dir_path.exists() or not dir_path.is_dir():
|
||||
return []
|
||||
|
||||
func_dirs_list: list[LogFuncDir] = []
|
||||
for func_dir_path in dir_path.iterdir():
|
||||
if func_dir_path.is_dir():
|
||||
group_dirs_list: list[LogGroupDir] = []
|
||||
|
||||
# First level: group directories
|
||||
for group_dir_path in dir_path.iterdir():
|
||||
if group_dir_path.is_dir():
|
||||
group_name = urllib.parse.unquote(group_dir_path.name)
|
||||
try:
|
||||
func_dirs_list.append(
|
||||
LogFuncDir(
|
||||
group_dirs_list.append(
|
||||
LogGroupDir(
|
||||
date_day=self.date_day,
|
||||
func_name=func_dir_path.name,
|
||||
group=group_name,
|
||||
_base_dir=self._base_dir,
|
||||
)
|
||||
)
|
||||
except (
|
||||
ValueError
|
||||
): # Should mainly catch issues if self.date_day was somehow invalid
|
||||
except ValueError:
|
||||
log.warning(
|
||||
f"Warning: Skipping malformed function directory '{func_dir_path.name}' in '{dir_path}'."
|
||||
f"Warning: Skipping malformed group directory '{group_dir_path.name}' in '{dir_path}'."
|
||||
)
|
||||
# Sorts using LogFuncDir.__lt__ (newest date first, then by func_name).
|
||||
# Since all LogFuncDir here share the same date_day, they'll be sorted by func_name.
|
||||
return sorted(func_dirs_list)
|
||||
return sorted(group_dirs_list)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, LogDayDir):
|
||||
@@ -228,12 +309,15 @@ class LogDayDir:
|
||||
class LogManager:
|
||||
base_dir: Path
|
||||
|
||||
def create_log_file(self, func: Callable, op_key: str) -> LogFile:
|
||||
def create_log_file(
|
||||
self, func: Callable, op_key: str, group: str = "default"
|
||||
) -> LogFile:
|
||||
now_utc = datetime.datetime.now(tz=datetime.UTC)
|
||||
|
||||
log_file = LogFile(
|
||||
op_key=op_key,
|
||||
date_day=now_utc.strftime("%Y-%m-%d"),
|
||||
group=group,
|
||||
date_second=now_utc.strftime("%H-%M-%S"), # Corrected original's %H-$M-%S
|
||||
func_name=func.__name__,
|
||||
_base_dir=self.base_dir,
|
||||
@@ -273,7 +357,10 @@ class LogManager:
|
||||
return sorted(log_day_dirs_list) # Sorts using LogDayDir.__lt__ (newest first)
|
||||
|
||||
def get_log_file(
|
||||
self, op_key_to_find: str, specific_date_day: str | None = None
|
||||
self,
|
||||
op_key_to_find: str,
|
||||
specific_date_day: str | None = None,
|
||||
specific_group: str | None = None,
|
||||
) -> LogFile | None:
|
||||
days_to_search: list[LogDayDir]
|
||||
|
||||
@@ -298,38 +385,16 @@ class LogManager:
|
||||
for day_dir in (
|
||||
days_to_search
|
||||
): # Iterates newest day first if days_to_search came from list_log_days()
|
||||
# day_dir.get_log_files() returns List[LogFuncDir], sorted by func_name (date is same)
|
||||
for func_dir in day_dir.get_log_files():
|
||||
# func_dir.get_log_files() returns List[LogFile], sorted newest file first
|
||||
for log_file in func_dir.get_log_files():
|
||||
if log_file.op_key == op_key_to_find:
|
||||
return log_file
|
||||
# day_dir.get_log_files() returns List[LogGroupDir], sorted by group name
|
||||
for group_dir in day_dir.get_log_files():
|
||||
# Skip this group if specific_group is provided and doesn't match
|
||||
if specific_group is not None and group_dir.group != specific_group:
|
||||
continue
|
||||
|
||||
# group_dir.get_log_files() returns List[LogFuncDir], sorted by func_name
|
||||
for func_dir in group_dir.get_log_files():
|
||||
# func_dir.get_log_files() returns List[LogFile], sorted newest file first
|
||||
for log_file in func_dir.get_log_files():
|
||||
if log_file.op_key == op_key_to_find:
|
||||
return log_file
|
||||
return None
|
||||
|
||||
|
||||
@API.register_abstract
|
||||
def list_log_days() -> list[LogDayDir]:
|
||||
"""List all logs."""
|
||||
msg = "list_logs() is not implemented"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
|
||||
@API.register_abstract
|
||||
def list_log_funcs_at_day(day: str) -> list[LogFuncDir]:
|
||||
"""List all logs for a specific function on a specific day."""
|
||||
msg = "list_func_logs() is not implemented"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
|
||||
@API.register_abstract
|
||||
def list_log_files(day: str, func_name: str) -> list[LogFile]:
|
||||
"""List all log files for a specific function on a specific day."""
|
||||
msg = "list_func_logs() is not implemented"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
|
||||
@API.register_abstract
|
||||
def get_log_file(id_key: str) -> str:
|
||||
"""Get a specific log file by op_key, function name and day."""
|
||||
msg = "get_log_file() is not implemented"
|
||||
raise NotImplementedError(msg)
|
||||
53
pkgs/clan-cli/clan_lib/log_manager/api.py
Normal file
53
pkgs/clan-cli/clan_lib/log_manager/api.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from clan_lib.api import API
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.log_manager import LogDayDir, LogFile, LogFuncDir, LogGroupDir, LogManager
|
||||
|
||||
LOG_MANAGER_INSTANCE: LogManager | None = None
|
||||
|
||||
|
||||
@API.register
|
||||
def list_log_days() -> list[LogDayDir]:
|
||||
"""List all logs."""
|
||||
assert LOG_MANAGER_INSTANCE is not None
|
||||
return LOG_MANAGER_INSTANCE.list_log_days()
|
||||
|
||||
|
||||
@API.register
|
||||
def list_log_groups(date_day: str) -> list[LogGroupDir]:
|
||||
"""List all log groups."""
|
||||
assert LOG_MANAGER_INSTANCE is not None
|
||||
day_dir = LogDayDir(date_day, LOG_MANAGER_INSTANCE.base_dir)
|
||||
return day_dir.get_log_files()
|
||||
|
||||
|
||||
@API.register
|
||||
def list_log_funcs_at_day(date_day: str, group: str) -> list[LogFuncDir]:
|
||||
"""List all logs for a specific function on a specific day."""
|
||||
assert LOG_MANAGER_INSTANCE is not None
|
||||
group_dir = LogGroupDir(date_day, group, LOG_MANAGER_INSTANCE.base_dir)
|
||||
return group_dir.get_log_files()
|
||||
|
||||
|
||||
@API.register
|
||||
def list_log_files(date_day: str, group: str, func_name: str) -> list[LogFile]:
|
||||
"""List all log files for a specific function on a specific day."""
|
||||
assert LOG_MANAGER_INSTANCE is not None
|
||||
func_dir = LogFuncDir(date_day, group, func_name, LOG_MANAGER_INSTANCE.base_dir)
|
||||
return func_dir.get_log_files()
|
||||
|
||||
|
||||
@API.register
|
||||
def get_log_file(id_key: str, group: str | None = None) -> str:
|
||||
"""Get a specific log file by op_key, function name and day."""
|
||||
assert LOG_MANAGER_INSTANCE is not None
|
||||
|
||||
log_file = LOG_MANAGER_INSTANCE.get_log_file(id_key, specific_group=group)
|
||||
if log_file is None:
|
||||
return ""
|
||||
|
||||
file_path = log_file.get_file_path()
|
||||
if not file_path.exists():
|
||||
msg = f"Log file {file_path} does not exist."
|
||||
raise ClanError(msg)
|
||||
|
||||
return file_path.read_text()
|
||||
@@ -1,6 +1,7 @@
|
||||
# ruff: noqa: SLF001
|
||||
import datetime
|
||||
import logging # For LogManager if not already imported
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
from typing import Any # Added Dict
|
||||
|
||||
@@ -8,10 +9,11 @@ import pytest
|
||||
|
||||
# Assuming your classes are in a file named 'log_manager_module.py'
|
||||
# If they are in the same file as the tests, you don't need this relative import.
|
||||
from .log_manager import (
|
||||
from clan_lib.log_manager import (
|
||||
LogDayDir,
|
||||
LogFile,
|
||||
LogFuncDir,
|
||||
LogGroupDir,
|
||||
LogManager,
|
||||
is_correct_day_format,
|
||||
)
|
||||
@@ -67,13 +69,19 @@ def populated_log_structure(
|
||||
monkeypatch.setattr(datetime, "datetime", MockDateTime)
|
||||
|
||||
# Day 1: 2023-10-26
|
||||
# Func A
|
||||
lf1 = log_manager.create_log_file(sample_func_one, "op_key_A1") # 10-00-00
|
||||
# Group A, Func A
|
||||
lf1 = log_manager.create_log_file(
|
||||
sample_func_one, "op_key_A1", "group_a"
|
||||
) # 10-00-00
|
||||
created_files["lf1"] = lf1
|
||||
lf2 = log_manager.create_log_file(sample_func_one, "op_key_A2") # 10-01-01
|
||||
lf2 = log_manager.create_log_file(
|
||||
sample_func_one, "op_key_A2", "group_a"
|
||||
) # 10-01-01
|
||||
created_files["lf2"] = lf2
|
||||
# Func B
|
||||
lf3 = log_manager.create_log_file(sample_func_two, "op_key_B1") # 10-02-02
|
||||
# Group B, Func B
|
||||
lf3 = log_manager.create_log_file(
|
||||
sample_func_two, "op_key_B1", "group_b"
|
||||
) # 10-02-02
|
||||
created_files["lf3"] = lf3
|
||||
|
||||
# Day 2: 2023-10-27 (by advancing mock time enough)
|
||||
@@ -82,18 +90,24 @@ def populated_log_structure(
|
||||
)
|
||||
MockDateTime._delta = datetime.timedelta(seconds=0) # Reset delta for new day
|
||||
|
||||
lf4 = log_manager.create_log_file(sample_func_one, "op_key_A3_day2") # 12-00-00
|
||||
lf4 = log_manager.create_log_file(
|
||||
sample_func_one, "op_key_A3_day2", "group_a"
|
||||
) # 12-00-00
|
||||
created_files["lf4"] = lf4
|
||||
|
||||
# Create a malformed file and dir to test skipping
|
||||
malformed_day_dir = base_dir / "2023-13-01" # Invalid date
|
||||
malformed_day_dir.mkdir(parents=True, exist_ok=True)
|
||||
(malformed_day_dir / "some_func").mkdir(exist_ok=True)
|
||||
(malformed_day_dir / "some_group" / "some_func").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
malformed_func_dir = base_dir / "2023-10-26" / "malformed_func_dir_name!"
|
||||
malformed_func_dir = (
|
||||
base_dir / "2023-10-26" / "group_a" / "malformed_func_dir_name!"
|
||||
)
|
||||
malformed_func_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
malformed_log_file_dir = base_dir / "2023-10-26" / sample_func_one.__name__
|
||||
malformed_log_file_dir = (
|
||||
base_dir / "2023-10-26" / "group_a" / sample_func_one.__name__
|
||||
)
|
||||
(malformed_log_file_dir / "badname.log").touch()
|
||||
(malformed_log_file_dir / "10-00-00_op_key.txt").touch() # Wrong suffix
|
||||
|
||||
@@ -126,42 +140,51 @@ def test_is_correct_day_format(date_str: str, expected: bool) -> None:
|
||||
|
||||
class TestLogFile:
|
||||
def test_creation_valid(self, tmp_path: Path) -> None:
|
||||
lf = LogFile("op1", "2023-10-26", "my_func", tmp_path, "10-20-30")
|
||||
lf = LogFile("op1", "2023-10-26", "test_group", "my_func", tmp_path, "10-20-30")
|
||||
assert lf.op_key == "op1"
|
||||
assert lf.date_day == "2023-10-26"
|
||||
assert lf.group == "test_group"
|
||||
assert lf.func_name == "my_func"
|
||||
assert lf._base_dir == tmp_path
|
||||
assert lf.date_second == "10-20-30"
|
||||
|
||||
def test_creation_invalid_date_day(self, tmp_path: Path) -> None:
|
||||
with pytest.raises(ValueError, match="not in YYYY-MM-DD format"):
|
||||
LogFile("op1", "2023/10/26", "my_func", tmp_path, "10-20-30")
|
||||
LogFile("op1", "2023/10/26", "test_group", "my_func", tmp_path, "10-20-30")
|
||||
|
||||
def test_creation_invalid_date_second(self, tmp_path: Path) -> None:
|
||||
with pytest.raises(ValueError, match="not in HH-MM-SS format"):
|
||||
LogFile("op1", "2023-10-26", "my_func", tmp_path, "10:20:30")
|
||||
LogFile("op1", "2023-10-26", "test_group", "my_func", tmp_path, "10:20:30")
|
||||
|
||||
def test_datetime_obj(self, tmp_path: Path) -> None:
|
||||
lf = LogFile("op1", "2023-10-26", "my_func", tmp_path, "10-20-30")
|
||||
lf = LogFile("op1", "2023-10-26", "test_group", "my_func", tmp_path, "10-20-30")
|
||||
expected_dt = datetime.datetime(2023, 10, 26, 10, 20, 30, tzinfo=datetime.UTC)
|
||||
assert lf._datetime_obj == expected_dt
|
||||
|
||||
def test_from_path_valid(self, tmp_path: Path) -> None:
|
||||
base = tmp_path / "logs"
|
||||
file_path = base / "2023-10-26" / "my_func" / "10-20-30_op_key_123.log"
|
||||
file_path = (
|
||||
base / "2023-10-26" / "test_group" / "my_func" / "10-20-30_op_key_123.log"
|
||||
)
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
file_path.touch()
|
||||
|
||||
lf = LogFile.from_path(file_path)
|
||||
assert lf.op_key == "op_key_123"
|
||||
assert lf.date_day == "2023-10-26"
|
||||
assert lf.group == "test_group"
|
||||
assert lf.func_name == "my_func"
|
||||
assert lf._base_dir == base
|
||||
assert lf.date_second == "10-20-30"
|
||||
|
||||
def test_from_path_invalid_filename_format(self, tmp_path: Path) -> None:
|
||||
file_path = (
|
||||
tmp_path / "logs" / "2023-10-26" / "my_func" / "10-20-30-op_key_123.log"
|
||||
tmp_path
|
||||
/ "logs"
|
||||
/ "2023-10-26"
|
||||
/ "test_group"
|
||||
/ "my_func"
|
||||
/ "10-20-30-op_key_123.log"
|
||||
) # Extra dash
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
file_path.touch()
|
||||
@@ -169,7 +192,14 @@ class TestLogFile:
|
||||
LogFile.from_path(file_path)
|
||||
|
||||
def test_from_path_filename_no_op_key(self, tmp_path: Path) -> None:
|
||||
file_path = tmp_path / "logs" / "2023-10-26" / "my_func" / "10-20-30_.log"
|
||||
file_path = (
|
||||
tmp_path
|
||||
/ "logs"
|
||||
/ "2023-10-26"
|
||||
/ "test_group"
|
||||
/ "my_func"
|
||||
/ "10-20-30_.log"
|
||||
)
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
file_path.touch()
|
||||
# This will result in op_key being ""
|
||||
@@ -177,101 +207,106 @@ class TestLogFile:
|
||||
assert lf.op_key == ""
|
||||
|
||||
def test_get_file_path(self, tmp_path: Path) -> None:
|
||||
lf = LogFile("op1", "2023-10-26", "my_func", tmp_path, "10-20-30")
|
||||
expected_path = tmp_path / "2023-10-26" / "my_func" / "10-20-30_op1.log"
|
||||
lf = LogFile("op1", "2023-10-26", "test_group", "my_func", tmp_path, "10-20-30")
|
||||
expected_path = (
|
||||
tmp_path / "2023-10-26" / "test_group" / "my_func" / "10-20-30_op1.log"
|
||||
)
|
||||
assert lf.get_file_path() == expected_path
|
||||
|
||||
def test_equality(self, tmp_path: Path) -> None:
|
||||
lf1 = LogFile("op1", "2023-10-26", "func_a", tmp_path, "10-00-00")
|
||||
lf2 = LogFile("op1", "2023-10-26", "func_a", tmp_path, "10-00-00")
|
||||
lf1 = LogFile("op1", "2023-10-26", "group_a", "func_a", tmp_path, "10-00-00")
|
||||
lf2 = LogFile("op1", "2023-10-26", "group_a", "func_a", tmp_path, "10-00-00")
|
||||
lf3 = LogFile(
|
||||
"op2", "2023-10-26", "func_a", tmp_path, "10-00-00"
|
||||
"op2", "2023-10-26", "group_a", "func_a", tmp_path, "10-00-00"
|
||||
) # Diff op_key
|
||||
lf4 = LogFile("op1", "2023-10-26", "func_a", tmp_path, "10-00-01") # Diff time
|
||||
lf4 = LogFile(
|
||||
"op1", "2023-10-26", "group_a", "func_a", tmp_path, "10-00-01"
|
||||
) # Diff time
|
||||
lf5 = LogFile(
|
||||
"op1", "2023-10-26", "group_b", "func_a", tmp_path, "10-00-00"
|
||||
) # Diff group
|
||||
assert lf1 == lf2
|
||||
assert lf1 != lf3
|
||||
assert lf1 != lf4
|
||||
assert lf1 != lf5
|
||||
assert lf1 != "not a logfile"
|
||||
|
||||
def test_ordering(self, tmp_path: Path) -> None:
|
||||
# Newest datetime first
|
||||
lf_newest = LogFile("op", "2023-10-26", "f", tmp_path, "10-00-01")
|
||||
lf_older = LogFile("op", "2023-10-26", "f", tmp_path, "10-00-00")
|
||||
lf_oldest_d = LogFile("op", "2023-10-25", "f", tmp_path, "12-00-00")
|
||||
lf_newest = LogFile("op", "2023-10-26", "group", "f", tmp_path, "10-00-01")
|
||||
lf_older = LogFile("op", "2023-10-26", "group", "f", tmp_path, "10-00-00")
|
||||
lf_oldest_d = LogFile("op", "2023-10-25", "group", "f", tmp_path, "12-00-00")
|
||||
|
||||
# Same datetime, different func_name (alphabetical)
|
||||
lf_func_a = LogFile("op", "2023-10-26", "func_a", tmp_path, "10-00-00")
|
||||
lf_func_b = LogFile("op", "2023-10-26", "func_b", tmp_path, "10-00-00")
|
||||
# Same datetime, different group (alphabetical)
|
||||
lf_group_a = LogFile(
|
||||
"op", "2023-10-26", "group_a", "func", tmp_path, "10-00-00"
|
||||
)
|
||||
lf_group_b = LogFile(
|
||||
"op", "2023-10-26", "group_b", "func", tmp_path, "10-00-00"
|
||||
)
|
||||
|
||||
# Same datetime, same func_name, different op_key (alphabetical)
|
||||
lf_op_a = LogFile("op_a", "2023-10-26", "func_a", tmp_path, "10-00-00")
|
||||
lf_op_b = LogFile("op_b", "2023-10-26", "func_a", tmp_path, "10-00-00")
|
||||
# Same datetime, same group, different func_name (alphabetical)
|
||||
lf_func_a = LogFile("op", "2023-10-26", "group", "func_a", tmp_path, "10-00-00")
|
||||
lf_func_b = LogFile("op", "2023-10-26", "group", "func_b", tmp_path, "10-00-00")
|
||||
|
||||
# Same datetime, same group, same func_name, different op_key (alphabetical)
|
||||
lf_op_a = LogFile("op_a", "2023-10-26", "group", "func_a", tmp_path, "10-00-00")
|
||||
lf_op_b = LogFile("op_b", "2023-10-26", "group", "func_a", tmp_path, "10-00-00")
|
||||
|
||||
assert lf_newest < lf_older # lf_newest is "less than" because it's newer
|
||||
assert lf_older < lf_oldest_d
|
||||
|
||||
assert lf_group_a < lf_group_b
|
||||
assert not (lf_group_b < lf_group_a)
|
||||
|
||||
assert lf_func_a < lf_func_b
|
||||
assert not (lf_func_b < lf_func_a)
|
||||
|
||||
assert lf_op_a < lf_op_b
|
||||
assert not (lf_op_b < lf_op_a)
|
||||
|
||||
# Test sorting
|
||||
files = [
|
||||
lf_older,
|
||||
lf_op_b,
|
||||
lf_newest,
|
||||
lf_func_a,
|
||||
lf_oldest_d,
|
||||
lf_op_a,
|
||||
lf_func_b,
|
||||
]
|
||||
# Expected order (newest first, then func_name, then op_key):
|
||||
# 1. lf_newest (2023-10-26 10:00:01 func_f op)
|
||||
# 2. lf_func_a (2023-10-26 10:00:00 func_a op) - same time as lf_older, but func_a < func_f
|
||||
# 3. lf_op_a (2023-10-26 10:00:00 func_a op_a)
|
||||
# 4. lf_op_b (2023-10-26 10:00:00 func_a op_b)
|
||||
# 5. lf_func_b (2023-10-26 10:00:00 func_b op)
|
||||
# 6. lf_older (2023-10-26 10:00:00 func_f op)
|
||||
# 7. lf_oldest_d(2023-10-25 12:00:00 func_f op)
|
||||
# Test sorting with groups
|
||||
lf_ga_fa_op = LogFile(
|
||||
"op", "2023-10-26", "group_a", "func_a", tmp_path, "10-00-00"
|
||||
)
|
||||
lf_ga_fa_opa = LogFile(
|
||||
"op_a", "2023-10-26", "group_a", "func_a", tmp_path, "10-00-00"
|
||||
)
|
||||
lf_ga_fb_op = LogFile(
|
||||
"op", "2023-10-26", "group_a", "func_b", tmp_path, "10-00-00"
|
||||
)
|
||||
lf_gb_fa_op = LogFile(
|
||||
"op", "2023-10-26", "group_b", "func_a", tmp_path, "10-00-00"
|
||||
)
|
||||
lf_g_f_op1 = LogFile(
|
||||
"op", "2023-10-26", "group", "f", tmp_path, "10-00-01"
|
||||
) # newest time
|
||||
lf_g_f_op0 = LogFile("op", "2023-10-26", "group", "f", tmp_path, "10-00-00")
|
||||
lf_old_day = LogFile("op", "2023-10-25", "group", "f", tmp_path, "12-00-00")
|
||||
|
||||
sorted(files)
|
||||
# Let's re-evaluate based on rules:
|
||||
# lf_func_a is same time as lf_older. func_a < f. So lf_func_a < lf_older.
|
||||
# lf_op_a is same time and func as lf_func_a. op_a > op. So lf_func_a < lf_op_a.
|
||||
|
||||
lf_fa_op = LogFile("op", "2023-10-26", "func_a", tmp_path, "10-00-00")
|
||||
lf_fa_opa = LogFile("op_a", "2023-10-26", "func_a", tmp_path, "10-00-00")
|
||||
lf_fa_opb = LogFile("op_b", "2023-10-26", "func_a", tmp_path, "10-00-00")
|
||||
lf_fb_op = LogFile("op", "2023-10-26", "func_b", tmp_path, "10-00-00")
|
||||
lf_ff_op1 = LogFile("op", "2023-10-26", "f", tmp_path, "10-00-01") # lf_newest
|
||||
lf_ff_op0 = LogFile("op", "2023-10-26", "f", tmp_path, "10-00-00") # lf_older
|
||||
lf_old_day = LogFile(
|
||||
"op", "2023-10-25", "f", tmp_path, "12-00-00"
|
||||
) # lf_oldest_d
|
||||
|
||||
files_redefined = [
|
||||
lf_fa_op,
|
||||
lf_fa_opa,
|
||||
lf_fa_opb,
|
||||
lf_fb_op,
|
||||
lf_ff_op1,
|
||||
lf_ff_op0,
|
||||
files_with_groups = [
|
||||
lf_ga_fa_op,
|
||||
lf_ga_fa_opa,
|
||||
lf_ga_fb_op,
|
||||
lf_gb_fa_op,
|
||||
lf_g_f_op1,
|
||||
lf_g_f_op0,
|
||||
lf_old_day,
|
||||
]
|
||||
sorted_redefined = sorted(files_redefined)
|
||||
sorted_with_groups = sorted(files_with_groups)
|
||||
|
||||
expected_redefined = [
|
||||
lf_ff_op1, # Newest time
|
||||
lf_ff_op0, # 2023-10-26 10:00:00, f, op
|
||||
lf_fa_op, # 2023-10-26 10:00:00, func_a, op (func_a smallest)
|
||||
lf_fa_opa, # 2023-10-26 10:00:00, func_a, op_a
|
||||
lf_fa_opb, # 2023-10-26 10:00:00, func_a, op_b
|
||||
lf_fb_op, # 2023-10-26 10:00:00, func_b, op
|
||||
lf_old_day,
|
||||
# Expected order (newest first, then group, then func_name, then op_key):
|
||||
expected_with_groups = [
|
||||
lf_g_f_op1, # Newest time: 2023-10-26 10:00:01
|
||||
lf_g_f_op0, # 2023-10-26 10:00:00, group, f, op
|
||||
lf_ga_fa_op, # 2023-10-26 10:00:00, group_a, func_a, op
|
||||
lf_ga_fa_opa, # 2023-10-26 10:00:00, group_a, func_a, op_a
|
||||
lf_ga_fb_op, # 2023-10-26 10:00:00, group_a, func_b, op
|
||||
lf_gb_fa_op, # 2023-10-26 10:00:00, group_b, func_a, op
|
||||
lf_old_day, # Oldest time: 2023-10-25 12:00:00
|
||||
]
|
||||
|
||||
assert sorted_redefined == expected_redefined
|
||||
assert sorted_with_groups == expected_with_groups
|
||||
|
||||
|
||||
# --- Tests for LogFuncDir ---
|
||||
@@ -279,26 +314,27 @@ class TestLogFile:
|
||||
|
||||
class TestLogFuncDir:
|
||||
def test_creation_valid(self, tmp_path: Path) -> None:
|
||||
lfd = LogFuncDir("2023-10-26", "my_func", tmp_path)
|
||||
lfd = LogFuncDir("2023-10-26", "test_group", "my_func", tmp_path)
|
||||
assert lfd.date_day == "2023-10-26"
|
||||
assert lfd.group == "test_group"
|
||||
assert lfd.func_name == "my_func"
|
||||
assert lfd._base_dir == tmp_path
|
||||
|
||||
def test_creation_invalid_date_day(self, tmp_path: Path) -> None:
|
||||
with pytest.raises(ValueError, match="not in YYYY-MM-DD format"):
|
||||
LogFuncDir("2023/10/26", "my_func", tmp_path)
|
||||
LogFuncDir("2023/10/26", "test_group", "my_func", tmp_path)
|
||||
|
||||
def test_date_obj(self, tmp_path: Path) -> None:
|
||||
lfd = LogFuncDir("2023-10-26", "my_func", tmp_path)
|
||||
lfd = LogFuncDir("2023-10-26", "test_group", "my_func", tmp_path)
|
||||
assert lfd._date_obj == datetime.date(2023, 10, 26)
|
||||
|
||||
def test_get_dir_path(self, tmp_path: Path) -> None:
|
||||
lfd = LogFuncDir("2023-10-26", "my_func", tmp_path)
|
||||
expected = tmp_path / "2023-10-26" / "my_func"
|
||||
lfd = LogFuncDir("2023-10-26", "test_group", "my_func", tmp_path)
|
||||
expected = tmp_path / "2023-10-26" / "test_group" / "my_func"
|
||||
assert lfd.get_dir_path() == expected
|
||||
|
||||
def test_get_log_files_empty_or_missing(self, tmp_path: Path) -> None:
|
||||
lfd = LogFuncDir("2023-10-26", "non_existent_func", tmp_path)
|
||||
lfd = LogFuncDir("2023-10-26", "test_group", "non_existent_func", tmp_path)
|
||||
assert lfd.get_log_files() == [] # Dir does not exist
|
||||
|
||||
dir_path = lfd.get_dir_path()
|
||||
@@ -309,7 +345,7 @@ class TestLogFuncDir:
|
||||
self, tmp_path: Path, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
base = tmp_path
|
||||
lfd = LogFuncDir("2023-10-26", "my_func", base)
|
||||
lfd = LogFuncDir("2023-10-26", "test_group", "my_func", base)
|
||||
dir_path = lfd.get_dir_path()
|
||||
dir_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@@ -338,38 +374,45 @@ class TestLogFuncDir:
|
||||
for record in caplog.records
|
||||
)
|
||||
|
||||
# Expected order: newest first (10-00-00_op1, then 10-00-00_op0, then 09-00-00_op2)
|
||||
# Sorting by LogFile: newest datetime first, then func_name (same here), then op_key
|
||||
# Expected order: newest first (10-00-01_op1, then 10-00-00_op0, then 09-00-00_op2)
|
||||
# Sorting by LogFile: newest datetime first, then group (same here), then func_name (same here), then op_key
|
||||
expected_lf1 = LogFile.from_path(lf1_path)
|
||||
expected_lf2 = LogFile.from_path(lf2_path)
|
||||
expected_lf3 = LogFile.from_path(lf3_path)
|
||||
|
||||
assert log_files[0] == expected_lf1 # 10-00-00_op1
|
||||
assert log_files[0] == expected_lf1 # 10-00-01_op1
|
||||
assert log_files[1] == expected_lf3 # 10-00-00_op0 (op0 < op1)
|
||||
assert log_files[2] == expected_lf2 # 09-00-00_op2
|
||||
|
||||
def test_equality(self, tmp_path: Path) -> None:
|
||||
lfd1 = LogFuncDir("2023-10-26", "func_a", tmp_path)
|
||||
lfd2 = LogFuncDir("2023-10-26", "func_a", tmp_path)
|
||||
lfd3 = LogFuncDir("2023-10-27", "func_a", tmp_path) # Diff date
|
||||
lfd4 = LogFuncDir("2023-10-26", "func_b", tmp_path) # Diff func_name
|
||||
lfd1 = LogFuncDir("2023-10-26", "group_a", "func_a", tmp_path)
|
||||
lfd2 = LogFuncDir("2023-10-26", "group_a", "func_a", tmp_path)
|
||||
lfd3 = LogFuncDir("2023-10-27", "group_a", "func_a", tmp_path) # Diff date
|
||||
lfd4 = LogFuncDir("2023-10-26", "group_a", "func_b", tmp_path) # Diff func_name
|
||||
lfd5 = LogFuncDir("2023-10-26", "group_b", "func_a", tmp_path) # Diff group
|
||||
assert lfd1 == lfd2
|
||||
assert lfd1 != lfd3
|
||||
assert lfd1 != lfd4
|
||||
assert lfd1 != lfd5
|
||||
assert lfd1 != "not a logfuncdir"
|
||||
|
||||
def test_ordering(self, tmp_path: Path) -> None:
|
||||
# Newest date first
|
||||
lfd_new_date = LogFuncDir("2023-10-27", "func_a", tmp_path)
|
||||
lfd_old_date = LogFuncDir("2023-10-26", "func_a", tmp_path)
|
||||
lfd_new_date = LogFuncDir("2023-10-27", "group_a", "func_a", tmp_path)
|
||||
lfd_old_date = LogFuncDir("2023-10-26", "group_a", "func_a", tmp_path)
|
||||
|
||||
# Same date, different func_name (alphabetical)
|
||||
lfd_func_a = LogFuncDir("2023-10-26", "func_a", tmp_path)
|
||||
lfd_func_b = LogFuncDir("2023-10-26", "func_b", tmp_path)
|
||||
# Same date, different group (alphabetical)
|
||||
lfd_group_a = LogFuncDir("2023-10-26", "group_a", "func_a", tmp_path)
|
||||
lfd_group_b = LogFuncDir("2023-10-26", "group_b", "func_a", tmp_path)
|
||||
|
||||
# Same date, same group, different func_name (alphabetical)
|
||||
lfd_func_a = LogFuncDir("2023-10-26", "group_a", "func_a", tmp_path)
|
||||
lfd_func_b = LogFuncDir("2023-10-26", "group_a", "func_b", tmp_path)
|
||||
|
||||
assert (
|
||||
lfd_new_date < lfd_old_date
|
||||
) # lfd_new_date is "less than" because it's newer
|
||||
assert lfd_group_a < lfd_group_b
|
||||
assert lfd_func_a < lfd_func_b
|
||||
|
||||
# Expected sort: lfd_new_date, then lfd_func_a, then lfd_func_b, then lfd_old_date (if func_a different)
|
||||
@@ -381,18 +424,22 @@ class TestLogFuncDir:
|
||||
# lfd_old_date (2023-10-26, func_a) -- wait, lfd_func_a IS lfd_old_date content-wise if func_name 'func_a'
|
||||
# lfd_func_b (2023-10-26, func_b)
|
||||
|
||||
# Redefine for clarity
|
||||
lfd1 = LogFuncDir("2023-10-27", "z_func", tmp_path) # Newest date
|
||||
# Test sorting with groups
|
||||
lfd1 = LogFuncDir("2023-10-27", "group_z", "z_func", tmp_path) # Newest date
|
||||
lfd2 = LogFuncDir(
|
||||
"2023-10-26", "a_func", tmp_path
|
||||
) # Older date, alpha first func
|
||||
"2023-10-26", "group_a", "a_func", tmp_path
|
||||
) # Older date, alpha first group and func
|
||||
lfd3 = LogFuncDir(
|
||||
"2023-10-26", "b_func", tmp_path
|
||||
) # Older date, alpha second func
|
||||
"2023-10-26", "group_a", "b_func", tmp_path
|
||||
) # Older date, same group, alpha second func
|
||||
lfd4 = LogFuncDir(
|
||||
"2023-10-26", "group_b", "a_func", tmp_path
|
||||
) # Older date, alpha second group
|
||||
|
||||
items_redefined = [lfd3, lfd1, lfd2]
|
||||
items_redefined = [lfd4, lfd3, lfd1, lfd2]
|
||||
sorted_items = sorted(items_redefined)
|
||||
expected_sorted = [lfd1, lfd2, lfd3]
|
||||
# Expected order: newest date first, then by group, then by func_name
|
||||
expected_sorted = [lfd1, lfd2, lfd3, lfd4]
|
||||
assert sorted_items == expected_sorted
|
||||
|
||||
|
||||
@@ -436,12 +483,19 @@ class TestLogDayDir:
|
||||
day_dir_path = ldd.get_dir_path()
|
||||
day_dir_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create func dirs
|
||||
func_a_path = day_dir_path / "func_a"
|
||||
# Create group dirs with func dirs inside
|
||||
group_a_path = day_dir_path / "group_a"
|
||||
group_a_path.mkdir()
|
||||
func_a_path = group_a_path / "func_a"
|
||||
func_a_path.mkdir()
|
||||
func_b_path = day_dir_path / "func_b"
|
||||
func_b_path = group_a_path / "func_b"
|
||||
func_b_path.mkdir()
|
||||
|
||||
group_b_path = day_dir_path / "group_b"
|
||||
group_b_path.mkdir()
|
||||
func_c_path = group_b_path / "func_c"
|
||||
func_c_path.mkdir()
|
||||
|
||||
# Create a non-dir and a malformed func dir name (if your logic would try to parse it)
|
||||
(day_dir_path / "not_a_dir.txt").touch()
|
||||
# LogDayDir's get_log_files doesn't try to parse func dir names for validity beyond being a dir
|
||||
@@ -450,18 +504,28 @@ class TestLogDayDir:
|
||||
# So, the warning there is unlikely to trigger from func_dir_path.name issues.
|
||||
|
||||
with caplog.at_level(logging.WARNING):
|
||||
log_func_dirs = ldd.get_log_files()
|
||||
log_group_dirs = ldd.get_log_files()
|
||||
|
||||
assert len(log_func_dirs) == 2
|
||||
assert len(log_group_dirs) == 2 # group_a and group_b
|
||||
# No warnings expected from this specific setup for LogDayDir.get_log_files
|
||||
# assert not any("Skipping malformed function directory" in record.message for record in caplog.records)
|
||||
# assert not any("Skipping malformed group directory" in record.message for record in caplog.records)
|
||||
|
||||
# Expected order: func_name alphabetical (since date_day is the same for all)
|
||||
expected_lfd_a = LogFuncDir("2023-10-26", "func_a", base)
|
||||
expected_lfd_b = LogFuncDir("2023-10-26", "func_b", base)
|
||||
# Expected order: group alphabetical
|
||||
expected_lgd_a = LogGroupDir("2023-10-26", "group_a", base)
|
||||
expected_lgd_b = LogGroupDir("2023-10-26", "group_b", base)
|
||||
|
||||
assert log_func_dirs[0] == expected_lfd_a
|
||||
assert log_func_dirs[1] == expected_lfd_b
|
||||
assert log_group_dirs[0] == expected_lgd_a
|
||||
assert log_group_dirs[1] == expected_lgd_b
|
||||
|
||||
# Test that each group directory contains the expected function directories
|
||||
group_a_funcs = log_group_dirs[0].get_log_files()
|
||||
assert len(group_a_funcs) == 2 # func_a and func_b
|
||||
assert group_a_funcs[0].func_name == "func_a"
|
||||
assert group_a_funcs[1].func_name == "func_b"
|
||||
|
||||
group_b_funcs = log_group_dirs[1].get_log_files()
|
||||
assert len(group_b_funcs) == 1 # func_c
|
||||
assert group_b_funcs[0].func_name == "func_c"
|
||||
|
||||
def test_equality(self, tmp_path: Path) -> None:
|
||||
ldd1 = LogDayDir("2023-10-26", tmp_path)
|
||||
@@ -486,6 +550,55 @@ class TestLogDayDir:
|
||||
expected_sorted = [ldd_new, ldd_old, ldd_ancient]
|
||||
assert sorted_items == expected_sorted
|
||||
|
||||
def test_get_log_files_returns_correct_groups(self, tmp_path: Path) -> None:
|
||||
"""Test that get_log_files returns LogGroupDir objects with correct group names."""
|
||||
base = tmp_path
|
||||
ldd = LogDayDir("2023-10-26", base)
|
||||
day_dir_path = ldd.get_dir_path()
|
||||
day_dir_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create multiple group directories with different names
|
||||
groups_to_create = ["auth", "database", "api", "web_ui"]
|
||||
expected_groups = []
|
||||
|
||||
for group_name in groups_to_create:
|
||||
group_path = day_dir_path / urllib.parse.quote(group_name, safe="")
|
||||
group_path.mkdir()
|
||||
# Create at least one function directory to make it valid
|
||||
func_path = group_path / "test_func"
|
||||
func_path.mkdir()
|
||||
expected_groups.append(group_name)
|
||||
|
||||
# Also create a group with special characters that need URL encoding
|
||||
special_group = "my group & special!"
|
||||
encoded_special_group = urllib.parse.quote(special_group, safe="")
|
||||
special_group_path = day_dir_path / encoded_special_group
|
||||
special_group_path.mkdir()
|
||||
(special_group_path / "test_func").mkdir()
|
||||
expected_groups.append(special_group)
|
||||
|
||||
# Get the log group directories
|
||||
log_group_dirs = ldd.get_log_files()
|
||||
|
||||
# Verify we get the correct number of groups
|
||||
assert len(log_group_dirs) == len(expected_groups)
|
||||
|
||||
# Verify each group has the correct name (should be URL-decoded)
|
||||
actual_groups = [lgd.group for lgd in log_group_dirs]
|
||||
|
||||
# Sort both lists for comparison since order might vary
|
||||
assert sorted(actual_groups) == sorted(expected_groups)
|
||||
|
||||
# Verify that each LogGroupDir object has the correct properties
|
||||
for lgd in log_group_dirs:
|
||||
assert lgd.date_day == "2023-10-26"
|
||||
assert lgd._base_dir == base
|
||||
assert lgd.group in expected_groups
|
||||
# Verify the group directory path exists (with URL encoding applied)
|
||||
expected_path = day_dir_path / urllib.parse.quote(lgd.group, safe="")
|
||||
assert expected_path.exists()
|
||||
assert expected_path.is_dir()
|
||||
|
||||
|
||||
# --- Tests for LogManager ---
|
||||
|
||||
@@ -525,6 +638,7 @@ class TestLogManager:
|
||||
expected_path = (
|
||||
base_dir
|
||||
/ expected_date_day
|
||||
/ "default" # Default group
|
||||
/ sample_func_one.__name__
|
||||
/ f"{log_file_obj.date_second}_{op_key}.log" # Use actual created second
|
||||
)
|
||||
@@ -620,3 +734,149 @@ class TestLogManager:
|
||||
log_manager.get_log_file("any_op_key", specific_date_day="2023/01/01")
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
# --- Tests for URL encoding/decoding of group names ---
|
||||
|
||||
|
||||
class TestGroupURLEncoding:
|
||||
def test_group_with_special_characters(self, tmp_path: Path) -> None:
|
||||
"""Test that group names with special characters are URL encoded/decoded correctly."""
|
||||
|
||||
# Test group name with spaces and special characters
|
||||
group_name = "my group with spaces & special chars!"
|
||||
encoded_group = urllib.parse.quote(group_name, safe="")
|
||||
|
||||
log_manager = LogManager(base_dir=tmp_path)
|
||||
log_file = log_manager.create_log_file(sample_func_one, "test_op", group_name)
|
||||
|
||||
# Check that the group is stored correctly (not encoded in the LogFile object)
|
||||
assert log_file.group == group_name
|
||||
|
||||
# Check that the file path uses the encoded version
|
||||
file_path = log_file.get_file_path()
|
||||
assert encoded_group in str(file_path)
|
||||
assert file_path.exists()
|
||||
|
||||
# Test that we can read it back correctly
|
||||
read_log_file = LogFile.from_path(file_path)
|
||||
assert read_log_file.group == group_name # Should be decoded back
|
||||
assert read_log_file == log_file
|
||||
|
||||
def test_group_with_forward_slash(self, tmp_path: Path) -> None:
|
||||
"""Test that group names with forward slashes are handled correctly."""
|
||||
|
||||
group_name = "parent/child"
|
||||
encoded_group = urllib.parse.quote(group_name, safe="")
|
||||
|
||||
log_manager = LogManager(base_dir=tmp_path)
|
||||
log_file = log_manager.create_log_file(sample_func_one, "test_op", group_name)
|
||||
|
||||
file_path = log_file.get_file_path()
|
||||
assert encoded_group in str(file_path)
|
||||
assert (
|
||||
"/" not in file_path.parent.parent.name
|
||||
) # The group directory name should be encoded
|
||||
assert file_path.exists()
|
||||
|
||||
# Verify round-trip
|
||||
read_log_file = LogFile.from_path(file_path)
|
||||
assert read_log_file.group == group_name
|
||||
|
||||
def test_group_unicode_characters(self, tmp_path: Path) -> None:
|
||||
"""Test that group names with Unicode characters are handled correctly."""
|
||||
|
||||
group_name = "测试组 🚀"
|
||||
encoded_group = urllib.parse.quote(group_name, safe="")
|
||||
|
||||
log_manager = LogManager(base_dir=tmp_path)
|
||||
log_file = log_manager.create_log_file(sample_func_one, "test_op", group_name)
|
||||
|
||||
file_path = log_file.get_file_path()
|
||||
assert encoded_group in str(file_path)
|
||||
assert file_path.exists()
|
||||
|
||||
# Verify round-trip
|
||||
read_log_file = LogFile.from_path(file_path)
|
||||
assert read_log_file.group == group_name
|
||||
|
||||
|
||||
# --- Tests for group directory creation and traversal ---
|
||||
|
||||
|
||||
class TestGroupDirectoryHandling:
|
||||
def test_create_log_file_with_custom_group(self, tmp_path: Path) -> None:
|
||||
"""Test creating log files with custom group names."""
|
||||
log_manager = LogManager(base_dir=tmp_path)
|
||||
|
||||
# Create log files with different groups
|
||||
lf1 = log_manager.create_log_file(sample_func_one, "op1", "auth")
|
||||
lf2 = log_manager.create_log_file(sample_func_two, "op2", "database")
|
||||
lf3 = log_manager.create_log_file(sample_func_one, "op3") # default group
|
||||
|
||||
assert lf1.group == "auth"
|
||||
assert lf2.group == "database"
|
||||
assert lf3.group == "default" # Default group
|
||||
|
||||
# Check that the directory structure is correct
|
||||
today = lf1.date_day
|
||||
assert (tmp_path / today / "auth" / sample_func_one.__name__).exists()
|
||||
assert (tmp_path / today / "database" / sample_func_two.__name__).exists()
|
||||
assert (tmp_path / today / "default" / sample_func_one.__name__).exists()
|
||||
|
||||
def test_list_log_days_with_groups(self, tmp_path: Path) -> None:
|
||||
"""Test that LogDayDir correctly traverses group directories."""
|
||||
log_manager = LogManager(base_dir=tmp_path)
|
||||
|
||||
# Create log files with different groups
|
||||
log_manager.create_log_file(sample_func_one, "op1", "auth")
|
||||
log_manager.create_log_file(sample_func_two, "op2", "database")
|
||||
log_manager.create_log_file(sample_func_one, "op3", "auth") # Same group as lf1
|
||||
|
||||
# Get the day directory and check its contents
|
||||
day_dirs = log_manager.list_log_days()
|
||||
assert len(day_dirs) == 1
|
||||
|
||||
log_group_dirs = day_dirs[0].get_log_files()
|
||||
assert len(log_group_dirs) == 2 # auth and database groups
|
||||
|
||||
# Check that we have the correct groups
|
||||
groups = [lgd.group for lgd in log_group_dirs]
|
||||
expected_groups = ["auth", "database"]
|
||||
# Sort both for comparison since order might vary
|
||||
assert sorted(groups) == sorted(expected_groups)
|
||||
|
||||
# Check function directories within each group
|
||||
all_funcs = []
|
||||
for group_dir in log_group_dirs:
|
||||
func_dirs = group_dir.get_log_files()
|
||||
for func_dir in func_dirs:
|
||||
all_funcs.append((func_dir.group, func_dir.func_name))
|
||||
|
||||
expected_funcs = [
|
||||
("auth", sample_func_one.__name__),
|
||||
("database", sample_func_two.__name__),
|
||||
]
|
||||
assert sorted(all_funcs) == sorted(expected_funcs)
|
||||
|
||||
def test_get_log_file_across_groups(self, tmp_path: Path) -> None:
|
||||
"""Test that get_log_file can find files across different groups."""
|
||||
log_manager = LogManager(base_dir=tmp_path)
|
||||
|
||||
# Create log files with same op_key but different groups
|
||||
lf1 = log_manager.create_log_file(sample_func_one, "shared_op", "auth")
|
||||
lf2 = log_manager.create_log_file(sample_func_two, "shared_op", "database")
|
||||
lf3 = log_manager.create_log_file(sample_func_one, "unique_op", "auth")
|
||||
|
||||
# get_log_file should find the first match (implementation detail: depends on sort order)
|
||||
found_shared = log_manager.get_log_file("shared_op")
|
||||
assert found_shared is not None
|
||||
assert found_shared.op_key == "shared_op"
|
||||
# Could be either lf1 or lf2 depending on sort order
|
||||
assert found_shared in [lf1, lf2]
|
||||
|
||||
found_unique = log_manager.get_log_file("unique_op")
|
||||
assert found_unique == lf3
|
||||
|
||||
not_found = log_manager.get_log_file("nonexistent_op")
|
||||
assert not_found is None
|
||||
Reference in New Issue
Block a user