diff --git a/clanServices/wireguard/ipv6_allocator.py b/clanServices/wireguard/ipv6_allocator.py index 16050efb2..fb4062005 100755 --- a/clanServices/wireguard/ipv6_allocator.py +++ b/clanServices/wireguard/ipv6_allocator.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -IPv6 address allocator for WireGuard networks. +"""IPv6 address allocator for WireGuard networks. Network layout: - Base network: /40 ULA prefix (fd00::/8 + 32 bits from hash) @@ -20,8 +19,7 @@ def hash_string(s: str) -> str: def generate_ula_prefix(instance_name: str) -> ipaddress.IPv6Network: - """ - Generate a /40 ULA prefix from instance name. + """Generate a /40 ULA prefix from instance name. Format: fd{32-bit hash}/40 This gives us fd00:0000:0000::/40 through fdff:ffff:ff00::/40 @@ -46,10 +44,10 @@ def generate_ula_prefix(instance_name: str) -> ipaddress.IPv6Network: def generate_controller_subnet( - base_network: ipaddress.IPv6Network, controller_name: str + base_network: ipaddress.IPv6Network, + controller_name: str, ) -> ipaddress.IPv6Network: - """ - Generate a /56 subnet for a controller from the base /40 network. + """Generate a /56 subnet for a controller from the base /40 network. We have 16 bits (40 to 56) to allocate controller subnets. This allows for 65,536 possible controller subnets. @@ -68,8 +66,7 @@ def generate_controller_subnet( def generate_peer_suffix(peer_name: str) -> str: - """ - Generate a unique 64-bit host suffix for a peer. + """Generate a unique 64-bit host suffix for a peer. This suffix will be used in all controller subnets to create unique addresses. Format: :xxxx:xxxx:xxxx:xxxx (64 bits) @@ -86,7 +83,7 @@ def generate_peer_suffix(peer_name: str) -> str: def main() -> None: if len(sys.argv) < 4: print( - "Usage: ipv6_allocator.py " + "Usage: ipv6_allocator.py ", ) sys.exit(1) diff --git a/docs/nix/render_options/__init__.py b/docs/nix/render_options/__init__.py index f5331110c..4537abb78 100644 --- a/docs/nix/render_options/__init__.py +++ b/docs/nix/render_options/__init__.py @@ -66,8 +66,7 @@ def render_option_header(name: str) -> str: def join_lines_with_indentation(lines: list[str], indent: int = 4) -> str: - """ - Joins multiple lines with a specified number of whitespace characters as indentation. + """Joins multiple lines with a specified number of whitespace characters as indentation. Args: lines (list of str): The lines of text to join. @@ -75,6 +74,7 @@ def join_lines_with_indentation(lines: list[str], indent: int = 4) -> str: Returns: str: The indented and concatenated string. + """ # Create the indentation string (e.g., four spaces) indent_str = " " * indent @@ -161,7 +161,10 @@ def render_option( def print_options( - options_file: str, head: str, no_options: str, replace_prefix: str | None = None + options_file: str, + head: str, + no_options: str, + replace_prefix: str | None = None, ) -> str: res = "" with (Path(options_file) / "share/doc/nixos/options.json").open() as f: @@ -235,7 +238,7 @@ def produce_clan_core_docs() -> None: for submodule_name, split_options in split.items(): outfile = f"{module_name}/{submodule_name}.md" print( - f"[clan_core.{submodule_name}] Rendering option of: {submodule_name}... {outfile}" + f"[clan_core.{submodule_name}] Rendering option of: {submodule_name}... {outfile}", ) init_level = 1 root = options_to_tree(split_options, debug=True) @@ -271,7 +274,8 @@ def produce_clan_core_docs() -> None: def render_categories( - categories: list[str], categories_info: dict[str, CategoryInfo] + categories: list[str], + categories_info: dict[str, CategoryInfo], ) -> str: res = """
""" for cat in categories: @@ -338,7 +342,8 @@ Learn how to use `clanServices` in practice in the [Using clanServices guide](.. # output += "## Categories\n\n" output += render_categories( - module_info["manifest"]["categories"], ModuleManifest.categories_info() + module_info["manifest"]["categories"], + ModuleManifest.categories_info(), ) output += f"{module_info['manifest']['readme']}\n" @@ -368,8 +373,7 @@ Learn how to use `clanServices` in practice in the [Using clanServices guide](.. def split_options_by_root(options: dict[str, Any]) -> dict[str, dict[str, Any]]: - """ - Split the flat dictionary of options into a dict of which each entry will construct complete option trees. + """Split the flat dictionary of options into a dict of which each entry will construct complete option trees. { "a": { Data } "a.b": { Data } @@ -453,9 +457,7 @@ def option_short_name(option_name: str) -> str: def options_to_tree(options: dict[str, Any], debug: bool = False) -> Option: - """ - Convert the options dictionary to a tree structure. - """ + """Convert the options dictionary to a tree structure.""" # Helper function to create nested structure def add_to_tree(path_parts: list[str], info: Any, current_node: Option) -> None: @@ -507,22 +509,24 @@ def options_to_tree(options: dict[str, Any], debug: bool = False) -> Option: def options_docs_from_tree( - root: Option, init_level: int = 1, prefix: list[str] | None = None + root: Option, + init_level: int = 1, + prefix: list[str] | None = None, ) -> str: - """ - eender the options from the tree structure. + """Eender the options from the tree structure. Args: root (Option): The root option node. init_level (int): The initial level of indentation. prefix (list str): Will be printed as common prefix of all attribute names. + """ def render_tree(option: Option, level: int = init_level) -> str: output = "" should_render = not option.name.startswith("<") and not option.name.startswith( - "_" + "_", ) if should_render: # short_name = option_short_name(option.name) @@ -547,7 +551,7 @@ def options_docs_from_tree( return md -if __name__ == "__main__": # +if __name__ == "__main__": produce_clan_core_docs() produce_clan_service_author_docs() diff --git a/lib/test/container-test-driver/test_driver/__init__.py b/lib/test/container-test-driver/test_driver/__init__.py index 48611614a..67a336a4a 100644 --- a/lib/test/container-test-driver/test_driver/__init__.py +++ b/lib/test/container-test-driver/test_driver/__init__.py @@ -32,11 +32,15 @@ def init_test_environment() -> None: # Set up network bridge subprocess.run( - ["ip", "link", "add", "br0", "type", "bridge"], check=True, text=True + ["ip", "link", "add", "br0", "type", "bridge"], + check=True, + text=True, ) subprocess.run(["ip", "link", "set", "br0", "up"], check=True, text=True) subprocess.run( - ["ip", "addr", "add", "192.168.1.254/24", "dev", "br0"], check=True, text=True + ["ip", "addr", "add", "192.168.1.254/24", "dev", "br0"], + check=True, + text=True, ) # Set up minimal passwd file for unprivileged operations @@ -111,8 +115,7 @@ def mount( mountflags: int = 0, data: str | None = None, ) -> None: - """ - A Python wrapper for the mount system call. + """A Python wrapper for the mount system call. :param source: The source of the file system (e.g., device name, remote filesystem). :param target: The mount point (an existing directory). @@ -129,7 +132,11 @@ def mount( # Call the mount system call result = libc.mount( - source_c, target_c, fstype_c, ctypes.c_ulong(mountflags), data_c + source_c, + target_c, + fstype_c, + ctypes.c_ulong(mountflags), + data_c, ) if result != 0: @@ -145,7 +152,7 @@ def prepare_machine_root(machinename: str, root: Path) -> None: root.mkdir(parents=True, exist_ok=True) root.joinpath("etc").mkdir(parents=True, exist_ok=True) root.joinpath(".env").write_text( - "\n".join(f"{k}={v}" for k, v in os.environ.items()) + "\n".join(f"{k}={v}" for k, v in os.environ.items()), ) @@ -157,7 +164,6 @@ def retry(fn: Callable, timeout: int = 900) -> None: """Call the given function repeatedly, with 1 second intervals, until it returns True or a timeout is reached. """ - for _ in range(timeout): if fn(False): return @@ -284,8 +290,7 @@ class Machine: check_output: bool = True, timeout: int | None = 900, ) -> subprocess.CompletedProcess: - """ - Execute a shell command, returning a list `(status, stdout)`. + """Execute a shell command, returning a list `(status, stdout)`. Commands are run with `set -euo pipefail` set: @@ -316,7 +321,6 @@ class Machine: `timeout` parameter, e.g., `execute(cmd, timeout=10)` or `execute(cmd, timeout=None)`. The default is 900 seconds. """ - # Always run command with shell opts command = f"set -eo pipefail; source /etc/profile; set -xu; {command}" @@ -330,7 +334,9 @@ class Machine: return proc def nested( - self, msg: str, attrs: dict[str, str] | None = None + self, + msg: str, + attrs: dict[str, str] | None = None, ) -> _GeneratorContextManager: if attrs is None: attrs = {} @@ -339,8 +345,7 @@ class Machine: return self.logger.nested(msg, my_attrs) def systemctl(self, q: str) -> subprocess.CompletedProcess: - """ - Runs `systemctl` commands with optional support for + """Runs `systemctl` commands with optional support for `systemctl --user` ```py @@ -355,8 +360,7 @@ class Machine: return self.execute(f"systemctl {q}") def wait_until_succeeds(self, command: str, timeout: int = 900) -> str: - """ - Repeat a shell command with 1-second intervals until it succeeds. + """Repeat a shell command with 1-second intervals until it succeeds. Has a default timeout of 900 seconds which can be modified, e.g. `wait_until_succeeds(cmd, timeout=10)`. See `execute` for details on command execution. @@ -374,18 +378,17 @@ class Machine: return output def wait_for_open_port( - self, port: int, addr: str = "localhost", timeout: int = 900 + self, + port: int, + addr: str = "localhost", + timeout: int = 900, ) -> None: - """ - Wait for a port to be open on the given address. - """ + """Wait for a port to be open on the given address.""" command = f"nc -z {shlex.quote(addr)} {port}" self.wait_until_succeeds(command, timeout=timeout) def wait_for_file(self, filename: str, timeout: int = 30) -> None: - """ - Waits until the file exists in the machine's file system. - """ + """Waits until the file exists in the machine's file system.""" def check_file(_last_try: bool) -> bool: result = self.execute(f"test -e {filename}") @@ -395,8 +398,7 @@ class Machine: retry(check_file, timeout) def wait_for_unit(self, unit: str, timeout: int = 900) -> None: - """ - Wait for a systemd unit to get into "active" state. + """Wait for a systemd unit to get into "active" state. Throws exceptions on "failed" and "inactive" states as well as after timing out. """ @@ -441,9 +443,7 @@ class Machine: return res.stdout def shutdown(self) -> None: - """ - Shut down the machine, waiting for the VM to exit. - """ + """Shut down the machine, waiting for the VM to exit.""" if self.process: self.process.terminate() self.process.wait() @@ -557,7 +557,7 @@ class Driver: rootdir=tempdir_path / container.name, out_dir=self.out_dir, logger=self.logger, - ) + ), ) def start_all(self) -> None: @@ -581,7 +581,7 @@ class Driver: ) print( - f"To attach to container {machine.name} run on the same machine that runs the test:" + f"To attach to container {machine.name} run on the same machine that runs the test:", ) print( " ".join( @@ -603,8 +603,8 @@ class Driver: "-c", "bash", Style.RESET_ALL, - ] - ) + ], + ), ) def test_symbols(self) -> dict[str, Any]: @@ -623,7 +623,7 @@ class Driver: "additionally exposed symbols:\n " + ", ".join(m.name for m in self.machines) + ",\n " - + ", ".join(list(general_symbols.keys())) + + ", ".join(list(general_symbols.keys())), ) return {**general_symbols, **machine_symbols} diff --git a/lib/test/container-test-driver/test_driver/logger.py b/lib/test/container-test-driver/test_driver/logger.py index 920333149..d82a035b9 100644 --- a/lib/test/container-test-driver/test_driver/logger.py +++ b/lib/test/container-test-driver/test_driver/logger.py @@ -25,14 +25,18 @@ class AbstractLogger(ABC): @abstractmethod @contextmanager def subtest( - self, name: str, attributes: dict[str, str] | None = None + self, + name: str, + attributes: dict[str, str] | None = None, ) -> Iterator[None]: pass @abstractmethod @contextmanager def nested( - self, message: str, attributes: dict[str, str] | None = None + self, + message: str, + attributes: dict[str, str] | None = None, ) -> Iterator[None]: pass @@ -66,7 +70,7 @@ class JunitXMLLogger(AbstractLogger): def __init__(self, outfile: Path) -> None: self.tests: dict[str, JunitXMLLogger.TestCaseState] = { - "main": self.TestCaseState() + "main": self.TestCaseState(), } self.currentSubtest = "main" self.outfile: Path = outfile @@ -78,7 +82,9 @@ class JunitXMLLogger(AbstractLogger): @contextmanager def subtest( - self, name: str, attributes: dict[str, str] | None = None + self, + name: str, + attributes: dict[str, str] | None = None, ) -> Iterator[None]: old_test = self.currentSubtest self.tests.setdefault(name, self.TestCaseState()) @@ -90,7 +96,9 @@ class JunitXMLLogger(AbstractLogger): @contextmanager def nested( - self, message: str, attributes: dict[str, str] | None = None + self, + message: str, + attributes: dict[str, str] | None = None, ) -> Iterator[None]: self.log(message) yield @@ -144,7 +152,9 @@ class CompositeLogger(AbstractLogger): @contextmanager def subtest( - self, name: str, attributes: dict[str, str] | None = None + self, + name: str, + attributes: dict[str, str] | None = None, ) -> Iterator[None]: with ExitStack() as stack: for logger in self.logger_list: @@ -153,7 +163,9 @@ class CompositeLogger(AbstractLogger): @contextmanager def nested( - self, message: str, attributes: dict[str, str] | None = None + self, + message: str, + attributes: dict[str, str] | None = None, ) -> Iterator[None]: with ExitStack() as stack: for logger in self.logger_list: @@ -200,19 +212,24 @@ class TerminalLogger(AbstractLogger): @contextmanager def subtest( - self, name: str, attributes: dict[str, str] | None = None + self, + name: str, + attributes: dict[str, str] | None = None, ) -> Iterator[None]: with self.nested("subtest: " + name, attributes): yield @contextmanager def nested( - self, message: str, attributes: dict[str, str] | None = None + self, + message: str, + attributes: dict[str, str] | None = None, ) -> Iterator[None]: self._eprint( self.maybe_prefix( - Style.BRIGHT + Fore.GREEN + message + Style.RESET_ALL, attributes - ) + Style.BRIGHT + Fore.GREEN + message + Style.RESET_ALL, + attributes, + ), ) tic = time.time() @@ -259,7 +276,9 @@ class XMLLogger(AbstractLogger): return "".join(ch for ch in message if unicodedata.category(ch)[0] != "C") def maybe_prefix( - self, message: str, attributes: dict[str, str] | None = None + self, + message: str, + attributes: dict[str, str] | None = None, ) -> str: if attributes and "machine" in attributes: return f"{attributes['machine']}: {message}" @@ -309,14 +328,18 @@ class XMLLogger(AbstractLogger): @contextmanager def subtest( - self, name: str, attributes: dict[str, str] | None = None + self, + name: str, + attributes: dict[str, str] | None = None, ) -> Iterator[None]: with self.nested("subtest: " + name, attributes): yield @contextmanager def nested( - self, message: str, attributes: dict[str, str] | None = None + self, + message: str, + attributes: dict[str, str] | None = None, ) -> Iterator[None]: if attributes is None: attributes = {} diff --git a/nixosModules/clanCore/zerotier/generate.py b/nixosModules/clanCore/zerotier/generate.py index a0c6f6af9..710b70670 100644 --- a/nixosModules/clanCore/zerotier/generate.py +++ b/nixosModules/clanCore/zerotier/generate.py @@ -195,7 +195,7 @@ def compute_zerotier_ip(network_id: str, identity: Identity) -> ipaddress.IPv6Ad (node_id >> 16) & 0xFF, (node_id >> 8) & 0xFF, (node_id) & 0xFF, - ] + ], ) return ipaddress.IPv6Address(bytes(addr_parts)) @@ -203,7 +203,10 @@ def compute_zerotier_ip(network_id: str, identity: Identity) -> ipaddress.IPv6Ad def main() -> None: parser = argparse.ArgumentParser() parser.add_argument( - "--mode", choices=["network", "identity"], required=True, type=str + "--mode", + choices=["network", "identity"], + required=True, + type=str, ) parser.add_argument("--ip", type=Path, required=True) parser.add_argument("--identity-secret", type=Path, required=True) diff --git a/nixosModules/clanCore/zerotier/genmoon.py b/nixosModules/clanCore/zerotier/genmoon.py index 17564d418..2952c3748 100644 --- a/nixosModules/clanCore/zerotier/genmoon.py +++ b/nixosModules/clanCore/zerotier/genmoon.py @@ -17,7 +17,7 @@ def main() -> None: moon_json = json.loads(Path(moon_json_path).read_text()) moon_json["roots"][0]["stableEndpoints"] = json.loads( - Path(endpoint_config).read_text() + Path(endpoint_config).read_text(), ) with NamedTemporaryFile("w") as f: diff --git a/pkgs/agit/agit.py b/pkgs/agit/agit.py index 8c91f7ef3..4ecc6606b 100644 --- a/pkgs/agit/agit.py +++ b/pkgs/agit/agit.py @@ -38,8 +38,7 @@ def get_gitea_api_url(remote: str = "origin") -> str: host_and_path = remote_url.split("@")[1] # git.clan.lol:clan/clan-core.git host = host_and_path.split(":")[0] # git.clan.lol repo_path = host_and_path.split(":")[1] # clan/clan-core.git - if repo_path.endswith(".git"): - repo_path = repo_path[:-4] # clan/clan-core + repo_path = repo_path.removesuffix(".git") # clan/clan-core elif remote_url.startswith("https://"): # HTTPS format: https://git.clan.lol/clan/clan-core.git url_parts = remote_url.replace("https://", "").split("/") @@ -86,7 +85,10 @@ def get_repo_info_from_api_url(api_url: str) -> tuple[str, str]: def fetch_pr_statuses( - repo_owner: str, repo_name: str, commit_sha: str, host: str + repo_owner: str, + repo_name: str, + commit_sha: str, + host: str, ) -> list[dict]: """Fetch CI statuses for a specific commit SHA.""" status_url = ( @@ -183,7 +185,7 @@ def run_git_command(command: list) -> tuple[int, str, str]: def get_current_branch_name() -> str: exit_code, branch_name, error = run_git_command( - ["git", "rev-parse", "--abbrev-ref", "HEAD"] + ["git", "rev-parse", "--abbrev-ref", "HEAD"], ) if exit_code != 0: @@ -196,7 +198,7 @@ def get_current_branch_name() -> str: def get_latest_commit_info() -> tuple[str, str]: """Get the title and body of the latest commit.""" exit_code, commit_msg, error = run_git_command( - ["git", "log", "-1", "--pretty=format:%B"] + ["git", "log", "-1", "--pretty=format:%B"], ) if exit_code != 0: @@ -225,7 +227,7 @@ def get_commits_since_main() -> list[tuple[str, str]]: "main..HEAD", "--no-merges", "--pretty=format:%s|%b|---END---", - ] + ], ) if exit_code != 0: @@ -263,7 +265,9 @@ def open_editor_for_pr() -> tuple[str, str]: commits_since_main = get_commits_since_main() with tempfile.NamedTemporaryFile( - mode="w+", suffix="COMMIT_EDITMSG", delete=False + mode="w+", + suffix="COMMIT_EDITMSG", + delete=False, ) as temp_file: temp_file.flush() temp_file_path = temp_file.name @@ -280,7 +284,7 @@ def open_editor_for_pr() -> tuple[str, str]: temp_file.write("# The first line will be used as the PR title.\n") temp_file.write("# Everything else will be used as the PR description.\n") temp_file.write( - "# To abort creation of the PR, close editor with an error code.\n" + "# To abort creation of the PR, close editor with an error code.\n", ) temp_file.write("# In vim for example you can use :cq!\n") temp_file.write("#\n") @@ -373,7 +377,7 @@ def create_agit_push( print( f" Description: {description[:50]}..." if len(description) > 50 - else f" Description: {description}" + else f" Description: {description}", ) print() @@ -530,19 +534,26 @@ Examples: ) create_parser.add_argument( - "-t", "--topic", help="Set PR topic (default: current branch name)" + "-t", + "--topic", + help="Set PR topic (default: current branch name)", ) create_parser.add_argument( - "--title", help="Set the PR title (default: last commit title)" + "--title", + help="Set the PR title (default: last commit title)", ) create_parser.add_argument( - "--description", help="Override the PR description (default: commit body)" + "--description", + help="Override the PR description (default: commit body)", ) create_parser.add_argument( - "-f", "--force", action="store_true", help="Force push the changes" + "-f", + "--force", + action="store_true", + help="Force push the changes", ) create_parser.add_argument( diff --git a/pkgs/clan-app/clan_app/__init__.py b/pkgs/clan-app/clan_app/__init__.py index ff89d1458..f0bddcb15 100644 --- a/pkgs/clan-app/clan_app/__init__.py +++ b/pkgs/clan-app/clan_app/__init__.py @@ -13,7 +13,9 @@ log = logging.getLogger(__name__) def main(argv: list[str] = sys.argv) -> int: parser = argparse.ArgumentParser(description="Clan App") parser.add_argument( - "--content-uri", type=str, help="The URI of the content to display" + "--content-uri", + type=str, + help="The URI of the content to display", ) parser.add_argument("--debug", action="store_true", help="Enable debug mode") parser.add_argument( diff --git a/pkgs/clan-app/clan_app/api/api_bridge.py b/pkgs/clan-app/clan_app/api/api_bridge.py index b1e38a697..4cba4eb20 100644 --- a/pkgs/clan-app/clan_app/api/api_bridge.py +++ b/pkgs/clan-app/clan_app/api/api_bridge.py @@ -56,18 +56,23 @@ class ApiBridge(ABC): for middleware in self.middleware_chain: try: log.debug( - f"{middleware.__class__.__name__} => {request.method_name}" + f"{middleware.__class__.__name__} => {request.method_name}", ) middleware.process(context) except Exception as e: # If middleware fails, handle error self.send_api_error_response( - request.op_key or "unknown", str(e), ["middleware_error"] + request.op_key or "unknown", + str(e), + ["middleware_error"], ) return def send_api_error_response( - self, op_key: str, error_message: str, location: list[str] + self, + op_key: str, + error_message: str, + location: list[str], ) -> None: """Send an error response.""" from clan_lib.api import ApiError, ErrorDataClass @@ -80,7 +85,7 @@ class ApiBridge(ABC): message="An internal error occured", description=error_message, location=location, - ) + ), ], ) @@ -107,6 +112,7 @@ class ApiBridge(ABC): thread_name: Name for the thread (for debugging) wait_for_completion: Whether to wait for the thread to complete timeout: Timeout in seconds when waiting for completion + """ op_key = request.op_key or "unknown" @@ -116,7 +122,7 @@ class ApiBridge(ABC): try: log.debug( f"Processing {request.method_name} with args {request.args} " - f"and header {request.header} in thread {thread_name}" + f"and header {request.header} in thread {thread_name}", ) self.process_request(request) finally: @@ -124,7 +130,9 @@ class ApiBridge(ABC): stop_event = threading.Event() thread = threading.Thread( - target=thread_task, args=(stop_event,), name=thread_name + target=thread_task, + args=(stop_event,), + name=thread_name, ) thread.start() @@ -138,5 +146,7 @@ class ApiBridge(ABC): if thread.is_alive(): stop_event.set() # Cancel the thread self.send_api_error_response( - op_key, "Request timeout", ["api_bridge", request.method_name] + op_key, + "Request timeout", + ["api_bridge", request.method_name], ) diff --git a/pkgs/clan-app/clan_app/api/file_gtk.py b/pkgs/clan-app/clan_app/api/file_gtk.py index 9f2b0e097..2da3774da 100644 --- a/pkgs/clan-app/clan_app/api/file_gtk.py +++ b/pkgs/clan-app/clan_app/api/file_gtk.py @@ -26,8 +26,7 @@ RESULT: dict[str, SuccessDataClass[list[str] | None] | ErrorDataClass] = {} def get_clan_folder() -> SuccessDataClass[Flake] | ErrorDataClass: - """ - Opens the clan folder using the GTK file dialog. + """Opens the clan folder using the GTK file dialog. Returns the path to the clan folder or an error if it fails. """ file_request = FileRequest( @@ -52,7 +51,7 @@ def get_clan_folder() -> SuccessDataClass[Flake] | ErrorDataClass: message="No folder selected", description="You must select a folder to open.", location=["get_clan_folder"], - ) + ), ], ) @@ -66,7 +65,7 @@ def get_clan_folder() -> SuccessDataClass[Flake] | ErrorDataClass: message="Invalid clan folder", description=f"The selected folder '{clan_folder}' is not a valid clan folder.", location=["get_clan_folder"], - ) + ), ], ) @@ -102,8 +101,10 @@ def gtk_open_file(file_request: FileRequest, op_key: str) -> bool: selected_path = remove_none([gfile.get_path()]) returns( SuccessDataClass( - op_key=op_key, data=selected_path, status="success" - ) + op_key=op_key, + data=selected_path, + status="success", + ), ) except Exception as e: log.exception("Error opening file") @@ -116,9 +117,9 @@ def gtk_open_file(file_request: FileRequest, op_key: str) -> bool: message=e.__class__.__name__, description=str(e), location=["get_system_file"], - ) + ), ], - ) + ), ) def on_file_select_multiple(file_dialog: Gtk.FileDialog, task: Gio.Task) -> None: @@ -128,8 +129,10 @@ def gtk_open_file(file_request: FileRequest, op_key: str) -> bool: selected_paths = remove_none([gfile.get_path() for gfile in gfiles]) returns( SuccessDataClass( - op_key=op_key, data=selected_paths, status="success" - ) + op_key=op_key, + data=selected_paths, + status="success", + ), ) else: returns(SuccessDataClass(op_key=op_key, data=None, status="success")) @@ -144,9 +147,9 @@ def gtk_open_file(file_request: FileRequest, op_key: str) -> bool: message=e.__class__.__name__, description=str(e), location=["get_system_file"], - ) + ), ], - ) + ), ) def on_folder_select(file_dialog: Gtk.FileDialog, task: Gio.Task) -> None: @@ -156,8 +159,10 @@ def gtk_open_file(file_request: FileRequest, op_key: str) -> bool: selected_path = remove_none([gfile.get_path()]) returns( SuccessDataClass( - op_key=op_key, data=selected_path, status="success" - ) + op_key=op_key, + data=selected_path, + status="success", + ), ) else: returns(SuccessDataClass(op_key=op_key, data=None, status="success")) @@ -172,9 +177,9 @@ def gtk_open_file(file_request: FileRequest, op_key: str) -> bool: message=e.__class__.__name__, description=str(e), location=["get_system_file"], - ) + ), ], - ) + ), ) def on_save_finish(file_dialog: Gtk.FileDialog, task: Gio.Task) -> None: @@ -184,8 +189,10 @@ def gtk_open_file(file_request: FileRequest, op_key: str) -> bool: selected_path = remove_none([gfile.get_path()]) returns( SuccessDataClass( - op_key=op_key, data=selected_path, status="success" - ) + op_key=op_key, + data=selected_path, + status="success", + ), ) else: returns(SuccessDataClass(op_key=op_key, data=None, status="success")) @@ -200,9 +207,9 @@ def gtk_open_file(file_request: FileRequest, op_key: str) -> bool: message=e.__class__.__name__, description=str(e), location=["get_system_file"], - ) + ), ], - ) + ), ) dialog = Gtk.FileDialog() diff --git a/pkgs/clan-app/clan_app/api/middleware/argument_parsing.py b/pkgs/clan-app/clan_app/api/middleware/argument_parsing.py index 6a1becbb0..10e97bb62 100644 --- a/pkgs/clan-app/clan_app/api/middleware/argument_parsing.py +++ b/pkgs/clan-app/clan_app/api/middleware/argument_parsing.py @@ -39,7 +39,7 @@ class ArgumentParsingMiddleware(Middleware): except Exception as e: log.exception( - f"Error while parsing arguments for {context.request.method_name}" + f"Error while parsing arguments for {context.request.method_name}", ) context.bridge.send_api_error_response( context.request.op_key or "unknown", diff --git a/pkgs/clan-app/clan_app/api/middleware/base.py b/pkgs/clan-app/clan_app/api/middleware/base.py index 1db218f4b..ae7cb0145 100644 --- a/pkgs/clan-app/clan_app/api/middleware/base.py +++ b/pkgs/clan-app/clan_app/api/middleware/base.py @@ -23,7 +23,9 @@ class Middleware(ABC): """Process the request through this middleware.""" def register_context_manager( - self, context: MiddlewareContext, cm: AbstractContextManager[Any] + self, + context: MiddlewareContext, + cm: AbstractContextManager[Any], ) -> Any: """Register a context manager with the exit stack.""" return context.exit_stack.enter_context(cm) diff --git a/pkgs/clan-app/clan_app/api/middleware/logging.py b/pkgs/clan-app/clan_app/api/middleware/logging.py index ae10a0343..b03fa8c82 100644 --- a/pkgs/clan-app/clan_app/api/middleware/logging.py +++ b/pkgs/clan-app/clan_app/api/middleware/logging.py @@ -25,23 +25,26 @@ class LoggingMiddleware(Middleware): try: # Handle log group configuration log_group: list[str] | None = context.request.header.get("logging", {}).get( - "group_path", None + "group_path", + None, ) if log_group is not None: if not isinstance(log_group, list): msg = f"Expected log_group to be a list, got {type(log_group)}" raise TypeError(msg) # noqa: TRY301 log.warning( - f"Using log group {log_group} for {context.request.method_name} with op_key {context.request.op_key}" + f"Using log group {log_group} for {context.request.method_name} with op_key {context.request.op_key}", ) # Create log file log_file = self.log_manager.create_log_file( - method, op_key=context.request.op_key or "unknown", group_path=log_group + method, + op_key=context.request.op_key or "unknown", + group_path=log_group, ).get_file_path() except Exception as e: log.exception( - f"Error while handling request header of {context.request.method_name}" + f"Error while handling request header of {context.request.method_name}", ) context.bridge.send_api_error_response( context.request.op_key or "unknown", @@ -76,7 +79,8 @@ class LoggingMiddleware(Middleware): line_buffering=True, ) self.handler = setup_logging( - log.getEffectiveLevel(), log_file=handler_stream + log.getEffectiveLevel(), + log_file=handler_stream, ) return self diff --git a/pkgs/clan-app/clan_app/api/middleware/method_execution.py b/pkgs/clan-app/clan_app/api/middleware/method_execution.py index a393932b9..e00964f6f 100644 --- a/pkgs/clan-app/clan_app/api/middleware/method_execution.py +++ b/pkgs/clan-app/clan_app/api/middleware/method_execution.py @@ -32,7 +32,7 @@ class MethodExecutionMiddleware(Middleware): except Exception as e: log.exception( - f"Error while handling result of {context.request.method_name}" + f"Error while handling result of {context.request.method_name}", ) context.bridge.send_api_error_response( context.request.op_key or "unknown", diff --git a/pkgs/clan-app/clan_app/app.py b/pkgs/clan-app/clan_app/app.py index b8b87b438..37ce9aba5 100644 --- a/pkgs/clan-app/clan_app/app.py +++ b/pkgs/clan-app/clan_app/app.py @@ -48,7 +48,7 @@ def app_run(app_opts: ClanAppOptions) -> int: # Add a log group ["clans", , "machines", ] log_manager = LogManager(base_dir=user_data_dir() / "clan-app" / "logs") clan_log_group = LogGroupConfig("clans", "Clans").add_child( - LogGroupConfig("machines", "Machines") + LogGroupConfig("machines", "Machines"), ) log_manager = log_manager.add_root_group_config(clan_log_group) # Init LogManager global in log_manager_api module @@ -89,7 +89,7 @@ def app_run(app_opts: ClanAppOptions) -> int: # HTTP-only mode - keep the server running log.info("HTTP API server running...") log.info( - f"Swagger: http://{app_opts.http_host}:{app_opts.http_port}/api/swagger" + f"Swagger: http://{app_opts.http_host}:{app_opts.http_port}/api/swagger", ) log.info("Press Ctrl+C to stop the server") diff --git a/pkgs/clan-app/clan_app/deps/http/http_bridge.py b/pkgs/clan-app/clan_app/deps/http/http_bridge.py index 883ce31cd..cb53f9246 100644 --- a/pkgs/clan-app/clan_app/deps/http/http_bridge.py +++ b/pkgs/clan-app/clan_app/deps/http/http_bridge.py @@ -63,7 +63,9 @@ class HttpBridge(ApiBridge, BaseHTTPRequestHandler): self.send_header("Access-Control-Allow-Headers", "Content-Type") def _send_json_response_with_status( - self, data: dict[str, Any], status_code: int = 200 + self, + data: dict[str, Any], + status_code: int = 200, ) -> None: """Send a JSON response with the given status code.""" try: @@ -82,11 +84,13 @@ class HttpBridge(ApiBridge, BaseHTTPRequestHandler): response_dict = dataclass_to_dict(response) self._send_json_response_with_status(response_dict, 200) log.debug( - f"HTTP response for {response._op_key}: {json.dumps(response_dict, indent=2)}" # noqa: SLF001 + f"HTTP response for {response._op_key}: {json.dumps(response_dict, indent=2)}", # noqa: SLF001 ) def _create_success_response( - self, op_key: str, data: dict[str, Any] + self, + op_key: str, + data: dict[str, Any], ) -> BackendResponse: """Create a successful API response.""" return BackendResponse( @@ -98,14 +102,16 @@ class HttpBridge(ApiBridge, BaseHTTPRequestHandler): def _send_info_response(self) -> None: """Send server information response.""" response = self._create_success_response( - "info", {"message": "Clan API Server", "version": "1.0.0"} + "info", + {"message": "Clan API Server", "version": "1.0.0"}, ) self.send_api_response(response) def _send_methods_response(self) -> None: """Send available API methods response.""" response = self._create_success_response( - "methods", {"methods": list(self.api.functions.keys())} + "methods", + {"methods": list(self.api.functions.keys())}, ) self.send_api_response(response) @@ -179,7 +185,7 @@ class HttpBridge(ApiBridge, BaseHTTPRequestHandler): json_data = json.loads(file_data.decode("utf-8")) server_address = getattr(self.server, "server_address", ("localhost", 80)) json_data["servers"] = [ - {"url": f"http://{server_address[0]}:{server_address[1]}/api/v1/"} + {"url": f"http://{server_address[0]}:{server_address[1]}/api/v1/"}, ] file_data = json.dumps(json_data, indent=2).encode("utf-8") @@ -213,7 +219,9 @@ class HttpBridge(ApiBridge, BaseHTTPRequestHandler): # Validate API path if not path.startswith("/api/v1/"): self.send_api_error_response( - "post", f"Path not found: {path}", ["http_bridge", "POST"] + "post", + f"Path not found: {path}", + ["http_bridge", "POST"], ) return @@ -221,7 +229,9 @@ class HttpBridge(ApiBridge, BaseHTTPRequestHandler): method_name = path[len("/api/v1/") :] if not method_name: self.send_api_error_response( - "post", "Method name required", ["http_bridge", "POST"] + "post", + "Method name required", + ["http_bridge", "POST"], ) return @@ -289,19 +299,26 @@ class HttpBridge(ApiBridge, BaseHTTPRequestHandler): # Create API request api_request = BackendRequest( - method_name=method_name, args=body, header=header, op_key=op_key + method_name=method_name, + args=body, + header=header, + op_key=op_key, ) except Exception as e: self.send_api_error_response( - gen_op_key, str(e), ["http_bridge", method_name] + gen_op_key, + str(e), + ["http_bridge", method_name], ) return self._process_api_request_in_thread(api_request, method_name) def _parse_request_data( - self, request_data: dict[str, Any], gen_op_key: str + self, + request_data: dict[str, Any], + gen_op_key: str, ) -> tuple[dict[str, Any], dict[str, Any], str]: """Parse and validate request data components.""" header = request_data.get("header", {}) @@ -344,7 +361,9 @@ class HttpBridge(ApiBridge, BaseHTTPRequestHandler): pass def _process_api_request_in_thread( - self, api_request: BackendRequest, method_name: str + self, + api_request: BackendRequest, + method_name: str, ) -> None: """Process the API request in a separate thread.""" stop_event = threading.Event() @@ -358,7 +377,7 @@ class HttpBridge(ApiBridge, BaseHTTPRequestHandler): log.debug( f"Processing {request.method_name} with args {request.args} " - f"and header {request.header}" + f"and header {request.header}", ) self.process_request(request) diff --git a/pkgs/clan-app/clan_app/deps/http/test_http_api.py b/pkgs/clan-app/clan_app/deps/http/test_http_api.py index 75c5c9742..1895ba880 100644 --- a/pkgs/clan-app/clan_app/deps/http/test_http_api.py +++ b/pkgs/clan-app/clan_app/deps/http/test_http_api.py @@ -64,7 +64,8 @@ def mock_log_manager() -> Mock: @pytest.fixture def http_bridge( - mock_api: MethodRegistry, mock_log_manager: Mock + mock_api: MethodRegistry, + mock_log_manager: Mock, ) -> tuple[MethodRegistry, tuple]: """Create HTTP bridge dependencies for testing.""" middleware_chain = ( @@ -256,7 +257,9 @@ class TestIntegration: """Integration tests for HTTP API components.""" def test_full_request_flow( - self, mock_api: MethodRegistry, mock_log_manager: Mock + self, + mock_api: MethodRegistry, + mock_log_manager: Mock, ) -> None: """Test complete request flow from server to bridge to middleware.""" server: HttpApiServer = HttpApiServer( @@ -301,7 +304,9 @@ class TestIntegration: server.stop() def test_blocking_task( - self, mock_api: MethodRegistry, mock_log_manager: Mock + self, + mock_api: MethodRegistry, + mock_log_manager: Mock, ) -> None: shared_threads: dict[str, tasks.WebThread] = {} tasks.BAKEND_THREADS = shared_threads diff --git a/pkgs/clan-app/clan_app/deps/webview/_webview_ffi.py b/pkgs/clan-app/clan_app/deps/webview/_webview_ffi.py index 598d0d69d..8b5e39f3a 100644 --- a/pkgs/clan-app/clan_app/deps/webview/_webview_ffi.py +++ b/pkgs/clan-app/clan_app/deps/webview/_webview_ffi.py @@ -36,7 +36,6 @@ def _get_lib_names() -> list[str]: def _be_sure_libraries() -> list[Path] | None: """Ensure libraries exist and return paths.""" - lib_dir = os.environ.get("WEBVIEW_LIB_DIR") if not lib_dir: msg = "WEBVIEW_LIB_DIR environment variable is not set" diff --git a/pkgs/clan-app/clan_app/deps/webview/webview.py b/pkgs/clan-app/clan_app/deps/webview/webview.py index 23651459b..824313509 100644 --- a/pkgs/clan-app/clan_app/deps/webview/webview.py +++ b/pkgs/clan-app/clan_app/deps/webview/webview.py @@ -144,7 +144,9 @@ class Webview: ) else: bridge = WebviewBridge( - webview=self, middleware_chain=tuple(self._middleware), threads={} + webview=self, + middleware_chain=tuple(self._middleware), + threads={}, ) self._bridge = bridge @@ -154,7 +156,10 @@ class Webview: def set_size(self, value: Size) -> None: """Set the webview size (legacy compatibility).""" _webview_lib.webview_set_size( - self.handle, value.width, value.height, value.hint + self.handle, + value.width, + value.height, + value.hint, ) def set_title(self, value: str) -> None: @@ -194,7 +199,10 @@ class Webview: self._callbacks[name] = c_callback _webview_lib.webview_bind( - self.handle, _encode_c_string(name), c_callback, None + self.handle, + _encode_c_string(name), + c_callback, + None, ) def bind(self, name: str, callback: Callable[..., Any]) -> None: @@ -219,7 +227,10 @@ class Webview: def return_(self, seq: str, status: int, result: str) -> None: _webview_lib.webview_return( - self.handle, _encode_c_string(seq), status, _encode_c_string(result) + self.handle, + _encode_c_string(seq), + status, + _encode_c_string(result), ) def eval(self, source: str) -> None: diff --git a/pkgs/clan-app/clan_app/deps/webview/webview_bridge.py b/pkgs/clan-app/clan_app/deps/webview/webview_bridge.py index dd92c468a..83082e58a 100644 --- a/pkgs/clan-app/clan_app/deps/webview/webview_bridge.py +++ b/pkgs/clan-app/clan_app/deps/webview/webview_bridge.py @@ -26,7 +26,9 @@ class WebviewBridge(ApiBridge): def send_api_response(self, response: BackendResponse) -> None: """Send response back to the webview client.""" serialized = json.dumps( - dataclass_to_dict(response), indent=4, ensure_ascii=False + dataclass_to_dict(response), + indent=4, + ensure_ascii=False, ) log.debug(f"Sending response: {serialized}") @@ -40,7 +42,6 @@ class WebviewBridge(ApiBridge): arg: int, ) -> None: """Handle a call from webview's JavaScript bridge.""" - try: op_key = op_key_bytes.decode() raw_args = json.loads(request_data.decode()) @@ -68,7 +69,10 @@ class WebviewBridge(ApiBridge): # Create API request api_request = BackendRequest( - method_name=method_name, args=args, header=header, op_key=op_key + method_name=method_name, + args=args, + header=header, + op_key=op_key, ) except Exception as e: @@ -77,7 +81,9 @@ class WebviewBridge(ApiBridge): ) log.exception(msg) self.send_api_error_response( - op_key, str(e), ["webview_bridge", method_name] + op_key, + str(e), + ["webview_bridge", method_name], ) return diff --git a/pkgs/clan-app/tests/command.py b/pkgs/clan-app/tests/command.py index 8284c7e69..f5a1f4911 100644 --- a/pkgs/clan-app/tests/command.py +++ b/pkgs/clan-app/tests/command.py @@ -54,8 +54,7 @@ class Command: @pytest.fixture def command() -> Iterator[Command]: - """ - Starts a background command. The process is automatically terminated in the end. + """Starts a background command. The process is automatically terminated in the end. >>> p = command.run(["some", "daemon"]) >>> print(p.pid) """ diff --git a/pkgs/clan-app/tests/root.py b/pkgs/clan-app/tests/root.py index 8593bd5e6..ceded7e9f 100644 --- a/pkgs/clan-app/tests/root.py +++ b/pkgs/clan-app/tests/root.py @@ -13,23 +13,17 @@ else: @pytest.fixture(scope="session") def project_root() -> Path: - """ - Root directory the clan-cli - """ + """Root directory the clan-cli""" return PROJECT_ROOT @pytest.fixture(scope="session") def test_root() -> Path: - """ - Root directory of the tests - """ + """Root directory of the tests""" return TEST_ROOT @pytest.fixture(scope="session") def clan_core() -> Path: - """ - Directory of the clan-core flake - """ + """Directory of the clan-core flake""" return CLAN_CORE diff --git a/pkgs/clan-app/tests/wayland.py b/pkgs/clan-app/tests/wayland.py index 2c4ee1ee9..672e4499f 100644 --- a/pkgs/clan-app/tests/wayland.py +++ b/pkgs/clan-app/tests/wayland.py @@ -24,7 +24,11 @@ def app() -> Generator[GtkProc]: cmd = [sys.executable, "-m", "clan_app"] print(f"Running: {cmd}") rapp = Popen( - cmd, text=True, stdout=sys.stdout, stderr=sys.stderr, start_new_session=True + cmd, + text=True, + stdout=sys.stdout, + stderr=sys.stderr, + start_new_session=True, ) yield GtkProc(rapp) # Cleanup: Terminate your application diff --git a/pkgs/clan-cli/clan_cli/backups/create.py b/pkgs/clan-cli/clan_cli/backups/create.py index e2c03ae84..ca1a0dd37 100644 --- a/pkgs/clan-cli/clan_cli/backups/create.py +++ b/pkgs/clan-cli/clan_cli/backups/create.py @@ -22,12 +22,16 @@ def create_command(args: argparse.Namespace) -> None: def register_create_parser(parser: argparse.ArgumentParser) -> None: machines_parser = parser.add_argument( - "machine", type=str, help="machine in the flake to create backups of" + "machine", + type=str, + help="machine in the flake to create backups of", ) add_dynamic_completer(machines_parser, complete_machines) provider_action = parser.add_argument( - "--provider", type=str, help="backup provider to use" + "--provider", + type=str, + help="backup provider to use", ) add_dynamic_completer(provider_action, complete_backup_providers_for_machine) parser.set_defaults(func=create_command) diff --git a/pkgs/clan-cli/clan_cli/backups/create_test.py b/pkgs/clan-cli/clan_cli/backups/create_test.py index 690ff9ae8..8d7c49764 100644 --- a/pkgs/clan-cli/clan_cli/backups/create_test.py +++ b/pkgs/clan-cli/clan_cli/backups/create_test.py @@ -7,7 +7,8 @@ from clan_cli.tests.helpers import cli def test_create_command_no_flake( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.chdir(tmp_path) diff --git a/pkgs/clan-cli/clan_cli/backups/list.py b/pkgs/clan-cli/clan_cli/backups/list.py index 9b85f35b5..c5a57eccb 100644 --- a/pkgs/clan-cli/clan_cli/backups/list.py +++ b/pkgs/clan-cli/clan_cli/backups/list.py @@ -21,11 +21,15 @@ def list_command(args: argparse.Namespace) -> None: def register_list_parser(parser: argparse.ArgumentParser) -> None: machines_parser = parser.add_argument( - "machine", type=str, help="machine in the flake to show backups of" + "machine", + type=str, + help="machine in the flake to show backups of", ) add_dynamic_completer(machines_parser, complete_machines) provider_action = parser.add_argument( - "--provider", type=str, help="backup provider to filter by" + "--provider", + type=str, + help="backup provider to filter by", ) add_dynamic_completer(provider_action, complete_backup_providers_for_machine) parser.set_defaults(func=list_command) diff --git a/pkgs/clan-cli/clan_cli/backups/restore.py b/pkgs/clan-cli/clan_cli/backups/restore.py index da064db2b..1433ed536 100644 --- a/pkgs/clan-cli/clan_cli/backups/restore.py +++ b/pkgs/clan-cli/clan_cli/backups/restore.py @@ -24,11 +24,15 @@ def restore_command(args: argparse.Namespace) -> None: def register_restore_parser(parser: argparse.ArgumentParser) -> None: machine_action = parser.add_argument( - "machine", type=str, help="machine in the flake to create backups of" + "machine", + type=str, + help="machine in the flake to create backups of", ) add_dynamic_completer(machine_action, complete_machines) provider_action = parser.add_argument( - "provider", type=str, help="backup provider to use" + "provider", + type=str, + help="backup provider to use", ) add_dynamic_completer(provider_action, complete_backup_providers_for_machine) parser.add_argument("name", type=str, help="Name of the backup to restore") diff --git a/pkgs/clan-cli/clan_cli/backups/restore_test.py b/pkgs/clan-cli/clan_cli/backups/restore_test.py index 6cec1ce8c..d3ec19a99 100644 --- a/pkgs/clan-cli/clan_cli/backups/restore_test.py +++ b/pkgs/clan-cli/clan_cli/backups/restore_test.py @@ -7,7 +7,8 @@ from clan_cli.tests.helpers import cli def test_restore_command_no_flake( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.chdir(tmp_path) diff --git a/pkgs/clan-cli/clan_cli/clan/create.py b/pkgs/clan-cli/clan_cli/clan/create.py index f150a6aa6..a69fa28ed 100644 --- a/pkgs/clan-cli/clan_cli/clan/create.py +++ b/pkgs/clan-cli/clan_cli/clan/create.py @@ -67,7 +67,7 @@ def register_create_parser(parser: argparse.ArgumentParser) -> None: setup_git=not args.no_git, src_flake=args.flake, update_clan=not args.no_update, - ) + ), ) create_secrets_user_auto( flake_dir=Path(args.name).resolve(), diff --git a/pkgs/clan-cli/clan_cli/clan/inspect.py b/pkgs/clan-cli/clan_cli/clan/inspect.py index 830b06128..5257834ec 100644 --- a/pkgs/clan-cli/clan_cli/clan/inspect.py +++ b/pkgs/clan-cli/clan_cli/clan/inspect.py @@ -74,8 +74,8 @@ def inspect_flake(flake_url: str | Path, machine_name: str) -> FlakeConfig: # Get the Clan name cmd = nix_eval( [ - f'{flake_url}#clanInternals.machines."{system}"."{machine_name}".config.clan.core.name' - ] + f'{flake_url}#clanInternals.machines."{system}"."{machine_name}".config.clan.core.name', + ], ) res = run_cmd(cmd) clan_name = res.strip('"') @@ -83,8 +83,8 @@ def inspect_flake(flake_url: str | Path, machine_name: str) -> FlakeConfig: # Get the clan icon path cmd = nix_eval( [ - f'{flake_url}#clanInternals.machines."{system}"."{machine_name}".config.clan.core.icon' - ] + f'{flake_url}#clanInternals.machines."{system}"."{machine_name}".config.clan.core.icon', + ], ) res = run_cmd(cmd) @@ -96,7 +96,7 @@ def inspect_flake(flake_url: str | Path, machine_name: str) -> FlakeConfig: cmd = nix_build( [ - f'{flake_url}#clanInternals.machines."{system}"."{machine_name}".config.clan.core.icon' + f'{flake_url}#clanInternals.machines."{system}"."{machine_name}".config.clan.core.icon', ], machine_gcroot(flake_url=str(flake_url)) / "icon", ) @@ -129,7 +129,8 @@ def inspect_command(args: argparse.Namespace) -> None: flake=args.flake or Flake(str(Path.cwd())), ) res = inspect_flake( - flake_url=str(inspect_options.flake), machine_name=inspect_options.machine + flake_url=str(inspect_options.flake), + machine_name=inspect_options.machine, ) print("Clan name:", res.clan_name) print("Icon:", res.icon) diff --git a/pkgs/clan-cli/clan_cli/clan/show_test.py b/pkgs/clan-cli/clan_cli/clan/show_test.py index 614c3d2b2..deab9667d 100644 --- a/pkgs/clan-cli/clan_cli/clan/show_test.py +++ b/pkgs/clan-cli/clan_cli/clan/show_test.py @@ -10,7 +10,8 @@ from clan_cli.tests.stdout import CaptureOutput @pytest.mark.with_core def test_clan_show( - test_flake_with_core: FlakeForTest, capture_output: CaptureOutput + test_flake_with_core: FlakeForTest, + capture_output: CaptureOutput, ) -> None: with capture_output as output: cli.run(["show", "--flake", str(test_flake_with_core.path)]) @@ -20,7 +21,9 @@ def test_clan_show( def test_clan_show_no_flake( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch, capture_output: CaptureOutput + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, + capture_output: CaptureOutput, ) -> None: monkeypatch.chdir(tmp_path) @@ -28,8 +31,8 @@ def test_clan_show_no_flake( cli.run(["show"]) assert "No clan flake found in the current directory or its parents" in str( - exc_info.value + exc_info.value, ) assert "Use the --flake flag to specify a clan flake path or URL" in str( - exc_info.value + exc_info.value, ) diff --git a/pkgs/clan-cli/clan_cli/cli.py b/pkgs/clan-cli/clan_cli/cli.py index bfd89b3e5..d54a1a21a 100644 --- a/pkgs/clan-cli/clan_cli/cli.py +++ b/pkgs/clan-cli/clan_cli/cli.py @@ -52,8 +52,7 @@ def create_flake_from_args(args: argparse.Namespace) -> Flake: def add_common_flags(parser: argparse.ArgumentParser) -> None: def argument_exists(parser: argparse.ArgumentParser, arg: str) -> bool: - """ - Check if an argparse argument already exists. + """Check if an argparse argument already exists. This is needed because the aliases subcommand doesn't *really* create an alias - it duplicates the actual parser in the tree making duplication inevitable while naively traversing. @@ -410,7 +409,9 @@ For more detailed information, visit: {help_hyperlink("deploy", "https://docs.cl machines.register_parser(parser_machine) parser_vms = subparsers.add_parser( - "vms", help="Manage virtual machines", description="Manage virtual machines" + "vms", + help="Manage virtual machines", + description="Manage virtual machines", ) vms.register_parser(parser_vms) diff --git a/pkgs/clan-cli/clan_cli/completions.py b/pkgs/clan-cli/clan_cli/completions.py index c75600370..2f0781874 100644 --- a/pkgs/clan-cli/clan_cli/completions.py +++ b/pkgs/clan-cli/clan_cli/completions.py @@ -38,11 +38,11 @@ def clan_dir(flake: str | None) -> str | None: def complete_machines( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for machine names configured in the clan. - """ + """Provides completion functionality for machine names configured in the clan.""" machines: list[str] = [] def run_cmd() -> None: @@ -72,11 +72,11 @@ def complete_machines( def complete_services_for_machine( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for machine facts generation services. - """ + """Provides completion functionality for machine facts generation services.""" services: list[str] = [] # TODO: consolidate, if multiple machines are used machines: list[str] = parsed_args.machines @@ -98,7 +98,7 @@ def complete_services_for_machine( "builtins.attrNames", ], ), - ).stdout.strip() + ).stdout.strip(), ) services.extend(services_result) @@ -117,11 +117,11 @@ def complete_services_for_machine( def complete_backup_providers_for_machine( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for machine backup providers. - """ + """Provides completion functionality for machine backup providers.""" providers: list[str] = [] machine: str = parsed_args.machine @@ -142,7 +142,7 @@ def complete_backup_providers_for_machine( "builtins.attrNames", ], ), - ).stdout.strip() + ).stdout.strip(), ) providers.extend(providers_result) @@ -161,11 +161,11 @@ def complete_backup_providers_for_machine( def complete_state_services_for_machine( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for machine state providers. - """ + """Provides completion functionality for machine state providers.""" providers: list[str] = [] machine: str = parsed_args.machine @@ -186,7 +186,7 @@ def complete_state_services_for_machine( "builtins.attrNames", ], ), - ).stdout.strip() + ).stdout.strip(), ) providers.extend(providers_result) @@ -205,11 +205,11 @@ def complete_state_services_for_machine( def complete_secrets( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for clan secrets - """ + """Provides completion functionality for clan secrets""" from clan_lib.flake.flake import Flake from .secrets.secrets import list_secrets @@ -228,11 +228,11 @@ def complete_secrets( def complete_users( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for clan users - """ + """Provides completion functionality for clan users""" from pathlib import Path from .secrets.users import list_users @@ -251,11 +251,11 @@ def complete_users( def complete_groups( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for clan groups - """ + """Provides completion functionality for clan groups""" from pathlib import Path from .secrets.groups import list_groups @@ -275,12 +275,11 @@ def complete_groups( def complete_templates_disko( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for disko templates - """ - + """Provides completion functionality for disko templates""" from clan_lib.templates import list_templates flake = ( @@ -300,12 +299,11 @@ def complete_templates_disko( def complete_templates_clan( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for clan templates - """ - + """Provides completion functionality for clan templates""" from clan_lib.templates import list_templates flake = ( @@ -325,10 +323,11 @@ def complete_templates_clan( def complete_vars_for_machine( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for variable names for a specific machine. + """Provides completion functionality for variable names for a specific machine. Only completes vars that already exist in the vars directory on disk. This is fast as it only scans the filesystem without any evaluation. """ @@ -368,11 +367,11 @@ def complete_vars_for_machine( def complete_target_host( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for target_host for a specific machine - """ + """Provides completion functionality for target_host for a specific machine""" target_hosts: list[str] = [] machine: str = parsed_args.machine @@ -391,7 +390,7 @@ def complete_target_host( f"{flake}#nixosConfigurations.{machine}.config.clan.core.networking.targetHost", ], ), - ).stdout.strip() + ).stdout.strip(), ) target_hosts.append(target_host_result) @@ -410,11 +409,11 @@ def complete_target_host( def complete_tags( - prefix: str, parsed_args: argparse.Namespace, **kwargs: Any + prefix: str, + parsed_args: argparse.Namespace, + **kwargs: Any, ) -> Iterable[str]: - """ - Provides completion functionality for tags inside the inventory - """ + """Provides completion functionality for tags inside the inventory""" tags: list[str] = [] threads = [] @@ -483,8 +482,7 @@ def add_dynamic_completer( action: argparse.Action, completer: Callable[..., Iterable[str]], ) -> None: - """ - Add a completion function to an argparse action, this will only be added, + """Add a completion function to an argparse action, this will only be added, if the argcomplete module is loaded. """ if argcomplete: diff --git a/pkgs/clan-cli/clan_cli/facts/check.py b/pkgs/clan-cli/clan_cli/facts/check.py index 8b8232bbf..f6233ea7d 100644 --- a/pkgs/clan-cli/clan_cli/facts/check.py +++ b/pkgs/clan-cli/clan_cli/facts/check.py @@ -21,14 +21,14 @@ def check_secrets(machine: Machine, service: None | str = None) -> bool: secret_name = secret_fact["name"] if not machine.secret_facts_store.exists(service, secret_name): machine.info( - f"Secret fact '{secret_fact}' for service '{service}' is missing." + f"Secret fact '{secret_fact}' for service '{service}' is missing.", ) missing_secret_facts.append((service, secret_name)) for public_fact in machine.facts_data[service]["public"]: if not machine.public_facts_store.exists(service, public_fact): machine.info( - f"Public fact '{public_fact}' for service '{service}' is missing." + f"Public fact '{public_fact}' for service '{service}' is missing.", ) missing_public_facts.append((service, public_fact)) diff --git a/pkgs/clan-cli/clan_cli/facts/check_test.py b/pkgs/clan-cli/clan_cli/facts/check_test.py index 23bc77249..3ac96451f 100644 --- a/pkgs/clan-cli/clan_cli/facts/check_test.py +++ b/pkgs/clan-cli/clan_cli/facts/check_test.py @@ -7,7 +7,8 @@ from clan_cli.tests.helpers import cli def test_check_command_no_flake( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.chdir(tmp_path) diff --git a/pkgs/clan-cli/clan_cli/facts/generate.py b/pkgs/clan-cli/clan_cli/facts/generate.py index b817c44c7..bea29285a 100644 --- a/pkgs/clan-cli/clan_cli/facts/generate.py +++ b/pkgs/clan-cli/clan_cli/facts/generate.py @@ -29,9 +29,7 @@ log = logging.getLogger(__name__) def read_multiline_input(prompt: str = "Finish with Ctrl-D") -> str: - """ - Read multi-line input from stdin. - """ + """Read multi-line input from stdin.""" print(prompt, flush=True) proc = run(["cat"], RunOpts(check=False)) log.info("Input received. Processing...") @@ -63,7 +61,7 @@ def bubblewrap_cmd(generator: str, facts_dir: Path, secrets_dir: Path) -> list[s "--uid", "1000", "--gid", "1000", "--", - "bash", "-c", generator + "bash", "-c", generator, ], ) # fmt: on @@ -102,7 +100,8 @@ def generate_service_facts( generator = machine.facts_data[service]["generator"]["finalScript"] if machine.facts_data[service]["generator"]["prompt"]: prompt_value = prompt( - service, machine.facts_data[service]["generator"]["prompt"] + service, + machine.facts_data[service]["generator"]["prompt"], ) env["prompt_value"] = prompt_value from clan_lib import bwrap @@ -126,7 +125,10 @@ def generate_service_facts( msg += generator raise ClanError(msg) secret_path = secret_facts_store.set( - service, secret_name, secret_file.read_bytes(), groups + service, + secret_name, + secret_file.read_bytes(), + groups, ) if secret_path: files_to_commit.append(secret_path) @@ -206,7 +208,11 @@ def generate_facts( errors = 0 try: was_regenerated |= _generate_facts_for_machine( - machine, service, regenerate, tmpdir, prompt + machine, + service, + regenerate, + tmpdir, + prompt, ) except (OSError, ClanError) as e: machine.error(f"Failed to generate facts: {e}") @@ -231,7 +237,7 @@ def generate_command(args: argparse.Namespace) -> None: filter( lambda m: m.name in args.machines, machines, - ) + ), ) generate_facts(machines, args.service, args.regenerate) diff --git a/pkgs/clan-cli/clan_cli/facts/generate_test.py b/pkgs/clan-cli/clan_cli/facts/generate_test.py index 910703084..3fab6cb5a 100644 --- a/pkgs/clan-cli/clan_cli/facts/generate_test.py +++ b/pkgs/clan-cli/clan_cli/facts/generate_test.py @@ -7,7 +7,8 @@ from clan_cli.tests.helpers import cli def test_generate_command_no_flake( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.chdir(tmp_path) diff --git a/pkgs/clan-cli/clan_cli/facts/public_modules/__init__.py b/pkgs/clan-cli/clan_cli/facts/public_modules/__init__.py index f39349677..501d69ecb 100644 --- a/pkgs/clan-cli/clan_cli/facts/public_modules/__init__.py +++ b/pkgs/clan-cli/clan_cli/facts/public_modules/__init__.py @@ -3,7 +3,7 @@ from __future__ import annotations from abc import ABC, abstractmethod from pathlib import Path -import clan_lib.machines.machines as machines +from clan_lib.machines import machines class FactStoreBase(ABC): diff --git a/pkgs/clan-cli/clan_cli/facts/secret_modules/__init__.py b/pkgs/clan-cli/clan_cli/facts/secret_modules/__init__.py index fe7269ae2..ea9d4c792 100644 --- a/pkgs/clan-cli/clan_cli/facts/secret_modules/__init__.py +++ b/pkgs/clan-cli/clan_cli/facts/secret_modules/__init__.py @@ -3,7 +3,7 @@ from __future__ import annotations from abc import ABC, abstractmethod from pathlib import Path -import clan_lib.machines.machines as machines +from clan_lib.machines import machines from clan_lib.ssh.host import Host @@ -14,7 +14,11 @@ class SecretStoreBase(ABC): @abstractmethod def set( - self, service: str, name: str, value: bytes, groups: list[str] + self, + service: str, + name: str, + value: bytes, + groups: list[str], ) -> Path | None: pass diff --git a/pkgs/clan-cli/clan_cli/facts/secret_modules/password_store.py b/pkgs/clan-cli/clan_cli/facts/secret_modules/password_store.py index 8ac033a55..e4a1c4bc9 100644 --- a/pkgs/clan-cli/clan_cli/facts/secret_modules/password_store.py +++ b/pkgs/clan-cli/clan_cli/facts/secret_modules/password_store.py @@ -16,7 +16,11 @@ class SecretStore(SecretStoreBase): self.machine = machine def set( - self, service: str, name: str, value: bytes, groups: list[str] + self, + service: str, + name: str, + value: bytes, + groups: list[str], ) -> Path | None: subprocess.run( nix_shell( @@ -40,14 +44,16 @@ class SecretStore(SecretStoreBase): def exists(self, service: str, name: str) -> bool: password_store = os.environ.get( - "PASSWORD_STORE_DIR", f"{os.environ['HOME']}/.password-store" + "PASSWORD_STORE_DIR", + f"{os.environ['HOME']}/.password-store", ) secret_path = Path(password_store) / f"machines/{self.machine.name}/{name}.gpg" return secret_path.exists() def generate_hash(self) -> bytes: password_store = os.environ.get( - "PASSWORD_STORE_DIR", f"{os.environ['HOME']}/.password-store" + "PASSWORD_STORE_DIR", + f"{os.environ['HOME']}/.password-store", ) hashes = [] hashes.append( @@ -66,7 +72,7 @@ class SecretStore(SecretStoreBase): ), stdout=subprocess.PIPE, check=False, - ).stdout.strip() + ).stdout.strip(), ) for symlink in Path(password_store).glob(f"machines/{self.machine.name}/**/*"): if symlink.is_symlink(): @@ -86,7 +92,7 @@ class SecretStore(SecretStoreBase): ), stdout=subprocess.PIPE, check=False, - ).stdout.strip() + ).stdout.strip(), ) # we sort the hashes to make sure that the order is always the same diff --git a/pkgs/clan-cli/clan_cli/facts/secret_modules/sops.py b/pkgs/clan-cli/clan_cli/facts/secret_modules/sops.py index 545811aba..80229ecab 100644 --- a/pkgs/clan-cli/clan_cli/facts/secret_modules/sops.py +++ b/pkgs/clan-cli/clan_cli/facts/secret_modules/sops.py @@ -37,7 +37,11 @@ class SecretStore(SecretStoreBase): add_machine(self.machine.flake_dir, self.machine.name, pub_key, False) def set( - self, service: str, name: str, value: bytes, groups: list[str] + self, + service: str, + name: str, + value: bytes, + groups: list[str], ) -> Path | None: path = ( sops_secrets_folder(self.machine.flake_dir) / f"{self.machine.name}-{name}" diff --git a/pkgs/clan-cli/clan_cli/facts/secret_modules/vm.py b/pkgs/clan-cli/clan_cli/facts/secret_modules/vm.py index 09d0e03fd..2a7d11b7d 100644 --- a/pkgs/clan-cli/clan_cli/facts/secret_modules/vm.py +++ b/pkgs/clan-cli/clan_cli/facts/secret_modules/vm.py @@ -15,7 +15,11 @@ class SecretStore(SecretStoreBase): self.dir.mkdir(parents=True, exist_ok=True) def set( - self, service: str, name: str, value: bytes, groups: list[str] + self, + service: str, + name: str, + value: bytes, + groups: list[str], ) -> Path | None: secret_file = self.dir / service / name secret_file.parent.mkdir(parents=True, exist_ok=True) diff --git a/pkgs/clan-cli/clan_cli/facts/upload_test.py b/pkgs/clan-cli/clan_cli/facts/upload_test.py index 62afefb8b..d5cdbdee1 100644 --- a/pkgs/clan-cli/clan_cli/facts/upload_test.py +++ b/pkgs/clan-cli/clan_cli/facts/upload_test.py @@ -7,7 +7,8 @@ from clan_cli.tests.helpers import cli def test_upload_command_no_flake( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.chdir(tmp_path) diff --git a/pkgs/clan-cli/clan_cli/flash/cli.py b/pkgs/clan-cli/clan_cli/flash/cli.py index e4d093be2..794ce702d 100644 --- a/pkgs/clan-cli/clan_cli/flash/cli.py +++ b/pkgs/clan-cli/clan_cli/flash/cli.py @@ -21,6 +21,7 @@ def register_parser(parser: argparse.ArgumentParser) -> None: register_flash_write_parser(write_parser) list_parser = subparser.add_parser( - "list", help="List possible keymaps or languages" + "list", + help="List possible keymaps or languages", ) register_flash_list_parser(list_parser) diff --git a/pkgs/clan-cli/clan_cli/flash/flash_cmd.py b/pkgs/clan-cli/clan_cli/flash/flash_cmd.py index d67e700ed..66ef968be 100644 --- a/pkgs/clan-cli/clan_cli/flash/flash_cmd.py +++ b/pkgs/clan-cli/clan_cli/flash/flash_cmd.py @@ -121,7 +121,7 @@ def register_flash_write_parser(parser: argparse.ArgumentParser) -> None: Format will format the disk before installing. Mount will mount the disk before installing. Mount is useful for updating an existing system without losing data. - """ + """, ) parser.add_argument( "--mode", @@ -166,7 +166,7 @@ def register_flash_write_parser(parser: argparse.ArgumentParser) -> None: Write EFI boot entries to the NVRAM of the system for the installed system. Specify this option if you plan to boot from this disk on the current machine, but not if you plan to move the disk to another machine. - """ + """, ).strip(), default=False, action="store_true", diff --git a/pkgs/clan-cli/clan_cli/flash/list_test.py b/pkgs/clan-cli/clan_cli/flash/list_test.py index a21a14709..6a62e8c83 100644 --- a/pkgs/clan-cli/clan_cli/flash/list_test.py +++ b/pkgs/clan-cli/clan_cli/flash/list_test.py @@ -8,7 +8,8 @@ from clan_cli.tests.stdout import CaptureOutput @pytest.mark.with_core def test_flash_list_languages( - temporary_home: Path, capture_output: CaptureOutput + temporary_home: Path, + capture_output: CaptureOutput, ) -> None: with capture_output as output: cli.run(["flash", "list", "languages"]) @@ -20,7 +21,8 @@ def test_flash_list_languages( @pytest.mark.with_core def test_flash_list_keymaps( - temporary_home: Path, capture_output: CaptureOutput + temporary_home: Path, + capture_output: CaptureOutput, ) -> None: with capture_output as output: cli.run(["flash", "list", "keymaps"]) diff --git a/pkgs/clan-cli/clan_cli/hyperlink.py b/pkgs/clan-cli/clan_cli/hyperlink.py index 55402a63d..7b7e3ed25 100644 --- a/pkgs/clan-cli/clan_cli/hyperlink.py +++ b/pkgs/clan-cli/clan_cli/hyperlink.py @@ -1,7 +1,6 @@ # Implementation of OSC8 def hyperlink(text: str, url: str) -> str: - """ - Generate OSC8 escape sequence for hyperlinks. + """Generate OSC8 escape sequence for hyperlinks. Args: url (str): The URL to link to. @@ -9,15 +8,14 @@ def hyperlink(text: str, url: str) -> str: Returns: str: The formatted string with an embedded hyperlink. + """ esc = "\033" return f"{esc}]8;;{url}{esc}\\{text}{esc}]8;;{esc}\\" def hyperlink_same_text_and_url(url: str) -> str: - """ - Keep the description and the link the same to support legacy terminals. - """ + """Keep the description and the link the same to support legacy terminals.""" return hyperlink(url, url) @@ -34,9 +32,7 @@ def help_hyperlink(description: str, url: str) -> str: def docs_hyperlink(description: str, url: str) -> str: - """ - Returns a markdown hyperlink - """ + """Returns a markdown hyperlink""" url = url.replace("https://docs.clan.lol", "../..") url = url.replace("index.html", "index") url += ".md" diff --git a/pkgs/clan-cli/clan_cli/machines/create.py b/pkgs/clan-cli/clan_cli/machines/create.py index 7b59d8b8f..5e1f75af0 100644 --- a/pkgs/clan-cli/clan_cli/machines/create.py +++ b/pkgs/clan-cli/clan_cli/machines/create.py @@ -32,8 +32,7 @@ def create_machine( opts: CreateOptions, commit: bool = True, ) -> None: - """ - Create a new machine in the clan directory. + """Create a new machine in the clan directory. This function will create a new machine based on a template. @@ -41,7 +40,6 @@ def create_machine( :param commit: Whether to commit the changes to the git repository. :param _persist: Temporary workaround for 'morph'. Whether to persist the changes to the inventory store. """ - if not opts.clan_dir.is_local: msg = f"Clan {opts.clan_dir} is not a local clan." description = "Import machine only works on local clans" diff --git a/pkgs/clan-cli/clan_cli/machines/hardware.py b/pkgs/clan-cli/clan_cli/machines/hardware.py index 0c478b519..b84576ba6 100644 --- a/pkgs/clan-cli/clan_cli/machines/hardware.py +++ b/pkgs/clan-cli/clan_cli/machines/hardware.py @@ -33,13 +33,15 @@ def update_hardware_config_command(args: argparse.Namespace) -> None: if args.target_host: target_host = Remote.from_ssh_uri( - machine_name=machine.name, address=args.target_host + machine_name=machine.name, + address=args.target_host, ) else: target_host = machine.target_host() target_host = target_host.override( - host_key_check=args.host_key_check, private_key=args.identity_file + host_key_check=args.host_key_check, + private_key=args.identity_file, ) run_machine_hardware_info(opts, target_host) diff --git a/pkgs/clan-cli/clan_cli/machines/hardware_test.py b/pkgs/clan-cli/clan_cli/machines/hardware_test.py index 41747113c..e92c123b5 100644 --- a/pkgs/clan-cli/clan_cli/machines/hardware_test.py +++ b/pkgs/clan-cli/clan_cli/machines/hardware_test.py @@ -7,7 +7,8 @@ from clan_cli.tests.helpers import cli def test_create_command_no_flake( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.chdir(tmp_path) diff --git a/pkgs/clan-cli/clan_cli/machines/install.py b/pkgs/clan-cli/clan_cli/machines/install.py index 4f5e4c7a1..063b1df4d 100644 --- a/pkgs/clan-cli/clan_cli/machines/install.py +++ b/pkgs/clan-cli/clan_cli/machines/install.py @@ -34,7 +34,8 @@ def install_command(args: argparse.Namespace) -> None: if args.target_host: # TODO add network support here with either --network or some url magic remote = Remote.from_ssh_uri( - machine_name=args.machine, address=args.target_host + machine_name=args.machine, + address=args.target_host, ) elif args.png: data = read_qr_image(Path(args.png)) @@ -73,7 +74,7 @@ def install_command(args: argparse.Namespace) -> None: if ask == "n" or ask == "": return None print( - f"Invalid input '{ask}'. Please enter 'y' for yes or 'n' for no." + f"Invalid input '{ask}'. Please enter 'y' for yes or 'n' for no.", ) if args.identity_file: diff --git a/pkgs/clan-cli/clan_cli/machines/list.py b/pkgs/clan-cli/clan_cli/machines/list.py index 7035e23b0..eb5348b11 100644 --- a/pkgs/clan-cli/clan_cli/machines/list.py +++ b/pkgs/clan-cli/clan_cli/machines/list.py @@ -13,7 +13,8 @@ def list_command(args: argparse.Namespace) -> None: flake = require_flake(args.flake) for name in list_machines( - flake, opts=ListOptions(filter=MachineFilter(tags=args.tags)) + flake, + opts=ListOptions(filter=MachineFilter(tags=args.tags)), ): print(name) diff --git a/pkgs/clan-cli/clan_cli/machines/list_test.py b/pkgs/clan-cli/clan_cli/machines/list_test.py index 3144ff167..9785fb268 100644 --- a/pkgs/clan-cli/clan_cli/machines/list_test.py +++ b/pkgs/clan-cli/clan_cli/machines/list_test.py @@ -43,7 +43,7 @@ def list_basic( description = "Backup server"; }; }; - }""" + }""", }, ], indirect=True, @@ -62,7 +62,7 @@ def list_with_tags_single_tag( str(test_flake_with_core.path), "--tags", "production", - ] + ], ) assert "web-server" in output.out @@ -94,7 +94,7 @@ def list_with_tags_single_tag( description = "Backup server"; }; }; - }""" + }""", }, ], indirect=True, @@ -114,7 +114,7 @@ def list_with_tags_multiple_tags_intersection( "--tags", "web", "production", - ] + ], ) # Should only include machines that have BOTH tags (intersection) @@ -139,7 +139,7 @@ def test_machines_list_with_tags_no_matches( str(test_flake_with_core.path), "--tags", "nonexistent", - ] + ], ) assert output.out.strip() == "" @@ -162,7 +162,7 @@ def test_machines_list_with_tags_no_matches( }; server4 = { }; }; - }""" + }""", }, ], indirect=True, @@ -180,7 +180,7 @@ def list_with_tags_various_scenarios( str(test_flake_with_core.path), "--tags", "web", - ] + ], ) assert "server1" in output.out @@ -197,7 +197,7 @@ def list_with_tags_various_scenarios( str(test_flake_with_core.path), "--tags", "database", - ] + ], ) assert "server2" in output.out @@ -216,7 +216,7 @@ def list_with_tags_various_scenarios( "--tags", "web", "database", - ] + ], ) assert "server3" in output.out @@ -239,7 +239,7 @@ def created_machine_and_tags( "--tags", "test", "server", - ] + ], ) with capture_output as output: @@ -258,7 +258,7 @@ def created_machine_and_tags( str(test_flake_with_core.path), "--tags", "test", - ] + ], ) assert "test-machine" in output.out @@ -274,7 +274,7 @@ def created_machine_and_tags( str(test_flake_with_core.path), "--tags", "server", - ] + ], ) assert "test-machine" in output.out @@ -291,7 +291,7 @@ def created_machine_and_tags( "--tags", "test", "server", - ] + ], ) assert "test-machine" in output.out @@ -310,7 +310,7 @@ def created_machine_and_tags( }; machine-without-tags = { }; }; - }""" + }""", }, ], indirect=True, @@ -334,7 +334,7 @@ def list_mixed_tagged_untagged( str(test_flake_with_core.path), "--tags", "tag1", - ] + ], ) assert "machine-with-tags" in output.out @@ -349,7 +349,7 @@ def list_mixed_tagged_untagged( str(test_flake_with_core.path), "--tags", "nonexistent", - ] + ], ) assert "machine-with-tags" not in output.out @@ -358,7 +358,8 @@ def list_mixed_tagged_untagged( def test_machines_list_require_flake_error( - temporary_home: Path, monkeypatch: pytest.MonkeyPatch + temporary_home: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test that machines list command fails when flake is required but not provided.""" monkeypatch.chdir(temporary_home) diff --git a/pkgs/clan-cli/clan_cli/machines/machines_test.py b/pkgs/clan-cli/clan_cli/machines/machines_test.py index 4410a0377..c9d811675 100644 --- a/pkgs/clan-cli/clan_cli/machines/machines_test.py +++ b/pkgs/clan-cli/clan_cli/machines/machines_test.py @@ -15,7 +15,7 @@ from clan_cli.tests.fixtures_flakes import FlakeForTest machines.jon1 = { }; machines.jon2 = { machineClass = "nixos"; }; machines.sara = { machineClass = "darwin"; }; - }""" + }""", }, ], # Important! @@ -27,8 +27,7 @@ from clan_cli.tests.fixtures_flakes import FlakeForTest def test_inventory_machine_detect_class( test_flake_with_core: FlakeForTest, ) -> None: - """ - Testing different inventory deserializations + """Testing different inventory deserializations Inventory should always be deserializable to a dict """ machine_jon1 = Machine( diff --git a/pkgs/clan-cli/clan_cli/machines/update.py b/pkgs/clan-cli/clan_cli/machines/update.py index 7a809e6af..811cd5d3d 100644 --- a/pkgs/clan-cli/clan_cli/machines/update.py +++ b/pkgs/clan-cli/clan_cli/machines/update.py @@ -87,7 +87,8 @@ def get_machines_for_update( ) -> list[Machine]: all_machines = list_machines(flake) machines_with_tags = list_machines( - flake, ListOptions(filter=MachineFilter(tags=filter_tags)) + flake, + ListOptions(filter=MachineFilter(tags=filter_tags)), ) if filter_tags and not machines_with_tags: @@ -101,7 +102,7 @@ def get_machines_for_update( filter( requires_explicit_update, instantiate_inventory_to_machines(flake, machines_with_tags).values(), - ) + ), ) # all machines that are in the clan but not included in the update list machine_names_to_update = [m.name for m in machines_to_update] @@ -131,7 +132,7 @@ def get_machines_for_update( raise ClanError(msg) machines_to_update.append( - Machine.from_inventory(name, flake, inventory_machine) + Machine.from_inventory(name, flake, inventory_machine), ) return machines_to_update @@ -163,7 +164,7 @@ def update_command(args: argparse.Namespace) -> None: f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.settings.secretModule", f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.deployment.requireExplicitUpdate", f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.system.clan.deployment.nixosMobileWorkaround", - ] + ], ) host_key_check = args.host_key_check diff --git a/pkgs/clan-cli/clan_cli/machines/update_test.py b/pkgs/clan-cli/clan_cli/machines/update_test.py index 0ae2b9a08..c124ddedf 100644 --- a/pkgs/clan-cli/clan_cli/machines/update_test.py +++ b/pkgs/clan-cli/clan_cli/machines/update_test.py @@ -17,12 +17,12 @@ from clan_cli.tests.helpers import cli "inventory_expr": r"""{ machines.jon = { tags = [ "foo" "bar" ]; }; machines.sara = { tags = [ "foo" "baz" ]; }; - }""" + }""", }, ["jon"], # explizit names [], # filter tags ["jon"], # expected - ) + ), ], # Important! # tells pytest to pass these values to the fixture @@ -55,12 +55,12 @@ def test_get_machines_for_update_single_name( "inventory_expr": r"""{ machines.jon = { tags = [ "foo" "bar" ]; }; machines.sara = { tags = [ "foo" "baz" ]; }; - }""" + }""", }, [], # explizit names ["foo"], # filter tags ["jon", "sara"], # expected - ) + ), ], # Important! # tells pytest to pass these values to the fixture @@ -93,12 +93,12 @@ def test_get_machines_for_update_tags( "inventory_expr": r"""{ machines.jon = { tags = [ "foo" "bar" ]; }; machines.sara = { tags = [ "foo" "baz" ]; }; - }""" + }""", }, ["sara"], # explizit names ["foo"], # filter tags ["sara"], # expected - ) + ), ], # Important! # tells pytest to pass these values to the fixture @@ -131,7 +131,7 @@ def test_get_machines_for_update_tags_and_name( "inventory_expr": r"""{ machines.jon = { tags = [ "foo" "bar" ]; }; machines.sara = { tags = [ "foo" "baz" ]; }; - }""" + }""", }, [], # no explizit names [], # no filter tags @@ -162,7 +162,8 @@ def test_get_machines_for_update_implicit_all( def test_update_command_no_flake( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.chdir(tmp_path) diff --git a/pkgs/clan-cli/clan_cli/network/list.py b/pkgs/clan-cli/clan_cli/network/list.py index 4175271dd..b09a38e50 100644 --- a/pkgs/clan-cli/clan_cli/network/list.py +++ b/pkgs/clan-cli/clan_cli/network/list.py @@ -19,7 +19,7 @@ def list_command(args: argparse.Namespace) -> None: col_network = max(12, max(len(name) for name in networks)) col_priority = 8 col_module = max( - 10, max(len(net.module_name.split(".")[-1]) for net in networks.values()) + 10, max(len(net.module_name.split(".")[-1]) for net in networks.values()), ) col_running = 8 @@ -30,7 +30,8 @@ def list_command(args: argparse.Namespace) -> None: # Print network entries for network_name, network in sorted( - networks.items(), key=lambda network: -network[1].priority + networks.items(), + key=lambda network: -network[1].priority, ): # Extract simple module name from full module path module_name = network.module_name.split(".")[-1] @@ -56,7 +57,7 @@ def list_command(args: argparse.Namespace) -> None: running_status = "Error" print( - f"{network_name:<{col_network}} {network.priority:<{col_priority}} {module_name:<{col_module}} {running_status:<{col_running}} {peers_str}" + f"{network_name:<{col_network}} {network.priority:<{col_priority}} {module_name:<{col_module}} {running_status:<{col_running}} {peers_str}", ) diff --git a/pkgs/clan-cli/clan_cli/profiler.py b/pkgs/clan-cli/clan_cli/profiler.py index 2f6b9203b..972f68e8f 100644 --- a/pkgs/clan-cli/clan_cli/profiler.py +++ b/pkgs/clan-cli/clan_cli/profiler.py @@ -95,8 +95,7 @@ PROFS = ProfilerStore() def profile(func: Callable) -> Callable: - """ - A decorator that profiles the decorated function, printing out the profiling + """A decorator that profiles the decorated function, printing out the profiling results with paths trimmed to three directories deep. """ diff --git a/pkgs/clan-cli/clan_cli/qemu/qga.py b/pkgs/clan-cli/clan_cli/qemu/qga.py index 667d293db..1d9024651 100644 --- a/pkgs/clan-cli/clan_cli/qemu/qga.py +++ b/pkgs/clan-cli/clan_cli/qemu/qga.py @@ -39,7 +39,8 @@ class QgaSession: def run_nonblocking(self, cmd: list[str]) -> int: result_pid = self.client.cmd( - "guest-exec", {"path": cmd[0], "arg": cmd[1:], "capture-output": True} + "guest-exec", + {"path": cmd[0], "arg": cmd[1:], "capture-output": True}, ) if result_pid is None: msg = "Could not get PID from QGA" diff --git a/pkgs/clan-cli/clan_cli/qemu/qmp.py b/pkgs/clan-cli/clan_cli/qemu/qmp.py index ef6e0620d..24aa801f1 100644 --- a/pkgs/clan-cli/clan_cli/qemu/qmp.py +++ b/pkgs/clan-cli/clan_cli/qemu/qmp.py @@ -20,32 +20,23 @@ from clan_lib.errors import ClanError class QMPError(Exception): - """ - QMP base exception - """ + """QMP base exception""" class QMPConnectError(QMPError): - """ - QMP connection exception - """ + """QMP connection exception""" class QMPCapabilitiesError(QMPError): - """ - QMP negotiate capabilities exception - """ + """QMP negotiate capabilities exception""" class QMPTimeoutError(QMPError): - """ - QMP timeout exception - """ + """QMP timeout exception""" class QEMUMonitorProtocol: - """ - Provide an API to connect to QEMU via QEMU Monitor Protocol (QMP) and then + """Provide an API to connect to QEMU via QEMU Monitor Protocol (QMP) and then allow to handle commands and events. """ @@ -58,8 +49,7 @@ class QEMUMonitorProtocol: server: bool = False, nickname: str | None = None, ) -> None: - """ - Create a QEMUMonitorProtocol class. + """Create a QEMUMonitorProtocol class. @param address: QEMU address, can be either a unix socket path (string) or a tuple in the form ( address, port ) for a TCP @@ -109,8 +99,7 @@ class QEMUMonitorProtocol: return resp def __get_events(self, wait: bool | float = False) -> None: - """ - Check for new events in the stream and cache them in __events. + """Check for new events in the stream and cache them in __events. @param wait (bool): block until an event is available. @param wait (float): If wait is a float, treat it as a timeout value. @@ -120,7 +109,6 @@ class QEMUMonitorProtocol: @raise QMPConnectError: If wait is True but no events could be retrieved or if some other error occurred. """ - # Check for new events regardless and pull them into the cache: self.__sock.setblocking(0) try: @@ -163,8 +151,7 @@ class QEMUMonitorProtocol: self.close() def connect(self, negotiate: bool = True) -> dict[str, Any] | None: - """ - Connect to the QMP Monitor and perform capabilities negotiation. + """Connect to the QMP Monitor and perform capabilities negotiation. @return QMP greeting dict, or None if negotiate is false @raise OSError on socket connection errors @@ -178,8 +165,7 @@ class QEMUMonitorProtocol: return None def accept(self, timeout: float | None = 15.0) -> dict[str, Any]: - """ - Await connection from QMP Monitor and perform capabilities negotiation. + """Await connection from QMP Monitor and perform capabilities negotiation. @param timeout: timeout in seconds (nonnegative float number, or None). The value passed will set the behavior of the @@ -199,8 +185,7 @@ class QEMUMonitorProtocol: return self.__negotiate_capabilities() def cmd_obj(self, qmp_cmd: dict[str, Any]) -> dict[str, Any] | None: - """ - Send a QMP command to the QMP Monitor. + """Send a QMP command to the QMP Monitor. @param qmp_cmd: QMP command to be sent as a Python dict @return QMP response as a Python dict or None if the connection has @@ -223,8 +208,7 @@ class QEMUMonitorProtocol: args: dict[str, Any] | None = None, cmd_id: dict[str, Any] | list[Any] | str | int | None = None, ) -> dict[str, Any] | None: - """ - Build a QMP command and send it to the QMP Monitor. + """Build a QMP command and send it to the QMP Monitor. @param name: command name (string) @param args: command arguments (dict) @@ -238,17 +222,14 @@ class QEMUMonitorProtocol: return self.cmd_obj(qmp_cmd) def command(self, cmd: str, **kwds: Any) -> Any: - """ - Build and send a QMP command to the monitor, report errors if any - """ + """Build and send a QMP command to the monitor, report errors if any""" ret = self.cmd(cmd, kwds) if "error" in ret: raise ClanError(ret["error"]["desc"]) return ret["return"] def pull_event(self, wait: bool | float = False) -> dict[str, Any] | None: - """ - Pulls a single event. + """Pulls a single event. @param wait (bool): block until an event is available. @param wait (float): If wait is a float, treat it as a timeout value. @@ -267,8 +248,7 @@ class QEMUMonitorProtocol: return None def get_events(self, wait: bool | float = False) -> list[dict[str, Any]]: - """ - Get a list of available QMP events. + """Get a list of available QMP events. @param wait (bool): block until an event is available. @param wait (float): If wait is a float, treat it as a timeout value. @@ -284,23 +264,18 @@ class QEMUMonitorProtocol: return self.__events def clear_events(self) -> None: - """ - Clear current list of pending events. - """ + """Clear current list of pending events.""" self.__events = [] def close(self) -> None: - """ - Close the socket and socket file. - """ + """Close the socket and socket file.""" if self.__sock: self.__sock.close() if self.__sockfile: self.__sockfile.close() def settimeout(self, timeout: float | None) -> None: - """ - Set the socket timeout. + """Set the socket timeout. @param timeout (float): timeout in seconds, or None. @note This is a wrap around socket.settimeout @@ -308,16 +283,14 @@ class QEMUMonitorProtocol: self.__sock.settimeout(timeout) def get_sock_fd(self) -> int: - """ - Get the socket file descriptor. + """Get the socket file descriptor. @return The file descriptor number. """ return self.__sock.fileno() def is_scm_available(self) -> bool: - """ - Check if the socket allows for SCM_RIGHTS. + """Check if the socket allows for SCM_RIGHTS. @return True if SCM_RIGHTS is available, otherwise False. """ diff --git a/pkgs/clan-cli/clan_cli/secrets/groups.py b/pkgs/clan-cli/clan_cli/secrets/groups.py index 967b89c14..98ddbf5f1 100644 --- a/pkgs/clan-cli/clan_cli/secrets/groups.py +++ b/pkgs/clan-cli/clan_cli/secrets/groups.py @@ -41,7 +41,11 @@ def users_folder(flake_dir: Path, group: str) -> Path: class Group: def __init__( - self, flake_dir: Path, name: str, machines: list[str], users: list[str] + self, + flake_dir: Path, + name: str, + machines: list[str], + users: list[str], ) -> None: self.name = name self.machines = machines @@ -235,13 +239,18 @@ def remove_machine_command(args: argparse.Namespace) -> None: def add_group_argument(parser: argparse.ArgumentParser) -> None: group_action = parser.add_argument( - "group", help="the name of the secret", type=group_name_type + "group", + help="the name of the secret", + type=group_name_type, ) add_dynamic_completer(group_action, complete_groups) def add_secret( - flake_dir: Path, group: str, name: str, age_plugins: list[str] | None + flake_dir: Path, + group: str, + name: str, + age_plugins: list[str] | None, ) -> None: secrets.allow_member( secrets.groups_folder(sops_secrets_folder(flake_dir) / name), @@ -276,7 +285,10 @@ def add_secret_command(args: argparse.Namespace) -> None: def remove_secret( - flake_dir: Path, group: str, name: str, age_plugins: list[str] + flake_dir: Path, + group: str, + name: str, + age_plugins: list[str], ) -> None: updated_paths = secrets.disallow_member( secrets.groups_folder(sops_secrets_folder(flake_dir) / name), @@ -313,22 +325,28 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None: # Add user add_machine_parser = subparser.add_parser( - "add-machine", help="add a machine to group" + "add-machine", + help="add a machine to group", ) add_group_argument(add_machine_parser) add_machine_action = add_machine_parser.add_argument( - "machine", help="the name of the machines to add", type=machine_name_type + "machine", + help="the name of the machines to add", + type=machine_name_type, ) add_dynamic_completer(add_machine_action, complete_machines) add_machine_parser.set_defaults(func=add_machine_command) # Remove machine remove_machine_parser = subparser.add_parser( - "remove-machine", help="remove a machine from group" + "remove-machine", + help="remove a machine from group", ) add_group_argument(remove_machine_parser) remove_machine_action = remove_machine_parser.add_argument( - "machine", help="the name of the machines to remove", type=machine_name_type + "machine", + help="the name of the machines to remove", + type=machine_name_type, ) add_dynamic_completer(remove_machine_action, complete_machines) remove_machine_parser.set_defaults(func=remove_machine_command) @@ -337,40 +355,51 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None: add_user_parser = subparser.add_parser("add-user", help="add a user to group") add_group_argument(add_user_parser) add_user_action = add_user_parser.add_argument( - "user", help="the name of the user to add", type=user_name_type + "user", + help="the name of the user to add", + type=user_name_type, ) add_dynamic_completer(add_user_action, complete_users) add_user_parser.set_defaults(func=add_user_command) # Remove user remove_user_parser = subparser.add_parser( - "remove-user", help="remove a user from a group" + "remove-user", + help="remove a user from a group", ) add_group_argument(remove_user_parser) remove_user_action = remove_user_parser.add_argument( - "user", help="the name of the user to remove", type=user_name_type + "user", + help="the name of the user to remove", + type=user_name_type, ) add_dynamic_completer(remove_user_action, complete_users) remove_user_parser.set_defaults(func=remove_user_command) # Add secret add_secret_parser = subparser.add_parser( - "add-secret", help="allow a groups to access a secret" + "add-secret", + help="allow a groups to access a secret", ) add_group_argument(add_secret_parser) add_secret_action = add_secret_parser.add_argument( - "secret", help="the name of the secret", type=secret_name_type + "secret", + help="the name of the secret", + type=secret_name_type, ) add_dynamic_completer(add_secret_action, complete_secrets) add_secret_parser.set_defaults(func=add_secret_command) # Remove secret remove_secret_parser = subparser.add_parser( - "remove-secret", help="remove a group's access to a secret" + "remove-secret", + help="remove a group's access to a secret", ) add_group_argument(remove_secret_parser) remove_secret_action = remove_secret_parser.add_argument( - "secret", help="the name of the secret", type=secret_name_type + "secret", + help="the name of the secret", + type=secret_name_type, ) add_dynamic_completer(remove_secret_action, complete_secrets) remove_secret_parser.set_defaults(func=remove_secret_command) diff --git a/pkgs/clan-cli/clan_cli/secrets/key.py b/pkgs/clan-cli/clan_cli/secrets/key.py index bb2df9a7c..a9491c5cb 100644 --- a/pkgs/clan-cli/clan_cli/secrets/key.py +++ b/pkgs/clan-cli/clan_cli/secrets/key.py @@ -19,8 +19,7 @@ log = logging.getLogger(__name__) def generate_key() -> sops.SopsKey: - """ - Generate a new age key and return it as a SopsKey. + """Generate a new age key and return it as a SopsKey. This function does not check if the key already exists. It will generate a new key every time it is called. @@ -28,14 +27,16 @@ def generate_key() -> sops.SopsKey: Use 'check_key_exists' to check if a key already exists. Before calling this function if you dont want to generate a new key. """ - path = default_admin_private_key_path() _, pub_key = generate_private_key(out_file=path) log.info( - f"Generated age private key at '{path}' for your user.\nPlease back it up on a secure location or you will lose access to your secrets." + f"Generated age private key at '{path}' for your user.\nPlease back it up on a secure location or you will lose access to your secrets.", ) return sops.SopsKey( - pub_key, username="", key_type=sops.KeyType.AGE, source=str(path) + pub_key, + username="", + key_type=sops.KeyType.AGE, + source=str(path), ) @@ -49,7 +50,8 @@ def generate_command(args: argparse.Namespace) -> None: key_type = key.key_type.name.lower() print(f"{key.key_type.name} key {key.pubkey} is already set", file=sys.stderr) print( - f"Add your {key_type} public key to the repository with:", file=sys.stderr + f"Add your {key_type} public key to the repository with:", + file=sys.stderr, ) print( f"clan secrets users add --{key_type}-key {key.pubkey}", diff --git a/pkgs/clan-cli/clan_cli/secrets/machines.py b/pkgs/clan-cli/clan_cli/secrets/machines.py index dca8e7b79..901b37a64 100644 --- a/pkgs/clan-cli/clan_cli/secrets/machines.py +++ b/pkgs/clan-cli/clan_cli/secrets/machines.py @@ -59,16 +59,12 @@ def get_machine_pubkey(flake_dir: Path, name: str) -> str: def has_machine(flake_dir: Path, name: str) -> bool: - """ - Checks if a machine exists in the sops machines folder - """ + """Checks if a machine exists in the sops machines folder""" return (sops_machines_folder(flake_dir) / name / "key.json").exists() def list_sops_machines(flake_dir: Path) -> list[str]: - """ - Lists all machines in the sops machines folder - """ + """Lists all machines in the sops machines folder""" path = sops_machines_folder(flake_dir) def validate(name: str) -> bool: @@ -97,7 +93,10 @@ def add_secret( def remove_secret( - flake_dir: Path, machine: str, secret: str, age_plugins: list[str] | None + flake_dir: Path, + machine: str, + secret: str, + age_plugins: list[str] | None, ) -> None: updated_paths = secrets.disallow_member( secrets.machines_folder(sops_secrets_folder(flake_dir) / secret), @@ -174,7 +173,9 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None: default=False, ) add_machine_action = add_parser.add_argument( - "machine", help="the name of the machine", type=machine_name_type + "machine", + help="the name of the machine", + type=machine_name_type, ) add_dynamic_completer(add_machine_action, complete_machines) add_parser.add_argument( @@ -187,7 +188,9 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None: # Parser get_parser = subparser.add_parser("get", help="get a machine public key") get_machine_parser = get_parser.add_argument( - "machine", help="the name of the machine", type=machine_name_type + "machine", + help="the name of the machine", + type=machine_name_type, ) add_dynamic_completer(get_machine_parser, complete_machines) get_parser.set_defaults(func=get_command) @@ -195,35 +198,47 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None: # Parser remove_parser = subparser.add_parser("remove", help="remove a machine") remove_machine_parser = remove_parser.add_argument( - "machine", help="the name of the machine", type=machine_name_type + "machine", + help="the name of the machine", + type=machine_name_type, ) add_dynamic_completer(remove_machine_parser, complete_machines) remove_parser.set_defaults(func=remove_command) # Parser add_secret_parser = subparser.add_parser( - "add-secret", help="allow a machine to access a secret" + "add-secret", + help="allow a machine to access a secret", ) machine_add_secret_parser = add_secret_parser.add_argument( - "machine", help="the name of the machine", type=machine_name_type + "machine", + help="the name of the machine", + type=machine_name_type, ) add_dynamic_completer(machine_add_secret_parser, complete_machines) add_secret_action = add_secret_parser.add_argument( - "secret", help="the name of the secret", type=secret_name_type + "secret", + help="the name of the secret", + type=secret_name_type, ) add_dynamic_completer(add_secret_action, complete_secrets) add_secret_parser.set_defaults(func=add_secret_command) # Parser remove_secret_parser = subparser.add_parser( - "remove-secret", help="remove a group's access to a secret" + "remove-secret", + help="remove a group's access to a secret", ) machine_remove_parser = remove_secret_parser.add_argument( - "machine", help="the name of the machine", type=machine_name_type + "machine", + help="the name of the machine", + type=machine_name_type, ) add_dynamic_completer(machine_remove_parser, complete_machines) remove_secret_action = remove_secret_parser.add_argument( - "secret", help="the name of the secret", type=secret_name_type + "secret", + help="the name of the secret", + type=secret_name_type, ) add_dynamic_completer(remove_secret_action, complete_secrets) remove_secret_parser.set_defaults(func=remove_secret_command) diff --git a/pkgs/clan-cli/clan_cli/secrets/secrets.py b/pkgs/clan-cli/clan_cli/secrets/secrets.py index ed23657f7..c4f136bce 100644 --- a/pkgs/clan-cli/clan_cli/secrets/secrets.py +++ b/pkgs/clan-cli/clan_cli/secrets/secrets.py @@ -50,7 +50,8 @@ def list_generators_secrets(generators_path: Path) -> list[Path]: return has_secret(generator_path / name) for obj in list_objects( - generator_path, functools.partial(validate, generator_path) + generator_path, + functools.partial(validate, generator_path), ): paths.append(generator_path / obj) return paths @@ -89,7 +90,7 @@ def update_secrets( changed_files.extend(cleanup_dangling_symlinks(path / "groups")) changed_files.extend(cleanup_dangling_symlinks(path / "machines")) changed_files.extend( - update_keys(path, collect_keys_for_path(path), age_plugins=age_plugins) + update_keys(path, collect_keys_for_path(path), age_plugins=age_plugins), ) return changed_files @@ -120,7 +121,7 @@ def collect_keys_for_type(folder: Path) -> set[sops.SopsKey]: kind = target.parent.name if folder.name != kind: log.warning( - f"Expected {p} to point to {folder} but points to {target.parent}" + f"Expected {p} to point to {folder} but points to {target.parent}", ) continue keys.update(read_keys(target)) @@ -160,7 +161,7 @@ def encrypt_secret( admin_keys = sops.ensure_admin_public_keys(flake_dir) if not admin_keys: - # todo double check the correct command to run + # TODO double check the correct command to run msg = "No keys found. Please run 'clan secrets add-key' to add a key." raise ClanError(msg) @@ -179,7 +180,7 @@ def encrypt_secret( user, do_update_keys, age_plugins=age_plugins, - ) + ), ) for machine in add_machines: @@ -190,7 +191,7 @@ def encrypt_secret( machine, do_update_keys, age_plugins=age_plugins, - ) + ), ) for group in add_groups: @@ -201,7 +202,7 @@ def encrypt_secret( group, do_update_keys, age_plugins=age_plugins, - ) + ), ) recipient_keys = collect_keys_for_path(secret_path) @@ -216,7 +217,7 @@ def encrypt_secret( username, do_update_keys, age_plugins=age_plugins, - ) + ), ) secret_path = secret_path / "secret" @@ -310,13 +311,15 @@ def allow_member( group_folder.parent, collect_keys_for_path(group_folder.parent), age_plugins=age_plugins, - ) + ), ) return changed def disallow_member( - group_folder: Path, name: str, age_plugins: list[str] | None + group_folder: Path, + name: str, + age_plugins: list[str] | None, ) -> list[Path]: target = group_folder / name if not target.exists(): @@ -349,7 +352,8 @@ def has_secret(secret_path: Path) -> bool: def list_secrets( - flake_dir: Path, filter_fn: Callable[[str], bool] | None = None + flake_dir: Path, + filter_fn: Callable[[str], bool] | None = None, ) -> list[str]: path = sops_secrets_folder(flake_dir) diff --git a/pkgs/clan-cli/clan_cli/secrets/sops.py b/pkgs/clan-cli/clan_cli/secrets/sops.py index f428bf459..c66a97ad2 100644 --- a/pkgs/clan-cli/clan_cli/secrets/sops.py +++ b/pkgs/clan-cli/clan_cli/secrets/sops.py @@ -66,7 +66,7 @@ class KeyType(enum.Enum): for public_key in get_public_age_keys(content): log.debug( f"Found age public key from a private key " - f"in {key_path}: {public_key}" + f"in {key_path}: {public_key}", ) keyring.append( @@ -75,7 +75,7 @@ class KeyType(enum.Enum): username="", key_type=self, source=str(key_path), - ) + ), ) except ClanError as e: error_msg = f"Failed to read age keys from {key_path}" @@ -96,7 +96,7 @@ class KeyType(enum.Enum): for public_key in get_public_age_keys(content): log.debug( f"Found age public key from a private key " - f"in the environment (SOPS_AGE_KEY): {public_key}" + f"in the environment (SOPS_AGE_KEY): {public_key}", ) keyring.append( @@ -105,7 +105,7 @@ class KeyType(enum.Enum): username="", key_type=self, source="SOPS_AGE_KEY", - ) + ), ) except ClanError as e: error_msg = "Failed to read age keys from SOPS_AGE_KEY" @@ -126,8 +126,11 @@ class KeyType(enum.Enum): log.debug(msg) keyring.append( SopsKey( - pubkey=fp, username="", key_type=self, source="SOPS_PGP_FP" - ) + pubkey=fp, + username="", + key_type=self, + source="SOPS_PGP_FP", + ), ) return keyring @@ -389,7 +392,7 @@ def get_user_name(flake_dir: Path, user: str) -> str: """Ask the user for their name until a unique one is provided.""" while True: name = input( - f"Your key is not yet added to the repository. Enter your user name for which your sops key will be stored in the repository [default: {user}]: " + f"Your key is not yet added to the repository. Enter your user name for which your sops key will be stored in the repository [default: {user}]: ", ) if name: user = name @@ -455,7 +458,9 @@ def ensure_admin_public_keys(flake_dir: Path) -> set[SopsKey]: def update_keys( - secret_path: Path, keys: Iterable[SopsKey], age_plugins: list[str] | None = None + secret_path: Path, + keys: Iterable[SopsKey], + age_plugins: list[str] | None = None, ) -> list[Path]: secret_path = secret_path / "secret" error_msg = f"Could not update keys for {secret_path}" @@ -565,7 +570,7 @@ def get_recipients(secret_path: Path) -> set[SopsKey]: username="", key_type=key_type, source="sops_file", - ) + ), ) return keys diff --git a/pkgs/clan-cli/clan_cli/secrets/users.py b/pkgs/clan-cli/clan_cli/secrets/users.py index 3358ee462..b447b64f7 100644 --- a/pkgs/clan-cli/clan_cli/secrets/users.py +++ b/pkgs/clan-cli/clan_cli/secrets/users.py @@ -66,7 +66,7 @@ def remove_user(flake_dir: Path, name: str) -> None: continue log.info(f"Removing user {name} from group {group}") updated_paths.extend( - groups.remove_member(flake_dir, group.name, groups.users_folder, name) + groups.remove_member(flake_dir, group.name, groups.users_folder, name), ) # Remove the user's key: updated_paths.extend(remove_object(sops_users_folder(flake_dir), name)) @@ -96,7 +96,10 @@ def list_users(flake_dir: Path) -> list[str]: def add_secret( - flake_dir: Path, user: str, secret: str, age_plugins: list[str] | None + flake_dir: Path, + user: str, + secret: str, + age_plugins: list[str] | None, ) -> None: updated_paths = secrets.allow_member( secrets.users_folder(sops_secrets_folder(flake_dir) / secret), @@ -112,10 +115,15 @@ def add_secret( def remove_secret( - flake_dir: Path, user: str, secret: str, age_plugins: list[str] | None + flake_dir: Path, + user: str, + secret: str, + age_plugins: list[str] | None, ) -> None: updated_paths = secrets.disallow_member( - secrets.users_folder(sops_secrets_folder(flake_dir) / secret), user, age_plugins + secrets.users_folder(sops_secrets_folder(flake_dir) / secret), + user, + age_plugins, ) commit_files( updated_paths, @@ -189,7 +197,7 @@ def _key_args(args: argparse.Namespace) -> Iterable[sops.SopsKey]: ] if args.agekey: age_keys.append( - sops.SopsKey(args.agekey, "", sops.KeyType.AGE, source="cmdline") + sops.SopsKey(args.agekey, "", sops.KeyType.AGE, source="cmdline"), ) pgp_keys = [ @@ -260,7 +268,10 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None: add_parser = subparser.add_parser("add", help="add a user") add_parser.add_argument( - "-f", "--force", help="overwrite existing user", action="store_true" + "-f", + "--force", + help="overwrite existing user", + action="store_true", ) add_parser.add_argument("user", help="the name of the user", type=user_name_type) _add_key_flags(add_parser) @@ -268,59 +279,79 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None: get_parser = subparser.add_parser("get", help="get a user public key") get_user_action = get_parser.add_argument( - "user", help="the name of the user", type=user_name_type + "user", + help="the name of the user", + type=user_name_type, ) add_dynamic_completer(get_user_action, complete_users) get_parser.set_defaults(func=get_command) remove_parser = subparser.add_parser("remove", help="remove a user") remove_user_action = remove_parser.add_argument( - "user", help="the name of the user", type=user_name_type + "user", + help="the name of the user", + type=user_name_type, ) add_dynamic_completer(remove_user_action, complete_users) remove_parser.set_defaults(func=remove_command) add_secret_parser = subparser.add_parser( - "add-secret", help="allow a user to access a secret" + "add-secret", + help="allow a user to access a secret", ) add_secret_user_action = add_secret_parser.add_argument( - "user", help="the name of the user", type=user_name_type + "user", + help="the name of the user", + type=user_name_type, ) add_dynamic_completer(add_secret_user_action, complete_users) add_secrets_action = add_secret_parser.add_argument( - "secret", help="the name of the secret", type=secret_name_type + "secret", + help="the name of the secret", + type=secret_name_type, ) add_dynamic_completer(add_secrets_action, complete_secrets) add_secret_parser.set_defaults(func=add_secret_command) remove_secret_parser = subparser.add_parser( - "remove-secret", help="remove a user's access to a secret" + "remove-secret", + help="remove a user's access to a secret", ) remove_secret_user_action = remove_secret_parser.add_argument( - "user", help="the name of the group", type=user_name_type + "user", + help="the name of the group", + type=user_name_type, ) add_dynamic_completer(remove_secret_user_action, complete_users) remove_secrets_action = remove_secret_parser.add_argument( - "secret", help="the name of the secret", type=secret_name_type + "secret", + help="the name of the secret", + type=secret_name_type, ) add_dynamic_completer(remove_secrets_action, complete_secrets) remove_secret_parser.set_defaults(func=remove_secret_command) add_key_parser = subparser.add_parser( - "add-key", help="add one or more keys for a user" + "add-key", + help="add one or more keys for a user", ) add_key_user_action = add_key_parser.add_argument( - "user", help="the name of the user", type=user_name_type + "user", + help="the name of the user", + type=user_name_type, ) add_dynamic_completer(add_key_user_action, complete_users) _add_key_flags(add_key_parser) add_key_parser.set_defaults(func=add_key_command) remove_key_parser = subparser.add_parser( - "remove-key", help="remove one or more keys for a user" + "remove-key", + help="remove one or more keys for a user", ) remove_key_user_action = remove_key_parser.add_argument( - "user", help="the name of the user", type=user_name_type + "user", + help="the name of the user", + type=user_name_type, ) add_dynamic_completer(remove_key_user_action, complete_users) _add_key_flags(remove_key_parser) diff --git a/pkgs/clan-cli/clan_cli/ssh/deploy_info.py b/pkgs/clan-cli/clan_cli/ssh/deploy_info.py index 79ecb0758..cb3e0bf8d 100644 --- a/pkgs/clan-cli/clan_cli/ssh/deploy_info.py +++ b/pkgs/clan-cli/clan_cli/ssh/deploy_info.py @@ -64,7 +64,8 @@ def ssh_command(args: argparse.Namespace) -> None: ssh_options[name] = value remote = remote.override( - host_key_check=args.host_key_check, ssh_options=ssh_options + host_key_check=args.host_key_check, + ssh_options=ssh_options, ) if args.remote_command: remote.interactive_ssh(args.remote_command) diff --git a/pkgs/clan-cli/clan_cli/ssh/test_deploy_info.py b/pkgs/clan-cli/clan_cli/ssh/test_deploy_info.py index 7849585d0..add133eed 100644 --- a/pkgs/clan-cli/clan_cli/ssh/test_deploy_info.py +++ b/pkgs/clan-cli/clan_cli/ssh/test_deploy_info.py @@ -147,7 +147,7 @@ def test_ssh_shell_from_deploy( str(success_txt), "&&", "exit 0", - ] + ], ) assert success_txt.exists() diff --git a/pkgs/clan-cli/clan_cli/state/list.py b/pkgs/clan-cli/clan_cli/state/list.py index 387b64f2a..986383d49 100644 --- a/pkgs/clan-cli/clan_cli/state/list.py +++ b/pkgs/clan-cli/clan_cli/state/list.py @@ -25,7 +25,7 @@ def list_state_folders(machine: Machine, service: None | str = None) -> None: [ f"{flake}#nixosConfigurations.{machine.name}.config.clan.core.state", "--json", - ] + ], ) res = "{}" @@ -80,7 +80,7 @@ def list_state_folders(machine: Machine, service: None | str = None) -> None: if post_restore: print(f" postRestoreCommand: {post_restore}") - print("") + print() def list_command(args: argparse.Namespace) -> None: diff --git a/pkgs/clan-cli/clan_cli/state/list_test.py b/pkgs/clan-cli/clan_cli/state/list_test.py index c8576fe4a..05d49b08e 100644 --- a/pkgs/clan-cli/clan_cli/state/list_test.py +++ b/pkgs/clan-cli/clan_cli/state/list_test.py @@ -7,7 +7,8 @@ from clan_cli.tests.stdout import CaptureOutput @pytest.mark.with_core def test_state_list_vm1( - test_flake_with_core: FlakeForTest, capture_output: CaptureOutput + test_flake_with_core: FlakeForTest, + capture_output: CaptureOutput, ) -> None: with capture_output as output: cli.run(["state", "list", "vm1", "--flake", str(test_flake_with_core.path)]) @@ -19,7 +20,8 @@ def test_state_list_vm1( @pytest.mark.with_core def test_state_list_vm2( - test_flake_with_core: FlakeForTest, capture_output: CaptureOutput + test_flake_with_core: FlakeForTest, + capture_output: CaptureOutput, ) -> None: with capture_output as output: cli.run(["state", "list", "vm2", "--flake", str(test_flake_with_core.path)]) diff --git a/pkgs/clan-cli/clan_cli/templates/__init__.py b/pkgs/clan-cli/clan_cli/templates/__init__.py index 6d23c4f4e..a66a1eebe 100644 --- a/pkgs/clan-cli/clan_cli/templates/__init__.py +++ b/pkgs/clan-cli/clan_cli/templates/__init__.py @@ -15,7 +15,8 @@ def register_parser(parser: argparse.ArgumentParser) -> None: ) list_parser = subparser.add_parser("list", help="List available templates") apply_parser = subparser.add_parser( - "apply", help="Apply a template of the specified type" + "apply", + help="Apply a template of the specified type", ) register_list_parser(list_parser) register_apply_parser(apply_parser) diff --git a/pkgs/clan-cli/clan_cli/templates/list.py b/pkgs/clan-cli/clan_cli/templates/list.py index c93fcdfb7..c3d281f59 100644 --- a/pkgs/clan-cli/clan_cli/templates/list.py +++ b/pkgs/clan-cli/clan_cli/templates/list.py @@ -12,10 +12,11 @@ def list_command(args: argparse.Namespace) -> None: # Display all templates for i, (template_type, _builtin_template_set) in enumerate( - templates.builtins.items() + templates.builtins.items(), ): builtin_template_set: TemplateClanType | None = templates.builtins.get( - template_type, None + template_type, + None, ) # type: ignore if not builtin_template_set: continue @@ -32,7 +33,8 @@ def list_command(args: argparse.Namespace) -> None: for i, (input_name, input_templates) in enumerate(templates.custom.items()): custom_templates: TemplateClanType | None = input_templates.get( - template_type, None + template_type, + None, ) # type: ignore if not custom_templates: continue @@ -48,11 +50,11 @@ def list_command(args: argparse.Namespace) -> None: is_last_template = i == len(custom_templates.items()) - 1 if not is_last_template: print( - f"{prefix} ├── {name}: {template.get('description', 'no description')}" + f"{prefix} ├── {name}: {template.get('description', 'no description')}", ) else: print( - f"{prefix} └── {name}: {template.get('description', 'no description')}" + f"{prefix} └── {name}: {template.get('description', 'no description')}", ) diff --git a/pkgs/clan-cli/clan_cli/templates/list_test.py b/pkgs/clan-cli/clan_cli/templates/list_test.py index 7d9ff8b98..361ba42c8 100644 --- a/pkgs/clan-cli/clan_cli/templates/list_test.py +++ b/pkgs/clan-cli/clan_cli/templates/list_test.py @@ -9,7 +9,8 @@ from clan_cli.tests.stdout import CaptureOutput @pytest.mark.with_core def test_templates_list( - test_flake_with_core: FlakeForTest, capture_output: CaptureOutput + test_flake_with_core: FlakeForTest, + capture_output: CaptureOutput, ) -> None: with capture_output as output: cli.run(["templates", "list", "--flake", str(test_flake_with_core.path)]) @@ -26,7 +27,8 @@ def test_templates_list( @pytest.mark.with_core def test_templates_list_outside_clan( - capture_output: CaptureOutput, temp_dir: Path + capture_output: CaptureOutput, + temp_dir: Path, ) -> None: """Test templates list command when run outside a clan directory.""" with capture_output as output: diff --git a/pkgs/clan-cli/clan_cli/tests/age_keys.py b/pkgs/clan-cli/clan_cli/tests/age_keys.py index 5c38ad35a..e52952ef9 100644 --- a/pkgs/clan-cli/clan_cli/tests/age_keys.py +++ b/pkgs/clan-cli/clan_cli/tests/age_keys.py @@ -37,7 +37,7 @@ class SopsSetup: "--user", self.user, "--no-interactive", - ] + ], ) diff --git a/pkgs/clan-cli/clan_cli/tests/command.py b/pkgs/clan-cli/clan_cli/tests/command.py index 8284c7e69..f5a1f4911 100644 --- a/pkgs/clan-cli/clan_cli/tests/command.py +++ b/pkgs/clan-cli/clan_cli/tests/command.py @@ -54,8 +54,7 @@ class Command: @pytest.fixture def command() -> Iterator[Command]: - """ - Starts a background command. The process is automatically terminated in the end. + """Starts a background command. The process is automatically terminated in the end. >>> p = command.run(["some", "daemon"]) >>> print(p.pid) """ diff --git a/pkgs/clan-cli/clan_cli/tests/fixtures_flakes.py b/pkgs/clan-cli/clan_cli/tests/fixtures_flakes.py index e79371074..6198ac25d 100644 --- a/pkgs/clan-cli/clan_cli/tests/fixtures_flakes.py +++ b/pkgs/clan-cli/clan_cli/tests/fixtures_flakes.py @@ -39,8 +39,7 @@ def def_value() -> defaultdict: def nested_dict() -> defaultdict: - """ - Creates a defaultdict that allows for arbitrary levels of nesting. + """Creates a defaultdict that allows for arbitrary levels of nesting. For example: d['a']['b']['c'] = value """ return defaultdict(def_value) @@ -75,7 +74,8 @@ def substitute( if clan_core_replacement: line = line.replace("__CLAN_CORE__", clan_core_replacement) line = line.replace( - "git+https://git.clan.lol/clan/clan-core", clan_core_replacement + "git+https://git.clan.lol/clan/clan-core", + clan_core_replacement, ) line = line.replace( "https://git.clan.lol/clan/clan-core/archive/main.tar.gz", @@ -133,8 +133,7 @@ def init_git(monkeypatch: pytest.MonkeyPatch, flake: Path) -> None: class ClanFlake: - """ - This class holds all attributes for generating a clan flake. + """This class holds all attributes for generating a clan flake. For example, inventory and machine configs can be set via self.inventory and self.machines["my_machine"] = {...}. Whenever a flake's configuration is changed, it needs to be re-generated by calling refresh(). @@ -179,7 +178,7 @@ class ClanFlake: if not suppress_tmp_home_warning: if "/tmp" not in str(os.environ.get("HOME")): log.warning( - f"!! $HOME does not point to a temp directory!! HOME={os.environ['HOME']}" + f"!! $HOME does not point to a temp directory!! HOME={os.environ['HOME']}", ) def copy( @@ -236,7 +235,7 @@ class ClanFlake: inventory_path = self.path / "inventory.json" inventory_path.write_text(json.dumps(self.inventory, indent=2)) imports = "\n".join( - [f"clan-core.clanModules.{module}" for module in self.clan_modules] + [f"clan-core.clanModules.{module}" for module in self.clan_modules], ) for machine_name, machine_config in self.machines.items(): configuration_nix = ( @@ -252,7 +251,7 @@ class ClanFlake: {imports} ]; }} - """ + """, ) machine = Machine(name=machine_name, flake=Flake(str(self.path))) set_machine_settings(machine, machine_config) @@ -309,8 +308,7 @@ def create_flake( machine_configs: dict[str, dict] | None = None, inventory_expr: str = r"{}", ) -> Iterator[FlakeForTest]: - """ - Creates a flake with the given name and machines. + """Creates a flake with the given name and machines. The machine names map to the machines in ./test_machines """ if machine_configs is None: @@ -372,7 +370,7 @@ def create_flake( if "/tmp" not in str(os.environ.get("HOME")): log.warning( - f"!! $HOME does not point to a temp directory!! HOME={os.environ['HOME']}" + f"!! $HOME does not point to a temp directory!! HOME={os.environ['HOME']}", ) init_git(monkeypatch, flake) @@ -382,7 +380,8 @@ def create_flake( @pytest.fixture def test_flake( - monkeypatch: pytest.MonkeyPatch, temporary_home: Path + monkeypatch: pytest.MonkeyPatch, + temporary_home: Path, ) -> Iterator[FlakeForTest]: yield from create_flake( temporary_home=temporary_home, @@ -429,8 +428,7 @@ def writable_clan_core( clan_core: Path, tmp_path: Path, ) -> Path: - """ - Creates a writable copy of clan_core in a temporary directory. + """Creates a writable copy of clan_core in a temporary directory. If clan_core is a git repo, copies tracked files and uncommitted changes. Removes vars/ and sops/ directories if they exist. """ @@ -454,7 +452,9 @@ def writable_clan_core( # Copy .git directory to maintain git functionality if (clan_core / ".git").is_dir(): shutil.copytree( - clan_core / ".git", temp_flake / ".git", ignore_dangling_symlinks=True + clan_core / ".git", + temp_flake / ".git", + ignore_dangling_symlinks=True, ) else: # It's a git file (for submodules/worktrees) @@ -478,9 +478,7 @@ def vm_test_flake( clan_core: Path, tmp_path: Path, ) -> Path: - """ - Creates a test flake that imports the VM test nixOS modules from clan-core. - """ + """Creates a test flake that imports the VM test nixOS modules from clan-core.""" test_flake_dir = tmp_path / "test-flake" test_flake_dir.mkdir(parents=True) diff --git a/pkgs/clan-cli/clan_cli/tests/hosts.py b/pkgs/clan-cli/clan_cli/tests/hosts.py index 9c64381fa..b4c128c39 100644 --- a/pkgs/clan-cli/clan_cli/tests/hosts.py +++ b/pkgs/clan-cli/clan_cli/tests/hosts.py @@ -18,7 +18,7 @@ def hosts(sshd: Sshd) -> list[Remote]: private_key=Path(sshd.key), host_key_check="none", command_prefix="local_test", - ) + ), ] return group diff --git a/pkgs/clan-cli/clan_cli/tests/root.py b/pkgs/clan-cli/clan_cli/tests/root.py index 4d32eff0a..fa6648476 100644 --- a/pkgs/clan-cli/clan_cli/tests/root.py +++ b/pkgs/clan-cli/clan_cli/tests/root.py @@ -13,31 +13,23 @@ else: @pytest.fixture(scope="session") def project_root() -> Path: - """ - Root directory the clan-cli - """ + """Root directory the clan-cli""" return PROJECT_ROOT @pytest.fixture(scope="session") def test_root() -> Path: - """ - Root directory of the tests - """ + """Root directory of the tests""" return TEST_ROOT @pytest.fixture(scope="session") def test_lib_root() -> Path: - """ - Root directory of the clan-lib tests - """ + """Root directory of the clan-lib tests""" return PROJECT_ROOT.parent / "clan_lib" / "tests" @pytest.fixture(scope="session") def clan_core() -> Path: - """ - Directory of the clan-core flake - """ + """Directory of the clan-core flake""" return CLAN_CORE diff --git a/pkgs/clan-cli/clan_cli/tests/sshd.py b/pkgs/clan-cli/clan_cli/tests/sshd.py index edd84d0c5..9696a7f30 100644 --- a/pkgs/clan-cli/clan_cli/tests/sshd.py +++ b/pkgs/clan-cli/clan_cli/tests/sshd.py @@ -29,7 +29,12 @@ class Sshd: class SshdConfig: def __init__( - self, path: Path, login_shell: Path, key: str, preload_lib: Path, log_file: Path + self, + path: Path, + login_shell: Path, + key: str, + preload_lib: Path, + log_file: Path, ) -> None: self.path = path self.login_shell = login_shell @@ -53,7 +58,7 @@ def sshd_config(test_root: Path) -> Iterator[SshdConfig]: sftp_server = sshdp.parent.parent / "libexec" / "sftp-server" assert sftp_server is not None content = string.Template(template).substitute( - {"host_key": host_key, "sftp_server": sftp_server} + {"host_key": host_key, "sftp_server": sftp_server}, ) config = tmpdir / "sshd_config" config.write_text(content) @@ -74,7 +79,7 @@ if [[ -f /etc/profile ]]; then fi export PATH="{bin_path}:{path}" exec {bash} -l "${{@}}" - """ + """, ) login_shell.chmod(0o755) @@ -82,7 +87,7 @@ exec {bash} -l "${{@}}" f"""#!{bash} shift exec "${{@}}" - """ + """, ) fake_sudo.chmod(0o755) diff --git a/pkgs/clan-cli/clan_cli/tests/test_api_dataclass_compat.py b/pkgs/clan-cli/clan_cli/tests/test_api_dataclass_compat.py index bb13ec460..882140eec 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_api_dataclass_compat.py +++ b/pkgs/clan-cli/clan_cli/tests/test_api_dataclass_compat.py @@ -21,16 +21,17 @@ def should_skip(file_path: Path, excludes: list[Path]) -> bool: def find_dataclasses_in_directory( - directory: Path, exclude_paths: list[str] | None = None + directory: Path, + exclude_paths: list[str] | None = None, ) -> list[tuple[Path, str]]: - """ - Find all dataclass classes in all Python files within a nested directory. + """Find all dataclass classes in all Python files within a nested directory. Args: directory (str): The root directory to start searching from. Returns: List[Tuple[str, str]]: A list of tuples containing the file path and the dataclass name. + """ if exclude_paths is None: exclude_paths = [] @@ -69,10 +70,11 @@ def find_dataclasses_in_directory( def load_dataclass_from_file( - file_path: Path, class_name: str, root_dir: str + file_path: Path, + class_name: str, + root_dir: str, ) -> type | None: - """ - Load a dataclass from a given file path. + """Load a dataclass from a given file path. Args: file_path (str): Path to the file. @@ -80,6 +82,7 @@ def load_dataclass_from_file( Returns: List[Type]: The dataclass type if found, else an empty list. + """ module_name = ( os.path.relpath(file_path, root_dir).replace(os.path.sep, ".").rstrip(".py") @@ -109,15 +112,14 @@ def load_dataclass_from_file( dataclass_type = getattr(module, class_name, None) if dataclass_type and is_dataclass(dataclass_type): - return cast(type, dataclass_type) + return cast("type", dataclass_type) msg = f"Could not load dataclass {class_name} from file: {file_path}" raise ClanError(msg) def test_all_dataclasses() -> None: - """ - This Test ensures that all dataclasses are compatible with the API. + """This Test ensures that all dataclasses are compatible with the API. It will load all dataclasses from the clan_cli directory and generate a JSON schema for each of them. @@ -125,7 +127,6 @@ def test_all_dataclasses() -> None: It will fail if any dataclass cannot be converted to JSON schema. This means the dataclass in its current form is not compatible with the API. """ - # Excludes: # - API includes Type Generic wrappers, that are not known in the init file. excludes = [ diff --git a/pkgs/clan-cli/clan_cli/tests/test_backups.py b/pkgs/clan-cli/clan_cli/tests/test_backups.py index c8be2bc1e..64b688413 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_backups.py +++ b/pkgs/clan-cli/clan_cli/tests/test_backups.py @@ -14,5 +14,5 @@ def test_backups( "--flake", str(test_flake_with_core.path), "vm1", - ] + ], ) diff --git a/pkgs/clan-cli/clan_cli/tests/test_create_flake.py b/pkgs/clan-cli/clan_cli/tests/test_create_flake.py index 4c6a21799..fa0bafa75 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_create_flake.py +++ b/pkgs/clan-cli/clan_cli/tests/test_create_flake.py @@ -139,7 +139,7 @@ def test_create_flake_fallback_from_non_clan_directory( monkeypatch.setenv("LOGNAME", "testuser") cli.run( - ["flakes", "create", str(new_clan_dir), "--template=default", "--no-update"] + ["flakes", "create", str(new_clan_dir), "--template=default", "--no-update"], ) assert (new_clan_dir / "flake.nix").exists() @@ -157,7 +157,7 @@ def test_create_flake_with_local_template_reference( # TODO: should error with: localFlake does not export myLocalTemplate clan template cli.run( - ["flakes", "create", str(new_clan_dir), "--template=.#default", "--no-update"] + ["flakes", "create", str(new_clan_dir), "--template=.#default", "--no-update"], ) assert (new_clan_dir / "flake.nix").exists() diff --git a/pkgs/clan-cli/clan_cli/tests/test_flakes_cli.py b/pkgs/clan-cli/clan_cli/tests/test_flakes_cli.py index 357740a36..36be3ab84 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_flakes_cli.py +++ b/pkgs/clan-cli/clan_cli/tests/test_flakes_cli.py @@ -1,17 +1,13 @@ -from typing import TYPE_CHECKING - import pytest from clan_cli.tests.fixtures_flakes import FlakeForTest from clan_cli.tests.helpers import cli from clan_cli.tests.stdout import CaptureOutput -if TYPE_CHECKING: - pass - @pytest.mark.with_core def test_flakes_inspect( - test_flake_with_core: FlakeForTest, capture_output: CaptureOutput + test_flake_with_core: FlakeForTest, + capture_output: CaptureOutput, ) -> None: with capture_output as output: cli.run( @@ -22,6 +18,6 @@ def test_flakes_inspect( str(test_flake_with_core.path), "--machine", "vm1", - ] + ], ) assert "Icon" in output.out diff --git a/pkgs/clan-cli/clan_cli/tests/test_git.py b/pkgs/clan-cli/clan_cli/tests/test_git.py index 760f1883a..063aab1fa 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_git.py +++ b/pkgs/clan-cli/clan_cli/tests/test_git.py @@ -19,7 +19,8 @@ def test_commit_file(git_repo: Path) -> None: # check that the latest commit message is correct assert ( subprocess.check_output( - ["git", "log", "-1", "--pretty=%B"], cwd=git_repo + ["git", "log", "-1", "--pretty=%B"], + cwd=git_repo, ).decode("utf-8") == "test commit\n\n" ) @@ -59,7 +60,8 @@ def test_clan_flake_in_subdir(git_repo: Path, monkeypatch: pytest.MonkeyPatch) - # check that the latest commit message is correct assert ( subprocess.check_output( - ["git", "log", "-1", "--pretty=%B"], cwd=git_repo + ["git", "log", "-1", "--pretty=%B"], + cwd=git_repo, ).decode("utf-8") == "test commit\n\n" ) diff --git a/pkgs/clan-cli/clan_cli/tests/test_import_sops_cli.py b/pkgs/clan-cli/clan_cli/tests/test_import_sops_cli.py index 0ad719be2..4f5d49e08 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_import_sops_cli.py +++ b/pkgs/clan-cli/clan_cli/tests/test_import_sops_cli.py @@ -28,7 +28,7 @@ def test_import_sops( str(test_flake_with_core.path), "machine1", age_keys[0].pubkey, - ] + ], ) cli.run( [ @@ -39,7 +39,7 @@ def test_import_sops( str(test_flake_with_core.path), "user1", age_keys[1].pubkey, - ] + ], ) cli.run( [ @@ -50,7 +50,7 @@ def test_import_sops( str(test_flake_with_core.path), "user2", age_keys[2].pubkey, - ] + ], ) cli.run( [ @@ -61,7 +61,7 @@ def test_import_sops( str(test_flake_with_core.path), "group1", "user1", - ] + ], ) cli.run( [ @@ -72,7 +72,7 @@ def test_import_sops( str(test_flake_with_core.path), "group1", "user2", - ] + ], ) # To edit: @@ -98,6 +98,6 @@ def test_import_sops( with capture_output as output: cli.run( - ["secrets", "get", "--flake", str(test_flake_with_core.path), "secret-key"] + ["secrets", "get", "--flake", str(test_flake_with_core.path), "secret-key"], ) assert output.out == "secret-value" diff --git a/pkgs/clan-cli/clan_cli/tests/test_inventory_serde.py b/pkgs/clan-cli/clan_cli/tests/test_inventory_serde.py index 63648373c..8f60df595 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_inventory_serde.py +++ b/pkgs/clan-cli/clan_cli/tests/test_inventory_serde.py @@ -16,7 +16,7 @@ from clan_lib.persist.inventory_store import InventoryStore "inventory_expr": r"""{ machines.jon = {}; machines.sara = {}; - }""" + }""", }, # TODO: Test # - Function modules @@ -38,14 +38,13 @@ from clan_lib.persist.inventory_store import InventoryStore def test_inventory_deserialize_variants( test_flake_with_core: FlakeForTest, ) -> None: - """ - Testing different inventory deserializations + """Testing different inventory deserializations Inventory should always be deserializable to a dict """ inventory_store = InventoryStore(Flake(str(test_flake_with_core.path))) # Cast the inventory to a dict for the following assertions - inventory = cast(dict[str, Any], inventory_store.read()) + inventory = cast("dict[str, Any]", inventory_store.read()) # Check that the inventory is a dict assert isinstance(inventory, dict) diff --git a/pkgs/clan-cli/clan_cli/tests/test_machines_cli.py b/pkgs/clan-cli/clan_cli/tests/test_machines_cli.py index 952c74056..13031ef55 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_machines_cli.py +++ b/pkgs/clan-cli/clan_cli/tests/test_machines_cli.py @@ -27,7 +27,7 @@ def test_machine_subcommands( "machine1", "--tags", "vm", - ] + ], ) # Usually this is done by `inventory.write` but we created a separate flake object in the test that now holds stale data inventory_store._flake.invalidate_cache() @@ -47,7 +47,7 @@ def test_machine_subcommands( assert "vm2" in output.out cli.run( - ["machines", "delete", "--flake", str(test_flake_with_core.path), "machine1"] + ["machines", "delete", "--flake", str(test_flake_with_core.path), "machine1"], ) # See comment above inventory_store._flake.invalidate_cache() @@ -105,7 +105,7 @@ def test_machines_update_nonexistent_machine( "--flake", str(test_flake_with_core.path), "nonexistent-machine", - ] + ], ) error_message = str(exc_info.value) @@ -130,7 +130,7 @@ def test_machines_update_typo_in_machine_name( "--flake", str(test_flake_with_core.path), "v1", # typo of "vm1" - ] + ], ) error_message = str(exc_info.value) diff --git a/pkgs/clan-cli/clan_cli/tests/test_secrets_cli.py b/pkgs/clan-cli/clan_cli/tests/test_secrets_cli.py index 084101b0e..ef80a661a 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_secrets_cli.py +++ b/pkgs/clan-cli/clan_cli/tests/test_secrets_cli.py @@ -51,7 +51,7 @@ def _test_identities( str(test_flake_with_core.path), "foo", age_keys[0].pubkey, - ] + ], ) assert (sops_folder / what / "foo" / "key.json").exists() @@ -64,7 +64,7 @@ def _test_identities( str(test_flake_with_core.path), "admin", admin_age_key.pubkey, - ] + ], ) with pytest.raises(ClanError): # raises "foo already exists" @@ -77,7 +77,7 @@ def _test_identities( str(test_flake_with_core.path), "foo", age_keys[0].pubkey, - ] + ], ) with monkeypatch.context() as m: @@ -93,7 +93,7 @@ def _test_identities( f"--{what_singular}", "foo", test_secret_name, - ] + ], ) assert_secrets_file_recipients( @@ -114,7 +114,7 @@ def _test_identities( "-f", "foo", age_keys[1].privkey, - ] + ], ) assert_secrets_file_recipients( test_flake_with_core.path, @@ -131,7 +131,7 @@ def _test_identities( "--flake", str(test_flake_with_core.path), "foo", - ] + ], ) assert age_keys[1].pubkey in output.out @@ -140,7 +140,7 @@ def _test_identities( assert "foo" in output.out cli.run( - ["secrets", what, "remove", "--flake", str(test_flake_with_core.path), "foo"] + ["secrets", what, "remove", "--flake", str(test_flake_with_core.path), "foo"], ) assert not (sops_folder / what / "foo" / "key.json").exists() @@ -153,7 +153,7 @@ def _test_identities( "--flake", str(test_flake_with_core.path), "foo", - ] + ], ) with capture_output as output: @@ -178,7 +178,11 @@ def test_users( ) -> None: with monkeypatch.context(): _test_identities( - "users", test_flake_with_core, capture_output, age_keys, monkeypatch + "users", + test_flake_with_core, + capture_output, + age_keys, + monkeypatch, ) @@ -208,7 +212,7 @@ def test_multiple_user_keys( str(test_flake_with_core.path), user, *[f"--age-key={key.pubkey}" for key in user_keys], - ] + ], ) assert (sops_folder / "users" / user / "key.json").exists() @@ -222,7 +226,7 @@ def test_multiple_user_keys( "--flake", str(test_flake_with_core.path), user, - ] + ], ) for user_key in user_keys: @@ -249,7 +253,7 @@ def test_multiple_user_keys( "--flake", str(test_flake_with_core.path), secret_name, - ] + ], ) # check the secret has each of our user's keys as a recipient @@ -268,7 +272,7 @@ def test_multiple_user_keys( "--flake", str(test_flake_with_core.path), secret_name, - ] + ], ) assert secret_value in output.out @@ -295,7 +299,7 @@ def test_multiple_user_keys( str(test_flake_with_core.path), user, key_to_remove.pubkey, - ] + ], ) # check the secret has been updated @@ -315,7 +319,7 @@ def test_multiple_user_keys( str(test_flake_with_core.path), user, key_to_remove.pubkey, - ] + ], ) # check the secret has been updated @@ -334,7 +338,11 @@ def test_machines( monkeypatch: pytest.MonkeyPatch, ) -> None: _test_identities( - "machines", test_flake_with_core, capture_output, age_keys, monkeypatch + "machines", + test_flake_with_core, + capture_output, + age_keys, + monkeypatch, ) @@ -347,7 +355,7 @@ def test_groups( ) -> None: with capture_output as output: cli.run( - ["secrets", "groups", "list", "--flake", str(test_flake_with_core.path)] + ["secrets", "groups", "list", "--flake", str(test_flake_with_core.path)], ) assert output.out == "" @@ -365,7 +373,7 @@ def test_groups( str(test_flake_with_core.path), "group1", "machine1", - ] + ], ) with pytest.raises(ClanError): # user does not exist yet cli.run( @@ -377,7 +385,7 @@ def test_groups( str(test_flake_with_core.path), "groupb1", "user1", - ] + ], ) cli.run( [ @@ -388,7 +396,7 @@ def test_groups( str(test_flake_with_core.path), "machine1", machine1_age_key.pubkey, - ] + ], ) cli.run( [ @@ -399,7 +407,7 @@ def test_groups( str(test_flake_with_core.path), "group1", "machine1", - ] + ], ) # Should this fail? @@ -412,7 +420,7 @@ def test_groups( str(test_flake_with_core.path), "group1", "machine1", - ] + ], ) cli.run( @@ -424,7 +432,7 @@ def test_groups( str(test_flake_with_core.path), "user1", user1_age_key.pubkey, - ] + ], ) cli.run( [ @@ -435,7 +443,7 @@ def test_groups( str(test_flake_with_core.path), "admin", admin_age_key.pubkey, - ] + ], ) cli.run( [ @@ -446,12 +454,12 @@ def test_groups( str(test_flake_with_core.path), "group1", "user1", - ] + ], ) with capture_output as output: cli.run( - ["secrets", "groups", "list", "--flake", str(test_flake_with_core.path)] + ["secrets", "groups", "list", "--flake", str(test_flake_with_core.path)], ) out = output.out assert "user1" in out @@ -472,7 +480,7 @@ def test_groups( "--group", "group1", secret_name, - ] + ], ) assert_secrets_file_recipients( @@ -498,7 +506,7 @@ def test_groups( str(test_flake_with_core.path), "group1", "user1", - ] + ], ) assert_secrets_file_recipients( test_flake_with_core.path, @@ -520,7 +528,7 @@ def test_groups( str(test_flake_with_core.path), "group1", "user1", - ] + ], ) assert_secrets_file_recipients( test_flake_with_core.path, @@ -541,7 +549,7 @@ def test_groups( "--flake", str(test_flake_with_core.path), "user1", - ] + ], ) assert_secrets_file_recipients( test_flake_with_core.path, @@ -562,7 +570,7 @@ def test_groups( str(test_flake_with_core.path), "group1", "machine1", - ] + ], ) assert_secrets_file_recipients( test_flake_with_core.path, @@ -629,13 +637,15 @@ def test_secrets( # Generate a new key for the clan monkeypatch.setenv( - "SOPS_AGE_KEY_FILE", str(test_flake_with_core.path / ".." / "age.key") + "SOPS_AGE_KEY_FILE", + str(test_flake_with_core.path / ".." / "age.key"), ) with patch( - "clan_cli.secrets.key.generate_private_key", wraps=generate_private_key + "clan_cli.secrets.key.generate_private_key", + wraps=generate_private_key, ) as spy: cli.run( - ["secrets", "key", "generate", "--flake", str(test_flake_with_core.path)] + ["secrets", "key", "generate", "--flake", str(test_flake_with_core.path)], ) assert spy.call_count == 1 @@ -655,18 +665,24 @@ def test_secrets( str(test_flake_with_core.path), "testuser", key["publickey"], - ] + ], ) with pytest.raises(ClanError): # does not exist yet cli.run( - ["secrets", "get", "--flake", str(test_flake_with_core.path), "nonexisting"] + [ + "secrets", + "get", + "--flake", + str(test_flake_with_core.path), + "nonexisting", + ], ) monkeypatch.setenv("SOPS_NIX_SECRET", "foo") cli.run(["secrets", "set", "--flake", str(test_flake_with_core.path), "initialkey"]) with capture_output as output: cli.run( - ["secrets", "get", "--flake", str(test_flake_with_core.path), "initialkey"] + ["secrets", "get", "--flake", str(test_flake_with_core.path), "initialkey"], ) assert output.out == "foo" with capture_output as output: @@ -684,7 +700,7 @@ def test_secrets( "--flake", str(test_flake_with_core.path), "initialkey", - ] + ], ) monkeypatch.delenv("EDITOR") @@ -696,7 +712,7 @@ def test_secrets( str(test_flake_with_core.path), "initialkey", "key", - ] + ], ) with capture_output as output: @@ -711,7 +727,7 @@ def test_secrets( "--flake", str(test_flake_with_core.path), "nonexisting", - ] + ], ) assert output.out == "" @@ -730,7 +746,7 @@ def test_secrets( str(test_flake_with_core.path), "machine1", age_keys[1].pubkey, - ] + ], ) cli.run( [ @@ -741,18 +757,18 @@ def test_secrets( str(test_flake_with_core.path), "machine1", "key", - ] + ], ) with capture_output as output: cli.run( - ["secrets", "machines", "list", "--flake", str(test_flake_with_core.path)] + ["secrets", "machines", "list", "--flake", str(test_flake_with_core.path)], ) assert output.out == "machine1\n" with use_age_key(age_keys[1].privkey, monkeypatch): with capture_output as output: cli.run( - ["secrets", "get", "--flake", str(test_flake_with_core.path), "key"] + ["secrets", "get", "--flake", str(test_flake_with_core.path), "key"], ) assert output.out == "foo" @@ -767,14 +783,14 @@ def test_secrets( "-f", "machine1", age_keys[0].privkey, - ] + ], ) # should also rotate the encrypted secret with use_age_key(age_keys[0].privkey, monkeypatch): with capture_output as output: cli.run( - ["secrets", "get", "--flake", str(test_flake_with_core.path), "key"] + ["secrets", "get", "--flake", str(test_flake_with_core.path), "key"], ) assert output.out == "foo" @@ -787,7 +803,7 @@ def test_secrets( str(test_flake_with_core.path), "machine1", "key", - ] + ], ) cli.run( @@ -799,7 +815,7 @@ def test_secrets( str(test_flake_with_core.path), "user1", age_keys[1].pubkey, - ] + ], ) cli.run( [ @@ -810,7 +826,7 @@ def test_secrets( str(test_flake_with_core.path), "user1", "key", - ] + ], ) with capture_output as output, use_age_key(age_keys[1].privkey, monkeypatch): cli.run(["secrets", "get", "--flake", str(test_flake_with_core.path), "key"]) @@ -824,7 +840,7 @@ def test_secrets( str(test_flake_with_core.path), "user1", "key", - ] + ], ) with pytest.raises(ClanError): # does not exist yet @@ -837,7 +853,7 @@ def test_secrets( str(test_flake_with_core.path), "admin-group", "key", - ] + ], ) cli.run( [ @@ -848,7 +864,7 @@ def test_secrets( str(test_flake_with_core.path), "admin-group", "user1", - ] + ], ) cli.run( [ @@ -859,7 +875,7 @@ def test_secrets( str(test_flake_with_core.path), "admin-group", owner, - ] + ], ) cli.run( [ @@ -870,7 +886,7 @@ def test_secrets( str(test_flake_with_core.path), "admin-group", "key", - ] + ], ) cli.run( @@ -882,13 +898,13 @@ def test_secrets( "--group", "admin-group", "key2", - ] + ], ) with use_age_key(age_keys[1].privkey, monkeypatch): with capture_output as output: cli.run( - ["secrets", "get", "--flake", str(test_flake_with_core.path), "key"] + ["secrets", "get", "--flake", str(test_flake_with_core.path), "key"], ) assert output.out == "foo" @@ -903,7 +919,7 @@ def test_secrets( "--pgp-key", gpg_key.fingerprint, "user2", - ] + ], ) # Extend group will update secrets @@ -916,13 +932,13 @@ def test_secrets( str(test_flake_with_core.path), "admin-group", "user2", - ] + ], ) with use_gpg_key(gpg_key, monkeypatch): # user2 with capture_output as output: cli.run( - ["secrets", "get", "--flake", str(test_flake_with_core.path), "key"] + ["secrets", "get", "--flake", str(test_flake_with_core.path), "key"], ) assert output.out == "foo" @@ -935,7 +951,7 @@ def test_secrets( str(test_flake_with_core.path), "admin-group", "user2", - ] + ], ) with ( pytest.raises(ClanError), @@ -955,7 +971,7 @@ def test_secrets( str(test_flake_with_core.path), "admin-group", "key", - ] + ], ) cli.run(["secrets", "remove", "--flake", str(test_flake_with_core.path), "key"]) @@ -979,7 +995,8 @@ def test_secrets_key_generate_gpg( with ( capture_output as output, patch( - "clan_cli.secrets.key.generate_private_key", wraps=generate_private_key + "clan_cli.secrets.key.generate_private_key", + wraps=generate_private_key, ) as spy_sops, ): cli.run( @@ -989,7 +1006,7 @@ def test_secrets_key_generate_gpg( "generate", "--flake", str(test_flake_with_core.path), - ] + ], ) assert spy_sops.call_count == 0 # assert "age private key" not in output.out @@ -1000,7 +1017,7 @@ def test_secrets_key_generate_gpg( with capture_output as output: cli.run( - ["secrets", "key", "show", "--flake", str(test_flake_with_core.path)] + ["secrets", "key", "show", "--flake", str(test_flake_with_core.path)], ) key = json.loads(output.out)[0] assert key["type"] == "pgp" @@ -1017,7 +1034,7 @@ def test_secrets_key_generate_gpg( "--pgp-key", gpg_key.fingerprint, "testuser", - ] + ], ) with capture_output as output: @@ -1029,7 +1046,7 @@ def test_secrets_key_generate_gpg( "--flake", str(test_flake_with_core.path), "testuser", - ] + ], ) keys = json.loads(output.out) assert len(keys) == 1 @@ -1048,7 +1065,7 @@ def test_secrets_key_generate_gpg( "--flake", str(test_flake_with_core.path), "secret-name", - ] + ], ) with capture_output as output: cli.run( @@ -1058,7 +1075,7 @@ def test_secrets_key_generate_gpg( "--flake", str(test_flake_with_core.path), "secret-name", - ] + ], ) assert output.out == "secret-value" @@ -1078,7 +1095,7 @@ def test_secrets_users_add_age_plugin_error( str(test_flake_with_core.path), "testuser", "AGE-PLUGIN-YUBIKEY-18P5XCQVZ5FE4WKCW3NJWP", - ] + ], ) error_msg = str(exc_info.value) diff --git a/pkgs/clan-cli/clan_cli/tests/test_secrets_generate.py b/pkgs/clan-cli/clan_cli/tests/test_secrets_generate.py index bdd410c43..f02d781f0 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_secrets_generate.py +++ b/pkgs/clan-cli/clan_cli/tests/test_secrets_generate.py @@ -31,7 +31,7 @@ def test_generate_secret( str(test_flake_with_core.path), "user1", age_keys[0].pubkey, - ] + ], ) cli.run( [ @@ -42,7 +42,7 @@ def test_generate_secret( str(test_flake_with_core.path), "admins", "user1", - ] + ], ) cmd = [ "vars", @@ -56,7 +56,7 @@ def test_generate_secret( cli.run(cmd) store1 = SecretStore( - Machine(name="vm1", flake=Flake(str(test_flake_with_core.path))) + Machine(name="vm1", flake=Flake(str(test_flake_with_core.path))), ) assert store1.exists("", "age.key") @@ -97,13 +97,13 @@ def test_generate_secret( str(test_flake_with_core.path), "--generator", "zerotier", - ] + ], ) assert age_key.lstat().st_mtime_ns == age_key_mtime assert identity_secret.lstat().st_mtime_ns == secret1_mtime store2 = SecretStore( - Machine(name="vm2", flake=Flake(str(test_flake_with_core.path))) + Machine(name="vm2", flake=Flake(str(test_flake_with_core.path))), ) assert store2.exists("", "age.key") diff --git a/pkgs/clan-cli/clan_cli/tests/test_ssh_local.py b/pkgs/clan-cli/clan_cli/tests/test_ssh_local.py index e7b008ce9..ba535696c 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_ssh_local.py +++ b/pkgs/clan-cli/clan_cli/tests/test_ssh_local.py @@ -28,7 +28,10 @@ def test_run_environment(runtime: AsyncRuntime) -> None: def test_run_local(runtime: AsyncRuntime) -> None: p1 = runtime.async_run( - None, host.run_local, ["echo", "hello"], RunOpts(log=Log.STDERR) + None, + host.run_local, + ["echo", "hello"], + RunOpts(log=Log.STDERR), ) assert p1.wait().result.stdout == "hello\n" diff --git a/pkgs/clan-cli/clan_cli/tests/test_vars.py b/pkgs/clan-cli/clan_cli/tests/test_vars.py index d0a592aae..3a5793703 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_vars.py +++ b/pkgs/clan-cli/clan_cli/tests/test_vars.py @@ -189,8 +189,8 @@ def test_generate_public_and_secret_vars( nix_eval( [ f"{flake.path}#nixosConfigurations.my_machine.config.clan.core.vars.generators.my_generator.files.value_with_default.value", - ] - ) + ], + ), ).stdout.strip() assert json.loads(value_non_default) == "default_value" @@ -211,14 +211,17 @@ def test_generate_public_and_secret_vars( public_value = get_machine_var(machine, "my_generator/my_value").printable_value assert public_value.startswith("public") shared_value = get_machine_var( - machine, "my_shared_generator/my_shared_value" + machine, + "my_shared_generator/my_shared_value", ).printable_value assert shared_value.startswith("shared") vars_text = stringify_all_vars(machine) flake_obj = Flake(str(flake.path)) my_generator = Generator("my_generator", machine="my_machine", _flake=flake_obj) dependent_generator = Generator( - "dependent_generator", machine="my_machine", _flake=flake_obj + "dependent_generator", + machine="my_machine", + _flake=flake_obj, ) in_repo_store = in_repo.FactStore(flake=flake_obj) assert not in_repo_store.exists(my_generator, "my_secret") @@ -235,8 +238,8 @@ def test_generate_public_and_secret_vars( nix_eval( [ f"{flake.path}#nixosConfigurations.my_machine.config.clan.core.vars.generators.my_generator.files.my_value.value", - ] - ) + ], + ), ).stdout.strip() assert json.loads(vars_eval).startswith("public") @@ -244,14 +247,14 @@ def test_generate_public_and_secret_vars( nix_eval( [ f"{flake.path}#nixosConfigurations.my_machine.config.clan.core.vars.generators.my_generator.files.value_with_default.value", - ] - ) + ], + ), ).stdout.strip() assert json.loads(value_non_default).startswith("non-default") # test regeneration works cli.run( - ["vars", "generate", "--flake", str(flake.path), "my_machine", "--regenerate"] + ["vars", "generate", "--flake", str(flake.path), "my_machine", "--regenerate"], ) # test regeneration without sandbox cli.run( @@ -263,7 +266,7 @@ def test_generate_public_and_secret_vars( "my_machine", "--regenerate", "--no-sandbox", - ] + ], ) # test stuff actually changed after regeneration public_value_new = get_machine_var(machine, "my_generator/my_value").printable_value @@ -273,7 +276,8 @@ def test_generate_public_and_secret_vars( "Secret value should change after regeneration" ) shared_value_new = get_machine_var( - machine, "my_shared_generator/my_shared_value" + machine, + "my_shared_generator/my_shared_value", ).printable_value assert shared_value != shared_value_new, ( "Shared value should change after regeneration" @@ -290,18 +294,20 @@ def test_generate_public_and_secret_vars( "--no-sandbox", "--generator", "my_shared_generator", - ] + ], ) # test that the shared generator is regenerated shared_value_after_regeneration = get_machine_var( - machine, "my_shared_generator/my_shared_value" + machine, + "my_shared_generator/my_shared_value", ).printable_value assert shared_value_after_regeneration != shared_value_new, ( "Shared value should change after regenerating my_shared_generator" ) # test that the dependent generator is also regenerated (because it depends on my_shared_generator) secret_value_after_regeneration = sops_store.get( - dependent_generator, "my_secret" + dependent_generator, + "my_secret", ).decode() assert secret_value_after_regeneration != secret_value_new, ( "Dependent generator's secret should change after regenerating my_shared_generator" @@ -311,7 +317,8 @@ def test_generate_public_and_secret_vars( ) # test that my_generator is NOT regenerated (it doesn't depend on my_shared_generator) public_value_after_regeneration = get_machine_var( - machine, "my_generator/my_value" + machine, + "my_generator/my_value", ).printable_value assert public_value_after_regeneration == public_value_new, ( "my_generator value should NOT change after regenerating only my_shared_generator" @@ -348,10 +355,14 @@ def test_generate_secret_var_sops_with_default_group( cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"]) flake_obj = Flake(str(flake.path)) first_generator = Generator( - "first_generator", machine="my_machine", _flake=flake_obj + "first_generator", + machine="my_machine", + _flake=flake_obj, ) second_generator = Generator( - "second_generator", machine="my_machine", _flake=flake_obj + "second_generator", + machine="my_machine", + _flake=flake_obj, ) in_repo_store = in_repo.FactStore(flake=flake_obj) assert not in_repo_store.exists(first_generator, "my_secret") @@ -372,16 +383,22 @@ def test_generate_secret_var_sops_with_default_group( str(flake.path), "user2", pubkey_user2.pubkey, - ] + ], ) cli.run(["secrets", "groups", "add-user", "my_group", "user2"]) # check if new user can access the secret monkeypatch.setenv("USER", "user2") first_generator_with_share = Generator( - "first_generator", share=False, machine="my_machine", _flake=flake_obj + "first_generator", + share=False, + machine="my_machine", + _flake=flake_obj, ) second_generator_with_share = Generator( - "second_generator", share=False, machine="my_machine", _flake=flake_obj + "second_generator", + share=False, + machine="my_machine", + _flake=flake_obj, ) assert sops_store.user_has_access("user2", first_generator_with_share, "my_secret") assert sops_store.user_has_access("user2", second_generator_with_share, "my_secret") @@ -398,7 +415,7 @@ def test_generate_secret_var_sops_with_default_group( "--force", "user2", pubkey_user3.pubkey, - ] + ], ) monkeypatch.setenv("USER", "user2") assert sops_store.user_has_access("user2", first_generator_with_share, "my_secret") @@ -438,10 +455,16 @@ def test_generated_shared_secret_sops( m2_sops_store = sops.SecretStore(machine2.flake) # Create generators with machine context for testing generator_m1 = Generator( - "my_shared_generator", share=True, machine="machine1", _flake=machine1.flake + "my_shared_generator", + share=True, + machine="machine1", + _flake=machine1.flake, ) generator_m2 = Generator( - "my_shared_generator", share=True, machine="machine2", _flake=machine2.flake + "my_shared_generator", + share=True, + machine="machine2", + _flake=machine2.flake, ) assert m1_sops_store.exists(generator_m1, "my_shared_secret") @@ -492,7 +515,9 @@ def test_generate_secret_var_password_store( check=True, ) subprocess.run( - ["git", "config", "user.name", "Test User"], cwd=password_store_dir, check=True + ["git", "config", "user.name", "Test User"], + cwd=password_store_dir, + check=True, ) flake_obj = Flake(str(flake.path)) @@ -502,10 +527,18 @@ def test_generate_secret_var_password_store( assert check_vars(machine.name, machine.flake) store = password_store.SecretStore(flake=flake_obj) my_generator = Generator( - "my_generator", share=False, files=[], machine="my_machine", _flake=flake_obj + "my_generator", + share=False, + files=[], + machine="my_machine", + _flake=flake_obj, ) my_generator_shared = Generator( - "my_generator", share=True, files=[], machine="my_machine", _flake=flake_obj + "my_generator", + share=True, + files=[], + machine="my_machine", + _flake=flake_obj, ) my_shared_generator = Generator( "my_shared_generator", @@ -538,7 +571,11 @@ def test_generate_secret_var_password_store( assert "my_generator/my_secret" in vars_text my_generator = Generator( - "my_generator", share=False, files=[], machine="my_machine", _flake=flake_obj + "my_generator", + share=False, + files=[], + machine="my_machine", + _flake=flake_obj, ) var_name = "my_secret" store.delete(my_generator, var_name) @@ -547,7 +584,11 @@ def test_generate_secret_var_password_store( store.delete_store("my_machine") store.delete_store("my_machine") # check idempotency my_generator2 = Generator( - "my_generator2", share=False, files=[], machine="my_machine", _flake=flake_obj + "my_generator2", + share=False, + files=[], + machine="my_machine", + _flake=flake_obj, ) var_name = "my_secret2" assert not store.exists(my_generator2, var_name) @@ -686,9 +727,7 @@ def test_shared_vars_must_never_depend_on_machine_specific_vars( monkeypatch: pytest.MonkeyPatch, flake_with_sops: ClanFlake, ) -> None: - """ - Ensure that shared vars never depend on machine specific vars. - """ + """Ensure that shared vars never depend on machine specific vars.""" flake = flake_with_sops config = flake.machines["my_machine"] @@ -719,8 +758,7 @@ def test_multi_machine_shared_vars( monkeypatch: pytest.MonkeyPatch, flake_with_sops: ClanFlake, ) -> None: - """ - Ensure that shared vars are regenerated only when they should, and also can be + """Ensure that shared vars are regenerated only when they should, and also can be accessed by all machines that should have access. Specifically: @@ -752,10 +790,16 @@ def test_multi_machine_shared_vars( in_repo_store_2 = in_repo.FactStore(machine2.flake) # Create generators with machine context for testing generator_m1 = Generator( - "shared_generator", share=True, machine="machine1", _flake=machine1.flake + "shared_generator", + share=True, + machine="machine1", + _flake=machine1.flake, ) generator_m2 = Generator( - "shared_generator", share=True, machine="machine2", _flake=machine2.flake + "shared_generator", + share=True, + machine="machine2", + _flake=machine2.flake, ) # generate for machine 1 cli.run(["vars", "generate", "--flake", str(flake.path), "machine1"]) @@ -771,7 +815,7 @@ def test_multi_machine_shared_vars( # ensure shared secret stays available for all machines after regeneration # regenerate for machine 1 cli.run( - ["vars", "generate", "--flake", str(flake.path), "machine1", "--regenerate"] + ["vars", "generate", "--flake", str(flake.path), "machine1", "--regenerate"], ) # ensure values changed new_secret_1 = sops_store_1.get(generator_m1, "my_secret") @@ -806,7 +850,7 @@ def test_api_set_prompts( prompt_values={ "my_generator": { "prompt1": "input1", - } + }, }, ) machine = Machine(name="my_machine", flake=Flake(str(flake.path))) @@ -820,14 +864,16 @@ def test_api_set_prompts( prompt_values={ "my_generator": { "prompt1": "input2", - } + }, }, ) assert store.get(my_generator, "prompt1").decode() == "input2" machine = Machine(name="my_machine", flake=Flake(str(flake.path))) generators = get_generators( - machine=machine, full_closure=True, include_previous_values=True + machine=machine, + full_closure=True, + include_previous_values=True, ) # get_generators should bind the store assert generators[0].files[0]._store is not None @@ -957,7 +1003,9 @@ def test_migration( flake_obj = Flake(str(flake.path)) my_generator = Generator("my_generator", machine="my_machine", _flake=flake_obj) other_generator = Generator( - "other_generator", machine="my_machine", _flake=flake_obj + "other_generator", + machine="my_machine", + _flake=flake_obj, ) in_repo_store = in_repo.FactStore(flake=flake_obj) sops_store = sops.SecretStore(flake=flake_obj) @@ -1023,7 +1071,8 @@ def test_fails_when_files_are_left_from_other_backend( @pytest.mark.with_core def test_create_sops_age_secrets( - monkeypatch: pytest.MonkeyPatch, flake: ClanFlake + monkeypatch: pytest.MonkeyPatch, + flake: ClanFlake, ) -> None: monkeypatch.chdir(flake.path) cli.run(["vars", "keygen", "--flake", str(flake.path), "--user", "user"]) @@ -1111,7 +1160,7 @@ def test_dynamic_invalidation( in { clan.core.vars.generators.dependent_generator.validation = if builtins.pathExists p then builtins.readFile p else null; } - """ + """, ) flake.refresh() diff --git a/pkgs/clan-cli/clan_cli/tests/test_vars_deployment.py b/pkgs/clan-cli/clan_cli/tests/test_vars_deployment.py index a06965d56..fd09ef7a4 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_vars_deployment.py +++ b/pkgs/clan-cli/clan_cli/tests/test_vars_deployment.py @@ -29,30 +29,30 @@ def test_vm_deployment( nix_eval( [ f"{vm_test_flake}#nixosConfigurations.test-vm-deployment.config.sops.secrets", - ] - ) - ).stdout.strip() + ], + ), + ).stdout.strip(), ) assert sops_secrets != {} my_secret_path = run( nix_eval( [ f"{vm_test_flake}#nixosConfigurations.test-vm-deployment.config.clan.core.vars.generators.m1_generator.files.my_secret.path", - ] - ) + ], + ), ).stdout.strip() assert "no-such-path" not in my_secret_path shared_secret_path = run( nix_eval( [ f"{vm_test_flake}#nixosConfigurations.test-vm-deployment.config.clan.core.vars.generators.my_shared_generator.files.shared_secret.path", - ] - ) + ], + ), ).stdout.strip() assert "no-such-path" not in shared_secret_path vm1_config = inspect_vm( - machine=Machine("test-vm-deployment", Flake(str(vm_test_flake))) + machine=Machine("test-vm-deployment", Flake(str(vm_test_flake))), ) with ExitStack() as stack: vm1 = stack.enter_context(spawn_vm(vm1_config, stdin=subprocess.DEVNULL)) @@ -64,7 +64,7 @@ def test_vm_deployment( assert result.stdout == "hello\n" # check shared_secret is deployed result = qga_m1.run( - ["cat", "/run/secrets/vars/my_shared_generator/shared_secret"] + ["cat", "/run/secrets/vars/my_shared_generator/shared_secret"], ) assert result.stdout == "hello\n" # check no_deploy_secret is not deployed diff --git a/pkgs/clan-cli/clan_cli/tests/test_vms_cli.py b/pkgs/clan-cli/clan_cli/tests/test_vms_cli.py index 65ca67fe9..04f713ddd 100644 --- a/pkgs/clan-cli/clan_cli/tests/test_vms_cli.py +++ b/pkgs/clan-cli/clan_cli/tests/test_vms_cli.py @@ -17,7 +17,8 @@ no_kvm = not Path("/dev/kvm").exists() @pytest.mark.with_core def test_inspect( - test_flake_with_core: FlakeForTest, capture_output: CaptureOutput + test_flake_with_core: FlakeForTest, + capture_output: CaptureOutput, ) -> None: with capture_output as output: cli.run(["vms", "inspect", "--flake", str(test_flake_with_core.path), "vm1"]) @@ -42,7 +43,7 @@ def test_run( "add", "user1", age_keys[0].pubkey, - ] + ], ) cli.run( [ @@ -51,7 +52,7 @@ def test_run( "add-user", "admins", "user1", - ] + ], ) cli.run( [ @@ -63,7 +64,7 @@ def test_run( "shutdown", "-h", "now", - ] + ], ) @@ -74,7 +75,7 @@ def test_vm_persistence( ) -> None: # Use the pre-built test VM from the test flake vm_config = inspect_vm( - machine=Machine("test-vm-persistence", Flake(str(vm_test_flake))) + machine=Machine("test-vm-persistence", Flake(str(vm_test_flake))), ) with spawn_vm(vm_config) as vm, vm.qga_connect() as qga: diff --git a/pkgs/clan-cli/clan_cli/vars/_types.py b/pkgs/clan-cli/clan_cli/vars/_types.py index 592b0cf13..9d9bc7e2b 100644 --- a/pkgs/clan-cli/clan_cli/vars/_types.py +++ b/pkgs/clan-cli/clan_cli/vars/_types.py @@ -62,9 +62,7 @@ class StoreBase(ABC): var: "Var", value: bytes, ) -> Path | None: - """ - override this method to implement the actual creation of the file - """ + """Override this method to implement the actual creation of the file""" @abstractmethod def exists(self, generator: "Generator", name: str) -> bool: @@ -81,8 +79,7 @@ class StoreBase(ABC): generators: list["Generator"] | None = None, file_name: str | None = None, ) -> str | None: - """ - Check the health of the store for the given machine and generators. + """Check the health of the store for the given machine and generators. This method detects any issues or inconsistencies in the store that may require fixing (e.g., outdated encryption keys, missing permissions). @@ -94,6 +91,7 @@ class StoreBase(ABC): Returns: str | None: An error message describing issues found, or None if everything is healthy + """ return None @@ -103,8 +101,7 @@ class StoreBase(ABC): generators: list["Generator"] | None = None, file_name: str | None = None, ) -> None: - """ - Fix any issues with the store for the given machine and generators. + """Fix any issues with the store for the given machine and generators. This method is intended to repair or update the store when inconsistencies are detected (e.g., re-encrypting secrets with new keys, fixing permissions). @@ -116,6 +113,7 @@ class StoreBase(ABC): Returns: None + """ return @@ -164,16 +162,15 @@ class StoreBase(ABC): log_info = machine.info if self.is_secret_store: log.info(f"{action_str} secret var {generator.name}/{var.name}\n") + elif value != old_val: + msg = f"{action_str} var {generator.name}/{var.name}" + if not is_migration: + msg += f"\n old: {old_val_str}\n new: {string_repr(value)}" + log_info(msg) else: - if value != old_val: - msg = f"{action_str} var {generator.name}/{var.name}" - if not is_migration: - msg += f"\n old: {old_val_str}\n new: {string_repr(value)}" - log_info(msg) - else: - log_info( - f"Var {generator.name}/{var.name} remains unchanged: {old_val_str}" - ) + log_info( + f"Var {generator.name}/{var.name} remains unchanged: {old_val_str}", + ) return new_file @abstractmethod @@ -200,8 +197,7 @@ class StoreBase(ABC): """ def get_validation(self, generator: "Generator") -> str | None: - """ - Return the invalidation hash that indicates if a generator needs to be re-run + """Return the invalidation hash that indicates if a generator needs to be re-run due to a change in its definition """ hash_file = self.directory(generator, ".validation-hash") @@ -210,17 +206,14 @@ class StoreBase(ABC): return hash_file.read_text().strip() def set_validation(self, generator: "Generator", hash_str: str) -> Path: - """ - Store the invalidation hash that indicates if a generator needs to be re-run - """ + """Store the invalidation hash that indicates if a generator needs to be re-run""" hash_file = self.directory(generator, ".validation-hash") hash_file.parent.mkdir(parents=True, exist_ok=True) hash_file.write_text(hash_str) return hash_file def hash_is_valid(self, generator: "Generator") -> bool: - """ - Check if the invalidation hash is up to date + """Check if the invalidation hash is up to date If the hash is not set in nix and hasn't been stored before, it is considered valid -> this provides backward and forward compatibility """ diff --git a/pkgs/clan-cli/clan_cli/vars/check.py b/pkgs/clan-cli/clan_cli/vars/check.py index 31945d6d3..2d99a0937 100644 --- a/pkgs/clan-cli/clan_cli/vars/check.py +++ b/pkgs/clan-cli/clan_cli/vars/check.py @@ -28,7 +28,9 @@ class VarStatus: def vars_status( - machine_name: str, flake: Flake, generator_name: None | str = None + machine_name: str, + flake: Flake, + generator_name: None | str = None, ) -> VarStatus: machine = Machine(name=machine_name, flake=flake) missing_secret_vars = [] @@ -53,14 +55,14 @@ def vars_status( for generator in generators: for file in generator.files: file.store( - machine.secret_vars_store if file.secret else machine.public_vars_store + machine.secret_vars_store if file.secret else machine.public_vars_store, ) file.generator(generator) if file.secret: if not machine.secret_vars_store.exists(generator, file.name): machine.info( - f"Secret var '{file.name}' for service '{generator.name}' in machine {machine.name} is missing." + f"Secret var '{file.name}' for service '{generator.name}' in machine {machine.name} is missing.", ) missing_secret_vars.append(file) else: @@ -71,13 +73,13 @@ def vars_status( ) if msg: machine.info( - f"Secret var '{file.name}' for service '{generator.name}' in machine {machine.name} needs update: {msg}" + f"Secret var '{file.name}' for service '{generator.name}' in machine {machine.name} needs update: {msg}", ) unfixed_secret_vars.append(file) elif not machine.public_vars_store.exists(generator, file.name): machine.info( - f"Public var '{file.name}' for service '{generator.name}' in machine {machine.name} is missing." + f"Public var '{file.name}' for service '{generator.name}' in machine {machine.name} is missing.", ) missing_public_vars.append(file) # check if invalidation hash is up to date @@ -87,7 +89,7 @@ def vars_status( ): invalid_generators.append(generator.name) machine.info( - f"Generator '{generator.name}' in machine {machine.name} has outdated invalidation hash." + f"Generator '{generator.name}' in machine {machine.name} has outdated invalidation hash.", ) return VarStatus( missing_secret_vars, @@ -98,7 +100,9 @@ def vars_status( def check_vars( - machine_name: str, flake: Flake, generator_name: None | str = None + machine_name: str, + flake: Flake, + generator_name: None | str = None, ) -> bool: status = vars_status(machine_name, flake, generator_name=generator_name) return not ( diff --git a/pkgs/clan-cli/clan_cli/vars/check_test.py b/pkgs/clan-cli/clan_cli/vars/check_test.py index 66b200b15..38f30858c 100644 --- a/pkgs/clan-cli/clan_cli/vars/check_test.py +++ b/pkgs/clan-cli/clan_cli/vars/check_test.py @@ -6,7 +6,8 @@ from clan_lib.errors import ClanError def test_check_command_no_flake( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.chdir(tmp_path) diff --git a/pkgs/clan-cli/clan_cli/vars/generate.py b/pkgs/clan-cli/clan_cli/vars/generate.py index 7fc2d02d9..bcee89f64 100644 --- a/pkgs/clan-cli/clan_cli/vars/generate.py +++ b/pkgs/clan-cli/clan_cli/vars/generate.py @@ -21,7 +21,7 @@ def generate_command(args: argparse.Namespace) -> None: filter( lambda m: m.name in args.machines, machines, - ) + ), ) # prefetch all vars @@ -32,7 +32,7 @@ def generate_command(args: argparse.Namespace) -> None: flake.precache( [ f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.generators.*.validationHash", - ] + ], ) run_generators( diff --git a/pkgs/clan-cli/clan_cli/vars/generate_test.py b/pkgs/clan-cli/clan_cli/vars/generate_test.py index 2f59b46eb..6b1f77ceb 100644 --- a/pkgs/clan-cli/clan_cli/vars/generate_test.py +++ b/pkgs/clan-cli/clan_cli/vars/generate_test.py @@ -6,7 +6,8 @@ from clan_lib.errors import ClanError def test_generate_command_no_flake( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.chdir(tmp_path) diff --git a/pkgs/clan-cli/clan_cli/vars/generator.py b/pkgs/clan-cli/clan_cli/vars/generator.py index d5e73bfdb..f7d678569 100644 --- a/pkgs/clan-cli/clan_cli/vars/generator.py +++ b/pkgs/clan-cli/clan_cli/vars/generator.py @@ -19,7 +19,8 @@ log = logging.getLogger(__name__) def dependencies_as_dir( - decrypted_dependencies: dict[str, dict[str, bytes]], tmpdir: Path + decrypted_dependencies: dict[str, dict[str, bytes]], + tmpdir: Path, ) -> None: """Helper function to create directory structure from decrypted dependencies.""" for dep_generator, files in decrypted_dependencies.items(): @@ -72,13 +73,15 @@ class Generator: flake: "Flake", include_previous_values: bool = False, ) -> list["Generator"]: - """ - Get all generators for a machine from the flake. + """Get all generators for a machine from the flake. + Args: machine_name (str): The name of the machine. flake (Flake): The flake to get the generators from. + Returns: list[Generator]: A list of (unsorted) generators for the machine. + """ # Get all generator metadata in one select (safe fields only) generators_data = flake.select_machine( @@ -146,7 +149,8 @@ class Generator: for generator in generators: for prompt in generator.prompts: prompt.previous_value = generator.get_previous_value( - machine, prompt + machine, + prompt, ) return generators @@ -175,8 +179,8 @@ class Generator: machine = Machine(name=self.machine, flake=self._flake) output = Path( machine.select( - f'config.clan.core.vars.generators."{self.name}".finalScript' - ) + f'config.clan.core.vars.generators."{self.name}".finalScript', + ), ) if tmp_store := nix_test_store(): output = tmp_store.joinpath(*output.parts[1:]) @@ -189,7 +193,7 @@ class Generator: machine = Machine(name=self.machine, flake=self._flake) return machine.select( - f'config.clan.core.vars.generators."{self.name}".validationHash' + f'config.clan.core.vars.generators."{self.name}".validationHash', ) def decrypt_dependencies( @@ -207,6 +211,7 @@ class Generator: Returns: Dictionary mapping generator names to their variable values + """ from clan_lib.errors import ClanError @@ -222,7 +227,8 @@ class Generator: result[dep_key.name] = {} dep_generator = next( - (g for g in generators if g.name == dep_key.name), None + (g for g in generators if g.name == dep_key.name), + None, ) if dep_generator is None: msg = f"Generator {dep_key.name} not found in machine {machine.name}" @@ -237,11 +243,13 @@ class Generator: for file in dep_files: if file.secret: result[dep_key.name][file.name] = secret_vars_store.get( - dep_generator, file.name + dep_generator, + file.name, ) else: result[dep_key.name][file.name] = public_vars_store.get( - dep_generator, file.name + dep_generator, + file.name, ) return result @@ -250,6 +258,7 @@ class Generator: Returns: Dictionary mapping prompt names to their values + """ from .prompt import ask @@ -275,6 +284,7 @@ class Generator: machine: The machine to execute the generator for prompt_values: Optional dictionary of prompt values. If not provided, prompts will be asked interactively. no_sandbox: Whether to disable sandboxing when executing the generator + """ import os import sys @@ -333,8 +343,8 @@ class Generator: "--uid", "1000", "--gid", "1000", "--", - str(real_bash_path), "-c", generator - ] + str(real_bash_path), "-c", generator, + ], ) # fmt: on @@ -418,11 +428,11 @@ class Generator: if validation is not None: if public_changed: files_to_commit.append( - machine.public_vars_store.set_validation(self, validation) + machine.public_vars_store.set_validation(self, validation), ) if secret_changed: files_to_commit.append( - machine.secret_vars_store.set_validation(self, validation) + machine.secret_vars_store.set_validation(self, validation), ) commit_files( diff --git a/pkgs/clan-cli/clan_cli/vars/get.py b/pkgs/clan-cli/clan_cli/vars/get.py index 39c677131..c04e9cbe0 100644 --- a/pkgs/clan-cli/clan_cli/vars/get.py +++ b/pkgs/clan-cli/clan_cli/vars/get.py @@ -33,7 +33,7 @@ def get_machine_var(machine: Machine, var_id: str) -> Var: raise ClanError(msg) if len(results) > 1: error = f"Found multiple vars for {var_id}:\n - " + "\n - ".join( - [str(var) for var in results] + [str(var) for var in results], ) raise ClanError(error) # we have exactly one result at this point diff --git a/pkgs/clan-cli/clan_cli/vars/graph.py b/pkgs/clan-cli/clan_cli/vars/graph.py index 968c48092..b32e6af99 100644 --- a/pkgs/clan-cli/clan_cli/vars/graph.py +++ b/pkgs/clan-cli/clan_cli/vars/graph.py @@ -72,7 +72,8 @@ def add_dependents( def toposort_closure( - _closure: Iterable[GeneratorKey], generators: dict[GeneratorKey, Generator] + _closure: Iterable[GeneratorKey], + generators: dict[GeneratorKey, Generator], ) -> list[Generator]: closure = set(_closure) # return the topological sorted list of generators to execute @@ -87,8 +88,7 @@ def toposort_closure( # all generators in topological order def full_closure(generators: dict[GeneratorKey, Generator]) -> list[Generator]: - """ - From a set of generators, return all generators in topological order. + """From a set of generators, return all generators in topological order. This includes all dependencies and dependents of the generators. Returns all generators in topological order. """ @@ -97,8 +97,7 @@ def full_closure(generators: dict[GeneratorKey, Generator]) -> list[Generator]: # just the missing generators including their dependents def all_missing_closure(generators: dict[GeneratorKey, Generator]) -> list[Generator]: - """ - From a set of generators, return all incomplete generators in topological order. + """From a set of generators, return all incomplete generators in topological order. incomplete : A generator is missing if at least one of its files is missing. @@ -111,7 +110,8 @@ def all_missing_closure(generators: dict[GeneratorKey, Generator]) -> list[Gener # only a selected list of generators including their missing dependencies and their dependents def requested_closure( - requested_generators: list[GeneratorKey], generators: dict[GeneratorKey, Generator] + requested_generators: list[GeneratorKey], + generators: dict[GeneratorKey, Generator], ) -> list[Generator]: closure = set(requested_generators) # extend the graph to include all dependencies which are not on disk @@ -123,7 +123,8 @@ def requested_closure( # just enough to ensure that the list of selected generators are in a consistent state. # empty if nothing is missing. def minimal_closure( - requested_generators: list[GeneratorKey], generators: dict[GeneratorKey, Generator] + requested_generators: list[GeneratorKey], + generators: dict[GeneratorKey, Generator], ) -> list[Generator]: closure = set(requested_generators) final_closure = missing_dependency_closure(closure, generators) diff --git a/pkgs/clan-cli/clan_cli/vars/keygen.py b/pkgs/clan-cli/clan_cli/vars/keygen.py index c26813bd1..58bda625e 100644 --- a/pkgs/clan-cli/clan_cli/vars/keygen.py +++ b/pkgs/clan-cli/clan_cli/vars/keygen.py @@ -27,11 +27,11 @@ def _get_user_or_default(user: str | None) -> str: # TODO: Unify with "create clan" should be done automatically @API.register def create_secrets_user( - flake_dir: Path, user: str | None = None, force: bool = False + flake_dir: Path, + user: str | None = None, + force: bool = False, ) -> None: - """ - initialize sops keys for vars - """ + """Initialize sops keys for vars""" user = _get_user_or_default(user) pub_keys = maybe_get_admin_public_keys() if not pub_keys: @@ -51,11 +51,11 @@ def _select_keys_interactive(pub_keys: list[SopsKey]) -> list[SopsKey]: selected_keys: list[SopsKey] = [] for i, key in enumerate(pub_keys): log.info( - f"{i + 1}: type: {key.key_type}\n pubkey: {key.pubkey}\n source: {key.source}" + f"{i + 1}: type: {key.key_type}\n pubkey: {key.pubkey}\n source: {key.source}", ) while not selected_keys: choice = input( - "Select keys to use (comma-separated list of numbers, or leave empty to select all): " + "Select keys to use (comma-separated list of numbers, or leave empty to select all): ", ).strip() if not choice: log.info("No keys selected, using all keys.") @@ -71,11 +71,11 @@ def _select_keys_interactive(pub_keys: list[SopsKey]) -> list[SopsKey]: def create_secrets_user_interactive( - flake_dir: Path, user: str | None = None, force: bool = False + flake_dir: Path, + user: str | None = None, + force: bool = False, ) -> None: - """ - Initialize sops keys for vars interactively. - """ + """Initialize sops keys for vars interactively.""" user = _get_user_or_default(user) pub_keys = maybe_get_admin_public_keys() if pub_keys: @@ -83,13 +83,13 @@ def create_secrets_user_interactive( pub_keys = _select_keys_interactive(pub_keys) else: log.info( - "\nNo admin keys found on this machine, generating a new key for sops." + "\nNo admin keys found on this machine, generating a new key for sops.", ) pub_keys = [generate_key()] # make sure the user backups the generated key log.info("\n⚠️ IMPORTANT: Secret Key Backup ⚠️") log.info( - "The generated key above is CRITICAL for accessing your clan's secrets." + "The generated key above is CRITICAL for accessing your clan's secrets.", ) log.info("Without this key, you will lose access to all encrypted data!") log.info("Please backup the key file immediately to a secure location.") @@ -97,12 +97,12 @@ def create_secrets_user_interactive( confirm = None while not confirm or confirm.lower() != "y": log.info( - "\nI have backed up the key file to a secure location. Confirm [y/N]: " + "\nI have backed up the key file to a secure location. Confirm [y/N]: ", ) confirm = input().strip().lower() if confirm != "y": log.error( - "You must backup the key before proceeding. This is critical for data recovery!" + "You must backup the key before proceeding. This is critical for data recovery!", ) # persist the generated or chosen admin pubkey in the repo @@ -115,11 +115,11 @@ def create_secrets_user_interactive( def create_secrets_user_auto( - flake_dir: Path, user: str | None = None, force: bool = False + flake_dir: Path, + user: str | None = None, + force: bool = False, ) -> None: - """ - Detect if the user is in interactive mode or not and choose the appropriate routine. - """ + """Detect if the user is in interactive mode or not and choose the appropriate routine.""" if sys.stdin.isatty(): create_secrets_user_interactive( flake_dir=flake_dir, @@ -159,7 +159,10 @@ def register_keygen_parser(parser: argparse.ArgumentParser) -> None: ) parser.add_argument( - "-f", "--force", help="overwrite existing user", action="store_true" + "-f", + "--force", + help="overwrite existing user", + action="store_true", ) parser.add_argument( diff --git a/pkgs/clan-cli/clan_cli/vars/migration.py b/pkgs/clan-cli/clan_cli/vars/migration.py index ba26d7487..eff2db679 100644 --- a/pkgs/clan-cli/clan_cli/vars/migration.py +++ b/pkgs/clan-cli/clan_cli/vars/migration.py @@ -29,13 +29,13 @@ def _migration_file_exists( if machine.secret_facts_store.exists(generator.name, fact_name): return True machine.debug( - f"Cannot migrate fact {fact_name} for service {generator.name}, as it does not exist in the secret fact store" + f"Cannot migrate fact {fact_name} for service {generator.name}, as it does not exist in the secret fact store", ) if not is_secret: if machine.public_facts_store.exists(generator.name, fact_name): return True machine.debug( - f"Cannot migrate fact {fact_name} for service {generator.name}, as it does not exist in the public fact store" + f"Cannot migrate fact {fact_name} for service {generator.name}, as it does not exist in the public fact store", ) return False @@ -59,14 +59,20 @@ def _migrate_file( if file.secret: old_value = machine.secret_facts_store.get(service_name, fact_name) maybe_path = machine.secret_vars_store.set( - generator, file, old_value, is_migration=True + generator, + file, + old_value, + is_migration=True, ) if maybe_path: paths.append(maybe_path) else: old_value = machine.public_facts_store.get(service_name, fact_name) maybe_path = machine.public_vars_store.set( - generator, file, old_value, is_migration=True + generator, + file, + old_value, + is_migration=True, ) if maybe_path: paths.append(maybe_path) @@ -84,7 +90,11 @@ def migrate_files( if _migration_file_exists(machine, generator, file.name): assert generator.migrate_fact is not None files_to_commit += _migrate_file( - machine, generator, file.name, generator.migrate_fact, file.name + machine, + generator, + file.name, + generator.migrate_fact, + file.name, ) else: not_found.append(file.name) @@ -114,11 +124,10 @@ def check_can_migrate( all_files_missing = False else: all_files_present = False + elif machine.public_vars_store.exists(generator, file.name): + all_files_missing = False else: - if machine.public_vars_store.exists(generator, file.name): - all_files_missing = False - else: - all_files_present = False + all_files_present = False if not all_files_present and not all_files_missing: msg = f"Cannot migrate facts for generator {generator.name} as some files already exist in the store" @@ -132,5 +141,5 @@ def check_can_migrate( all( _migration_file_exists(machine, generator, file.name) for file in generator.files - ) + ), ) diff --git a/pkgs/clan-cli/clan_cli/vars/prompt.py b/pkgs/clan-cli/clan_cli/vars/prompt.py index d27d0ea91..4862bbe05 100644 --- a/pkgs/clan-cli/clan_cli/vars/prompt.py +++ b/pkgs/clan-cli/clan_cli/vars/prompt.py @@ -44,8 +44,8 @@ class Prompt: "group": None, "helperText": None, "required": False, - } - ) + }, + ), ) @classmethod @@ -60,13 +60,11 @@ class Prompt: def get_multiline_hidden_input() -> str: - """ - Get multiline input from the user without echoing the input. + """Get multiline input from the user without echoing the input. This function allows the user to enter multiple lines of text, and it will return the concatenated string of all lines entered. The user can finish the input by pressing Ctrl-D (EOF). """ - # Save terminal settings fd = sys.stdin.fileno() old_settings = termios.tcgetattr(fd) @@ -136,7 +134,7 @@ def ask( result = sys.stdin.read() case PromptType.MULTILINE_HIDDEN: print( - "Enter multiple lines (press Ctrl-D to finish or Ctrl-C to cancel):" + "Enter multiple lines (press Ctrl-D to finish or Ctrl-C to cancel):", ) result = get_multiline_hidden_input() case PromptType.HIDDEN: diff --git a/pkgs/clan-cli/clan_cli/vars/secret_modules/password_store.py b/pkgs/clan-cli/clan_cli/vars/secret_modules/password_store.py index 6602c8ea3..e44512e7c 100644 --- a/pkgs/clan-cli/clan_cli/vars/secret_modules/password_store.py +++ b/pkgs/clan-cli/clan_cli/vars/secret_modules/password_store.py @@ -33,7 +33,11 @@ class SecretStore(StoreBase): """Get the password store directory, cached per machine.""" if not self._store_dir: result = self._run_pass( - machine, "git", "rev-parse", "--show-toplevel", check=False + machine, + "git", + "rev-parse", + "--show-toplevel", + check=False, ) if result.returncode != 0: msg = "Password store must be a git repository" @@ -43,7 +47,8 @@ class SecretStore(StoreBase): def _pass_command(self, machine: str) -> str: out_path = self.flake.select_machine( - machine, "config.clan.core.vars.password-store.passPackage.outPath" + machine, + "config.clan.core.vars.password-store.passPackage.outPath", ) main_program = ( self.flake.select_machine( @@ -133,13 +138,24 @@ class SecretStore(StoreBase): result = self._run_pass(machine, "ls", str(machine_dir), check=False) if result.returncode == 0: self._run_pass( - machine, "rm", "--force", "--recursive", str(machine_dir), check=True + machine, + "rm", + "--force", + "--recursive", + str(machine_dir), + check=True, ) return [] def generate_hash(self, machine: str) -> bytes: result = self._run_pass( - machine, "git", "log", "-1", "--format=%H", self.entry_prefix, check=False + machine, + "git", + "log", + "-1", + "--format=%H", + self.entry_prefix, + check=False, ) git_hash = result.stdout.strip() @@ -183,7 +199,8 @@ class SecretStore(StoreBase): vars_generators = Generator.get_machine_generators(machine, self.flake) if "users" in phases: with tarfile.open( - output_dir / "secrets_for_users.tar.gz", "w:gz" + output_dir / "secrets_for_users.tar.gz", + "w:gz", ) as user_tar: for generator in vars_generators: dir_exists = False @@ -255,7 +272,8 @@ class SecretStore(StoreBase): self.populate_dir(machine, pass_dir, phases) upload_dir = Path( self.flake.select_machine( - machine, "config.clan.core.vars.password-store.secretLocation" - ) + machine, + "config.clan.core.vars.password-store.secretLocation", + ), ) upload(host, pass_dir, upload_dir) diff --git a/pkgs/clan-cli/clan_cli/vars/secret_modules/sops.py b/pkgs/clan-cli/clan_cli/vars/secret_modules/sops.py index 6362b0c23..847284bd5 100644 --- a/pkgs/clan-cli/clan_cli/vars/secret_modules/sops.py +++ b/pkgs/clan-cli/clan_cli/vars/secret_modules/sops.py @@ -75,7 +75,8 @@ class SecretStore(StoreBase): sops_secrets_folder(self.flake.path) / f"{machine}-age.key", priv_key, add_groups=self.flake.select_machine( - machine, "config.clan.core.sops.defaultGroups" + machine, + "config.clan.core.sops.defaultGroups", ), age_plugins=load_age_plugins(self.flake), ) @@ -86,7 +87,10 @@ class SecretStore(StoreBase): return "sops" def user_has_access( - self, user: str, generator: Generator, secret_name: str + self, + user: str, + generator: Generator, + secret_name: str, ) -> bool: key_dir = sops_users_folder(self.flake.path) / user return self.key_has_access(key_dir, generator, secret_name) @@ -98,7 +102,10 @@ class SecretStore(StoreBase): return self.key_has_access(key_dir, generator, secret_name) def key_has_access( - self, key_dir: Path, generator: Generator, secret_name: str + self, + key_dir: Path, + generator: Generator, + secret_name: str, ) -> bool: secret_path = self.secret_path(generator, secret_name) recipient = sops.SopsKey.load_dir(key_dir) @@ -115,8 +122,7 @@ class SecretStore(StoreBase): generators: list[Generator] | None = None, file_name: str | None = None, ) -> str | None: - """ - Check if SOPS secrets need to be re-encrypted due to recipient changes. + """Check if SOPS secrets need to be re-encrypted due to recipient changes. This method verifies that all secrets are properly encrypted with the current set of recipient keys. It detects when new users or machines have been added @@ -132,8 +138,8 @@ class SecretStore(StoreBase): Raises: ClanError: If the specified file_name is not found - """ + """ if generators is None: from clan_cli.vars.generator import Generator @@ -185,7 +191,8 @@ class SecretStore(StoreBase): value, add_machines=[machine] if var.deploy else [], add_groups=self.flake.select_machine( - machine, "config.clan.core.sops.defaultGroups" + machine, + "config.clan.core.sops.defaultGroups", ), git_commit=False, age_plugins=load_age_plugins(self.flake), @@ -291,17 +298,18 @@ class SecretStore(StoreBase): keys = collect_keys_for_path(path) for group in self.flake.select_machine( - machine, "config.clan.core.sops.defaultGroups" + machine, + "config.clan.core.sops.defaultGroups", ): keys.update( collect_keys_for_type( - self.flake.path / "sops" / "groups" / group / "machines" - ) + self.flake.path / "sops" / "groups" / group / "machines", + ), ) keys.update( collect_keys_for_type( - self.flake.path / "sops" / "groups" / group / "users" - ) + self.flake.path / "sops" / "groups" / group / "users", + ), ) return keys @@ -329,8 +337,7 @@ class SecretStore(StoreBase): generators: list[Generator] | None = None, file_name: str | None = None, ) -> None: - """ - Fix sops secrets by re-encrypting them with the current set of recipient keys. + """Fix sops secrets by re-encrypting them with the current set of recipient keys. This method updates secrets when recipients have changed (e.g., new admin users were added to the clan). It ensures all authorized recipients have access to the @@ -343,6 +350,7 @@ class SecretStore(StoreBase): Raises: ClanError: If the specified file_name is not found + """ from clan_cli.secrets.secrets import update_keys @@ -368,7 +376,8 @@ class SecretStore(StoreBase): gen_machine = self.get_machine(generator) for group in self.flake.select_machine( - gen_machine, "config.clan.core.sops.defaultGroups" + gen_machine, + "config.clan.core.sops.defaultGroups", ): allow_member( groups_folder(secret_path), diff --git a/pkgs/clan-cli/clan_cli/vars/upload.py b/pkgs/clan-cli/clan_cli/vars/upload.py index 4d2f62b95..6a9383385 100644 --- a/pkgs/clan-cli/clan_cli/vars/upload.py +++ b/pkgs/clan-cli/clan_cli/vars/upload.py @@ -13,13 +13,17 @@ log = logging.getLogger(__name__) def upload_secret_vars(machine: Machine, host: Host) -> None: machine.secret_vars_store.upload( - machine.name, host, phases=["activation", "users", "services"] + machine.name, + host, + phases=["activation", "users", "services"], ) def populate_secret_vars(machine: Machine, directory: Path) -> None: machine.secret_vars_store.populate_dir( - machine.name, directory, phases=["activation", "users", "services"] + machine.name, + directory, + phases=["activation", "users", "services"], ) diff --git a/pkgs/clan-cli/clan_cli/vars/upload_test.py b/pkgs/clan-cli/clan_cli/vars/upload_test.py index b2ae68885..015902612 100644 --- a/pkgs/clan-cli/clan_cli/vars/upload_test.py +++ b/pkgs/clan-cli/clan_cli/vars/upload_test.py @@ -6,7 +6,8 @@ from clan_lib.errors import ClanError def test_upload_command_no_flake( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: monkeypatch.chdir(tmp_path) diff --git a/pkgs/clan-cli/clan_cli/vms/__init__.py b/pkgs/clan-cli/clan_cli/vms/__init__.py index f3dab5077..1e20fa01e 100644 --- a/pkgs/clan-cli/clan_cli/vms/__init__.py +++ b/pkgs/clan-cli/clan_cli/vms/__init__.py @@ -13,6 +13,6 @@ def register_parser(parser: argparse.ArgumentParser) -> None: ) register_inspect_parser( - subparser.add_parser("inspect", help="inspect the vm configuration") + subparser.add_parser("inspect", help="inspect the vm configuration"), ) register_run_parser(subparser.add_parser("run", help="run a VM from a machine")) diff --git a/pkgs/clan-cli/clan_cli/vms/qemu.py b/pkgs/clan-cli/clan_cli/vms/qemu.py index 61f83c3f6..f60713ab7 100644 --- a/pkgs/clan-cli/clan_cli/vms/qemu.py +++ b/pkgs/clan-cli/clan_cli/vms/qemu.py @@ -204,7 +204,7 @@ def qemu_command( "chardev=char0,mode=readline", "-device", "virtconsole,chardev=char0,nr=0", - ] + ], ) else: command.extend( @@ -217,7 +217,7 @@ def qemu_command( "virtconsole,chardev=char0,nr=0", "-monitor", "none", - ] + ], ) vsock_cid = None diff --git a/pkgs/clan-cli/clan_cli/vms/run.py b/pkgs/clan-cli/clan_cli/vms/run.py index b492a11a0..f93b0c389 100644 --- a/pkgs/clan-cli/clan_cli/vms/run.py +++ b/pkgs/clan-cli/clan_cli/vms/run.py @@ -43,14 +43,16 @@ def facts_to_nixos_config(facts: dict[str, dict[str, bytes]]) -> dict: nixos_config["clan"]["core"]["secrets"][service]["facts"] = {} for fact, value in service_facts.items(): nixos_config["clan"]["core"]["secrets"][service]["facts"][fact] = { - "value": value.decode() + "value": value.decode(), } return nixos_config # TODO move this to the Machines class def build_vm( - machine: Machine, tmpdir: Path, nix_options: list[str] | None = None + machine: Machine, + tmpdir: Path, + nix_options: list[str] | None = None, ) -> dict[str, str]: # TODO pass prompt here for the GTK gui if nix_options is None: @@ -60,7 +62,7 @@ def build_vm( output = Path( machine.select( "config.system.clan.vm.create", - ) + ), ) if tmp_store := nix_test_store(): output = tmp_store.joinpath(*output.parts[1:]) @@ -129,7 +131,11 @@ def start_vm( machine.debug(f"Starting VM with command: {cmd}") with subprocess.Popen( - cmd, env=env, stdout=stdout, stderr=stderr, stdin=stdin + cmd, + env=env, + stdout=stdout, + stderr=stderr, + stdin=stdin, ) as process: try: yield process @@ -222,7 +228,7 @@ def spawn_vm( if cachedir is None: cache_tmp = stack.enter_context( - TemporaryDirectory(prefix="vm-cache-", dir=cache) + TemporaryDirectory(prefix="vm-cache-", dir=cache), ) cachedir = Path(cache_tmp) @@ -403,7 +409,9 @@ def run_command( def register_run_parser(parser: argparse.ArgumentParser) -> None: machine_action = parser.add_argument( - "machine", type=str, help="machine in the flake to run" + "machine", + type=str, + help="machine in the flake to run", ) add_dynamic_completer(machine_action, complete_machines) # option: --publish 2222:22 diff --git a/pkgs/clan-cli/clan_lib/api/__init__.py b/pkgs/clan-cli/clan_lib/api/__init__.py index 57f5f5b6c..637377e34 100644 --- a/pkgs/clan-cli/clan_lib/api/__init__.py +++ b/pkgs/clan-cli/clan_lib/api/__init__.py @@ -33,13 +33,13 @@ ResponseDataType = TypeVar("ResponseDataType") class ProcessMessage(TypedDict): - """ - Represents a message to be sent to the UI. + """Represents a message to be sent to the UI. Attributes: - topic: The topic of the message, used to identify the type of message. - data: The data to be sent with the message. - origin: The API operation that this message is related to, if applicable. + """ topic: str @@ -173,7 +173,7 @@ API.register(get_system_file) message=e.msg, description=e.description, location=[fn.__name__, e.location], - ) + ), ], ) except Exception as e: @@ -186,7 +186,7 @@ API.register(get_system_file) message=str(e), description="An unexpected error occurred", location=[fn.__name__], - ) + ), ], ) @@ -292,7 +292,8 @@ API.register(get_system_file) def import_all_modules_from_package(pkg: ModuleType) -> None: for _loader, module_name, _is_pkg in pkgutil.walk_packages( - pkg.__path__, prefix=f"{pkg.__name__}." + pkg.__path__, + prefix=f"{pkg.__name__}.", ): base_name = module_name.split(".")[-1] @@ -308,8 +309,7 @@ def import_all_modules_from_package(pkg: ModuleType) -> None: def load_in_all_api_functions() -> None: - """ - For the global API object, to have all functions available. + """For the global API object, to have all functions available. We have to make sure python loads every wrapped function at least once. This is done by importing all modules from the clan_lib and clan_cli packages. """ diff --git a/pkgs/clan-cli/clan_lib/api/directory.py b/pkgs/clan-cli/clan_lib/api/directory.py index ecbd92814..c3d10a025 100644 --- a/pkgs/clan-cli/clan_lib/api/directory.py +++ b/pkgs/clan-cli/clan_lib/api/directory.py @@ -32,8 +32,7 @@ class FileRequest: @API.register_abstract def get_system_file(file_request: FileRequest) -> list[str] | None: - """ - Api method to open a file dialog window. + """Api method to open a file dialog window. Implementations is specific to the platform and returns the name of the selected file or None if no file was selected. @@ -44,8 +43,7 @@ def get_system_file(file_request: FileRequest) -> list[str] | None: @API.register_abstract def get_clan_folder() -> Flake: - """ - Api method to open the clan folder. + """Api method to open the clan folder. Implementations is specific to the platform and returns the path to the clan folder. """ @@ -85,13 +83,12 @@ def blk_from_dict(data: dict) -> BlkInfo: @API.register def list_system_storage_devices() -> Blockdevices: - """ - List local block devices by running `lsblk`. + """List local block devices by running `lsblk`. Returns: A list of detected block devices with metadata like size, path, type, etc. - """ + """ cmd = nix_shell( ["util-linux"], [ @@ -107,14 +104,13 @@ def list_system_storage_devices() -> Blockdevices: blk_info: dict[str, Any] = json.loads(res) return Blockdevices( - blockdevices=[blk_from_dict(device) for device in blk_info["blockdevices"]] + blockdevices=[blk_from_dict(device) for device in blk_info["blockdevices"]], ) @API.register def get_clan_directory_relative(flake: Flake) -> str: - """ - Get the clan directory path relative to the flake root + """Get the clan directory path relative to the flake root from the clan.directory configuration setting. Args: @@ -125,6 +121,7 @@ def get_clan_directory_relative(flake: Flake) -> str: Raises: ClanError: If the flake evaluation fails or directories cannot be found + """ from clan_lib.dirs import get_clan_directories @@ -133,12 +130,13 @@ def get_clan_directory_relative(flake: Flake) -> str: def get_clan_dir(flake: Flake) -> Path: - """ - Get the effective clan directory, respecting the clan.directory configuration. + """Get the effective clan directory, respecting the clan.directory configuration. + Args: flake: The clan flake Returns: Path to the effective clan directory + """ relative_clan_dir = get_clan_directory_relative(flake) return flake.path / relative_clan_dir if relative_clan_dir else flake.path diff --git a/pkgs/clan-cli/clan_lib/api/directory_test.py b/pkgs/clan-cli/clan_lib/api/directory_test.py index 2e2ecc7b2..a26a74839 100644 --- a/pkgs/clan-cli/clan_lib/api/directory_test.py +++ b/pkgs/clan-cli/clan_lib/api/directory_test.py @@ -29,7 +29,7 @@ def test_get_relative_clan_directory_custom( { directory = ./direct-config; } -""" +""", ) test_subdir = Path(flake.path) / "direct-config" @@ -68,7 +68,7 @@ def test_get_clan_dir_custom( { directory = ./direct-config; } -""" +""", ) test_subdir = Path(flake.path) / "direct-config" diff --git a/pkgs/clan-cli/clan_lib/api/mdns_discovery.py b/pkgs/clan-cli/clan_lib/api/mdns_discovery.py index c121ef7ed..7703093c7 100644 --- a/pkgs/clan-cli/clan_lib/api/mdns_discovery.py +++ b/pkgs/clan-cli/clan_lib/api/mdns_discovery.py @@ -90,8 +90,10 @@ def parse_avahi_output(output: str) -> DNSInfo: @API.register def list_system_services_mdns() -> DNSInfo: """List mDNS/DNS-SD services on the local network. + Returns: DNSInfo: A dictionary containing discovered mDNS/DNS-SD services. + """ cmd = nix_shell( ["avahi"], diff --git a/pkgs/clan-cli/clan_lib/api/serde.py b/pkgs/clan-cli/clan_lib/api/serde.py index dd2e2583c..35b9cae3e 100644 --- a/pkgs/clan-cli/clan_lib/api/serde.py +++ b/pkgs/clan-cli/clan_lib/api/serde.py @@ -1,5 +1,4 @@ -""" -This module provides utility functions for serialization and deserialization of data classes. +"""This module provides utility functions for serialization and deserialization of data classes. Functions: - sanitize_string(s: str) -> str: Ensures a string is properly escaped for json serializing. @@ -56,9 +55,7 @@ def sanitize_string(s: str) -> str: def is_enum(obj: Any) -> bool: - """ - Safely checks if the object or one of its attributes is an Enum. - """ + """Safely checks if the object or one of its attributes is an Enum.""" # Check if the object itself is an Enum if isinstance(obj, Enum): return True @@ -69,9 +66,7 @@ def is_enum(obj: Any) -> bool: def get_enum_value(obj: Any) -> Any: - """ - Safely checks if the object or one of its attributes is an Enum. - """ + """Safely checks if the object or one of its attributes is an Enum.""" # Check if the object itself is an Enum value = getattr(obj, "value", None) if value is None and obj.enum: @@ -85,8 +80,7 @@ def get_enum_value(obj: Any) -> Any: def dataclass_to_dict(obj: Any, *, use_alias: bool = True) -> Any: - """ - Converts objects to dictionaries. + """Converts objects to dictionaries. This function is round trip safe. Meaning that if you convert the object to a dict and then back to a dataclass using 'from_dict' @@ -103,8 +97,7 @@ def dataclass_to_dict(obj: Any, *, use_alias: bool = True) -> Any: """ def _to_dict(obj: Any) -> Any: - """ - Utility function to convert dataclasses to dictionaries + """Utility function to convert dataclasses to dictionaries It converts all nested dataclasses, lists, tuples, and dictionaries to dictionaries It does NOT convert member functions. @@ -115,7 +108,9 @@ def dataclass_to_dict(obj: Any, *, use_alias: bool = True) -> Any: return { # Use either the original name or name sanitize_string( - field.metadata.get("alias", field.name) if use_alias else field.name + field.metadata.get("alias", field.name) + if use_alias + else field.name, ): _to_dict(getattr(obj, field.name)) for field in fields(obj) if not field.name.startswith("_") @@ -173,13 +168,11 @@ def is_type_in_union(union_type: type | UnionType, target_type: type) -> bool: def unwrap_none_type(type_hint: type | UnionType) -> type: - """ - Takes a type union and returns the first non-None type. + """Takes a type union and returns the first non-None type. None | str => str """ - if is_union_type(type_hint): # Return the first non-None type return next(t for t in get_args(type_hint) if t is not type(None)) @@ -191,10 +184,11 @@ JsonValue = str | float | dict[str, Any] | list[Any] | None def construct_value( - t: type | UnionType, field_value: JsonValue, loc: list[str] | None = None + t: type | UnionType, + field_value: JsonValue, + loc: list[str] | None = None, ) -> Any: - """ - Construct a field value from a type hint and a field value. + """Construct a field value from a type hint and a field value. The following types are supported and matched in this order: @@ -328,10 +322,11 @@ def construct_value( def construct_dataclass[T: Any]( - t: type[T], data: dict[str, Any], path: list[str] | None = None + t: type[T], + data: dict[str, Any], + path: list[str] | None = None, ) -> T: - """ - type t MUST be a dataclass + """Type t MUST be a dataclass Dynamically instantiate a data class from a dictionary, handling nested data classes. Constructs the field values from the data dictionary using 'construct_value' @@ -383,10 +378,11 @@ def construct_dataclass[T: Any]( def from_dict( - t: type | UnionType, data: dict[str, Any] | Any, path: list[str] | None = None + t: type | UnionType, + data: dict[str, Any] | Any, + path: list[str] | None = None, ) -> Any: - """ - Dynamically instantiate a data class from a dictionary, handling nested data classes. + """Dynamically instantiate a data class from a dictionary, handling nested data classes. This function is round trip safe in conjunction with 'dataclass_to_dict' """ diff --git a/pkgs/clan-cli/clan_lib/api/serde_deserialize_test.py b/pkgs/clan-cli/clan_lib/api/serde_deserialize_test.py index b45be43bd..a152b9483 100644 --- a/pkgs/clan-cli/clan_lib/api/serde_deserialize_test.py +++ b/pkgs/clan-cli/clan_lib/api/serde_deserialize_test.py @@ -102,7 +102,9 @@ def test_nested_nullable() -> None: mode="format", disks={"main": "/dev/sda"}, system_config=SystemConfig( - language="en_US.UTF-8", keymap="en", ssh_keys_path=None + language="en_US.UTF-8", + keymap="en", + ssh_keys_path=None, ), dry_run=False, write_efi_boot_entries=False, @@ -182,9 +184,7 @@ def test_alias_field() -> None: def test_alias_field_from_orig_name() -> None: - """ - Field declares an alias. But the data is provided with the field name. - """ + """Field declares an alias. But the data is provided with the field name.""" @dataclass class Person: @@ -197,10 +197,7 @@ def test_alias_field_from_orig_name() -> None: def test_none_or_string() -> None: - """ - Field declares an alias. But the data is provided with the field name. - """ - + """Field declares an alias. But the data is provided with the field name.""" data = None @dataclass @@ -218,8 +215,7 @@ def test_none_or_string() -> None: def test_union_with_none_edge_cases() -> None: - """ - Test various union types with None to ensure issubclass() error is avoided. + """Test various union types with None to ensure issubclass() error is avoided. This specifically tests the fix for the TypeError in is_type_in_union. """ # Test basic types with None diff --git a/pkgs/clan-cli/clan_lib/api/serde_serialize_test.py b/pkgs/clan-cli/clan_lib/api/serde_serialize_test.py index 07697383d..95e2df4f5 100644 --- a/pkgs/clan-cli/clan_lib/api/serde_serialize_test.py +++ b/pkgs/clan-cli/clan_lib/api/serde_serialize_test.py @@ -7,7 +7,6 @@ from clan_lib.api import ( ) -# def test_sanitize_string() -> None: # Simple strings assert sanitize_string("Hello World") == "Hello World" diff --git a/pkgs/clan-cli/clan_lib/api/tasks.py b/pkgs/clan-cli/clan_lib/api/tasks.py index 4d173ecf2..33c7ce144 100644 --- a/pkgs/clan-cli/clan_lib/api/tasks.py +++ b/pkgs/clan-cli/clan_lib/api/tasks.py @@ -44,7 +44,7 @@ def run_task_blocking(somearg: str) -> str: log.debug("Task was cancelled") return "Task was cancelled" log.debug( - f"Processing {i} for {somearg}. ctx.should_cancel={ctx.should_cancel()}" + f"Processing {i} for {somearg}. ctx.should_cancel={ctx.should_cancel()}", ) time.sleep(1) return f"Task completed with argument: {somearg}" diff --git a/pkgs/clan-cli/clan_lib/api/type_to_jsonschema.py b/pkgs/clan-cli/clan_lib/api/type_to_jsonschema.py index b5d4379e3..ba3840b1f 100644 --- a/pkgs/clan-cli/clan_lib/api/type_to_jsonschema.py +++ b/pkgs/clan-cli/clan_lib/api/type_to_jsonschema.py @@ -29,9 +29,7 @@ class JSchemaTypeError(Exception): # Inspect the fields of the parameterized type def inspect_dataclass_fields(t: type) -> dict[TypeVar, type]: - """ - Returns a map of type variables to actual types for a parameterized type. - """ + """Returns a map of type variables to actual types for a parameterized type.""" origin = get_origin(t) type_args = get_args(t) if origin is None: @@ -45,13 +43,12 @@ def inspect_dataclass_fields(t: type) -> dict[TypeVar, type]: def apply_annotations(schema: dict[str, Any], annotations: list[Any]) -> dict[str, Any]: - """ - Add metadata from typing.annotations to the json Schema. + """Add metadata from typing.annotations to the json Schema. The annotations can be a dict, a tuple, or a string and is directly applied to the schema as shown below. No further validation is done, the caller is responsible for following json-schema. Examples - + -------- ```python # String annotation Annotated[int, "This is an int"] -> {"type": "integer", "description": "This is an int"} @@ -62,6 +59,7 @@ def apply_annotations(schema: dict[str, Any], annotations: list[Any]) -> dict[st # Tuple annotation Annotated[int, ("minimum", 0)] -> {"type": "integer", "minimum": 0} ``` + """ for annotation in annotations: if isinstance(annotation, dict): @@ -96,8 +94,7 @@ def is_type_in_union(union_type: type | UnionType, target_type: type) -> bool: def is_total(typed_dict_class: type) -> bool: - """ - Check if a TypedDict has total=true + """Check if a TypedDict has total=true https://typing.readthedocs.io/en/latest/spec/typeddict.html#interaction-with-total-false """ return getattr(typed_dict_class, "__total__", True) # Default to True if not set @@ -177,7 +174,9 @@ def type_to_dict( explicit_required.add(field_name) dict_properties[field_name] = type_to_dict( - field_type, f"{scope} {t.__name__}.{field_name}", type_map + field_type, + f"{scope} {t.__name__}.{field_name}", + type_map, ) optional = set(dict_fields) - explicit_optional @@ -195,7 +194,7 @@ def type_to_dict( for arg in get_args(t): try: supported.append( - type_to_dict(arg, scope, type_map, narrow_unsupported_union_types) + type_to_dict(arg, scope, type_map, narrow_unsupported_union_types), ) except JSchemaTypeError: if narrow_unsupported_union_types: diff --git a/pkgs/clan-cli/clan_lib/api/type_to_jsonschema_test.py b/pkgs/clan-cli/clan_lib/api/type_to_jsonschema_test.py index 96d86faaf..e5a3a4fe8 100644 --- a/pkgs/clan-cli/clan_lib/api/type_to_jsonschema_test.py +++ b/pkgs/clan-cli/clan_lib/api/type_to_jsonschema_test.py @@ -85,7 +85,7 @@ def test_simple_union_types() -> None: "oneOf": [ {"type": "integer"}, {"type": "string"}, - ] + ], } assert type_to_dict(int | str | float) == { @@ -93,7 +93,7 @@ def test_simple_union_types() -> None: {"type": "integer"}, {"type": "string"}, {"type": "number"}, - ] + ], } assert type_to_dict(int | str | None) == { @@ -101,7 +101,7 @@ def test_simple_union_types() -> None: {"type": "integer"}, {"type": "string"}, {"type": "null"}, - ] + ], } @@ -133,7 +133,7 @@ def test_complex_union_types() -> None: "required": ["bar"], }, {"type": "null"}, - ] + ], } @@ -187,7 +187,7 @@ def test_dataclasses() -> None: }, "additionalProperties": False, "required": [ - "name" + "name", ], # value is optional because it has a default value of None } diff --git a/pkgs/clan-cli/clan_lib/async_run/__init__.py b/pkgs/clan-cli/clan_lib/async_run/__init__.py index d6a03b681..3642abc6c 100644 --- a/pkgs/clan-cli/clan_lib/async_run/__init__.py +++ b/pkgs/clan-cli/clan_lib/async_run/__init__.py @@ -44,17 +44,14 @@ class AsyncResult[R]: @property def error(self) -> Exception | None: - """ - Returns an error if the callable raised an exception. - """ + """Returns an error if the callable raised an exception.""" if isinstance(self._result, Exception): return self._result return None @property def result(self) -> R: - """ - Unwraps and returns the result if no exception occurred. + """Unwraps and returns the result if no exception occurred. Raises the exception otherwise. """ if isinstance(self._result, Exception): @@ -64,9 +61,7 @@ class AsyncResult[R]: @dataclass class AsyncContext: - """ - This class stores thread-local data. - """ + """This class stores thread-local data.""" prefix: str | None = None # prefix for logging stdout: IO[bytes] | None = None # stdout of subprocesses @@ -79,9 +74,7 @@ class AsyncContext: @dataclass class AsyncOpts: - """ - Options for the async_run function. - """ + """Options for the async_run function.""" tid: str | None = None check: bool = True @@ -92,39 +85,29 @@ ASYNC_CTX_THREAD_LOCAL = threading.local() def set_current_thread_opkey(op_key: str) -> None: - """ - Set the current thread's operation key. - """ + """Set the current thread's operation key.""" ctx = get_async_ctx() ctx.op_key = op_key def get_current_thread_opkey() -> str | None: - """ - Get the current thread's operation key. - """ + """Get the current thread's operation key.""" ctx = get_async_ctx() return ctx.op_key def is_async_cancelled() -> bool: - """ - Check if the current task has been cancelled. - """ + """Check if the current task has been cancelled.""" return get_async_ctx().should_cancel() def set_should_cancel(should_cancel: Callable[[], bool]) -> None: - """ - Set the cancellation function for the current task. - """ + """Set the cancellation function for the current task.""" get_async_ctx().should_cancel = should_cancel def get_async_ctx() -> AsyncContext: - """ - Retrieve the current AsyncContext, creating a new one if none exists. - """ + """Retrieve the current AsyncContext, creating a new one if none exists.""" global ASYNC_CTX_THREAD_LOCAL if not hasattr(ASYNC_CTX_THREAD_LOCAL, "async_ctx"): @@ -155,9 +138,7 @@ class AsyncThread[**P, R](threading.Thread): *args: P.args, **kwargs: P.kwargs, ) -> None: - """ - A threaded wrapper for running a function asynchronously. - """ + """A threaded wrapper for running a function asynchronously.""" super().__init__() self.function = function self.args = args @@ -169,9 +150,7 @@ class AsyncThread[**P, R](threading.Thread): self.stop_event = stop_event # Event to signal cancellation def run(self) -> None: - """ - Run the function in a separate thread. - """ + """Run the function in a separate thread.""" try: set_should_cancel(lambda: self.stop_event.is_set()) # Arguments for ParamSpec "P@AsyncThread" are missing @@ -191,9 +170,7 @@ class AsyncFuture[R]: _runtime: "AsyncRuntime" def wait(self) -> AsyncResult[R]: - """ - Wait for the task to finish. - """ + """Wait for the task to finish.""" if self._tid not in self._runtime.tasks: msg = f"No task with the name '{self._tid}' exists." raise ClanError(msg) @@ -207,9 +184,7 @@ class AsyncFuture[R]: return result def get_result(self) -> AsyncResult[R] | None: - """ - Retrieve the result of a finished task and remove it from the task list. - """ + """Retrieve the result of a finished task and remove it from the task list.""" if self._tid not in self._runtime.tasks: msg = f"No task with the name '{self._tid}' exists." raise ClanError(msg) @@ -251,8 +226,7 @@ class AsyncRuntime: *args: P.args, **kwargs: P.kwargs, ) -> AsyncFuture[R]: - """ - Run the given function asynchronously in a thread with a specific name and arguments. + """Run the given function asynchronously in a thread with a specific name and arguments. The function's static typing is preserved. """ if opts is None: @@ -268,7 +242,12 @@ class AsyncRuntime: stop_event = threading.Event() # Create and start the new AsyncThread thread = AsyncThread( - opts, self.condition, stop_event, function, *args, **kwargs + opts, + self.condition, + stop_event, + function, + *args, + **kwargs, ) self.tasks[opts.tid] = thread thread.start() @@ -282,17 +261,14 @@ class AsyncRuntime: *args: P.args, **kwargs: P.kwargs, ) -> AsyncFutureRef[R, Q]: - """ - The same as async_run, but with an additional reference to an object. + """The same as async_run, but with an additional reference to an object. This is useful to keep track of the origin of the task. """ future = self.async_run(opts, function, *args, **kwargs) return AsyncFutureRef(_tid=future._tid, _runtime=self, ref=ref) # noqa: SLF001 def join_all(self) -> None: - """ - Wait for all tasks to finish - """ + """Wait for all tasks to finish""" with self.condition: while any( not task.finished for task in self.tasks.values() @@ -300,9 +276,7 @@ class AsyncRuntime: self.condition.wait() # Wait until a thread signals completion def check_all(self) -> None: - """ - Check if there where any errors - """ + """Check if there where any errors""" err_count = 0 for name, task in self.tasks.items(): @@ -328,9 +302,7 @@ class AsyncRuntime: raise ClanError(msg) def __enter__(self) -> "AsyncRuntime": - """ - Enter the runtime context related to this object. - """ + """Enter the runtime context related to this object.""" return self def __exit__( @@ -339,8 +311,7 @@ class AsyncRuntime: exc_value: BaseException | None, traceback: types.TracebackType | None, ) -> None: - """ - Exit the runtime context related to this object. + """Exit the runtime context related to this object. Sets async_ctx.cancel to True to signal cancellation. """ for name, task in self.tasks.items(): diff --git a/pkgs/clan-cli/clan_lib/backups/restore.py b/pkgs/clan-cli/clan_lib/backups/restore.py index fe109415b..81e41141f 100644 --- a/pkgs/clan-cli/clan_lib/backups/restore.py +++ b/pkgs/clan-cli/clan_lib/backups/restore.py @@ -5,7 +5,11 @@ from clan_lib.ssh.remote import Remote def restore_service( - machine: Machine, host: Remote, name: str, provider: str, service: str + machine: Machine, + host: Remote, + name: str, + provider: str, + service: str, ) -> None: backup_metadata = machine.select("config.clan.core.backups") backup_folders = machine.select("config.clan.core.state") @@ -73,5 +77,5 @@ def restore_backup( errors.append(f"{service}: {e}") if errors: raise ClanError( - "Restore failed for the following services:\n" + "\n".join(errors) + "Restore failed for the following services:\n" + "\n".join(errors), ) diff --git a/pkgs/clan-cli/clan_lib/bwrap/__init__.py b/pkgs/clan-cli/clan_lib/bwrap/__init__.py index 02c10cb6b..05f8d5f2c 100644 --- a/pkgs/clan-cli/clan_lib/bwrap/__init__.py +++ b/pkgs/clan-cli/clan_lib/bwrap/__init__.py @@ -39,7 +39,7 @@ def _bubblewrap_works() -> bool: "--gid", "1000", "--", # do nothing, just test if bash executes - str(real_bash_path), "-c", ":" + str(real_bash_path), "-c", ":", ], ) diff --git a/pkgs/clan-cli/clan_lib/clan/check.py b/pkgs/clan-cli/clan_lib/clan/check.py index 26f074054..2f2651570 100644 --- a/pkgs/clan-cli/clan_lib/clan/check.py +++ b/pkgs/clan-cli/clan_lib/clan/check.py @@ -10,10 +10,13 @@ log = logging.getLogger(__name__) @API.register def check_clan_valid(flake: Flake) -> bool: """Check if a clan is valid by verifying if it has the clanInternals attribute. + Args: flake: The Flake instance representing the clan. + Returns: bool: True if the clan exists, False otherwise. + """ try: flake.prefetch() diff --git a/pkgs/clan-cli/clan_lib/clan/check_test.py b/pkgs/clan-cli/clan_lib/clan/check_test.py index 8f60e9a1f..cdca3e575 100644 --- a/pkgs/clan-cli/clan_lib/clan/check_test.py +++ b/pkgs/clan-cli/clan_lib/clan/check_test.py @@ -9,7 +9,9 @@ from clan_lib.flake import Flake @pytest.mark.with_core def test_check_clan_valid( - temporary_home: Path, test_flake_with_core: FlakeForTest, test_flake: FlakeForTest + temporary_home: Path, + test_flake_with_core: FlakeForTest, + test_flake: FlakeForTest, ) -> None: # Test with a valid clan flake = Flake(str(test_flake_with_core.path)) diff --git a/pkgs/clan-cli/clan_lib/clan/create.py b/pkgs/clan-cli/clan_lib/clan/create.py index 20472f251..e6443f036 100644 --- a/pkgs/clan-cli/clan_lib/clan/create.py +++ b/pkgs/clan-cli/clan_lib/clan/create.py @@ -53,12 +53,15 @@ def git_command(directory: Path, *args: str) -> list[str]: @API.register def create_clan(opts: CreateOptions) -> None: """Create a new clan repository with the specified template. + Args: opts: CreateOptions containing the destination path, template name, source flake, and other options. + Raises: ClanError: If the source flake is not a valid flake or if the destination directory already exists. + """ opts.validate() @@ -69,7 +72,7 @@ def create_clan(opts: CreateOptions) -> None: nix_metadata(str(opts.src_flake)) except ClanError: log.error( - f"Found a repository, but it is not a valid flake: {opts.src_flake}" + f"Found a repository, but it is not a valid flake: {opts.src_flake}", ) log.warning("Setting src_flake to None") opts.src_flake = None @@ -92,13 +95,15 @@ def create_clan(opts: CreateOptions) -> None: # check if username is set has_username = run( - git_command(dest, "config", "user.name"), RunOpts(check=False) + git_command(dest, "config", "user.name"), + RunOpts(check=False), ) if has_username.returncode != 0: run(git_command(dest, "config", "user.name", "clan-tool")) has_username = run( - git_command(dest, "config", "user.email"), RunOpts(check=False) + git_command(dest, "config", "user.email"), + RunOpts(check=False), ) if has_username.returncode != 0: run(git_command(dest, "config", "user.email", "clan@example.com")) @@ -125,5 +130,3 @@ def create_clan(opts: CreateOptions) -> None: new_meta = merge_objects(curr_meta, opts.initial) set_value_by_path(inventory, "meta", new_meta) inventory_store.write(inventory, message="Init inventory") - - return diff --git a/pkgs/clan-cli/clan_lib/clan/get.py b/pkgs/clan-cli/clan_lib/clan/get.py index 259088445..d155be99b 100644 --- a/pkgs/clan-cli/clan_lib/clan/get.py +++ b/pkgs/clan-cli/clan_lib/clan/get.py @@ -12,12 +12,16 @@ log = logging.getLogger(__name__) @API.register def get_clan_details(flake: Flake) -> InventoryMeta: """Retrieve the clan details from the inventory of a given flake. + Args: flake: The Flake instance representing the clan. + Returns: InventoryMeta: The meta information from the clan's inventory. + Raises: ClanError: If the flake does not exist, or if the inventory is invalid (missing the meta attribute). + """ inventory_store = InventoryStore(flake) inventory = inventory_store.read() diff --git a/pkgs/clan-cli/clan_lib/clan/test_create.py b/pkgs/clan-cli/clan_lib/clan/test_create.py index ec501b224..bd0111606 100644 --- a/pkgs/clan-cli/clan_lib/clan/test_create.py +++ b/pkgs/clan-cli/clan_lib/clan/test_create.py @@ -11,14 +11,15 @@ from clan_lib.persist.inventory_store import InventoryStore @pytest.mark.with_core def test_create_simple(tmp_path: Path, offline_flake_hook: Any) -> None: - """ - Template = 'default' + """Template = 'default' # All default params """ dest = tmp_path / "test_clan" opts = CreateOptions( - dest=dest, template="default", _postprocess_flake_hook=offline_flake_hook + dest=dest, + template="default", + _postprocess_flake_hook=offline_flake_hook, ) create_clan(opts) @@ -44,15 +45,16 @@ def test_can_handle_path_without_slash( offline_flake_hook: Any, monkeypatch: pytest.MonkeyPatch, ) -> None: - """ - Tests for a regression, where it broke when the path is a single word like `foo`. + """Tests for a regression, where it broke when the path is a single word like `foo`. The flake identifier was interpreted as an external flake. """ monkeypatch.chdir(tmp_path) dest = Path("test_clan") opts = CreateOptions( - dest=dest, template="default", _postprocess_flake_hook=offline_flake_hook + dest=dest, + template="default", + _postprocess_flake_hook=offline_flake_hook, ) create_clan(opts) @@ -63,8 +65,7 @@ def test_can_handle_path_without_slash( @pytest.mark.with_core def test_create_with_name(tmp_path: Path, offline_flake_hook: Any) -> None: - """ - Template = 'default' + """Template = 'default' # All default params """ dest = tmp_path / "test_clan" @@ -107,8 +108,7 @@ def test_create_with_name(tmp_path: Path, offline_flake_hook: Any) -> None: # We might want to change this in the future @pytest.mark.with_core def test_create_cannot_set_name(tmp_path: Path, offline_flake_hook: Any) -> None: - """ - Template = 'default' + """Template = 'default' # All default params """ dest = tmp_path / "test_clan" @@ -132,8 +132,7 @@ def test_create_cannot_set_name(tmp_path: Path, offline_flake_hook: Any) -> None @pytest.mark.with_core def test_create_invalid_name(tmp_path: Path, offline_flake_hook: Any) -> None: - """ - Template = 'default' + """Template = 'default' # All default params """ dest = tmp_path / "test_clan" diff --git a/pkgs/clan-cli/clan_lib/clan/update.py b/pkgs/clan-cli/clan_lib/clan/update.py index 59c13147e..b7fba2c9c 100644 --- a/pkgs/clan-cli/clan_lib/clan/update.py +++ b/pkgs/clan-cli/clan_lib/clan/update.py @@ -16,12 +16,16 @@ class UpdateOptions: @API.register def set_clan_details(options: UpdateOptions) -> InventorySnapshot: """Update the clan metadata in the inventory of a given flake. + Args: options: UpdateOptions containing the flake and the new metadata. + Returns: InventorySnapshot: The updated inventory snapshot after modifying the metadata. + Raises: ClanError: If the flake does not exist or if the inventory is invalid (missing the meta attribute). + """ inventory_store = InventoryStore(options.flake) inventory = inventory_store.read() diff --git a/pkgs/clan-cli/clan_lib/cmd/__init__.py b/pkgs/clan-cli/clan_lib/cmd/__init__.py index 17dc5faff..b7de80cb2 100644 --- a/pkgs/clan-cli/clan_lib/cmd/__init__.py +++ b/pkgs/clan-cli/clan_lib/cmd/__init__.py @@ -229,8 +229,7 @@ def terminate_process(process: subprocess.Popen) -> Iterator[None]: class TimeTable: - """ - This class is used to store the time taken by each command + """This class is used to store the time taken by each command and print it at the end of the program if env CLAN_CLI_PERF=1 is set. """ @@ -245,7 +244,9 @@ class TimeTable: # Sort the table by time in descending order sorted_table = sorted( - self.table.items(), key=lambda item: item[1], reverse=True + self.table.items(), + key=lambda item: item[1], + reverse=True, ) for k, v in sorted_table: @@ -294,8 +295,7 @@ class RunOpts: def cmd_with_root(cmd: list[str], graphical: bool = False) -> list[str]: - """ - This function returns a wrapped command that will be run with root permissions. + """This function returns a wrapped command that will be run with root permissions. It will use sudo if graphical is False, otherwise it will use run0 or pkexec. """ if os.geteuid() == 0: @@ -375,7 +375,7 @@ def run( stderr=subprocess.PIPE, start_new_session=not options.needs_user_terminal, shell=options.shell, - ) + ), ) if options.needs_user_terminal: diff --git a/pkgs/clan-cli/clan_lib/colors/__init__.py b/pkgs/clan-cli/clan_lib/colors/__init__.py index 9d45cde6a..73774830f 100644 --- a/pkgs/clan-cli/clan_lib/colors/__init__.py +++ b/pkgs/clan-cli/clan_lib/colors/__init__.py @@ -7,9 +7,7 @@ DEFAULT_MARKER = 302 class RgbColor(Enum): - """ - A subset of CSS colors with RGB values that work well in Dark and Light mode. - """ + """A subset of CSS colors with RGB values that work well in Dark and Light mode.""" TEAL = (0, 130, 128) OLIVEDRAB = (113, 122, 57) @@ -85,8 +83,7 @@ class ColorType(Enum): def _join(*values: int | str) -> str: - """ - Join a series of values with semicolons. The values + """Join a series of values with semicolons. The values are either integers or strings, so stringify each for good measure. Worth breaking out as its own function because semicolon-joined lists are core to ANSI coding. @@ -95,8 +92,7 @@ def _join(*values: int | str) -> str: def color_code(spec: tuple[int, int, int], base: ColorType) -> str: - """ - Workhorse of encoding a color. Give preference to named colors from + """Workhorse of encoding a color. Give preference to named colors from ANSI, then to specific numeric or tuple specs. If those don't work, try looking up CSS color names or parsing CSS color specifications (hex or rgb). @@ -143,9 +139,7 @@ def color( fg: Color = AnsiColor.DEFAULT, bg: Color = AnsiColor.DEFAULT, ) -> str: - """ - Add ANSI colors and styles to a string. - """ + """Add ANSI colors and styles to a string.""" return color_by_tuple(message, fg.value, bg.value) diff --git a/pkgs/clan-cli/clan_lib/custom_logger/__init__.py b/pkgs/clan-cli/clan_lib/custom_logger/__init__.py index 6a284e810..a48342bf4 100644 --- a/pkgs/clan-cli/clan_lib/custom_logger/__init__.py +++ b/pkgs/clan-cli/clan_lib/custom_logger/__init__.py @@ -22,9 +22,7 @@ def _get_filepath(record: logging.LogRecord) -> Path: class PrefixFormatter(logging.Formatter): - """ - print errors in red and warnings in yellow - """ + """print errors in red and warnings in yellow""" def __init__(self, trace_prints: bool = False) -> None: super().__init__() @@ -90,14 +88,12 @@ class PrefixFormatter(logging.Formatter): def get_callers(start: int = 2, end: int = 2) -> list[str]: - """ - Get a list of caller information for a given range in the call stack. + """Get a list of caller information for a given range in the call stack. :param start: The starting position in the call stack (1 being directly above in the call stack). :param end: The end position in the call stack. :return: A list of strings, each containing the file, line number, and function of the caller. """ - frame = inspect.currentframe() if frame is None: return ["unknown"] diff --git a/pkgs/clan-cli/clan_lib/dirs/__init__.py b/pkgs/clan-cli/clan_lib/dirs/__init__.py index 22df7530f..76a0d1ef8 100644 --- a/pkgs/clan-cli/clan_lib/dirs/__init__.py +++ b/pkgs/clan-cli/clan_lib/dirs/__init__.py @@ -35,9 +35,7 @@ def get_clan_flake_toplevel() -> Path | None: def clan_key_safe(flake_url: str) -> str: - """ - only embed the url in the path, not the clan name, as it would involve eval. - """ + """Only embed the url in the path, not the clan name, as it would involve eval.""" quoted_url = urllib.parse.quote_plus(flake_url) return f"{quoted_url}" @@ -55,9 +53,7 @@ def find_toplevel(top_level_files: list[str]) -> Path | None: def clan_core_flake() -> Path: - """ - Returns the path to the clan core flake. - """ + """Returns the path to the clan core flake.""" return module_root().parent.parent.parent @@ -102,7 +98,7 @@ def user_config_dir() -> Path: def user_data_dir() -> Path: if sys.platform == "win32": return Path( - Path(os.getenv("LOCALAPPDATA", Path("~\\AppData\\Local\\").expanduser())) + Path(os.getenv("LOCALAPPDATA", Path("~\\AppData\\Local\\").expanduser())), ) xdg_data = os.getenv("XDG_DATA_HOME") if xdg_data: @@ -115,7 +111,7 @@ def user_data_dir() -> Path: def user_cache_dir() -> Path: if sys.platform == "win32": return Path( - Path(os.getenv("LOCALAPPDATA", Path("~\\AppData\\Local\\").expanduser())) + Path(os.getenv("LOCALAPPDATA", Path("~\\AppData\\Local\\").expanduser())), ) xdg_cache = os.getenv("XDG_CACHE_HOME") if xdg_cache: @@ -184,8 +180,7 @@ def select_source() -> Path: def get_clan_directories(flake: "Flake") -> tuple[str, str]: - """ - Get the clan source directory and computed clan directory paths. + """Get the clan source directory and computed clan directory paths. Args: flake: The clan flake to get directories from @@ -197,6 +192,7 @@ def get_clan_directories(flake: "Flake") -> tuple[str, str]: Raises: ClanError: If the flake evaluation fails or directories cannot be found + """ import json from pathlib import Path @@ -213,8 +209,8 @@ def get_clan_directories(flake: "Flake") -> tuple[str, str]: nix_eval( flags=[ f"{flake.identifier}#clanInternals.inventoryClass.directory", - ] - ) + ], + ), ) directory = json.loads(directory_result.stdout.strip()) diff --git a/pkgs/clan-cli/clan_lib/dirs/dirs_test.py b/pkgs/clan-cli/clan_lib/dirs/dirs_test.py index 422a37656..14f53e110 100644 --- a/pkgs/clan-cli/clan_lib/dirs/dirs_test.py +++ b/pkgs/clan-cli/clan_lib/dirs/dirs_test.py @@ -40,7 +40,7 @@ def test_get_clan_directories_with_direct_directory_config( { directory = ./direct-config; } -""" +""", ) test_subdir = Path(flake.path) / "direct-config" diff --git a/pkgs/clan-cli/clan_lib/errors/__init__.py b/pkgs/clan-cli/clan_lib/errors/__init__.py index 1c22a0c4c..57650fe79 100644 --- a/pkgs/clan-cli/clan_lib/errors/__init__.py +++ b/pkgs/clan-cli/clan_lib/errors/__init__.py @@ -42,8 +42,7 @@ class DictDiff: def diff_dicts(dict1: dict[str, str], dict2: dict[str, str]) -> DictDiff: - """ - Compare two dictionaries and report additions, deletions, and changes. + """Compare two dictionaries and report additions, deletions, and changes. :param dict1: The first dictionary (baseline). :param dict2: The second dictionary (to compare). @@ -117,14 +116,14 @@ class CmdOut: {optional_text("Stdout", self.stdout)} {optional_text("Stderr", self.stderr)} {"Return Code:":<{label_width}} {self.returncode} -""" +""", ] if self.msg: error_msg += [f"{'Error Msg:':<{label_width}} {self.msg.capitalize()}"] if DEBUG_COMMANDS: diffed_dict = ( - diff_dicts(cast(dict[str, str], os.environ), self.env) + diff_dicts(cast("dict[str, str]", os.environ), self.env) if self.env else None ) @@ -136,7 +135,7 @@ class CmdOut: {optional_text("Environment", diffed_dict_str)} {text_heading(heading="Metadata")} {"Work Dir:":<{label_width}} '{self.cwd}' -""" +""", ] return "\n".join(error_msg) diff --git a/pkgs/clan-cli/clan_lib/fixtures/flakes/flakes.py b/pkgs/clan-cli/clan_lib/fixtures/flakes/flakes.py index da9ac306f..94c178c5d 100644 --- a/pkgs/clan-cli/clan_lib/fixtures/flakes/flakes.py +++ b/pkgs/clan-cli/clan_lib/fixtures/flakes/flakes.py @@ -53,7 +53,8 @@ def patch_clan_template(monkeypatch: Any, offline_template: Path) -> None: @pytest.fixture() def clan_flake( - tmp_path: Path, patch_clan_template: Any + tmp_path: Path, + patch_clan_template: Any, ) -> Callable[[Clan | None, str | None], Flake]: def factory(clan: Clan | None = None, raw: str | None = None) -> Flake: # TODO: Make more options configurable diff --git a/pkgs/clan-cli/clan_lib/flake/flake.py b/pkgs/clan-cli/clan_lib/flake/flake.py index 1f7546990..2c0569933 100644 --- a/pkgs/clan-cli/clan_lib/flake/flake.py +++ b/pkgs/clan-cli/clan_lib/flake/flake.py @@ -89,8 +89,7 @@ def is_pure_store_path(path: str) -> bool: class SetSelectorType(str, Enum): - """ - enum for the type of selector in a set. + """enum for the type of selector in a set. For now this is either a string or a maybe selector. """ @@ -100,8 +99,7 @@ class SetSelectorType(str, Enum): @dataclass class SetSelector: - """ - This class represents a selector used in a set. + """This class represents a selector used in a set. type: SetSelectorType = SetSelectorType.STR value: str = "" @@ -114,8 +112,7 @@ class SetSelector: class SelectorType(str, Enum): - """ - enum for the type of a selector + """enum for the type of a selector this can be all, string, set or maybe """ @@ -127,8 +124,7 @@ class SelectorType(str, Enum): @dataclass class Selector: - """ - A class to represent a selector, which selects nix elements one level down. + """A class to represent a selector, which selects nix elements one level down. consists of a SelectorType and a value. if the type is all, no value is needed, since it selects all elements. @@ -209,8 +205,7 @@ def selectors_as_json(selectors: list[Selector]) -> str: def parse_selector(selector: str) -> list[Selector]: - """ - takes a string and returns a list of selectors. + """Takes a string and returns a list of selectors. a selector can be: - a string, which is a key in a dict @@ -286,7 +281,7 @@ def parse_selector(selector: str) -> list[Selector]: else: set_select_type = SetSelectorType.STR acc_selectors.append( - SetSelector(type=set_select_type, value=acc_str) + SetSelector(type=set_select_type, value=acc_str), ) # Check for invalid multiselect patterns with outPath for subselector in acc_selectors: @@ -355,8 +350,7 @@ def parse_selector(selector: str) -> list[Selector]: @dataclass class FlakeCacheEntry: - """ - a recursive structure to store the cache. + """a recursive structure to store the cache. consists of a dict with the keys being the selectors and the values being FlakeCacheEntry objects. is_list is used to check if the value is a list. @@ -565,8 +559,8 @@ class FlakeCacheEntry: if self.value[selector.value].exists: return { selector.value: self.value[selector.value].select( - selectors[1:] - ) + selectors[1:], + ), } return {} # Key not found, return empty dict for MAYBE selector @@ -684,7 +678,10 @@ class FlakeCacheEntry: fetched_all = json_data.get("fetched_all", False) entry = FlakeCacheEntry( - value=value, is_list=is_list, exists=exists, fetched_all=fetched_all + value=value, + is_list=is_list, + exists=exists, + fetched_all=fetched_all, ) return entry @@ -696,9 +693,7 @@ class FlakeCacheEntry: @dataclass class FlakeCache: - """ - an in-memory cache for flake outputs, uses a recursive FLakeCacheEntry structure - """ + """an in-memory cache for flake outputs, uses a recursive FLakeCacheEntry structure""" def __init__(self) -> None: self.cache: FlakeCacheEntry = FlakeCacheEntry() @@ -736,8 +731,7 @@ class FlakeCache: @dataclass class Flake: - """ - This class represents a flake, and is used to interact with it. + """This class represents a flake, and is used to interact with it. values can be accessed using the select method, which will fetch the value from the cache if it is present. """ @@ -800,9 +794,7 @@ class Flake: log.warning(f"Failed load eval cache: {e}. Continue without cache") def prefetch(self) -> None: - """ - Loads the flake into the store and populates self.store_path and self.hash such that the flake can evaluate locally and offline - """ + """Loads the flake into the store and populates self.store_path and self.hash such that the flake can evaluate locally and offline""" from clan_lib.cmd import RunOpts, run from clan_lib.nix import ( nix_command, @@ -843,8 +835,7 @@ class Flake: self.flake_metadata = flake_metadata def invalidate_cache(self) -> None: - """ - Invalidate the cache and reload it. + """Invalidate the cache and reload it. This method is used to refresh the cache by reloading it from the flake. """ @@ -883,8 +874,7 @@ class Flake: self, selectors: list[str], ) -> None: - """ - Retrieves specific attributes from a Nix flake using the provided selectors. + """Retrieves specific attributes from a Nix flake using the provided selectors. This function interacts with the Nix build system to fetch and process attributes from a flake. It uses the provided selectors to determine which @@ -899,6 +889,7 @@ class Flake: Raises: ClanError: If the number of outputs does not match the number of selectors. AssertionError: If the cache or flake cache path is not properly initialized. + """ from clan_lib.cmd import Log, RunOpts, run from clan_lib.dirs import select_source @@ -972,7 +963,7 @@ class Flake: run( nix_build(["--expr", nix_code, *nix_options]), RunOpts(log=Log.NONE, trace=trace), - ).stdout.strip() + ).stdout.strip(), ) except ClanCmdError as e: if "error: attribute 'clan' missing" in str(e): @@ -1008,8 +999,7 @@ class Flake: self._cache.save_to_file(self.flake_cache_path) def precache(self, selectors: list[str]) -> None: - """ - Ensures that the specified selectors are cached locally. + """Ensures that the specified selectors are cached locally. This function checks if the given selectors are already cached. If not, it fetches them using the Nix build system and stores them in the local cache. @@ -1017,8 +1007,8 @@ class Flake: Args: selectors (list[str]): A list of attribute selectors to check and cache. - """ + """ if self._cache is None: self.invalidate_cache() assert self._cache is not None @@ -1035,12 +1025,12 @@ class Flake: self, selector: str, ) -> Any: - """ - Selects a value from the cache based on the provided selector string. + """Selects a value from the cache based on the provided selector string. Fetches it via nix_build if it is not already cached. Args: selector (str): The attribute selector string to fetch the value for. + """ if self._cache is None: self.invalidate_cache() @@ -1058,13 +1048,13 @@ class Flake: return value def select_machine(self, machine_name: str, selector: str) -> Any: - """ - Select a nix attribute for a specific machine. + """Select a nix attribute for a specific machine. Args: machine_name: The name of the machine selector: The attribute selector string relative to the machine config apply: Optional function to apply to the result + """ from clan_lib.nix import nix_config @@ -1076,8 +1066,7 @@ class Flake: def require_flake(flake: Flake | None) -> Flake: - """ - Require that a flake argument is provided, if not in a clan flake. + """Require that a flake argument is provided, if not in a clan flake. This should be called by commands that require a flake but don't have a sensible default when no clan flake is found locally. @@ -1090,6 +1079,7 @@ def require_flake(flake: Flake | None) -> Flake: Raises: ClanError: If the flake is None + """ if flake is None: msg = "No clan flake found in the current directory or its parents" diff --git a/pkgs/clan-cli/clan_lib/flake/flake_cache_test.py b/pkgs/clan-cli/clan_lib/flake/flake_cache_test.py index aa91a555f..fe0a86bef 100644 --- a/pkgs/clan-cli/clan_lib/flake/flake_cache_test.py +++ b/pkgs/clan-cli/clan_lib/flake/flake_cache_test.py @@ -48,13 +48,13 @@ def test_cache_persistance(flake: ClanFlake) -> None: assert isinstance(flake1._cache, FlakeCache) # noqa: SLF001 assert isinstance(flake2._cache, FlakeCache) # noqa: SLF001 assert not flake1._cache.is_cached( # noqa: SLF001 - "nixosConfigurations.*.config.networking.hostName" + "nixosConfigurations.*.config.networking.hostName", ) flake1.select("nixosConfigurations.*.config.networking.hostName") flake1.select("nixosConfigurations.*.config.networking.{hostName,hostId}") flake2.invalidate_cache() assert flake2._cache.is_cached( # noqa: SLF001 - "nixosConfigurations.*.config.networking.{hostName,hostId}" + "nixosConfigurations.*.config.networking.{hostName,hostId}", ) @@ -171,7 +171,8 @@ def test_insert_and_iscached() -> None: def test_cache_is_cached_with_clan_test_store( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test that is_cached correctly handles CLAN_TEST_STORE paths. @@ -237,7 +238,9 @@ def test_caching_works(flake: ClanFlake) -> None: my_flake = Flake(str(flake.path)) with patch.object( - my_flake, "get_from_nix", wraps=my_flake.get_from_nix + my_flake, + "get_from_nix", + wraps=my_flake.get_from_nix, ) as tracked_build: assert tracked_build.call_count == 0 my_flake.select("clanInternals.inventoryClass.inventory.meta") @@ -247,7 +250,8 @@ def test_caching_works(flake: ClanFlake) -> None: def test_cache_is_cached_with_nix_store_dir( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test that is_cached works correctly when NIX_STORE_DIR is set to match CLAN_TEST_STORE.""" # Create a temporary store @@ -364,10 +368,10 @@ def test_store_reference_helpers() -> None: # Test find_store_references assert find_store_references("/nix/store/abc123-pkg") == ["/nix/store/abc123-pkg"] assert find_store_references("/nix/store/abc123-file.nix:42") == [ - "/nix/store/abc123-file.nix" + "/nix/store/abc123-file.nix", ] assert find_store_references("/nix/store/abc123-src/lib/file.nix:42:10") == [ - "/nix/store/abc123-src" + "/nix/store/abc123-src", ] # Multiple references @@ -385,10 +389,10 @@ def test_store_reference_helpers() -> None: assert is_pure_store_path("/nix/store/abc123def456ghi789jkl012mno345pqr-package") assert is_pure_store_path("/nix/store/abc123def456ghi789jkl012mno345pqr-source") assert not is_pure_store_path( - "/nix/store/abc123def456ghi789jkl012mno345pqr-file.nix:42" + "/nix/store/abc123def456ghi789jkl012mno345pqr-file.nix:42", ) assert not is_pure_store_path( - "/nix/store/abc123def456ghi789jkl012mno345pqr-src/subdir/file.nix" + "/nix/store/abc123def456ghi789jkl012mno345pqr-src/subdir/file.nix", ) assert not is_pure_store_path("/home/user/file") @@ -412,30 +416,30 @@ def test_store_references_with_custom_store_dir( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test store reference detection with custom NIX_STORE_DIR.""" - # Set custom store directory monkeypatch.setenv("NIX_STORE_DIR", "/custom/store") # Test find_store_references with custom dir assert find_store_references("/custom/store/abc123-pkg") == [ - "/custom/store/abc123-pkg" + "/custom/store/abc123-pkg", ] assert find_store_references("/custom/store/abc123-file.nix") == [ - "/custom/store/abc123-file.nix" + "/custom/store/abc123-file.nix", ] # Test is_pure_store_path with custom dir assert is_pure_store_path("/custom/store/abc123def456ghi789jkl012mno345pqr-package") assert not is_pure_store_path( - "/custom/store/abc123def456ghi789jkl012mno345pqr-file.nix:42" + "/custom/store/abc123def456ghi789jkl012mno345pqr-file.nix:42", ) assert not is_pure_store_path( - "/nix/store/abc123def456ghi789jkl012mno345pqr-package" + "/nix/store/abc123def456ghi789jkl012mno345pqr-package", ) def test_cache_path_with_line_numbers( - tmp_path: Path, monkeypatch: pytest.MonkeyPatch + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test that is_cached correctly handles store paths with line numbers appended. diff --git a/pkgs/clan-cli/clan_lib/flash/automount.py b/pkgs/clan-cli/clan_lib/flash/automount.py index 953f9123f..9d0c44a8a 100644 --- a/pkgs/clan-cli/clan_lib/flash/automount.py +++ b/pkgs/clan-cli/clan_lib/flash/automount.py @@ -13,13 +13,13 @@ log = logging.getLogger(__name__) @contextmanager def pause_automounting( - devices: list[Path], machine: Machine, request_graphical: bool = False + devices: list[Path], + machine: Machine, + request_graphical: bool = False, ) -> Generator[None]: - """ - Pause automounting on the device for the duration of this context + """Pause automounting on the device for the duration of this context manager """ - if shutil.which("udevadm") is None: msg = "udev is required to disable automounting" log.warning(msg) diff --git a/pkgs/clan-cli/clan_lib/flash/flash.py b/pkgs/clan-cli/clan_lib/flash/flash.py index a5dda9c91..ec82fddb6 100644 --- a/pkgs/clan-cli/clan_lib/flash/flash.py +++ b/pkgs/clan-cli/clan_lib/flash/flash.py @@ -28,7 +28,7 @@ log = logging.getLogger(__name__) class SystemConfig: keymap: str = field(default="en") language: str = field( - default="en_US.UTF-8" + default="en_US.UTF-8", ) # Leave this default, or implement virtual scrolling for the 400+ options in the UI. ssh_keys_path: list[str] | None = field(default=None) @@ -57,6 +57,7 @@ def run_machine_flash( graphical: bool = False, ) -> None: """Flash a machine with the given configuration. + Args: machine: The Machine instance to flash. mode: The mode to use for flashing (e.g., "install", "reinstall @@ -67,9 +68,11 @@ def run_machine_flash( debug: If True, enable debug mode. extra_args: Additional arguments to pass to the disko-install command. graphical: If True, run the command in graphical mode. + Raises: ClanError: If the language or keymap is invalid, or if there are issues with reading SSH keys, or if disko-install fails. + """ devices = [Path(disk.device) for disk in disks] with pause_automounting(devices, machine, request_graphical=graphical): @@ -98,7 +101,7 @@ def run_machine_flash( raise ClanError(msg) system_config_nix["console"] = {"keyMap": system_config.keymap} system_config_nix["services"] = { - "xserver": {"xkb": {"layout": system_config.keymap}} + "xserver": {"xkb": {"layout": system_config.keymap}}, } if system_config.ssh_keys_path: @@ -110,7 +113,7 @@ def run_machine_flash( msg = f"Cannot read SSH public key file: {key_path}: {e}" raise ClanError(msg) from e system_config_nix["users"] = { - "users": {"root": {"openssh": {"authorizedKeys": {"keys": root_keys}}}} + "users": {"root": {"openssh": {"authorizedKeys": {"keys": root_keys}}}}, } from clan_cli.vars.generator import Generator @@ -141,7 +144,7 @@ def run_machine_flash( "disko_install=$(command -v disko-install);", "exec", *cmd_with_root(['"$disko_install" "$@"'], graphical=graphical), - ] + ], ) disko_install.extend(["bash", "-c", wrapper]) @@ -164,7 +167,7 @@ def run_machine_flash( [ "--system-config", json.dumps(system_config_nix), - ] + ], ) disko_install.extend(["--option", "dry-run", "true"]) disko_install.extend(extra_args) diff --git a/pkgs/clan-cli/clan_lib/flash/list.py b/pkgs/clan-cli/clan_lib/flash/list.py index 5d2fdaf86..c70a561db 100644 --- a/pkgs/clan-cli/clan_lib/flash/list.py +++ b/pkgs/clan-cli/clan_lib/flash/list.py @@ -20,10 +20,13 @@ class FlashOptions(TypedDict): @API.register def get_machine_flash_options() -> FlashOptions: """Retrieve available languages and keymaps for flash configuration. + Returns: FlashOptions: A dictionary containing lists of available languages and keymaps. + Raises: ClanError: If the locale file or keymaps directory does not exist. + """ return {"languages": list_languages(), "keymaps": list_keymaps()} diff --git a/pkgs/clan-cli/clan_lib/git/__init__.py b/pkgs/clan-cli/clan_lib/git/__init__.py index 354d0a19c..ae0bb476d 100644 --- a/pkgs/clan-cli/clan_lib/git/__init__.py +++ b/pkgs/clan-cli/clan_lib/git/__init__.py @@ -51,7 +51,9 @@ def commit_files( def _commit_file_to_git( - repo_dir: Path, file_paths: list[Path], commit_message: str + repo_dir: Path, + file_paths: list[Path], + commit_message: str, ) -> None: """Commit a file to a git repository. @@ -115,6 +117,6 @@ def _commit_file_to_git( run( cmd, RunOpts( - error_msg=f"Failed to commit {file_paths} to git repository {repo_dir}" + error_msg=f"Failed to commit {file_paths} to git repository {repo_dir}", ), ) diff --git a/pkgs/clan-cli/clan_lib/import_utils/__init__.py b/pkgs/clan-cli/clan_lib/import_utils/__init__.py index 1c7724af7..fdebd2774 100644 --- a/pkgs/clan-cli/clan_lib/import_utils/__init__.py +++ b/pkgs/clan-cli/clan_lib/import_utils/__init__.py @@ -37,8 +37,7 @@ def import_with_source[T]( *args: Any, **kwargs: Any, ) -> T: - """ - Import a class from a module and instantiate it with source information. + """Import a class from a module and instantiate it with source information. This function dynamically imports a class and adds source location metadata that can be used for debugging. The instantiated object will have VSCode-clickable @@ -62,6 +61,7 @@ def import_with_source[T]( ... NetworkTechnologyBase ... ) >>> print(tech) # Outputs: ~/Projects/clan-core/.../tor.py:7 + """ # Import the module module = importlib.import_module(module_name) @@ -95,4 +95,4 @@ def import_with_source[T]( ) # Instantiate the class with source information - return cast(T, cls(source, *args, **kwargs)) + return cast("T", cls(source, *args, **kwargs)) diff --git a/pkgs/clan-cli/clan_lib/import_utils/import_utils_test.py b/pkgs/clan-cli/clan_lib/import_utils/import_utils_test.py index 7a0579212..4c88e1557 100644 --- a/pkgs/clan-cli/clan_lib/import_utils/import_utils_test.py +++ b/pkgs/clan-cli/clan_lib/import_utils/import_utils_test.py @@ -48,7 +48,7 @@ def test_import_with_source(tmp_path: Path) -> None: @contextmanager def connection(self, network: Network) -> Iterator[Network]: yield network - """) + """), ) # Add the temp directory to sys.path @@ -61,7 +61,7 @@ def test_import_with_source(tmp_path: Path) -> None: instance = import_with_source( "test_module.test_tech", "NetworkTechnology", - cast(Any, NetworkTechnologyBase), + cast("Any", NetworkTechnologyBase), ) # Verify the instance is created correctly @@ -125,7 +125,7 @@ def test_import_with_source_with_args() -> None: @contextmanager def connection(self, network: Network) -> Iterator[Network]: yield network - """) + """), ) temp_file = Path(f.name) @@ -145,7 +145,7 @@ def test_import_with_source_with_args() -> None: instance = import_with_source( "temp_module", "NetworkTechnology", - cast(Any, NetworkTechnologyBase), + cast("Any", NetworkTechnologyBase), "extra_value", keyword_arg="keyword_value", ) @@ -165,7 +165,9 @@ def test_import_with_source_module_not_found() -> None: """Test error handling when module is not found.""" with pytest.raises(ModuleNotFoundError): import_with_source( - "non_existent_module", "SomeClass", cast(Any, NetworkTechnologyBase) + "non_existent_module", + "SomeClass", + cast("Any", NetworkTechnologyBase), ) @@ -175,5 +177,5 @@ def test_import_with_source_class_not_found() -> None: import_with_source( "clan_lib.network.network", "NonExistentClass", - cast(Any, NetworkTechnologyBase), + cast("Any", NetworkTechnologyBase), ) diff --git a/pkgs/clan-cli/clan_lib/locked_open/__init__.py b/pkgs/clan-cli/clan_lib/locked_open/__init__.py index 34b1ba44d..afc5459a7 100644 --- a/pkgs/clan-cli/clan_lib/locked_open/__init__.py +++ b/pkgs/clan-cli/clan_lib/locked_open/__init__.py @@ -11,9 +11,7 @@ from clan_lib.jsonrpc import ClanJSONEncoder @contextmanager def locked_open(filename: Path, mode: str = "r") -> Generator: - """ - This is a context manager that provides an advisory write lock on the file specified by `filename` when entering the context, and releases the lock when leaving the context. The lock is acquired using the `fcntl` module's `LOCK_EX` flag, which applies an exclusive write lock to the file. - """ + """This is a context manager that provides an advisory write lock on the file specified by `filename` when entering the context, and releases the lock when leaving the context. The lock is acquired using the `fcntl` module's `LOCK_EX` flag, which applies an exclusive write lock to the file.""" with filename.open(mode) as fd: fcntl.flock(fd, fcntl.LOCK_EX) yield fd diff --git a/pkgs/clan-cli/clan_lib/log_manager/__init__.py b/pkgs/clan-cli/clan_lib/log_manager/__init__.py index 9aed55efa..735e60ace 100644 --- a/pkgs/clan-cli/clan_lib/log_manager/__init__.py +++ b/pkgs/clan-cli/clan_lib/log_manager/__init__.py @@ -16,7 +16,7 @@ class LogGroupConfig: name: str # The name of this group level (single directory name) nickname: str | None = None # Optional display name for easier visibility children: dict[str, "LogGroupConfig"] = field( - default_factory=dict + default_factory=dict, ) # Nested child groups def get_display_name(self) -> str: @@ -24,6 +24,7 @@ class LogGroupConfig: Returns: The nickname if available, otherwise the group name. + """ return self.nickname if self.nickname else self.name @@ -35,10 +36,13 @@ class LogGroupConfig: Returns: A new LogGroupConfig instance with the child added. + """ new_children = {**self.children, child.name: child} return LogGroupConfig( - name=self.name, nickname=self.nickname, children=new_children + name=self.name, + nickname=self.nickname, + children=new_children, ) def get_child(self, name: str) -> "LogGroupConfig | None": @@ -49,6 +53,7 @@ class LogGroupConfig: Returns: The child LogGroupConfig if found, None otherwise. + """ return self.children.get(name) @@ -62,6 +67,7 @@ def is_correct_day_format(date_day: str) -> bool: Returns: True if the date_day matches YYYY-MM-DD format, False otherwise. + """ try: datetime.datetime.strptime(date_day, "%Y-%m-%d").replace(tzinfo=datetime.UTC) @@ -85,6 +91,7 @@ class LogFile: Raises: ValueError: If date_day or date_second are not in the correct format. + """ # Validate formats upon initialization. if not is_correct_day_format(self.date_day): @@ -92,7 +99,7 @@ class LogFile: raise ValueError(msg) try: datetime.datetime.strptime(self.date_second, "%H-%M-%S").replace( - tzinfo=datetime.UTC + tzinfo=datetime.UTC, ) except ValueError as ex: msg = f"LogFile.date_second '{self.date_second}' is not in HH-MM-SS format." @@ -104,10 +111,12 @@ class LogFile: Returns: A datetime object constructed from date_day and date_second. + """ # Formats are pre-validated by __post_init__. return datetime.datetime.strptime( - f"{self.date_day} {self.date_second}", "%Y-%m-%d %H-%M-%S" + f"{self.date_day} {self.date_second}", + "%Y-%m-%d %H-%M-%S", ).replace(tzinfo=datetime.UTC) def get_file_path(self) -> Path: @@ -115,6 +124,7 @@ class LogFile: Returns: The complete Path object for this log file including nested directory structure. + """ # Create nested directory structure for hierarchical groups path = self._base_dir / self.date_day @@ -135,6 +145,7 @@ class LogFile: Returns: True if all significant fields are equal, False otherwise. + """ if not isinstance(other, LogFile): return NotImplemented @@ -157,6 +168,7 @@ class LogFile: Returns: True if this instance should be sorted before the other. + """ if not isinstance(other, LogFile): return NotImplemented @@ -186,6 +198,7 @@ class LogDayDir: Raises: ValueError: If date_day is not in YYYY-MM-DD format. + """ if not is_correct_day_format(self.date_day): msg = f"LogDayDir.date_day '{self.date_day}' is not in YYYY-MM-DD format." @@ -197,6 +210,7 @@ class LogDayDir: Returns: A date object constructed from date_day. + """ return ( datetime.datetime.strptime(self.date_day, "%Y-%m-%d") @@ -209,6 +223,7 @@ class LogDayDir: Returns: The Path object for this day's log directory. + """ return self._base_dir / self.date_day @@ -220,6 +235,7 @@ class LogDayDir: Returns: True if date_day and base_dir are equal, False otherwise. + """ if not isinstance(other, LogDayDir): return NotImplemented @@ -235,6 +251,7 @@ class LogDayDir: Returns: True if this instance should be sorted before the other. + """ if not isinstance(other, LogDayDir): return NotImplemented @@ -252,6 +269,7 @@ class LogManager: Attributes: base_dir: The base directory where all log files are stored. root_group_configs: Dictionary of root-level group configurations. + """ base_dir: Path @@ -265,6 +283,7 @@ class LogManager: Returns: A new LogManager instance with the group configuration added. + """ new_configs = {**self.root_group_configs, group_config.name: group_config} return LogManager(base_dir=self.base_dir, root_group_configs=new_configs) @@ -279,6 +298,7 @@ class LogManager: Returns: The LogGroupConfig if found, None otherwise. + """ if not group_path: return None @@ -301,7 +321,10 @@ class LogManager: return current_config def create_log_file( - self, func: Callable | str, op_key: str, group_path: list[str] | None = None + self, + func: Callable | str, + op_key: str, + group_path: list[str] | None = None, ) -> LogFile: """Create a new log file for the given function and operation. @@ -316,6 +339,7 @@ class LogManager: Raises: ValueError: If the group structure is not registered. FileExistsError: If the log file already exists. + """ now_utc = datetime.datetime.now(tz=datetime.UTC) @@ -372,6 +396,7 @@ class LogManager: Returns: True if the group structure is registered, False otherwise. + """ # Special case: allow "default" group without registration if group_path == ["default"]: @@ -397,6 +422,7 @@ class LogManager: Returns: True if the group structure is valid, False otherwise. + """ if not group_path: return False @@ -429,6 +455,7 @@ class LogManager: Returns: A sorted list of LogDayDir instances (newest first). Returns empty list if base directory doesn't exist. + """ if not self.base_dir.exists() or not self.base_dir.is_dir(): return [] @@ -436,18 +463,18 @@ class LogManager: log_day_dirs_list: list[LogDayDir] = [] for day_dir_candidate_path in self.base_dir.iterdir(): if day_dir_candidate_path.is_dir() and is_correct_day_format( - day_dir_candidate_path.name + day_dir_candidate_path.name, ): try: log_day_dirs_list.append( LogDayDir( date_day=day_dir_candidate_path.name, _base_dir=self.base_dir, - ) + ), ) except ValueError: log.warning( - f"Skipping directory with invalid date format '{day_dir_candidate_path.name}'." + f"Skipping directory with invalid date format '{day_dir_candidate_path.name}'.", ) return sorted(log_day_dirs_list) # Sorts using LogDayDir.__lt__ (newest first) @@ -468,6 +495,7 @@ class LogManager: Returns: The LogFile if found, None otherwise. + """ days_to_search: list[LogDayDir] @@ -495,7 +523,10 @@ class LogManager: return None def _find_log_file_in_day( - self, day_dir: LogDayDir, op_key: str, selector: list[str] | None = None + self, + day_dir: LogDayDir, + op_key: str, + selector: list[str] | None = None, ) -> LogFile | None: """Find a log file in a specific day directory. @@ -506,6 +537,7 @@ class LogManager: Returns: The LogFile if found, None otherwise. + """ base_path = day_dir.get_dir_path() @@ -520,15 +552,17 @@ class LogManager: if search_path.exists() and search_path.is_dir(): return self._search_in_path(search_path, op_key, selector) - else: - # Search all groups in this day - if base_path.exists() and base_path.is_dir(): - return self._search_in_path(base_path, op_key, None) + # Search all groups in this day + elif base_path.exists() and base_path.is_dir(): + return self._search_in_path(base_path, op_key, None) return None def _search_in_path( - self, search_path: Path, op_key: str, group_path: list[str] | None + self, + search_path: Path, + op_key: str, + group_path: list[str] | None, ) -> LogFile | None: """Search for log files in a given path. @@ -539,6 +573,7 @@ class LogManager: Returns: The LogFile if found, None otherwise. + """ log_files: list[LogFile] = [] @@ -601,7 +636,9 @@ class LogManager: return None def filter( - self, selector: list[str] | None = None, date_day: str | None = None + self, + selector: list[str] | None = None, + date_day: str | None = None, ) -> list[str]: """Filter and list folders at the specified hierarchical path. @@ -615,6 +652,7 @@ class LogManager: Returns: List of folder names (decoded) at the specified path level. + """ if selector is None: selector = [] diff --git a/pkgs/clan-cli/clan_lib/log_manager/api.py b/pkgs/clan-cli/clan_lib/log_manager/api.py index db51d3b3d..eb2608f36 100644 --- a/pkgs/clan-cli/clan_lib/log_manager/api.py +++ b/pkgs/clan-cli/clan_lib/log_manager/api.py @@ -14,6 +14,7 @@ def list_log_days() -> list[str]: Raises: AssertionError: If LOG_MANAGER_INSTANCE is not initialized. + """ assert LOG_MANAGER_INSTANCE is not None return [day.date_day for day in LOG_MANAGER_INSTANCE.list_log_days()] @@ -21,7 +22,8 @@ def list_log_days() -> list[str]: @API.register def list_log_groups( - selector: list[str] | None, date_day: str | None = None + selector: list[str] | None, + date_day: str | None = None, ) -> list[str]: """List all log groups at the specified hierarchical path. @@ -34,6 +36,7 @@ def list_log_groups( Raises: AssertionError: If LOG_MANAGER_INSTANCE is not initialized. + """ assert LOG_MANAGER_INSTANCE is not None return LOG_MANAGER_INSTANCE.filter(selector, date_day=date_day) @@ -41,7 +44,9 @@ def list_log_groups( @API.register def get_log_file( - id_key: str, selector: list[str] | None = None, date_day: str | None = None + id_key: str, + selector: list[str] | None = None, + date_day: str | None = None, ) -> str: """Get the contents of a specific log file by operation key. @@ -56,11 +61,14 @@ def get_log_file( Raises: ClanError: If the log file is not found. AssertionError: If LOG_MANAGER_INSTANCE is not initialized. + """ assert LOG_MANAGER_INSTANCE is not None log_file = LOG_MANAGER_INSTANCE.get_log_file( - op_key=id_key, selector=selector, date_day=date_day + op_key=id_key, + selector=selector, + date_day=date_day, ) if log_file is None: msg = f"Log file with op_key '{id_key}' not found in selector '{selector}' and date_day '{date_day}'." diff --git a/pkgs/clan-cli/clan_lib/log_manager/example_usage.py b/pkgs/clan-cli/clan_lib/log_manager/example_usage.py index c1aebda3c..aac933c58 100755 --- a/pkgs/clan-cli/clan_lib/log_manager/example_usage.py +++ b/pkgs/clan-cli/clan_lib/log_manager/example_usage.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Simple LogManager example with filter function. +"""Simple LogManager example with filter function. This demonstrates: - Dynamic group names with URL encoding @@ -80,7 +79,7 @@ def main() -> None: ) if specific_log: print( - f"5. Found specific log: {specific_log.op_key} in {specific_log.func_name}" + f"5. Found specific log: {specific_log.op_key} in {specific_log.func_name}", ) else: print("5. Specific log not found") diff --git a/pkgs/clan-cli/clan_lib/log_manager/test_log_manager.py b/pkgs/clan-cli/clan_lib/log_manager/test_log_manager.py index e68e9385f..4c2762485 100644 --- a/pkgs/clan-cli/clan_lib/log_manager/test_log_manager.py +++ b/pkgs/clan-cli/clan_lib/log_manager/test_log_manager.py @@ -1,5 +1,4 @@ -""" -Simplified tests for the log manager focusing only on features used by the API. +"""Simplified tests for the log manager focusing only on features used by the API. Tests are based on actual usage patterns from example_usage.py and api.py. """ @@ -150,14 +149,15 @@ class TestLogManagerGroupConfiguration: """Test finding nested group configuration.""" # ["clans", "dynamic_name", "machines"] - should find machines config config = configured_log_manager.find_group_config( - ["clans", "repo1", "machines"] + ["clans", "repo1", "machines"], ) assert config is not None assert config.name == "machines" assert config.nickname == "Machines" def test_find_group_config_nonexistent( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test finding non-existent group configuration.""" config = configured_log_manager.find_group_config(["nonexistent"]) @@ -171,7 +171,8 @@ class TestLogFileCreation: """Test log file creation features used in example_usage.py.""" def test_create_log_file_default_group( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test creating log file with default group.""" log_file = configured_log_manager.create_log_file(example_function, "test_op") @@ -185,7 +186,8 @@ class TestLogFileCreation: assert log_file.get_file_path().exists() def test_create_log_file_with_nested_groups( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test creating log file with nested groups like example_usage.py.""" repos = ["/home/user/Projects/qubasas_clan", "https://github.com/qubasa/myclan"] @@ -210,7 +212,8 @@ class TestLogFileCreation: # Dynamic elements should be URL encoded if they contain special chars def test_create_log_file_unregistered_group_fails( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test that creating log file with unregistered group fails.""" with pytest.raises(ValueError, match="Group structure.*is not valid"): @@ -221,7 +224,8 @@ class TestLogFileCreation: ) def test_create_log_file_invalid_structure_fails( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test that invalid nested structure fails.""" with pytest.raises(ValueError, match="Group structure.*is not valid"): @@ -236,19 +240,23 @@ class TestFilterFunction: """Test filter functionality used in example_usage.py and api.py.""" def test_filter_empty_returns_top_level_groups( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test that empty filter returns top-level groups.""" # Create some log files first configured_log_manager.create_log_file( - run_machine_update, "test_op", ["clans", "repo1", "machines", "machine1"] + run_machine_update, + "test_op", + ["clans", "repo1", "machines", "machine1"], ) top_level = configured_log_manager.filter([]) assert "clans" in top_level def test_filter_lists_dynamic_names( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test filtering lists dynamic names like example_usage.py.""" repos = ["repo1", "repo2"] @@ -271,17 +279,20 @@ class TestFilterFunction: if clans_repos: first_repo = clans_repos[0] repo_machines = configured_log_manager.filter( - ["clans", first_repo, "machines"] + ["clans", first_repo, "machines"], ) assert set(repo_machines) == set(machines) def test_filter_with_specific_date( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test filtering with specific date.""" # Create log file log_file = configured_log_manager.create_log_file( - run_machine_update, "test_op", ["clans", "repo1", "machines", "machine1"] + run_machine_update, + "test_op", + ["clans", "repo1", "machines", "machine1"], ) # Filter with the specific date @@ -320,7 +331,8 @@ class TestGetLogFile: assert found_log_file.func_name == "run_machine_update" def test_get_log_file_with_selector( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test getting log file with specific selector like example_usage.py.""" # Create log files in different locations @@ -347,12 +359,15 @@ class TestGetLogFile: """Test getting log file with specific date.""" # Create log file log_file = configured_log_manager.create_log_file( - run_machine_update, "deploy_demo", ["clans", "repo1", "machines", "demo"] + run_machine_update, + "deploy_demo", + ["clans", "repo1", "machines", "demo"], ) # Find it by op_key and date found_log_file = configured_log_manager.get_log_file( - "deploy_demo", date_day=log_file.date_day + "deploy_demo", + date_day=log_file.date_day, ) assert found_log_file is not None assert found_log_file.op_key == "deploy_demo" @@ -362,7 +377,8 @@ class TestGetLogFile: datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=1) ).strftime("%Y-%m-%d") not_found = configured_log_manager.get_log_file( - "deploy_demo", date_day=tomorrow + "deploy_demo", + date_day=tomorrow, ) assert not_found is None @@ -384,10 +400,14 @@ class TestListLogDays: """Test listing log days when logs exist.""" # Create log files configured_log_manager.create_log_file( - run_machine_update, "op1", ["clans", "repo1", "machines", "machine1"] + run_machine_update, + "op1", + ["clans", "repo1", "machines", "machine1"], ) configured_log_manager.create_log_file( - run_machine_update, "op2", ["clans", "repo2", "machines", "machine2"] + run_machine_update, + "op2", + ["clans", "repo2", "machines", "machine2"], ) days = configured_log_manager.list_log_days() @@ -402,7 +422,8 @@ class TestApiCompatibility: """Test that the log manager works with the API functions.""" def test_api_workflow_like_example_usage( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test a complete workflow like example_usage.py and api.py.""" repos = ["/home/user/Projects/qubasas_clan", "https://github.com/qubasa/myclan"] @@ -447,7 +468,8 @@ class TestLogFileSorting: """Test LogFile sorting functionality - newest first is a key feature.""" def test_logfile_comparison_by_datetime( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test that LogFiles are sorted by datetime (newest first).""" from clan_lib.log_manager import LogFile @@ -482,7 +504,8 @@ class TestLogFileSorting: assert sorted_files[1] == older_file def test_logfile_comparison_by_date( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test that LogFiles are sorted by date (newer dates first).""" from clan_lib.log_manager import LogFile @@ -516,7 +539,8 @@ class TestLogFileSorting: assert sorted_files[1] == older_date_file def test_logfile_secondary_sort_by_group( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test that LogFiles with same datetime are sorted by group name (alphabetical).""" from clan_lib.log_manager import LogFile @@ -551,7 +575,8 @@ class TestLogFileSorting: assert sorted_files[1] == group_b_file def test_logfile_tertiary_sort_by_func_name( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test that LogFiles with same datetime and group are sorted by func_name (alphabetical).""" from clan_lib.log_manager import LogFile @@ -585,7 +610,8 @@ class TestLogFileSorting: assert sorted_files[1] == func_b_file def test_logfile_quaternary_sort_by_op_key( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test that LogFiles with same datetime, group, and func_name are sorted by op_key (alphabetical).""" from clan_lib.log_manager import LogFile @@ -619,10 +645,10 @@ class TestLogFileSorting: assert sorted_files[1] == op_b_file def test_logfile_complex_sorting_scenario( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test complex sorting with multiple LogFiles demonstrating full sort order.""" - from clan_lib.log_manager import LogFile # Create multiple files with different characteristics @@ -720,7 +746,7 @@ class TestLogFileSorting: ] for i, (exp_op, exp_date, exp_group, exp_func, exp_time) in enumerate( - expected_order + expected_order, ): actual = sorted_files[i] assert actual.op_key == exp_op, ( @@ -740,7 +766,8 @@ class TestLogFileSorting: ) def test_get_log_file_returns_newest_when_multiple_exist( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test that get_log_file returns the newest file when multiple files with same op_key exist in different locations.""" # Create log files with same op_key in different locations (different groups/machines) @@ -771,7 +798,8 @@ class TestLogFileSorting: # When searching with specific selector, should find the specific one specific_log = configured_log_manager.get_log_file( - "deploy_operation", selector=["clans", "repo1", "machines", "machine1"] + "deploy_operation", + selector=["clans", "repo1", "machines", "machine1"], ) assert specific_log is not None assert specific_log.op_key == "deploy_operation" @@ -779,7 +807,8 @@ class TestLogFileSorting: assert "machine1" in specific_log.group def test_list_log_days_sorted_newest_first( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test that list_log_days returns days sorted newest first.""" # Create log files on different days by manipulating the date @@ -817,7 +846,8 @@ class TestURLEncoding: """Test URL encoding for dynamic group names with special characters.""" def test_special_characters_in_dynamic_names( - self, configured_log_manager: LogManager + self, + configured_log_manager: LogManager, ) -> None: """Test that special characters in dynamic names are handled correctly.""" special_repo = "/home/user/Projects/my clan" # Contains space diff --git a/pkgs/clan-cli/clan_lib/log_manager/test_url_encoding.py b/pkgs/clan-cli/clan_lib/log_manager/test_url_encoding.py index 033ffcc02..678bd0c60 100644 --- a/pkgs/clan-cli/clan_lib/log_manager/test_url_encoding.py +++ b/pkgs/clan-cli/clan_lib/log_manager/test_url_encoding.py @@ -68,7 +68,9 @@ class TestURLEncoding: group_path = ["clans", dynamic_name, "machines", f"machine-{dynamic_name}"] log_file = log_manager.create_log_file( - sample_function, f"test_{dynamic_name}", group_path + sample_function, + f"test_{dynamic_name}", + group_path, ) # Check that the file was created and encoded names appear in path @@ -78,7 +80,8 @@ class TestURLEncoding: # Verify encoding for both dynamic elements (indices 1 and 3) expected_encoded_repo = urllib.parse.quote(dynamic_name, safe="") expected_encoded_machine = urllib.parse.quote( - f"machine-{dynamic_name}", safe="" + f"machine-{dynamic_name}", + safe="", ) assert expected_encoded_repo in str(file_path) @@ -126,7 +129,9 @@ class TestURLEncoding: group_path = ["clans", dynamic_name, "default"] log_file = log_manager.create_log_file( - sample_function, "unicode_test", group_path + sample_function, + "unicode_test", + group_path, ) file_path = log_file.get_file_path() @@ -153,7 +158,9 @@ class TestURLEncoding: group_path = ["default"] log_file = log_manager.create_log_file( - sample_function, "simple_test", group_path + sample_function, + "simple_test", + group_path, ) file_path = log_file.get_file_path() @@ -177,7 +184,9 @@ class TestURLEncoding: group_path = ["clans", "", "default"] log_file = log_manager.create_log_file( - sample_function, "empty_test", group_path + sample_function, + "empty_test", + group_path, ) file_path = log_file.get_file_path() diff --git a/pkgs/clan-cli/clan_lib/machines/actions.py b/pkgs/clan-cli/clan_lib/machines/actions.py index be95ba0fd..bbc6e7219 100644 --- a/pkgs/clan-cli/clan_lib/machines/actions.py +++ b/pkgs/clan-cli/clan_lib/machines/actions.py @@ -43,11 +43,10 @@ class MachineState(TypedDict): @API.register def list_machines( - flake: Flake, opts: ListOptions | None = None + flake: Flake, + opts: ListOptions | None = None, ) -> dict[str, InventoryMachine]: - """ - List machines of a clan - """ + """List machines of a clan""" inventory_store = InventoryStore(flake=flake) inventory = inventory_store.read() @@ -68,8 +67,7 @@ def list_machines( @API.register def get_machine(flake: Flake, name: str) -> InventoryMachine: - """ - Retrieve a machine's inventory details by name from the given flake. + """Retrieve a machine's inventory details by name from the given flake. Args: flake (Flake): The flake object representing the configuration source. @@ -80,6 +78,7 @@ def get_machine(flake: Flake, name: str) -> InventoryMachine: Raises: ClanError: If the machine with the specified name is not found in the clan + """ inventory_store = InventoryStore(flake=flake) inventory = inventory_store.read() @@ -94,9 +93,7 @@ def get_machine(flake: Flake, name: str) -> InventoryMachine: @API.register def set_machine(machine: Machine, update: InventoryMachine) -> None: - """ - Update the machine information in the inventory. - """ + """Update the machine information in the inventory.""" assert machine.name == update.get("name", machine.name), "Machine name mismatch" inventory_store = InventoryStore(flake=machine.flake) @@ -104,7 +101,8 @@ def set_machine(machine: Machine, update: InventoryMachine) -> None: set_value_by_path(inventory, f"machines.{machine.name}", update) inventory_store.write( - inventory, message=f"Update information about machine {machine.name}" + inventory, + message=f"Update information about machine {machine.name}", ) @@ -116,8 +114,7 @@ class FieldSchema(TypedDict): @API.register def get_machine_fields_schema(machine: Machine) -> dict[str, FieldSchema]: - """ - Get attributes for each field of the machine. + """Get attributes for each field of the machine. This function checks which fields of the 'machine' resource are readonly and provides a reason if so. @@ -126,8 +123,8 @@ def get_machine_fields_schema(machine: Machine) -> dict[str, FieldSchema]: Returns: dict[str, FieldSchema]: A map from field-names to { 'readonly' (bool) and 'reason' (str or None ) } - """ + """ inventory_store = InventoryStore(machine.flake) write_info = inventory_store.get_writeability_of(f"machines.{machine.name}") @@ -140,10 +137,12 @@ def get_machine_fields_schema(machine: Machine) -> dict[str, FieldSchema]: # TODO: handle this more generically. I.e via json schema persisted_data = inventory_store._get_persisted() # noqa: SLF001 - inventory = inventory_store.read() # + inventory = inventory_store.read() all_tags = get_value_by_path(inventory, f"machines.{machine.name}.tags", []) persisted_tags = get_value_by_path( - persisted_data, f"machines.{machine.name}.tags", [] + persisted_data, + f"machines.{machine.name}.tags", + [], ) nix_tags = list_difference(all_tags, persisted_tags) @@ -153,7 +152,8 @@ def get_machine_fields_schema(machine: Machine) -> dict[str, FieldSchema]: True if field in protected_fields else not is_writeable_key( - f"machines.{machine.name}.{field}", write_info + f"machines.{machine.name}.{field}", + write_info, ) ), # TODO: Provide a meaningful reason @@ -166,11 +166,11 @@ def get_machine_fields_schema(machine: Machine) -> dict[str, FieldSchema]: @API.register def list_machine_state(flake: Flake) -> dict[str, MachineState]: - """ - Retrieve the current state of all machines in the clan. + """Retrieve the current state of all machines in the clan. Args: flake (Flake): The flake object representing the configuration source. + """ inventory_store = InventoryStore(flake=flake) inventory = inventory_store.read() @@ -182,7 +182,7 @@ def list_machine_state(flake: Flake) -> dict[str, MachineState]: machine_name: MachineState( status=MachineStatus.OFFLINE if get_value_by_path(machine, "installedAt", None) - else MachineStatus.NOT_INSTALLED + else MachineStatus.NOT_INSTALLED, ) for machine_name, machine in machines.items() } @@ -190,11 +190,11 @@ def list_machine_state(flake: Flake) -> dict[str, MachineState]: @API.register def get_machine_state(machine: Machine) -> MachineState: - """ - Retrieve the current state of the machine. + """Retrieve the current state of the machine. Args: machine (Machine): The machine object for which we want to retrieve the latest state. + """ inventory_store = InventoryStore(flake=machine.flake) inventory = inventory_store.read() @@ -204,5 +204,5 @@ def get_machine_state(machine: Machine) -> MachineState: return MachineState( status=MachineStatus.OFFLINE if get_value_by_path(inventory, f"machines.{machine.name}.installedAt", None) - else MachineStatus.NOT_INSTALLED + else MachineStatus.NOT_INSTALLED, ) diff --git a/pkgs/clan-cli/clan_lib/machines/actions_test.py b/pkgs/clan-cli/clan_lib/machines/actions_test.py index 9e049df16..31def6fcc 100644 --- a/pkgs/clan-cli/clan_lib/machines/actions_test.py +++ b/pkgs/clan-cli/clan_lib/machines/actions_test.py @@ -29,9 +29,9 @@ from .actions import ( def test_list_nixos_machines(clan_flake: Callable[..., Flake]) -> None: clan_config: Clan = { "machines": { - "jon": cast(Unknown, {}), # Nixos Modules are not type checkable - "sara": cast(Unknown, {}), # Nixos Modules are not type checkable - } + "jon": cast("Unknown", {}), # Nixos Modules are not type checkable + "sara": cast("Unknown", {}), # Nixos Modules are not type checkable + }, } flake = clan_flake(clan_config) @@ -49,7 +49,7 @@ def test_list_inventory_machines(clan_flake: Callable[..., Flake]) -> None: "jon": {}, "sara": {}, }, - } + }, }, # Attention: This is a raw Nix expression, which is not type-checked in python # Use with care! @@ -77,7 +77,7 @@ def test_set_machine_no_op(clan_flake: Callable[..., Flake]) -> None: "jon": {}, "sara": {}, }, - } + }, }, ) @@ -97,7 +97,8 @@ def test_set_machine_no_op(clan_flake: Callable[..., Flake]) -> None: # This is a bit internal - we want to make sure the write is called # with only the changed value, so we don't persist the whole machine mock_write.assert_called_once_with( - {"machines": {"jon": {"machineClass": "darwin"}}}, post_write=ANY + {"machines": {"jon": {"machineClass": "darwin"}}}, + post_write=ANY, ) @@ -119,7 +120,7 @@ def test_set_machine_fully_defined_in_nix(clan_flake: Callable[..., Flake]) -> N "tags": ["server", "backup"], }, }, - } + }, }, ) @@ -143,14 +144,13 @@ def test_set_machine_fully_defined_in_nix(clan_flake: Callable[..., Flake]) -> N @pytest.mark.with_core def test_set_machine_manage_tags(clan_flake: Callable[..., Flake]) -> None: """Test adding/removing tags on a machine with validation of immutable base tags.""" - flake = clan_flake( clan={ "inventory": { "machines": { "jon": {"tags": ["nix1", "nix2"]}, }, - } + }, }, ) @@ -182,7 +182,7 @@ def test_set_machine_manage_tags(clan_flake: Callable[..., Flake]) -> None: set_jon(invalid_tags) assert "Key 'machines.jon.tags' doesn't contain items ['nix1', 'nix2']" in str( - exc_info.value + exc_info.value, ) @@ -199,7 +199,7 @@ def test_get_machine_writeability(clan_flake: Callable[..., Flake]) -> None: "tags": ["nix1"], # Static list is not partially writeable }, }, - } + }, }, ) @@ -251,7 +251,7 @@ def test_machine_state(clan_flake: Callable[..., Flake]) -> None: "sara": {"installedAt": yesterday}, "bob": {"installedAt": last_week}, }, - } + }, }, ) @@ -262,11 +262,11 @@ def test_machine_state(clan_flake: Callable[..., Flake]) -> None: } assert get_machine_state(Machine("jon", flake)) == MachineState( - status=MachineStatus.NOT_INSTALLED + status=MachineStatus.NOT_INSTALLED, ) assert get_machine_state(Machine("sara", flake)) == MachineState( - status=MachineStatus.OFFLINE + status=MachineStatus.OFFLINE, ) assert get_machine_state(Machine("bob", flake)) == MachineState( - status=MachineStatus.OFFLINE + status=MachineStatus.OFFLINE, ) diff --git a/pkgs/clan-cli/clan_lib/machines/delete.py b/pkgs/clan-cli/clan_lib/machines/delete.py index 14d40a925..86eb30266 100644 --- a/pkgs/clan-cli/clan_lib/machines/delete.py +++ b/pkgs/clan-cli/clan_lib/machines/delete.py @@ -20,11 +20,14 @@ log = logging.getLogger(__name__) @API.register def delete_machine(machine: Machine) -> None: """Delete a machine from the clan's inventory and remove its associated files. + Args: machine: The Machine instance to be deleted. + Raises: ClanError: If the machine does not exist in the inventory or if there are issues with removing its files. + """ inventory_store = InventoryStore(machine.flake) try: @@ -37,7 +40,7 @@ def delete_machine(machine: Machine) -> None: # personal clan ended up in the inventory for some reason, so I think # it makes sense to eat the exception here. log.warning( - f"{machine.name} was missing or already deleted from the machines inventory: {exc}" + f"{machine.name} was missing or already deleted from the machines inventory: {exc}", ) changed_paths: list[Path] = [] diff --git a/pkgs/clan-cli/clan_lib/machines/hardware.py b/pkgs/clan-cli/clan_lib/machines/hardware.py index 93e8bb363..61f6cc632 100644 --- a/pkgs/clan-cli/clan_lib/machines/hardware.py +++ b/pkgs/clan-cli/clan_lib/machines/hardware.py @@ -50,7 +50,7 @@ def get_machine_target_platform(machine: Machine) -> str | None: "--apply", "machine: { inherit (machine.pkgs) system; }", "--json", - ] + ], ) proc = run(cmd, RunOpts(prefix=machine.name)) res = proc.stdout.strip() @@ -68,13 +68,12 @@ class HardwareGenerateOptions: @API.register def run_machine_hardware_info( - opts: HardwareGenerateOptions, target_host: Remote + opts: HardwareGenerateOptions, + target_host: Remote, ) -> HardwareConfig: - """ - Generate hardware information for a machine + """Generate hardware information for a machine and place the resulting *.nix file in the machine's directory. """ - machine = opts.machine hw_file = opts.backend.config_path(opts.machine) @@ -142,11 +141,11 @@ def run_machine_hardware_info( def get_machine_hardware_config(machine: Machine) -> HardwareConfig: - """ - Detect and return the full hardware configuration for the given machine. + """Detect and return the full hardware configuration for the given machine. Returns: HardwareConfig: Structured hardware information, or None if unavailable. + """ return HardwareConfig.detect_type(machine) @@ -158,9 +157,7 @@ class MachineHardwareBrief(TypedDict): @API.register def get_machine_hardware_summary(machine: Machine) -> MachineHardwareBrief: - """ - Return a high-level summary of hardware config and platform type. - """ + """Return a high-level summary of hardware config and platform type.""" return { "hardware_config": get_machine_hardware_config(machine), "platform": get_machine_target_platform(machine), diff --git a/pkgs/clan-cli/clan_lib/machines/install.py b/pkgs/clan-cli/clan_lib/machines/install.py index 7b3d90ebc..b67014bb0 100644 --- a/pkgs/clan-cli/clan_lib/machines/install.py +++ b/pkgs/clan-cli/clan_lib/machines/install.py @@ -42,7 +42,7 @@ def notify_install_step(current: Step) -> None: "data": None, # MUST be set the to api function name, while technically you can set any origin, this is a bad idea. "origin": "run_machine_install", - } + }, ) @@ -62,14 +62,17 @@ class InstallOptions: @API.register def run_machine_install(opts: InstallOptions, target_host: Remote) -> None: """Install a machine using nixos-anywhere. + Args: opts: InstallOptions containing the machine to install, kexec option, debug mode, no-reboot option, phases, build-on option, hardware config update, password, identity file, and use_tor flag. target_host: Remote object representing the target host for installation. + Raises: ClanError: If the machine is not found in the inventory or if there are issues with generating facts or variables. + """ machine = opts.machine @@ -86,7 +89,7 @@ def run_machine_install(opts: InstallOptions, target_host: Remote) -> None: f"clanInternals.machines.{system}.{machine_name}.config.clan.core.vars.generators.*.files.*.{{secret,deploy,owner,group,mode,neededFor}}", f"clanInternals.machines.{system}.{machine_name}.config.clan.core.vars.settings.secretModule", f"clanInternals.machines.{system}.{machine_name}.config.clan.core.vars.settings.publicModule", - ] + ], ) # Notify the UI about what we are doing @@ -106,13 +109,17 @@ def run_machine_install(opts: InstallOptions, target_host: Remote) -> None: notify_install_step("upload-secrets") machine.secret_facts_store.upload(upload_dir) machine.secret_vars_store.populate_dir( - machine.name, upload_dir, phases=["activation", "users", "services"] + machine.name, + upload_dir, + phases=["activation", "users", "services"], ) partitioning_secrets = base_directory / "partitioning_secrets" partitioning_secrets.mkdir(parents=True) machine.secret_vars_store.populate_dir( - machine.name, partitioning_secrets, phases=["partitioning"] + machine.name, + partitioning_secrets, + phases=["partitioning"], ) if target_host.password: @@ -133,10 +140,10 @@ def run_machine_install(opts: InstallOptions, target_host: Remote) -> None: "--disk-encryption-keys", str( "/run/partitioning-secrets" - / path.relative_to(partitioning_secrets) + / path.relative_to(partitioning_secrets), ), str(path), - ] + ], ) if opts.no_reboot: @@ -151,7 +158,7 @@ def run_machine_install(opts: InstallOptions, target_host: Remote) -> None: "--generate-hardware-config", str(opts.update_hardware_config.value), str(opts.update_hardware_config.config_path(machine)), - ] + ], ) if target_host.password: @@ -239,5 +246,6 @@ def run_machine_install(opts: InstallOptions, target_host: Remote) -> None: int(time()), ) inventory_store.write( - inventory, f"Installed {machine.name} at {target_host.target}" + inventory, + f"Installed {machine.name} at {target_host.target}", ) diff --git a/pkgs/clan-cli/clan_lib/machines/list.py b/pkgs/clan-cli/clan_lib/machines/list.py index 333b71141..b16768d6c 100644 --- a/pkgs/clan-cli/clan_lib/machines/list.py +++ b/pkgs/clan-cli/clan_lib/machines/list.py @@ -17,7 +17,8 @@ log = logging.getLogger(__name__) def instantiate_inventory_to_machines( - flake: Flake, machines: dict[str, InventoryMachine] + flake: Flake, + machines: dict[str, InventoryMachine], ) -> dict[str, Machine]: return { name: Machine.from_inventory(name, flake, _inventory_machine) @@ -26,9 +27,7 @@ def instantiate_inventory_to_machines( def list_full_machines(flake: Flake) -> dict[str, Machine]: - """ - Like `list_machines`, but returns a full 'machine' instance for each machine. - """ + """Like `list_machines`, but returns a full 'machine' instance for each machine.""" machines = list_machines(flake) return instantiate_inventory_to_machines(flake, machines) @@ -59,14 +58,18 @@ def extract_header(c: str) -> str: def get_machine_details(machine: Machine) -> MachineDetails: """Retrieve detailed information about a machine, including its inventory, hardware configuration, and disk schema if available. + Args: machine (Machine): The machine instance for which details are to be retrieved. + Returns: MachineDetails: An instance containing the machine's inventory, hardware configuration, and disk schema. + Raises: ClanError: If the machine's inventory cannot be found or if there are issues with the hardware configuration or disk schema extraction. + """ machine_inv = get_machine(machine.flake, machine.name) hw_config = HardwareConfig.detect_type(machine) diff --git a/pkgs/clan-cli/clan_lib/machines/machines.py b/pkgs/clan-cli/clan_lib/machines/machines.py index 57b40e811..d6f90f11b 100644 --- a/pkgs/clan-cli/clan_lib/machines/machines.py +++ b/pkgs/clan-cli/clan_lib/machines/machines.py @@ -70,7 +70,7 @@ class Machine: def _class_(self) -> str: try: return self.flake.select( - f'clanInternals.inventoryClass.inventory.machines."{self.name}".machineClass' + f'clanInternals.inventoryClass.inventory.machines."{self.name}".machineClass', ) except ClanSelectError: return "nixos" @@ -78,7 +78,7 @@ class Machine: @property def system(self) -> str: return self.flake.select( - f'{self._class_}Configurations."{self.name}".pkgs.hostPlatform.system' + f'{self._class_}Configurations."{self.name}".pkgs.hostPlatform.system', ) @cached_property @@ -127,8 +127,7 @@ class Machine: return remote def build_host(self) -> Remote | None: - """ - The host where the machine is built and deployed from. + """The host where the machine is built and deployed from. Can be the same as the target host. """ remote = get_machine_host(self.name, self.flake, field="buildHost") @@ -143,8 +142,7 @@ class Machine: self, attr: str, ) -> Any: - """ - Select a nix attribute of the machine + """Select a nix attribute of the machine @attr: the attribute to get """ return self.flake.select_machine(self.name, attr) @@ -158,11 +156,11 @@ class RemoteSource: @API.register def get_machine_host( - name: str, flake: Flake, field: Literal["targetHost", "buildHost"] + name: str, + flake: Flake, + field: Literal["targetHost", "buildHost"], ) -> RemoteSource | None: - """ - Get the build or target host for a machine. - """ + """Get the build or target host for a machine.""" machine = Machine(name=name, flake=flake) inv_machine = machine.get_inv_machine() @@ -171,7 +169,7 @@ def get_machine_host( if host_str is None: machine.debug( - f"`inventory.machines.{machine.name}.deploy.{field}` is not set — falling back to `clan.core.networking.{field}`. See: https://docs.clan.lol/guides/target-host" + f"`inventory.machines.{machine.name}.deploy.{field}` is not set — falling back to `clan.core.networking.{field}`. See: https://docs.clan.lol/guides/target-host", ) host_str = machine.select(f'config.clan.core.networking."{field}"') diff --git a/pkgs/clan-cli/clan_lib/machines/morph.py b/pkgs/clan-cli/clan_lib/machines/morph.py index c093437ef..009b0afb5 100644 --- a/pkgs/clan-cli/clan_lib/machines/morph.py +++ b/pkgs/clan-cli/clan_lib/machines/morph.py @@ -40,7 +40,10 @@ def random_hostname() -> str: def morph_machine( - flake: Flake, template: str, ask_confirmation: bool, name: str | None = None + flake: Flake, + template: str, + ask_confirmation: bool, + name: str | None = None, ) -> None: cmd = nix_command( [ @@ -48,11 +51,12 @@ def morph_machine( "archive", "--json", f"{flake}", - ] + ], ) archive_json = run( - cmd, RunOpts(error_msg="Failed to archive flake for morphing") + cmd, + RunOpts(error_msg="Failed to archive flake for morphing"), ).stdout.rstrip() archive_path = json.loads(archive_json)["path"] @@ -96,8 +100,8 @@ def morph_machine( Path(f"{machine_dir}/facter.json").write_text('{"system": "x86_64-linux"}') result_path = run( nix_build( - [f"{flakedir}#nixosConfigurations.{name}.config.system.build.toplevel"] - ) + [f"{flakedir}#nixosConfigurations.{name}.config.system.build.toplevel"], + ), ).stdout.rstrip() ropts = RunOpts(log=Log.BOTH) @@ -115,11 +119,11 @@ def morph_machine( if ask_confirmation: log.warning("ARE YOU SURE YOU WANT TO DO THIS?") log.warning( - "You should have read and understood all of the above and know what you are doing." + "You should have read and understood all of the above and know what you are doing.", ) ask = input( - f"Do you really want convert this machine into {name}? If to continue, type in the new machine name: " + f"Do you really want convert this machine into {name}? If to continue, type in the new machine name: ", ) if ask != name: return diff --git a/pkgs/clan-cli/clan_lib/machines/suggestions.py b/pkgs/clan-cli/clan_lib/machines/suggestions.py index 81be38156..a5197b9b0 100644 --- a/pkgs/clan-cli/clan_lib/machines/suggestions.py +++ b/pkgs/clan-cli/clan_lib/machines/suggestions.py @@ -3,9 +3,7 @@ from clan_lib.flake import Flake def _levenshtein_distance(s1: str, s2: str) -> int: - """ - Calculate the Levenshtein distance between two strings. - """ + """Calculate the Levenshtein distance between two strings.""" if len(s1) < len(s2): return _levenshtein_distance(s2, s1) @@ -26,7 +24,9 @@ def _levenshtein_distance(s1: str, s2: str) -> int: def _suggest_similar_names( - target: str, candidates: list[str], max_suggestions: int = 3 + target: str, + candidates: list[str], + max_suggestions: int = 3, ) -> list[str]: if not candidates: return [] @@ -49,8 +49,7 @@ def get_available_machines(flake: Flake) -> list[str]: def validate_machine_names(machine_names: list[str], flake: Flake) -> list[str]: - """ - Returns a list of valid machine names + """Returns a list of valid machine names that are guaranteed to exist in the referenced clan """ if not machine_names: diff --git a/pkgs/clan-cli/clan_lib/machines/update.py b/pkgs/clan-cli/clan_lib/machines/update.py index 099ddf749..a73cbf6c8 100644 --- a/pkgs/clan-cli/clan_lib/machines/update.py +++ b/pkgs/clan-cli/clan_lib/machines/update.py @@ -68,7 +68,7 @@ def upload_sources(machine: Machine, ssh: Host, upload_inputs: bool) -> str: remote_url, "--no-check-sigs", path, - ] + ], ) run( cmd, @@ -98,7 +98,7 @@ def upload_sources(machine: Machine, ssh: Host, upload_inputs: bool) -> str: remote_url, "--json", flake_url, - ] + ], ) proc = run( cmd, @@ -125,16 +125,18 @@ def run_machine_update( upload_inputs: bool = False, ) -> None: """Update an existing machine using nixos-rebuild or darwin-rebuild. + Args: machine: The Machine instance to deploy. target_host: Remote object representing the target host for deployment. build_host: Optional Remote object representing the build host. upload_inputs: Whether to upload flake inputs from the local. + Raises: ClanError: If the machine is not found in the inventory or if there are issues with generating facts or variables. - """ + """ with ExitStack() as stack: _target_host: Host = stack.enter_context(target_host.host_connection()) # type: ignore _build_host: Host @@ -227,14 +229,14 @@ def run_machine_update( raise ClanError(msg) try: is_mobile = machine.select( - "config.system.clan.deployment.nixosMobileWorkaround" + "config.system.clan.deployment.nixosMobileWorkaround", ) except Exception: is_mobile = False # if the machine is mobile, we retry to deploy with the mobile workaround method if is_mobile: machine.info( - "Mobile machine detected, applying workaround deployment method" + "Mobile machine detected, applying workaround deployment method", ) ret = _build_host.run( ["nixos--rebuild", "test", *nix_options] if is_mobile else switch_cmd, diff --git a/pkgs/clan-cli/clan_lib/metrics/telegraf.py b/pkgs/clan-cli/clan_lib/metrics/telegraf.py index 97f59b301..4272752e0 100644 --- a/pkgs/clan-cli/clan_lib/metrics/telegraf.py +++ b/pkgs/clan-cli/clan_lib/metrics/telegraf.py @@ -26,13 +26,15 @@ def get_metrics( target_host: Host, ) -> Iterator[MetricSample]: """Fetch Prometheus metrics from telegraf and return them as streaming metrics. + Args: machine: The Machine instance to check. target_host: Remote instance representing the target host. + Returns: Iterator[dict[str, Any]]: An iterator yielding parsed metric dictionaries line by line. - """ + """ # Example: fetch Prometheus metrics with basic auth url = f"http://{target_host.address}:9990" username = "prometheus" @@ -59,7 +61,7 @@ def get_metrics( line_str = line.decode("utf-8").strip() if line_str: try: - yield cast(MetricSample, json.loads(line_str)) + yield cast("MetricSample", json.loads(line_str)) except json.JSONDecodeError: log.warning(f"Skipping invalid JSON line: {line_str}") continue diff --git a/pkgs/clan-cli/clan_lib/metrics/version.py b/pkgs/clan-cli/clan_lib/metrics/version.py index 151a53087..bc39747f0 100644 --- a/pkgs/clan-cli/clan_lib/metrics/version.py +++ b/pkgs/clan-cli/clan_lib/metrics/version.py @@ -21,10 +21,10 @@ class NixOSSystems: def get_nixos_systems( - machine: Machine, target_host: Remote | LocalHost + machine: Machine, + target_host: Remote | LocalHost, ) -> NixOSSystems | None: """Get the nixos systems from the target host.""" - parsed_metrics = get_metrics(machine, target_host) for metric in parsed_metrics: @@ -44,13 +44,15 @@ def check_machine_up_to_date( target_host: Remote | LocalHost, ) -> bool: """Check if a machine needs an update. + Args: machine: The Machine instance to check. target_host: Optional Remote or LocalHost instance representing the target host. + Returns: bool: True if the machine needs an update, False otherwise. - """ + """ nixos_systems = get_nixos_systems(machine, target_host) if nixos_systems is None: @@ -61,14 +63,14 @@ def check_machine_up_to_date( git_out_path = nix_eval( [ - f"{machine.flake}#nixosConfigurations.'{machine.name}'.config.system.build.toplevel.outPath" - ] + f"{machine.flake}#nixosConfigurations.'{machine.name}'.config.system.build.toplevel.outPath", + ], ) log.debug( f"Checking if {machine.name} needs an update:\n" f"Machine outPath: {nixos_systems.current_system}\n" - f"Git outPath : {git_out_path}\n" + f"Git outPath : {git_out_path}\n", ) return git_out_path != nixos_systems.current_system diff --git a/pkgs/clan-cli/clan_lib/network/check.py b/pkgs/clan-cli/clan_lib/network/check.py index 768a21b0a..3b7bf40db 100644 --- a/pkgs/clan-cli/clan_lib/network/check.py +++ b/pkgs/clan-cli/clan_lib/network/check.py @@ -16,9 +16,11 @@ class ConnectionOptions: @API.register def check_machine_ssh_login( - remote: Remote, opts: ConnectionOptions | None = None + remote: Remote, + opts: ConnectionOptions | None = None, ) -> None: """Checks if a remote machine is reachable via SSH by attempting to run a simple command. + Args: remote (Remote): The remote host to check for SSH login. opts (ConnectionOptions, optional): Connection options such as timeout. @@ -29,8 +31,10 @@ def check_machine_ssh_login( print("SSH login successful") else: print(f"SSH login failed: {result.reason}") + Raises: ClanError: If the SSH login fails. + """ if opts is None: opts = ConnectionOptions() @@ -54,15 +58,17 @@ def check_machine_ssh_login( @API.register def check_machine_ssh_reachable( - remote: Remote, opts: ConnectionOptions | None = None + remote: Remote, + opts: ConnectionOptions | None = None, ) -> None: - """ - Checks if a remote machine is reachable via SSH by attempting to open a TCP connection + """Checks if a remote machine is reachable via SSH by attempting to open a TCP connection to the specified address and port. + Args: remote (Remote): The remote host to check for SSH reachability. opts (ConnectionOptions, optional): Connection options such as timeout. If not provided, default values are used. + Returns: CheckResult: An object indicating whether the SSH port is reachable (`ok=True`) or not (`ok=False`), and a reason if the check failed. @@ -71,6 +77,7 @@ def check_machine_ssh_reachable( if result.ok: print("SSH port is reachable") print(f"SSH port is not reachable: {result.reason}") + """ if opts is None: opts = ConnectionOptions() @@ -90,7 +97,7 @@ def check_machine_ssh_reachable( [ "-o", f"ProxyCommand=nc -X 5 -x localhost:{remote.socks_port} %h %p", - ] + ], ) cmd.extend( @@ -109,7 +116,7 @@ def check_machine_ssh_reachable( str(remote.port or 22), f"dummy@{remote.address.strip()}", "true", - ] + ], ) try: diff --git a/pkgs/clan-cli/clan_lib/network/network.py b/pkgs/clan-cli/clan_lib/network/network.py index cd685033f..ca1acdaba 100644 --- a/pkgs/clan-cli/clan_lib/network/network.py +++ b/pkgs/clan-cli/clan_lib/network/network.py @@ -108,7 +108,7 @@ def networks_from_flake(flake: Flake) -> dict[str, Network]: flake.precache( [ "clan.?exports.instances.*.networking", - ] + ], ) networks: dict[str, Network] = {} networks_ = flake.select("clan.?exports.instances.*.networking") @@ -123,7 +123,9 @@ def networks_from_flake(flake: Flake) -> dict[str, Network]: peers: dict[str, Peer] = {} for _peer in network["peers"].values(): peers[_peer["name"]] = Peer( - name=_peer["name"], _host=_peer["host"], flake=flake + name=_peer["name"], + _host=_peer["host"], + flake=flake, ) networks[network_name] = Network( peers=peers, @@ -135,8 +137,7 @@ def networks_from_flake(flake: Flake) -> dict[str, Network]: @contextmanager def get_best_remote(machine: "Machine") -> Iterator["Remote"]: - """ - Context manager that yields the best remote connection for a machine following this priority: + """Context manager that yields the best remote connection for a machine following this priority: 1. If machine has targetHost in inventory, return a direct connection 2. Return the highest priority network where machine is reachable 3. If no network works, try to get targetHost from machine nixos config @@ -149,8 +150,8 @@ def get_best_remote(machine: "Machine") -> Iterator["Remote"]: Raises: ClanError: If no connection method works - """ + """ # Step 1: Check if targetHost is set in inventory inv_machine = machine.get_inv_machine() target_host = inv_machine.get("deploy", {}).get("targetHost") @@ -182,7 +183,7 @@ def get_best_remote(machine: "Machine") -> Iterator["Remote"]: ping_time = network.ping(machine.name) if ping_time is not None: log.info( - f"Machine {machine.name} reachable via {network_name} network" + f"Machine {machine.name} reachable via {network_name} network", ) yield network.remote(machine.name) return @@ -195,13 +196,13 @@ def get_best_remote(machine: "Machine") -> Iterator["Remote"]: ping_time = connected_network.ping(machine.name) if ping_time is not None: log.info( - f"Machine {machine.name} reachable via {network_name} network after connection" + f"Machine {machine.name} reachable via {network_name} network after connection", ) yield connected_network.remote(machine.name) return except Exception as e: log.debug( - f"Failed to establish connection to {machine.name} via {network_name}: {e}" + f"Failed to establish connection to {machine.name} via {network_name}: {e}", ) except Exception as e: log.debug(f"Failed to use networking modules to determine machines remote: {e}") @@ -211,18 +212,19 @@ def get_best_remote(machine: "Machine") -> Iterator["Remote"]: target_host = machine.select('config.clan.core.networking."targetHost"') if target_host: log.debug( - f"Using targetHost from machine config for {machine.name}: {target_host}" + f"Using targetHost from machine config for {machine.name}: {target_host}", ) # Check if reachable try: remote = Remote.from_ssh_uri( - machine_name=machine.name, address=target_host + machine_name=machine.name, + address=target_host, ) yield remote return except Exception as e: log.debug( - f"Machine config targetHost not reachable for {machine.name}: {e}" + f"Machine config targetHost not reachable for {machine.name}: {e}", ) except Exception as e: log.debug(f"Could not get targetHost from machine config: {e}") @@ -247,10 +249,10 @@ def get_network_overview(networks: dict[str, Network]) -> dict: for peer_name in network.peers: try: result[network_name]["peers"][peer_name] = network.ping( - peer_name + peer_name, ) except ClanError: log.warning( - f"getting host for machine: {peer_name} in network: {network_name} failed" + f"getting host for machine: {peer_name} in network: {network_name} failed", ) return result diff --git a/pkgs/clan-cli/clan_lib/network/network_test.py b/pkgs/clan-cli/clan_lib/network/network_test.py index e9cc4834e..057aea62a 100644 --- a/pkgs/clan-cli/clan_lib/network/network_test.py +++ b/pkgs/clan-cli/clan_lib/network/network_test.py @@ -37,7 +37,7 @@ def test_networks_from_flake(mock_get_machine_var: MagicMock) -> None: "machine": "machine1", "generator": "wireguard", "file": "address", - } + }, }, }, "machine2": { @@ -47,7 +47,7 @@ def test_networks_from_flake(mock_get_machine_var: MagicMock) -> None: "machine": "machine2", "generator": "wireguard", "file": "address", - } + }, }, }, }, @@ -68,7 +68,7 @@ def test_networks_from_flake(mock_get_machine_var: MagicMock) -> None: "module": "clan_lib.network.direct", "priority": 500, }, - } + }, } # Mock the select method diff --git a/pkgs/clan-cli/clan_lib/network/qr_code.py b/pkgs/clan-cli/clan_lib/network/qr_code.py index c8b77fa84..44fa71150 100644 --- a/pkgs/clan-cli/clan_lib/network/qr_code.py +++ b/pkgs/clan-cli/clan_lib/network/qr_code.py @@ -33,7 +33,7 @@ class QRCodeData: try: log.debug(f"Establishing connection via {address}") with address.network.module.connection( - address.network + address.network, ) as connected_network: ping_time = connected_network.module.ping(address.remote) if ping_time is not None: @@ -44,8 +44,7 @@ class QRCodeData: def read_qr_json(qr_data: dict[str, Any], flake: Flake) -> QRCodeData: - """ - Parse QR code JSON contents and output a dict of networks with remotes. + """Parse QR code JSON contents and output a dict of networks with remotes. Args: qr_data: JSON data from QR code containing network information @@ -72,6 +71,7 @@ def read_qr_json(qr_data: dict[str, Any], flake: Flake) -> QRCodeData: "remote": Remote(...) } } + """ addresses: list[RemoteWithNetwork] = [] @@ -123,8 +123,7 @@ def read_qr_json(qr_data: dict[str, Any], flake: Flake) -> QRCodeData: def read_qr_image(image_path: Path) -> dict[str, Any]: - """ - Parse a QR code image and extract the JSON data. + """Parse a QR code image and extract the JSON data. Args: image_path: Path to the QR code image file @@ -134,6 +133,7 @@ def read_qr_image(image_path: Path) -> dict[str, Any]: Raises: ClanError: If the QR code cannot be read or contains invalid JSON + """ if not image_path.exists(): msg = f"QR code image file not found: {image_path}" diff --git a/pkgs/clan-cli/clan_lib/network/tor/__init__.py b/pkgs/clan-cli/clan_lib/network/tor/__init__.py index 9dd2af1ed..801cc3a1c 100644 --- a/pkgs/clan-cli/clan_lib/network/tor/__init__.py +++ b/pkgs/clan-cli/clan_lib/network/tor/__init__.py @@ -3,7 +3,6 @@ import time from collections.abc import Iterator from contextlib import contextmanager from dataclasses import dataclass -from typing import TYPE_CHECKING from clan_lib.errors import ClanError from clan_lib.network import Network, NetworkTechnologyBase, Peer diff --git a/pkgs/clan-cli/clan_lib/network/tor/lib.py b/pkgs/clan-cli/clan_lib/network/tor/lib.py index 6b0ea0df7..c885ba660 100755 --- a/pkgs/clan-cli/clan_lib/network/tor/lib.py +++ b/pkgs/clan-cli/clan_lib/network/tor/lib.py @@ -35,10 +35,7 @@ def is_tor_running(proxy_port: int | None = None) -> bool: # TODO: Move this to network technology tor module @contextmanager def spawn_tor() -> Iterator[None]: - """ - Spawns a Tor process using `nix-shell` if Tor is not already running. - """ - + """Spawns a Tor process using `nix-shell` if Tor is not already running.""" # Check if Tor is already running if is_tor_running(): log.info("Tor is running") @@ -68,9 +65,7 @@ class TorCheck: def tor_online_test(proxy_port: int) -> None: - """ - Tests if Tor is online by checking if we can establish a SOCKS5 connection. - """ + """Tests if Tor is online by checking if we can establish a SOCKS5 connection.""" import socket # Try to establish a SOCKS5 handshake with the Tor proxy diff --git a/pkgs/clan-cli/clan_lib/nix/__init__.py b/pkgs/clan-cli/clan_lib/nix/__init__.py index 3d9b7ec8a..da079d514 100644 --- a/pkgs/clan-cli/clan_lib/nix/__init__.py +++ b/pkgs/clan-cli/clan_lib/nix/__init__.py @@ -30,7 +30,7 @@ def nix_flake_show(flake_url: str | Path) -> list[str]: "--json", *(["--show-trace"] if log.isEnabledFor(logging.DEBUG) else []), str(flake_url), - ] + ], ) @@ -43,7 +43,7 @@ def nix_build(flags: list[str], gcroot: Path | None = None) -> list[str]: *(["--show-trace"] if log.isEnabledFor(logging.DEBUG) else []), *(["--out-root", str(gcroot)] if gcroot is not None else ["--no-link"]), *flags, - ] + ], ) @@ -86,7 +86,7 @@ def nix_eval(flags: list[str]) -> list[str]: *(["--show-trace"] if log.isEnabledFor(logging.DEBUG) else []), "--json", "--print-build-logs", - ] + ], ) if os.environ.get("IN_NIX_SANDBOX"): return [ @@ -132,12 +132,10 @@ class Packages: @classmethod def is_provided(cls: type["Packages"], program: str) -> bool: - """ - Determines if a program is shipped with the clan package. - """ + """Determines if a program is shipped with the clan package.""" if cls.static_packages is None: cls.static_packages = set( - os.environ.get("CLAN_PROVIDED_PACKAGES", "").split(":") + os.environ.get("CLAN_PROVIDED_PACKAGES", "").split(":"), ) if "#" in program: diff --git a/pkgs/clan-cli/clan_lib/persist/inventory_store.py b/pkgs/clan-cli/clan_lib/persist/inventory_store.py index 91cbe0a60..23bf0e999 100644 --- a/pkgs/clan-cli/clan_lib/persist/inventory_store.py +++ b/pkgs/clan-cli/clan_lib/persist/inventory_store.py @@ -24,8 +24,7 @@ from .util import ( def unwrap_known_unknown(value: Any) -> Any: - """ - Helper untility to unwrap our custom deferred module. (uniqueDeferredSerializableModule) + """Helper untility to unwrap our custom deferred module. (uniqueDeferredSerializableModule) This works because we control ClanLib.type.uniqueDeferredSerializableModule @@ -57,8 +56,7 @@ def unwrap_known_unknown(value: Any) -> Any: def sanitize(data: Any, whitelist_paths: list[str], current_path: list[str]) -> Any: - """ - Recursively walks dicts only, unwraps matching values only on whitelisted paths. + """Recursively walks dicts only, unwraps matching values only on whitelisted paths. Throws error if a value would be transformed on non-whitelisted path. """ if isinstance(data, dict): @@ -96,8 +94,7 @@ class FlakeInterface(Protocol): class InventorySnapshot(TypedDict): - """ - Restricted view of an Inventory. + """Restricted view of an Inventory. It contains only the keys that are convertible to python types and can be serialized to JSON. """ @@ -116,8 +113,7 @@ class InventoryStore: _allowed_path_transforms: list[str] | None = None, _keys: list[str] | None = None, ) -> None: - """ - InventoryStore constructor + """InventoryStore constructor :param flake: The flake to use :param inventory_file_name: The name of the inventory file @@ -138,8 +134,7 @@ class InventoryStore: self._keys = _keys def _load_merged_inventory(self) -> InventorySnapshot: - """ - Loads the evaluated inventory. + """Loads the evaluated inventory. After all merge operations with eventual nix code in buildClan. Evaluates clanInternals.inventoryClass.inventory with nix. Which is performant. @@ -151,11 +146,11 @@ class InventoryStore: raw_value = self.get_readonly_raw() if self._keys: filtered = cast( - InventorySnapshot, + "InventorySnapshot", {k: v for k, v in raw_value.items() if k in self._keys}, ) else: - filtered = cast(InventorySnapshot, raw_value) + filtered = cast("InventorySnapshot", raw_value) sanitized = sanitize(filtered, self._allowed_path_transforms, []) return sanitized @@ -165,11 +160,9 @@ class InventoryStore: return self._flake.select(f"clanInternals.inventoryClass.inventory.{attrs}") def _get_persisted(self) -> InventorySnapshot: - """ - Load the inventory FILE from the flake directory + """Load the inventory FILE from the flake directory If no file is found, returns an empty dictionary """ - # TODO: make this configurable if not self.inventory_file.exists(): return {} @@ -185,8 +178,7 @@ class InventoryStore: return inventory def _get_inventory_current_priority(self) -> dict: - """ - Returns the current priority of the inventory values + """Returns the current priority of the inventory values machines = { __prio = 100; @@ -205,8 +197,7 @@ class InventoryStore: return self._flake.select("clanInternals.inventoryClass.introspection") def _write_info(self) -> WriteInfo: - """ - Get the paths of the writeable keys in the inventory + """Get the paths of the writeable keys in the inventory Load the inventory and determine the writeable keys Performs 2 nix evaluations to get the current priority and the inventory @@ -217,14 +208,15 @@ class InventoryStore: data_disk: InventorySnapshot = self._get_persisted() writeables = determine_writeability( - current_priority, dict(data_eval), dict(data_disk) + current_priority, + dict(data_eval), + dict(data_disk), ) return WriteInfo(writeables, data_eval, data_disk) def get_writeability_of(self, path: str) -> Any: - """ - Get the writeability of a path in the inventory + """Get the writeability of a path in the inventory :param path: The path to check :return: A dictionary with the writeability of the path @@ -233,8 +225,7 @@ class InventoryStore: return write_info.writeables def read(self) -> InventorySnapshot: - """ - Accessor to the merged inventory + """Accessor to the merged inventory Side Effects: Runs 'nix eval' through the '_flake' member of this class @@ -242,9 +233,7 @@ class InventoryStore: return self._load_merged_inventory() def delete(self, delete_set: set[str], commit: bool = True) -> None: - """ - Delete keys from the inventory - """ + """Delete keys from the inventory""" data_disk = dict(self._get_persisted()) for delete_path in delete_set: @@ -261,13 +250,14 @@ class InventoryStore: ) def write( - self, update: InventorySnapshot, message: str, commit: bool = True + self, + update: InventorySnapshot, + message: str, + commit: bool = True, ) -> None: - """ - Write the inventory to the flake directory + """Write the inventory to the flake directory and commit it to git with the given message """ - write_info = self._write_info() patchset, delete_set = calc_patches( dict(write_info.data_disk), @@ -300,11 +290,11 @@ class InventoryStore: self._write(persisted, post_write=post_write) def _write( - self, content: Any, post_write: Callable[[], None] | None = None + self, + content: Any, + post_write: Callable[[], None] | None = None, ) -> None: - """ - Write the content to the inventory file and run post_write callback - """ + """Write the content to the inventory file and run post_write callback""" with self.inventory_file.open("w") as f: json.dump(content, f, indent=2) diff --git a/pkgs/clan-cli/clan_lib/persist/inventory_store_test.py b/pkgs/clan-cli/clan_lib/persist/inventory_store_test.py index b053a1d45..bb54bc35d 100644 --- a/pkgs/clan-cli/clan_lib/persist/inventory_store_test.py +++ b/pkgs/clan-cli/clan_lib/persist/inventory_store_test.py @@ -54,6 +54,7 @@ class MockFlake: ) output = subprocess.run( cmd, + check=False, capture_output=True, ) res_str = output.stdout.decode() diff --git a/pkgs/clan-cli/clan_lib/persist/util.py b/pkgs/clan-cli/clan_lib/persist/util.py index 5f66a7684..3fc31c2ce 100644 --- a/pkgs/clan-cli/clan_lib/persist/util.py +++ b/pkgs/clan-cli/clan_lib/persist/util.py @@ -1,5 +1,4 @@ -""" -Utilities for working with nested dictionaries, particularly for +"""Utilities for working with nested dictionaries, particularly for flattening, unmerging lists, finding duplicates, and calculating patches. """ @@ -15,18 +14,20 @@ empty: list[str] = [] def merge_objects( - curr: T, update: T, merge_lists: bool = True, path: list[str] = empty + curr: T, + update: T, + merge_lists: bool = True, + path: list[str] = empty, ) -> T: - """ - Updates values in curr by values of update + """Updates values in curr by values of update The output contains values for all keys of curr and update together. Lists are deduplicated and appended almost like in the nix module system. Example: - merge_objects({"a": 1}, {"a": null }) -> {"a": null} merge_objects({"a": null}, {"a": 1 }) -> {"a": 1} + """ result = {} msg = f"cannot update non-dictionary values: {curr} by {update}" @@ -43,7 +44,10 @@ def merge_objects( if isinstance(update_val, dict) and isinstance(curr_val, dict): result[key] = merge_objects( - curr_val, update_val, merge_lists=merge_lists, path=[*path, key] + curr_val, + update_val, + merge_lists=merge_lists, + path=[*path, key], ) elif isinstance(update_val, list) and isinstance(curr_val, list): if merge_lists: @@ -62,12 +66,11 @@ def merge_objects( elif key in curr: result[key] = curr_val # type: ignore - return cast(T, result) + return cast("T", result) def path_match(path: list[str], whitelist_paths: list[list[str]]) -> bool: - """ - Returns True if path matches any whitelist path with "*" wildcards. + """Returns True if path matches any whitelist path with "*" wildcards. I.e.: whitelist_paths = [["a.b.*"]] @@ -93,8 +96,7 @@ def path_match(path: list[str], whitelist_paths: list[list[str]]) -> bool: def flatten_data(data: dict, parent_key: str = "", separator: str = ".") -> dict: - """ - Recursively flattens a nested dictionary structure where keys are joined by the separator. + """Recursively flattens a nested dictionary structure where keys are joined by the separator. Args: data (dict): The nested dictionary structure. @@ -103,6 +105,7 @@ def flatten_data(data: dict, parent_key: str = "", separator: str = ".") -> dict Returns: dict: A flattened dictionary with all values. Directly in the root. + """ flattened = {} @@ -123,11 +126,11 @@ def flatten_data(data: dict, parent_key: str = "", separator: str = ".") -> dict def list_difference(all_items: list, filter_items: list) -> list: - """ - Unmerge the current list. Given a previous list. + """Unmerge the current list. Given a previous list. Returns: The other list. + """ # Unmerge the lists res = [] @@ -145,10 +148,11 @@ def find_duplicates(string_list: list[str]) -> list[str]: def find_deleted_paths( - curr: dict[str, Any], update: dict[str, Any], parent_key: str = "" + curr: dict[str, Any], + update: dict[str, Any], + parent_key: str = "", ) -> set[str]: - """ - Recursively find keys (at any nesting level) that exist in persisted but do not + """Recursively find keys (at any nesting level) that exist in persisted but do not exist in update. If a nested dictionary is completely removed, return that dictionary key. :param persisted: The original (persisted) nested dictionary. @@ -180,7 +184,9 @@ def find_deleted_paths( else: # Both are dicts, recurse deeper deleted_paths |= find_deleted_paths( - p_value, u_value, current_path + p_value, + u_value, + current_path, ) else: # Persisted was a dict, update is not a dict -> entire branch changed @@ -204,8 +210,7 @@ def is_writeable_key( key: str, writeables: dict[str, set[str]], ) -> bool: - """ - Recursively check if a key is writeable. + """Recursively check if a key is writeable. key "machines.machine1.deploy.targetHost" is specified but writeability is only defined for "machines" We pop the last key and check if the parent key is writeable/non-writeable. """ @@ -228,8 +233,7 @@ def calc_patches( all_values: dict[str, Any], writeables: dict[str, set[str]], ) -> tuple[dict[str, Any], set[str]]: - """ - Calculate the patches to apply to the inventory. + """Calculate the patches to apply to the inventory. Given its current state and the update to apply. @@ -296,7 +300,7 @@ After: {new} # every item in nix_list MUST be in new nix_items_to_remove = list( - filter(lambda item: item not in new, nix_list) + filter(lambda item: item not in new, nix_list), ) if nix_items_to_remove: @@ -403,8 +407,7 @@ def determine_writeability( def delete_by_path(d: dict[str, Any], path: str) -> Any: - """ - Deletes the nested entry specified by a dot-separated path from the dictionary using pop(). + """Deletes the nested entry specified by a dot-separated path from the dictionary using pop(). :param data: The dictionary to modify. :param path: A dot-separated string indicating the nested key to delete. @@ -442,8 +445,7 @@ type DictLike = dict[str, Any] | Any def get_value_by_path(d: DictLike, path: str, fallback: Any = None) -> Any: - """ - Get the value at a specific dot-separated path in a nested dictionary. + """Get the value at a specific dot-separated path in a nested dictionary. If the path does not exist, it returns fallback. @@ -462,8 +464,7 @@ def get_value_by_path(d: DictLike, path: str, fallback: Any = None) -> Any: def set_value_by_path(d: DictLike, path: str, content: Any) -> None: - """ - Update the value at a specific dot-separated path in a nested dictionary. + """Update the value at a specific dot-separated path in a nested dictionary. If the value didn't exist before, it will be created recursively. diff --git a/pkgs/clan-cli/clan_lib/persist/util_test.py b/pkgs/clan-cli/clan_lib/persist/util_test.py index 2fd23a665..6ed3e403b 100644 --- a/pkgs/clan-cli/clan_lib/persist/util_test.py +++ b/pkgs/clan-cli/clan_lib/persist/util_test.py @@ -58,7 +58,9 @@ from clan_lib.persist.util import ( ], ) def test_path_match( - path: list[str], whitelist: list[list[str]], expected: bool + path: list[str], + whitelist: list[list[str]], + expected: bool, ) -> None: assert path_match(path, whitelist) == expected @@ -167,7 +169,7 @@ def test_write_list() -> None: "foo": [ "a", "b", - ] # <- writeable: because lists are merged. Filtering out nix-values comes later + ], # <- writeable: because lists are merged. Filtering out nix-values comes later } res = determine_writeability(prios, default, data) assert res == { @@ -199,9 +201,9 @@ def test_write_because_written() -> None: data: dict = { "foo": { "bar": { - "baz": "foo" # <- written. Since we created the data, we know we can write to it - } - } + "baz": "foo", # <- written. Since we created the data, we know we can write to it + }, + }, } res = determine_writeability(prios, {}, data) assert res == { @@ -246,10 +248,13 @@ def test_update_simple() -> None: "bar": "new value", # <- user sets this value "nix": "this is set in nix", # <- user didnt touch this value # If the user would have set this value, it would trigger an error - } + }, } patchset, _ = calc_patches( - data_disk, update, all_values=data_eval, writeables=writeables + data_disk, + update, + all_values=data_eval, + writeables=writeables, ) assert patchset == {"foo.bar": "new value"} @@ -274,7 +279,10 @@ def test_update_add_empty_dict() -> None: set_value_by_path(update, "foo.mimi", {}) patchset, _ = calc_patches( - data_disk, update, all_values=data_eval, writeables=writeables + data_disk, + update, + all_values=data_eval, + writeables=writeables, ) assert patchset == {"foo.mimi": {}} # this is what gets persisted @@ -299,7 +307,7 @@ def test_update_many() -> None: "bar": "baz", "nix": "this is set in nix", "nested": {"x": "x", "y": "y"}, - } + }, } data_disk = {"foo": {"bar": "baz", "nested": {"x": "x"}}} @@ -319,10 +327,13 @@ def test_update_many() -> None: "x": "new value for x", # <- user sets this value "y": "y", # <- user cannot set this value }, - } + }, } patchset, _ = calc_patches( - data_disk, update, all_values=data_eval, writeables=writeables + data_disk, + update, + all_values=data_eval, + writeables=writeables, ) assert patchset == { @@ -342,13 +353,13 @@ def test_update_parent_non_writeable() -> None: data_eval = { "foo": { "bar": "baz", - } + }, } data_disk = { "foo": { "bar": "baz", - } + }, } writeables = determine_writeability(prios, data_eval, data_disk) @@ -358,7 +369,7 @@ def test_update_parent_non_writeable() -> None: update = { "foo": { "bar": "new value", # <- user sets this value - } + }, } with pytest.raises(ClanError) as error: calc_patches(data_disk, update, all_values=data_eval, writeables=writeables) @@ -375,7 +386,7 @@ def test_update_list() -> None: data_eval = { # [ "A" ] is defined in nix. - "foo": ["A", "B"] + "foo": ["A", "B"], } data_disk = {"foo": ["B"]} @@ -388,7 +399,10 @@ def test_update_list() -> None: update = {"foo": ["A", "B", "C"]} # User wants to add "C" patchset, _ = calc_patches( - data_disk, update, all_values=data_eval, writeables=writeables + data_disk, + update, + all_values=data_eval, + writeables=writeables, ) assert patchset == {"foo": ["B", "C"]} @@ -399,7 +413,10 @@ def test_update_list() -> None: update = {"foo": ["A"]} # User wants to remove "B" patchset, _ = calc_patches( - data_disk, update, all_values=data_eval, writeables=writeables + data_disk, + update, + all_values=data_eval, + writeables=writeables, ) assert patchset == {"foo": []} @@ -414,7 +431,7 @@ def test_update_list_duplicates() -> None: data_eval = { # [ "A" ] is defined in nix. - "foo": ["A", "B"] + "foo": ["A", "B"], } data_disk = {"foo": ["B"]} @@ -433,10 +450,7 @@ def test_update_list_duplicates() -> None: def test_dont_persist_defaults() -> None: - """ - Default values should not be persisted to disk if not explicitly requested by the user. - """ - + """Default values should not be persisted to disk if not explicitly requested by the user.""" prios = { "enabled": {"__prio": 1500}, "config": {"__prio": 100}, @@ -453,7 +467,10 @@ def test_dont_persist_defaults() -> None: set_value_by_path(update, "config.foo", "foo") patchset, delete_set = calc_patches( - data_disk, update, all_values=data_eval, writeables=writeables + data_disk, + update, + all_values=data_eval, + writeables=writeables, ) assert patchset == {"config.foo": "foo"} assert delete_set == set() @@ -480,7 +497,10 @@ def test_machine_delete() -> None: delete_by_path(update, "machines.bar") patchset, delete_set = calc_patches( - data_disk, update, all_values=data_eval, writeables=writeables + data_disk, + update, + all_values=data_eval, + writeables=writeables, ) assert patchset == {} @@ -533,7 +553,10 @@ def test_delete_key() -> None: update: dict = {"foo": {}} patchset, delete_set = calc_patches( - data_disk, update, all_values=data_eval, writeables=writeables + data_disk, + update, + all_values=data_eval, + writeables=writeables, ) assert patchset == {"foo": {}} @@ -553,10 +576,10 @@ def test_delete_key_intermediate() -> None: "bar": {"name": "bar", "info": "info", "other": ["a", "b"]}, # Leave the key "other" "other": {"name": "other", "info": "info", "other": ["a", "b"]}, - } + }, } update: dict = { - "foo": {"other": {"name": "other", "info": "info", "other": ["a", "b"]}} + "foo": {"other": {"name": "other", "info": "info", "other": ["a", "b"]}}, } data_disk = data_eval @@ -568,7 +591,10 @@ def test_delete_key_intermediate() -> None: # remove all keys from foo patchset, delete_set = calc_patches( - data_disk, update, all_values=data_eval, writeables=writeables + data_disk, + update, + all_values=data_eval, + writeables=writeables, ) assert patchset == {} @@ -586,7 +612,7 @@ def test_delete_key_non_writeable() -> None: "foo": { # Remove the key "bar" "bar": {"name": "bar", "info": "info", "other": ["a", "b"]}, - } + }, } update: dict = {"foo": {}} diff --git a/pkgs/clan-cli/clan_lib/sandbox_exec/__init__.py b/pkgs/clan-cli/clan_lib/sandbox_exec/__init__.py index b55a026ed..e739d7a33 100644 --- a/pkgs/clan-cli/clan_lib/sandbox_exec/__init__.py +++ b/pkgs/clan-cli/clan_lib/sandbox_exec/__init__.py @@ -9,7 +9,6 @@ from tempfile import NamedTemporaryFile def create_sandbox_profile() -> str: """Create a sandbox profile that allows access to tmpdir and nix store, based on Nix's sandbox-defaults.sb.""" - # Based on Nix's sandbox-defaults.sb implementation with TMPDIR parameter profile_content = """(version 1) @@ -102,6 +101,7 @@ def sandbox_exec_cmd(generator: str, tmpdir: Path) -> Iterator[list[str]]: Yields: list[str]: The command to execute + """ profile_content = create_sandbox_profile() diff --git a/pkgs/clan-cli/clan_lib/sandbox_exec/tests/test_sandbox_exec.py b/pkgs/clan-cli/clan_lib/sandbox_exec/tests/test_sandbox_exec.py index 49a67bffe..b9b17a28f 100644 --- a/pkgs/clan-cli/clan_lib/sandbox_exec/tests/test_sandbox_exec.py +++ b/pkgs/clan-cli/clan_lib/sandbox_exec/tests/test_sandbox_exec.py @@ -19,7 +19,7 @@ def test_sandbox_allows_write_to_tmpdir() -> None: script = f'echo "test content" > "{test_file}"' with sandbox_exec_cmd(script, tmpdir_path) as cmd: - result = subprocess.run(cmd, capture_output=True, text=True) + result = subprocess.run(cmd, check=False, capture_output=True, text=True) assert result.returncode == 0, f"Command failed: {result.stderr}" assert test_file.exists(), "File was not created in tmpdir" assert test_file.read_text().strip() == "test content" @@ -42,7 +42,9 @@ def test_sandbox_denies_write_to_home() -> None: forbidden_file.unlink() with sandbox_exec_cmd(script, tmpdir_path) as cmd: - result = subprocess.run(cmd, capture_output=True, text=True) + result = subprocess.run( + cmd, check=False, capture_output=True, text=True + ) # Check that either the write was denied or the file wasn't created # macOS sandbox-exec with (allow default) has limitations @@ -50,7 +52,7 @@ def test_sandbox_denies_write_to_home() -> None: # If file was created, clean it up and note the limitation forbidden_file.unlink() pytest.skip( - "macOS sandbox-exec with (allow default) has limited deny capabilities" + "macOS sandbox-exec with (allow default) has limited deny capabilities", ) else: # Good - file was not created @@ -76,7 +78,7 @@ def test_sandbox_allows_nix_store_read() -> None: script = f'ls /nix/store >/dev/null 2>&1 && echo "success" > "{success_file}"' with sandbox_exec_cmd(script, tmpdir_path) as cmd: - result = subprocess.run(cmd, capture_output=True, text=True) + result = subprocess.run(cmd, check=False, capture_output=True, text=True) assert result.returncode == 0, f"Command failed: {result.stderr}" assert success_file.exists(), "Success file was not created" assert success_file.read_text().strip() == "success" diff --git a/pkgs/clan-cli/clan_lib/services/instances.py b/pkgs/clan-cli/clan_lib/services/instances.py index c5fe63c58..e453cb047 100644 --- a/pkgs/clan-cli/clan_lib/services/instances.py +++ b/pkgs/clan-cli/clan_lib/services/instances.py @@ -21,10 +21,7 @@ from clan_lib.services.modules import ( @API.register def list_service_instances(flake: Flake) -> InventoryInstancesType: - """ - Returns all currently present service instances including their full configuration - """ - + """Returns all currently present service instances including their full configuration""" inventory_store = InventoryStore(flake) inventory = inventory_store.read() instances = inventory.get("instances", {}) @@ -87,7 +84,7 @@ def create_service_instance( msg = f"Role: '{role_name}' - " if machine_refs: unavailable_machines = list( - filter(lambda m: m not in available_machine_refs, machine_refs) + filter(lambda m: m not in available_machine_refs, machine_refs), ) if unavailable_machines: msg += f"Unknown machine reference: {unavailable_machines}. Use one of {available_machine_refs}" @@ -96,7 +93,7 @@ def create_service_instance( tag_refs = role_members.get("tags") if tag_refs: unavailable_tags = list( - filter(lambda m: m not in available_tag_refs, tag_refs) + filter(lambda m: m not in available_tag_refs, tag_refs), ) if unavailable_tags: @@ -111,5 +108,6 @@ def create_service_instance( set_value_by_path(inventory, f"instances.{instance_name}", instance_config) set_value_by_path(inventory, f"instances.{instance_name}.module", module_ref) inventory_store.write( - inventory, message=f"services: instance '{instance_name}' init" + inventory, + message=f"services: instance '{instance_name}' init", ) diff --git a/pkgs/clan-cli/clan_lib/services/modules.py b/pkgs/clan-cli/clan_lib/services/modules.py index 1989ef0a3..e6b31cadf 100644 --- a/pkgs/clan-cli/clan_lib/services/modules.py +++ b/pkgs/clan-cli/clan_lib/services/modules.py @@ -60,8 +60,7 @@ class ModuleManifest: @classmethod def from_dict(cls, data: dict) -> "ModuleManifest": - """ - Create an instance of this class from a dictionary. + """Create an instance of this class from a dictionary. Drops any keys that are not defined in the dataclass. """ valid = {f.name for f in fields(cls)} @@ -69,11 +68,11 @@ class ModuleManifest: def parse_frontmatter(readme_content: str) -> tuple[dict[str, Any] | None, str]: - """ - Extracts TOML frontmatter from a string + """Extracts TOML frontmatter from a string Raises: - ClanError: If the toml frontmatter is invalid + """ # Pattern to match YAML frontmatter enclosed by triple-dashed lines frontmatter_pattern = r"^---\s+(.*?)\s+---\s?+(.*)$" @@ -106,20 +105,25 @@ T = TypeVar("T") def extract_frontmatter[T]( - readme_content: str, err_scope: str, fm_class: type[T] + readme_content: str, + err_scope: str, + fm_class: type[T], ) -> tuple[T, str]: - """ - Extracts TOML frontmatter from a README file content. + """Extracts TOML frontmatter from a README file content. - Parameters: + Parameters + ---------- - readme_content (str): The content of the README file as a string. - Returns: + Returns + ------- - str: The extracted frontmatter as a string. - str: The content of the README file without the frontmatter. - Raises: + Raises + ------ - ValueError: If the README does not contain valid frontmatter. + """ frontmatter_raw, remaining_content = parse_frontmatter(readme_content) @@ -147,9 +151,7 @@ class ModuleList(TypedDict): @API.register def list_service_modules(flake: Flake) -> ModuleList: - """ - Show information about a module - """ + """Show information about a module""" modules = flake.select("clanInternals.inventoryClass.modulesPerSource") res: dict[str, dict[str, ModuleInfo]] = {} @@ -168,16 +170,15 @@ def list_service_modules(flake: Flake) -> ModuleList: @API.register def get_service_module( - flake: Flake, module_ref: InventoryInstanceModuleType + flake: Flake, + module_ref: InventoryInstanceModuleType, ) -> ModuleInfo: - """ - Returns the module information for a given module reference + """Returns the module information for a given module reference :param module_ref: The module reference to get the information for :return: Dict of module information :raises ClanError: If the module_ref is invalid or missing required fields """ - input_name, module_name = check_service_module_ref(flake, module_ref) avilable_modules = list_service_modules(flake) @@ -196,8 +197,7 @@ def check_service_module_ref( flake: Flake, module_ref: InventoryInstanceModuleType, ) -> tuple[str, str]: - """ - Checks if the module reference is valid + """Checks if the module reference is valid :param module_ref: The module reference to check :raises ClanError: If the module_ref is invalid or missing required fields @@ -228,10 +228,10 @@ def check_service_module_ref( @API.register def get_service_module_schema( - flake: Flake, module_ref: InventoryInstanceModuleType + flake: Flake, + module_ref: InventoryInstanceModuleType, ) -> dict[str, Any]: - """ - Returns the schema for a service module + """Returns the schema for a service module :param module_ref: The module reference to get the schema for :return: Dict of schemas for the service module roles @@ -240,7 +240,7 @@ def get_service_module_schema( input_name, module_name = check_service_module_ref(flake, module_ref) return flake.select( - f"clanInternals.inventoryClass.moduleSchemas.{input_name}.{module_name}" + f"clanInternals.inventoryClass.moduleSchemas.{input_name}.{module_name}", ) @@ -250,9 +250,7 @@ def create_service_instance( module_ref: InventoryInstanceModuleType, roles: InventoryInstanceRolesType, ) -> None: - """ - Show information about a module - """ + """Show information about a module""" input_name, module_name = check_service_module_ref(flake, module_ref) inventory_store = InventoryStore(flake) @@ -291,5 +289,3 @@ def create_service_instance( message=f"Add service instance '{instance_name}' with module '{module_name} from {input_name}'", commit=True, ) - - return diff --git a/pkgs/clan-cli/clan_lib/ssh/create.py b/pkgs/clan-cli/clan_lib/ssh/create.py index 1239d5ee2..c2873880f 100644 --- a/pkgs/clan-cli/clan_lib/ssh/create.py +++ b/pkgs/clan-cli/clan_lib/ssh/create.py @@ -15,8 +15,7 @@ class SSHKeyPair: def create_secret_key_nixos_anywhere() -> SSHKeyPair: - """ - Create a new SSH key pair for NixOS Anywhere. + """Create a new SSH key pair for NixOS Anywhere. The keys are stored in ~/.config/clan/nixos-anywhere/keys/id_ed25519 and id_ed25519.pub. """ private_key_dir = user_nixos_anywhere_dir() @@ -27,8 +26,7 @@ def create_secret_key_nixos_anywhere() -> SSHKeyPair: def generate_ssh_key(root_dir: Path) -> SSHKeyPair: - """ - Generate a new SSH key pair at root_dir/keys/id_ed25519 and id_ed25519.pub. + """Generate a new SSH key pair at root_dir/keys/id_ed25519 and id_ed25519.pub. If the key already exists, it will not be regenerated. """ key_dir = root_dir / "keys" diff --git a/pkgs/clan-cli/clan_lib/ssh/host.py b/pkgs/clan-cli/clan_lib/ssh/host.py index 63cbb4f61..86b5f7ba0 100644 --- a/pkgs/clan-cli/clan_lib/ssh/host.py +++ b/pkgs/clan-cli/clan_lib/ssh/host.py @@ -12,8 +12,7 @@ cmdlog = logging.getLogger(__name__) class Host(Protocol): - """ - Abstract base class for host command execution. + """Abstract base class for host command execution. This provides a common interface for both local and remote hosts. """ @@ -47,8 +46,7 @@ class Host(Protocol): quiet: bool = False, control_master: bool = True, ) -> CmdOut: - """ - Run a command on the host. + """Run a command on the host. Args: cmd: Command to execute @@ -61,20 +59,18 @@ class Host(Protocol): Returns: Command output + """ @contextmanager @abstractmethod def become_root(self) -> Iterator["Host"]: - """ - Context manager to execute commands as root. - """ + """Context manager to execute commands as root.""" @contextmanager @abstractmethod def host_connection(self) -> Iterator["Host"]: - """ - Context manager to manage host connections. + """Context manager to manage host connections. For remote hosts, this manages SSH ControlMaster connections. For local hosts, this is a no-op that returns self. """ @@ -85,7 +81,6 @@ class Host(Protocol): env: dict[str, str] | None = None, control_master: bool = True, ) -> dict[str, str]: - """ - Get environment variables for Nix operations. + """Get environment variables for Nix operations. Remote hosts will add NIX_SSHOPTS, local hosts won't. """ diff --git a/pkgs/clan-cli/clan_lib/ssh/host_key.py b/pkgs/clan-cli/clan_lib/ssh/host_key.py index e6529bcec..34684c2c9 100644 --- a/pkgs/clan-cli/clan_lib/ssh/host_key.py +++ b/pkgs/clan-cli/clan_lib/ssh/host_key.py @@ -13,9 +13,7 @@ HostKeyCheck = Literal[ def hostkey_to_ssh_opts(host_key_check: HostKeyCheck) -> list[str]: - """ - Convert a HostKeyCheck value to SSH options. - """ + """Convert a HostKeyCheck value to SSH options.""" match host_key_check: case "strict": return ["-o", "StrictHostKeyChecking=yes"] diff --git a/pkgs/clan-cli/clan_lib/ssh/localhost.py b/pkgs/clan-cli/clan_lib/ssh/localhost.py index f62a12856..3d4572ada 100644 --- a/pkgs/clan-cli/clan_lib/ssh/localhost.py +++ b/pkgs/clan-cli/clan_lib/ssh/localhost.py @@ -12,9 +12,7 @@ cmdlog = logging.getLogger(__name__) @dataclass(frozen=True) class LocalHost: - """ - A Host implementation that executes commands locally without SSH. - """ + """A Host implementation that executes commands locally without SSH.""" command_prefix: str = "localhost" _user: str = field(default_factory=lambda: os.environ.get("USER", "root")) @@ -45,9 +43,7 @@ class LocalHost: quiet: bool = False, control_master: bool = True, ) -> CmdOut: - """ - Run a command locally. - """ + """Run a command locally.""" if opts is None: opts = RunOpts() @@ -83,9 +79,7 @@ class LocalHost: @contextmanager def become_root(self) -> Iterator["LocalHost"]: - """ - Context manager to execute commands as root. - """ + """Context manager to execute commands as root.""" if self._user == "root": yield self return @@ -96,9 +90,7 @@ class LocalHost: @contextmanager def host_connection(self) -> Iterator["LocalHost"]: - """ - For LocalHost, this is a no-op that just returns self. - """ + """For LocalHost, this is a no-op that just returns self.""" yield self def nix_ssh_env( @@ -106,9 +98,7 @@ class LocalHost: env: dict[str, str] | None = None, control_master: bool = True, ) -> dict[str, str]: - """ - LocalHost doesn't need SSH environment variables. - """ + """LocalHost doesn't need SSH environment variables.""" if env is None: env = {} # Don't set NIX_SSHOPTS for localhost diff --git a/pkgs/clan-cli/clan_lib/ssh/remote.py b/pkgs/clan-cli/clan_lib/ssh/remote.py index 6ed24f1fd..d7cec5d68 100644 --- a/pkgs/clan-cli/clan_lib/ssh/remote.py +++ b/pkgs/clan-cli/clan_lib/ssh/remote.py @@ -70,9 +70,7 @@ class Remote: port: int | None = None, ssh_options: dict[str, str] | None = None, ) -> "Remote": - """ - Returns a new Remote instance with the same data but with a different host_key_check. - """ + """Returns a new Remote instance with the same data but with a different host_key_check.""" return Remote( address=self.address, user=self.user, @@ -105,10 +103,7 @@ class Remote: machine_name: str, address: str, ) -> "Remote": - """ - Parse a deployment address and return a Remote object. - """ - + """Parse a deployment address and return a Remote object.""" return _parse_ssh_uri(machine_name=machine_name, address=address) def run_local( @@ -117,9 +112,7 @@ class Remote: opts: RunOpts | None = None, extra_env: dict[str, str] | None = None, ) -> CmdOut: - """ - Command to run locally for the host - """ + """Command to run locally for the host""" if opts is None: opts = RunOpts() env = opts.env or os.environ.copy() @@ -139,13 +132,12 @@ class Remote: @contextmanager def host_connection(self) -> Iterator["Remote"]: - """ - Context manager to manage SSH ControlMaster connections. + """Context manager to manage SSH ControlMaster connections. This will create a temporary directory for the control socket. """ directory = None if sys.platform == "darwin" and os.environ.get("TMPDIR", "").startswith( - "/var/folders/" + "/var/folders/", ): directory = "/tmp/" with TemporaryDirectory(prefix="clan-ssh", dir=directory) as temp_dir: @@ -179,15 +171,16 @@ class Remote: "exit", ] exit_cmd.append(remote.target) - subprocess.run(exit_cmd, capture_output=True, timeout=5) + subprocess.run( + exit_cmd, check=False, capture_output=True, timeout=5 + ) except (subprocess.TimeoutExpired, subprocess.CalledProcessError): # If exit fails still try to stop the master connection pass @contextmanager def become_root(self) -> Iterator["Remote"]: - """ - Context manager to set up sudo askpass proxy. + """Context manager to set up sudo askpass proxy. This will set up a proxy for sudo password prompts. """ if self.user == "root": @@ -245,8 +238,7 @@ class Remote: quiet: bool = False, control_master: bool = True, ) -> CmdOut: - """ - Internal method to run a command on the host via ssh. + """Internal method to run a command on the host via ssh. `control_path_dir`: If provided, SSH ControlMaster options will be used. """ if extra_env is None: @@ -299,7 +291,9 @@ class Remote: ssh_cmd = [ *self.ssh_cmd( - verbose_ssh=verbose_ssh, tty=tty, control_master=control_master + verbose_ssh=verbose_ssh, + tty=tty, + control_master=control_master, ), "--", *sudo, @@ -320,7 +314,7 @@ class Remote: if env is None: env = {} env["NIX_SSHOPTS"] = " ".join( - self._ssh_cmd_opts(control_master=control_master) # Renamed + self._ssh_cmd_opts(control_master=control_master), # Renamed ) return env @@ -350,14 +344,12 @@ class Remote: "ControlPersist=1m", "-o", f"ControlPath={socket_path}", - ] + ], ) return ssh_opts def ssh_url(self) -> str: - """ - Generates a standard SSH URL (ssh://[user@]host[:port]). - """ + """Generates a standard SSH URL (ssh://[user@]host[:port]).""" url = "ssh://" if self.user: url += f"{self.user}@" @@ -367,7 +359,10 @@ class Remote: return url def ssh_cmd( - self, verbose_ssh: bool = False, tty: bool = False, control_master: bool = True + self, + verbose_ssh: bool = False, + tty: bool = False, + control_master: bool = True, ) -> list[str]: packages = [] password_args = [] @@ -387,7 +382,7 @@ class Remote: [ "-o", f"ProxyCommand=nc -x localhost:{self.socks_port} -X 5 %h %p", - ] + ], ) cmd = [ @@ -399,8 +394,7 @@ class Remote: return nix_shell(packages, cmd) def _check_sshpass_errorcode(self, res: subprocess.CompletedProcess) -> None: - """ - Check the return code of the sshpass command and raise an error if it indicates a failure. + """Check the return code of the sshpass command and raise an error if it indicates a failure. Error codes are based on man sshpass(1) and may vary by version. """ if res.returncode == 0: @@ -459,7 +453,8 @@ class Remote: self._check_sshpass_errorcode(res) def check_machine_ssh_reachable( - self, opts: "ConnectionOptions | None" = None + self, + opts: "ConnectionOptions | None" = None, ) -> None: from clan_lib.network.check import check_machine_ssh_reachable @@ -476,16 +471,13 @@ def _parse_ssh_uri( machine_name: str, address: str, ) -> "Remote": - """ - Parses an SSH URI into a Remote object. + """Parses an SSH URI into a Remote object. The address can be in the form of: - `ssh://[user@]hostname[:port]?option=value&option2=value2` - `[user@]hostname[:port]` The specification can be found here: https://www.ietf.org/archive/id/draft-salowey-secsh-uri-00.html """ - if address.startswith("ssh://"): - # Strip the `ssh://` prefix if it exists - address = address[len("ssh://") :] + address = address.removeprefix("ssh://") parts = address.split("?", maxsplit=1) endpoint, maybe_options = parts if len(parts) == 2 else (parts[0], "") diff --git a/pkgs/clan-cli/clan_lib/ssh/remote_test.py b/pkgs/clan-cli/clan_lib/ssh/remote_test.py index b2e3ef6ad..40d93205b 100644 --- a/pkgs/clan-cli/clan_lib/ssh/remote_test.py +++ b/pkgs/clan-cli/clan_lib/ssh/remote_test.py @@ -131,7 +131,7 @@ def test_parse_deployment_address( def test_parse_ssh_options() -> None: addr = "root@example.com:2222?IdentityFile=/path/to/private/key&StrictRemoteKeyChecking=yes" host = Remote.from_ssh_uri(machine_name="foo", address=addr).override( - host_key_check="strict" + host_key_check="strict", ) assert host.address == "example.com" assert host.port == 2222 @@ -143,7 +143,10 @@ def test_parse_ssh_options() -> None: def test_run(hosts: list[Remote], runtime: AsyncRuntime) -> None: for host in hosts: proc = runtime.async_run( - None, host.run_local, ["echo", "hello"], RunOpts(log=Log.STDERR) + None, + host.run_local, + ["echo", "hello"], + RunOpts(log=Log.STDERR), ) assert proc.wait().result.stdout == "hello\n" @@ -173,7 +176,10 @@ def test_run_environment(hosts: list[Remote], runtime: AsyncRuntime) -> None: def test_run_no_shell(hosts: list[Remote], runtime: AsyncRuntime) -> None: for host in hosts: proc = runtime.async_run( - None, host.run_local, ["echo", "hello"], RunOpts(log=Log.STDERR) + None, + host.run_local, + ["echo", "hello"], + RunOpts(log=Log.STDERR), ) assert proc.wait().result.stdout == "hello\n" @@ -209,7 +215,10 @@ def test_run_function(hosts: list[Remote], runtime: AsyncRuntime) -> None: def test_timeout(hosts: list[Remote], runtime: AsyncRuntime) -> None: for host in hosts: proc = runtime.async_run( - None, host.run_local, ["sleep", "10"], RunOpts(timeout=0.01) + None, + host.run_local, + ["sleep", "10"], + RunOpts(timeout=0.01), ) error = proc.wait().error assert isinstance(error, ClanCmdTimeoutError) @@ -218,7 +227,10 @@ def test_timeout(hosts: list[Remote], runtime: AsyncRuntime) -> None: def test_run_exception(hosts: list[Remote], runtime: AsyncRuntime) -> None: for host in hosts: proc = runtime.async_run( - None, host.run_local, ["exit 1"], RunOpts(shell=True, check=False) + None, + host.run_local, + ["exit 1"], + RunOpts(shell=True, check=False), ) assert proc.wait().result.returncode == 1 diff --git a/pkgs/clan-cli/clan_lib/ssh/sudo_askpass_proxy.py b/pkgs/clan-cli/clan_lib/ssh/sudo_askpass_proxy.py index a3459a6d7..5e7235276 100644 --- a/pkgs/clan-cli/clan_lib/ssh/sudo_askpass_proxy.py +++ b/pkgs/clan-cli/clan_lib/ssh/sudo_askpass_proxy.py @@ -48,15 +48,20 @@ class SudoAskpassProxy: old_settings = termios.tcgetattr(sys.stdin.fileno()) try: logger.debug( - f"Running password prompt command: {' '.join(password_command)}" + f"Running password prompt command: {' '.join(password_command)}", ) password_process = subprocess.run( - password_command, text=True, check=False, stdout=subprocess.PIPE + password_command, + text=True, + check=False, + stdout=subprocess.PIPE, ) finally: # dialog messes with the terminal settings, so we need to restore them if old_settings is not None: termios.tcsetattr( - sys.stdin.fileno(), termios.TCSADRAIN, old_settings + sys.stdin.fileno(), + termios.TCSADRAIN, + old_settings, ) if password_process.returncode != 0: @@ -68,7 +73,6 @@ class SudoAskpassProxy: def _process(self, ssh_process: subprocess.Popen) -> None: """Execute the remote command with password proxying""" - # Monitor SSH output for password requests assert ssh_process.stdout is not None, "SSH process stdout is None" try: @@ -115,7 +119,9 @@ class SudoAskpassProxy: raise ClanError(msg) self.thread = threading.Thread( - target=self._process, name="SudoAskpassProxy", args=(self.ssh_process,) + target=self._process, + name="SudoAskpassProxy", + args=(self.ssh_process,), ) self.thread.start() return askpass_script diff --git a/pkgs/clan-cli/clan_lib/templates/__init__.py b/pkgs/clan-cli/clan_lib/templates/__init__.py index b75530a1f..2f6251939 100644 --- a/pkgs/clan-cli/clan_lib/templates/__init__.py +++ b/pkgs/clan-cli/clan_lib/templates/__init__.py @@ -15,9 +15,7 @@ class TemplateList: def get_builtin_template_list() -> TemplateList: - """ - Fallback to get only builtin clan templates with no custom templates. - """ + """Fallback to get only builtin clan templates with no custom templates.""" builtin_flake = Flake(str(clan_templates())) builtin_templates = builtin_flake.select("clanInternals.templates") custom_templates: dict[str, ClanTemplatesType] = {} @@ -25,16 +23,14 @@ def get_builtin_template_list() -> TemplateList: def list_templates(flake: Flake | None) -> TemplateList: - """ - Show information about a module - """ + """Show information about a module""" if flake is None: log.debug("No flake provided, falling back to clan-core builtin templates") return get_builtin_template_list() try: custom_templates = flake.select( - "clanInternals.inventoryClass.templatesPerSource" + "clanInternals.inventoryClass.templatesPerSource", ) builtin_templates = flake.select("clanInternals.templates") @@ -43,6 +39,6 @@ def list_templates(flake: Flake | None) -> TemplateList: except (AttributeError, KeyError, Exception): log.debug( "Failed to get templates from clan inputs, " - "falling back to clan-core builtin templates" + "falling back to clan-core builtin templates", ) return get_builtin_template_list() diff --git a/pkgs/clan-cli/clan_lib/templates/disk.py b/pkgs/clan-cli/clan_lib/templates/disk.py index 84b538eec..027c475e0 100644 --- a/pkgs/clan-cli/clan_lib/templates/disk.py +++ b/pkgs/clan-cli/clan_lib/templates/disk.py @@ -30,9 +30,8 @@ def get_best_unix_device_name(unix_device_names: list[str]) -> str: for device_name in unix_device_names: if "disk/by-id" in device_name: return device_name - else: - # if no by-id found, use the first device name - return unix_device_names[0] + # if no by-id found, use the first device name + return unix_device_names[0] def hw_main_disk_options(hw_report: dict) -> list[str] | None: @@ -71,9 +70,11 @@ templates: dict[str, dict[str, Callable[[dict[str, Any]], Placeholder]]] = { "single-disk": { # Placeholders "mainDisk": lambda hw_report: Placeholder( - label="Main disk", options=hw_main_disk_options(hw_report), required=True + label="Main disk", + options=hw_main_disk_options(hw_report), + required=True, ), - } + }, } @@ -87,10 +88,10 @@ def get_empty_placeholder(label: str) -> Placeholder: @API.register def get_machine_disk_schemas( - machine: Machine, check_hw: bool = True + machine: Machine, + check_hw: bool = True, ) -> dict[str, DiskSchema]: - """ - Get the available disk schemas. + """Get the available disk schemas. This function reads the disk schemas from the templates directory and returns them as a dictionary. Offering options based on the hardware report of the machine. @@ -161,9 +162,7 @@ def set_machine_disk_schema( force: bool = False, check_hw: bool = True, ) -> None: - """ - Set the disk placeholders of the template - """ + """Set the disk placeholders of the template""" # Ensure the machine exists machine.get_inv_machine() @@ -221,7 +220,7 @@ def set_machine_disk_schema( ) placeholders_toml = "\n".join( - [f"""# {k} = "{v}" """ for k, v in placeholders.items() if v is not None] + [f"""# {k} = "{v}" """ for k, v in placeholders.items() if v is not None], ) header = f"""# --- # schema = "{schema_name}" @@ -235,7 +234,8 @@ def set_machine_disk_schema( config_str = disk_template.read() for placeholder_name, placeholder_value in placeholders.items(): config_str = config_str.replace( - r"{{" + placeholder_name + r"}}", placeholder_value + r"{{" + placeholder_name + r"}}", + placeholder_value, ) # Custom replacements diff --git a/pkgs/clan-cli/clan_lib/templates/filesystem.py b/pkgs/clan-cli/clan_lib/templates/filesystem.py index 1b3187cbc..c6772729d 100644 --- a/pkgs/clan-cli/clan_lib/templates/filesystem.py +++ b/pkgs/clan-cli/clan_lib/templates/filesystem.py @@ -9,10 +9,7 @@ from clan_lib.nix import ( def realize_nix_path(clan_dir: Flake, nix_path: str) -> None: - """ - Downloads / realizes a nix path into the nix store - """ - + """Downloads / realizes a nix path into the nix store""" if Path(nix_path).exists(): return @@ -31,12 +28,11 @@ def realize_nix_path(clan_dir: Flake, nix_path: str) -> None: def copy_from_nixstore(src: Path, dest: Path) -> None: - """ - Copy a directory from the nix store to a destination path. + """Copy a directory from the nix store to a destination path. Uses `cp -r` to recursively copy the directory. Ensures the destination directory is writable by the user. """ shutil.copytree(src, dest, dirs_exist_ok=True, symlinks=True) run( - ["chmod", "-R", "u+w", str(dest)] + ["chmod", "-R", "u+w", str(dest)], ) # Ensure the destination is writable by the user diff --git a/pkgs/clan-cli/clan_lib/templates/handler.py b/pkgs/clan-cli/clan_lib/templates/handler.py index b379b4336..0347d6ddb 100644 --- a/pkgs/clan-cli/clan_lib/templates/handler.py +++ b/pkgs/clan-cli/clan_lib/templates/handler.py @@ -17,10 +17,11 @@ log = logging.getLogger(__name__) @contextmanager def machine_template( - flake: Flake, template_ident: str, dst_machine_name: str + flake: Flake, + template_ident: str, + dst_machine_name: str, ) -> Iterator[Path]: - """ - Create a machine from a template. + """Create a machine from a template. This function will copy the template files to the machine specific directory of the specified flake. :param flake: The flake to create the machine in. @@ -37,7 +38,6 @@ def machine_template( ... The machine directory is removed if the context raised any errors. ... Only if the context is exited without errors, the machine directory is kept. """ - # Check for duplicates if dst_machine_name in list_machines(flake): msg = f"Machine '{dst_machine_name}' already exists" @@ -48,7 +48,9 @@ def machine_template( # Get the clan template from the specifier [flake_ref, template_selector] = transform_url( - "machine", template_ident, flake=flake + "machine", + template_ident, + flake=flake, ) # For pretty error messages printable_template_ref = f"{flake_ref}#{template_selector}" @@ -108,8 +110,7 @@ def clan_template( dst_dir: Path, post_process: Callable[[Path], None] | None = None, ) -> Iterator[Path]: - """ - Create a clan from a template. + """Create a clan from a template. This function will copy the template files to a new clan directory :param flake: The flake to create the machine in. @@ -127,7 +128,6 @@ def clan_template( ... The directory is removed if the context raised any errors. ... Only if the context is exited without errors, it is kept. """ - # Get the clan template from the specifier [flake_ref, template_selector] = transform_url("clan", template_ident, flake=flake) # For pretty error messages @@ -140,11 +140,13 @@ def clan_template( except ClanError as e: try: log.info( - f"Template '{template_ident}' not found in {flake_ref}, trying builtin template" + f"Template '{template_ident}' not found in {flake_ref}, trying builtin template", ) builtin_flake = Flake(str(clan_templates())) [_, builtin_selector] = transform_url( - "clan", template_ident, flake=builtin_flake + "clan", + template_ident, + flake=builtin_flake, ) template = builtin_flake.select(builtin_selector) template_flake = builtin_flake diff --git a/pkgs/clan-cli/clan_lib/templates/template_url.py b/pkgs/clan-cli/clan_lib/templates/template_url.py index bbb2237f9..24d520bcf 100644 --- a/pkgs/clan-cli/clan_lib/templates/template_url.py +++ b/pkgs/clan-cli/clan_lib/templates/template_url.py @@ -8,8 +8,7 @@ log = logging.getLogger(__name__) class Flake(Protocol): - """ - Protocol for a local flake, which has a path attribute. + """Protocol for a local flake, which has a path attribute. Pass clan_lib.flake.Flake or any other object that implements this protocol. """ @@ -20,8 +19,7 @@ class Flake(Protocol): def transform_url(template_type: str, identifier: str, flake: Flake) -> tuple[str, str]: - """ - Transform a template flake ref by injecting the context (clan|machine|disko) into the url. + """Transform a template flake ref by injecting the context (clan|machine|disko) into the url. We do this for shorthand notation of URLs. If the attribute selector path is explicitly selecting an attribute, we don't transform it. @@ -30,7 +28,6 @@ def transform_url(template_type: str, identifier: str, flake: Flake) -> tuple[st :param local_path: The local flake path, which is used to resolve to a local flake reference, i.e. ".#" shorthand. Examples: - 1. injects "machine" as context clan machines create --template .#new-machine or @@ -106,7 +103,7 @@ def transform_url(template_type: str, identifier: str, flake: Flake) -> tuple[st # If the tail contains a dot, or is quoted we assume its a path and don't transform it. if '"' in selector or "'" in selector: log.warning( - "Quotes in template paths are not yet supported. Please use unquoted paths." + "Quotes in template paths are not yet supported. Please use unquoted paths.", ) return (flake_ref, input_prefix + selector) diff --git a/pkgs/clan-cli/clan_lib/templates/template_url_test.py b/pkgs/clan-cli/clan_lib/templates/template_url_test.py index 5b85ae7c2..7c3b83b1d 100644 --- a/pkgs/clan-cli/clan_lib/templates/template_url_test.py +++ b/pkgs/clan-cli/clan_lib/templates/template_url_test.py @@ -24,7 +24,9 @@ def test_transform_url_self_explizit_dot() -> None: expected_selector = 'clan.templates.machine."new-machine"' flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert flake_ref == str(local_path.path) assert selector == expected_selector @@ -44,7 +46,9 @@ def test_transform_url_self_no_dot() -> None: expected_selector = 'clan.templates.machine."new-machine"' flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert flake_ref == str(local_path.path) assert selector == expected_selector @@ -55,7 +59,9 @@ def test_transform_url_builtin_template() -> None: expected_selector = 'clanInternals.templates.machine."new-machine"' flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert flake_ref == str(local_path.path) assert selector == expected_selector @@ -66,7 +72,9 @@ def test_transform_url_remote_template() -> None: expected_selector = 'clan.templates.machine."new-machine"' flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert flake_ref == "github:/org/repo" @@ -78,7 +86,9 @@ def test_transform_url_explicit_path() -> None: expected_selector = "clan.templates.machine.new-machine" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert flake_ref == str(local_path.path) assert selector == expected_selector @@ -89,7 +99,9 @@ def test_transform_url_quoted_selector() -> None: user_input = '.#"new.machine"' expected_selector = '"new.machine"' flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert flake_ref == str(local_path.path) assert selector == expected_selector @@ -99,7 +111,9 @@ def test_single_quote_selector() -> None: user_input = ".#'new.machine'" expected_selector = "'new.machine'" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert flake_ref == str(local_path.path) assert selector == expected_selector @@ -110,7 +124,9 @@ def test_custom_template_path() -> None: expected_selector = "my.templates.custom.machine" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert flake_ref == "github:/org/repo" assert selector == expected_selector @@ -122,7 +138,9 @@ def test_full_url_query_and_fragment() -> None: expected_selector = "my.templates.custom.machine" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert flake_ref == expected_flake_ref assert selector == expected_selector @@ -141,7 +159,9 @@ def test_malformed_identifier() -> None: user_input = "github:/org/repo#my.templates.custom.machine#extra" with pytest.raises(ClanError) as exc_info: _flake_ref, _selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert isinstance(exc_info.value, ClanError) @@ -156,7 +176,9 @@ def test_locked_input_template() -> None: expected_selector = 'inputs.locked-input.clan.templates.machine."new-machine"' flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert flake_ref == str(local_path.path) assert selector == expected_selector @@ -167,7 +189,9 @@ def test_locked_input_template_no_quotes() -> None: expected_selector = 'inputs.locked-input."new.machine"' flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert selector == expected_selector assert flake_ref == str(local_path.path) @@ -178,7 +202,9 @@ def test_locked_input_template_no_dot() -> None: expected_selector = "inputs.locked-input.new.machine" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert selector == expected_selector assert flake_ref == str(local_path.path) @@ -189,7 +215,9 @@ def test_explizit_path_default_minimal_rel_1() -> None: expected_selector = "clan.templates.machine.default" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert selector == expected_selector assert flake_ref == user_input @@ -200,7 +228,9 @@ def test_explizit_path_default_minimal_rel_2() -> None: expected_selector = "clan.templates.machine.default" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert selector == expected_selector assert flake_ref == user_input @@ -211,7 +241,9 @@ def test_explizit_path_default_minimal_parent_1() -> None: expected_selector = "clan.templates.machine.default" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert selector == expected_selector assert flake_ref == user_input @@ -222,7 +254,9 @@ def test_explizit_path_default_minimal_parent_2() -> None: expected_selector = "clan.templates.machine.default" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert selector == expected_selector assert flake_ref == user_input @@ -233,7 +267,9 @@ def test_internal_dot_template() -> None: expected_selector = 'clanInternals.templates.machine.".internal"' flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert selector == expected_selector assert flake_ref == str(local_path.path) @@ -244,7 +280,9 @@ def test_explizit_rel_path_default() -> None: expected_selector = "clan.templates.machine.default" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert selector == expected_selector assert flake_ref == user_input @@ -255,7 +293,9 @@ def test_explizit_abs_path_default() -> None: expected_selector = "clan.templates.machine.default" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert selector == expected_selector assert flake_ref == user_input @@ -266,7 +306,9 @@ def test_explizit_home_path_default() -> None: expected_selector = "clan.templates.machine.default" flake_ref, selector = transform_url( - machine_template_type, user_input, flake=local_path + machine_template_type, + user_input, + flake=local_path, ) assert selector == expected_selector assert flake_ref == user_input diff --git a/pkgs/clan-cli/clan_lib/tests/test_create.py b/pkgs/clan-cli/clan_lib/tests/test_create.py index 13e8af74b..228fe921c 100644 --- a/pkgs/clan-cli/clan_lib/tests/test_create.py +++ b/pkgs/clan-cli/clan_lib/tests/test_create.py @@ -71,17 +71,17 @@ def create_base_inventory(ssh_keys_pairs: list[SSHKeyPair]) -> InventoryWrapper: "default": { "tags": {"all": {}}, "settings": cast( - Unknown, + "Unknown", { "allowedKeys": { key.username: key.ssh_pubkey_txt for key in ssh_keys - } + }, }, ), }, }, - } - } + }, + }, ) return InventoryWrapper(instances=instances) @@ -107,7 +107,10 @@ def fix_flake_inputs(clan_dir: Path, clan_core_dir: Path) -> None: @pytest.mark.with_core @pytest.mark.skipif(sys.platform == "darwin", reason="sshd fails to start on darwin") def test_clan_create_api( - temporary_home: Path, test_lib_root: Path, clan_core: Path, hosts: list[Remote] + temporary_home: Path, + test_lib_root: Path, + clan_core: Path, + hosts: list[Remote], ) -> None: host_ip = hosts[0].address host_user = hosts[0].user @@ -134,8 +137,10 @@ def test_clan_create_api( # TODO: We need to generate a lock file for the templates clan_cli.clan.create.create_clan( clan_cli.clan.create.CreateOptions( - template="minimal", dest=dest_clan_dir, update_clan=False - ) + template="minimal", + dest=dest_clan_dir, + update_clan=False, + ), ) assert dest_clan_dir.is_dir() assert (dest_clan_dir / "flake.nix").is_file() @@ -165,16 +170,22 @@ def test_clan_create_api( machines: list[Machine] = [] host = Remote( - user=host_user, address=host_ip, port=int(ssh_port_var), command_prefix=vm_name + user=host_user, + address=host_ip, + port=int(ssh_port_var), + command_prefix=vm_name, ) # TODO: We need to merge Host and Machine class these duplicate targetHost stuff is a nightmare inv_machine = InventoryMachine( - name=vm_name, deploy=MachineDeploy(targetHost=f"{host.target}:{ssh_port_var}") + name=vm_name, + deploy=MachineDeploy(targetHost=f"{host.target}:{ssh_port_var}"), ) create_machine( ClanCreateOptions( - clan_dir_flake, inv_machine, target_host=f"{host.target}:{ssh_port_var}" - ) + clan_dir_flake, + inv_machine, + target_host=f"{host.target}:{ssh_port_var}", + ), ) machine = Machine(name=vm_name, flake=clan_dir_flake) machines.append(machine) @@ -184,7 +195,8 @@ def test_clan_create_api( clan_dir_flake.invalidate_cache() target_host = machine.target_host().override( - private_key=private_key, host_key_check="none" + private_key=private_key, + host_key_check="none", ) target_host.check_machine_ssh_reachable() @@ -194,7 +206,7 @@ def test_clan_create_api( SSHKeyPair( private=private_key, public=public_key, - ) + ), ] # ===== CREATE BASE INVENTORY ====== diff --git a/pkgs/clan-cli/clan_lib/validator/hostname.py b/pkgs/clan-cli/clan_lib/validator/hostname.py index ec65641df..513bf6c57 100644 --- a/pkgs/clan-cli/clan_lib/validator/hostname.py +++ b/pkgs/clan-cli/clan_lib/validator/hostname.py @@ -4,8 +4,7 @@ from clan_lib.errors import ClanError def hostname(host: str) -> str: - """ - Validates a hostname according to the expected format in NixOS. + """Validates a hostname according to the expected format in NixOS. Usage Example @@ -20,7 +19,6 @@ def hostname(host: str) -> str: except ValueError as e: raise ClanError(str(e), location="name") """ - # TODO: Generate from nix schema hostname_regex = r"^(?!-)[A-Za-z0-9-]{1,63}(? list[Generator]: - """ - Get generators for a machine, with optional closure computation. + """Get generators for a machine, with optional closure computation. Args: machine: The machine to get generators for. @@ -30,6 +29,7 @@ def get_generators( Returns: List of generators based on the specified selection and closure mode. + """ from clan_cli.vars import graph @@ -62,18 +62,19 @@ def _ensure_healthy( machine: "Machine", generators: list[Generator] | None = None, ) -> None: - """ - Run health checks on the provided generators. + """Run health checks on the provided generators. Fails if any of the generators' health checks fail. """ if generators is None: generators = Generator.get_machine_generators(machine.name, machine.flake) pub_healtcheck_msg = machine.public_vars_store.health_check( - machine.name, generators + machine.name, + generators, ) sec_healtcheck_msg = machine.secret_vars_store.health_check( - machine.name, generators + machine.name, + generators, ) if pub_healtcheck_msg or sec_healtcheck_msg: @@ -103,6 +104,7 @@ def run_generators( no_sandbox: bool = False, ) -> None: """Run the specified generators for machines. + Args: machines: The machines to run generators for. generators: Can be: @@ -116,9 +118,11 @@ def run_generators( prompt_values: A dictionary mapping generator names to their prompt values, or a function that returns prompt values for a generator. no_sandbox: Whether to disable sandboxing when executing the generator. + Raises: ClanError: If the machine or generator is not found, or if there are issues with executing the generator. + """ for machine in machines: if isinstance(generators, list): @@ -134,7 +138,9 @@ def run_generators( else: # None or single string - use get_generators with closure parameter generator_objects = get_generators( - machine, full_closure=full_closure, generator_name=generators + machine, + full_closure=full_closure, + generator_name=generators, ) # If prompt function provided, ask all prompts diff --git a/pkgs/clan-cli/docs.py b/pkgs/clan-cli/docs.py index b17db9d88..f0913d9d4 100644 --- a/pkgs/clan-cli/docs.py +++ b/pkgs/clan-cli/docs.py @@ -23,7 +23,7 @@ class Option: md_li += f"`<{self.metavar}>` " if self.metavar else "" md_li += f"(Default: `{self.default}`) " if self.default else "" md_li += indent_next( - f"\n{self.description.strip()}" if self.description else "" + f"\n{self.description.strip()}" if self.description else "", ) # md_li += indent_next(f"\n{self.epilog.strip()}" if self.epilog else "") @@ -83,16 +83,15 @@ class Category: md_li += f"""- **[{icon}{self.title}](./{"-".join(self.title.split(" "))}.md)**\n\n""" md_li += f"""{indent_all("---", 4)}\n\n""" md_li += indent_all( - f"{self.description.strip()}\n" if self.description else "", 4 + f"{self.description.strip()}\n" if self.description else "", + 4, ) return md_li def epilog_to_md(text: str) -> str: - """ - Convert the epilog to md - """ + """Convert the epilog to md""" after_examples = False md = "" # md += text @@ -101,21 +100,20 @@ def epilog_to_md(text: str) -> str: after_examples = True md += "### Examples" md += "\n" - else: - if after_examples: - if line.strip().startswith("$"): - md += f"`{line}`" - md += "\n" - md += "\n" - else: - # TODO: check, if the link is already a markdown link, only convert if not - # if contains_https_link(line): - # line = convert_to_markdown_link(line) - md += line - md += "\n" + elif after_examples: + if line.strip().startswith("$"): + md += f"`{line}`" + md += "\n" + md += "\n" else: + # TODO: check, if the link is already a markdown link, only convert if not + # if contains_https_link(line): + # line = convert_to_markdown_link(line) md += line md += "\n" + else: + md += line + md += "\n" return md @@ -132,8 +130,7 @@ def convert_to_markdown_link(line: str) -> str: def indent_next(text: str, indent_size: int = 4) -> str: - """ - Indent all lines in a string except the first line. + """Indent all lines in a string except the first line. This is useful for adding multiline texts a lists in Markdown. """ indent = " " * indent_size @@ -143,9 +140,7 @@ def indent_next(text: str, indent_size: int = 4) -> str: def indent_all(text: str, indent_size: int = 4) -> str: - """ - Indent all lines in a string. - """ + """Indent all lines in a string.""" indent = " " * indent_size lines = text.split("\n") indented_text = indent + ("\n" + indent).join(lines) @@ -158,14 +153,12 @@ def get_subcommands( level: int = 0, prefix: list[str] | None = None, ) -> tuple[list[Option], list[Option], list[Subcommand]]: - """ - Generate Markdown documentation for an argparse.ArgumentParser instance including its subcommands. + """Generate Markdown documentation for an argparse.ArgumentParser instance including its subcommands. :param parser: The argparse.ArgumentParser instance. :param level: Current depth of subcommand. :return: Markdown formatted documentation as a string. """ - # Document each argument # --flake --option --debug, etc. if prefix is None: @@ -190,7 +183,7 @@ def get_subcommands( description=action.help if action.help else "", default=action.default if action.default is not None else "", metavar=f"{action.metavar}" if action.metavar else "", - ) + ), ) if not option_strings: @@ -201,7 +194,7 @@ def get_subcommands( description=action.help if action.help else "", default=action.default if action.default is not None else "", metavar=f"{action.metavar}" if action.metavar else "", - ) + ), ) for action in parser._actions: # noqa: SLF001 @@ -215,7 +208,10 @@ def get_subcommands( subcommands.append(sub_command) (_options, _positionals, _subcommands) = get_subcommands( - parser=subparser, to=to, level=level + 1, prefix=[*prefix, name] + parser=subparser, + to=to, + level=level + 1, + prefix=[*prefix, name], ) to.append( @@ -227,15 +223,14 @@ def get_subcommands( options=_options, positionals=_positionals, subcommands=_subcommands, - ) + ), ) return (flag_options, positional_options, subcommands) def collect_commands() -> list[Category]: - """ - Returns a sorted list of all available commands. + """Returns a sorted list of all available commands. i.e. a... @@ -261,12 +256,15 @@ def collect_commands() -> list[Category]: continue if str(name) in hidden_subcommands: print( - f"Excluded {name} from documentation as it is a hidden subcommand." + f"Excluded {name} from documentation as it is a hidden subcommand.", ) continue (_options, _positionals, _subcommands) = get_subcommands( - subparser, to=result, level=2, prefix=[name] + subparser, + to=result, + level=2, + prefix=[name], ) result.append( Category( @@ -277,7 +275,7 @@ def collect_commands() -> list[Category]: subcommands=_subcommands, epilog=subparser.epilog, level=1, - ) + ), ) def weight_cmd_groups(c: Category) -> tuple[str, str, int]: @@ -297,8 +295,7 @@ def collect_commands() -> list[Category]: def build_command_reference() -> None: - """ - Function that will build the reference + """Function that will build the reference and write it to the out path. """ cmds = collect_commands() diff --git a/pkgs/clan-cli/openapi.py b/pkgs/clan-cli/openapi.py index 68c007023..726448df7 100644 --- a/pkgs/clan-cli/openapi.py +++ b/pkgs/clan-cli/openapi.py @@ -84,14 +84,14 @@ def check_operation_name(op_name: str, normalized: list[str]) -> list[str]: warnings.append( f"""Verb '{verb}' of API operation {op_name} is not allowed. Use one of: {", ".join(COMMON_VERBS)} -""" +""", ) top_level_noun = nouns[0] if nouns else None if top_level_noun is None or top_level_noun.lower() not in TOP_LEVEL_RESOURCES: warnings.append( f"""Top-level resource '{top_level_noun}' of API operation {op_name} is not allowed. Use one of: {", ".join(TOP_LEVEL_RESOURCES)} -""" +""", ) return warnings @@ -136,11 +136,9 @@ def fix_nullables(schema: dict) -> dict: def fix_empty_required(schema: dict) -> dict: - """ - Recursively remove "required: []" from schemas + """Recursively remove "required: []" from schemas This is valid in json schema, but leads to errors in some OpenAPI 3.0 renderers. """ - if isinstance(schema, dict): if "required" in schema and schema["required"] == []: # Remove empty required list @@ -243,7 +241,7 @@ def main() -> None: if not func_schema.get("description"): errors.append( - f"{func_name} doesn't have a description. Python docstring is required for an API function." + f"{func_name} doesn't have a description. Python docstring is required for an API function.", ) if warnings: @@ -279,8 +277,8 @@ def main() -> None: "required": True, "content": { "application/json": { - "schema": {"$ref": f"#/components/schemas/{args_name}"} - } + "schema": {"$ref": f"#/components/schemas/{args_name}"}, + }, }, }, "responses": { @@ -289,13 +287,13 @@ def main() -> None: "content": { "application/json": { "schema": { - "$ref": f"#/components/schemas/{return_name}" - } - } + "$ref": f"#/components/schemas/{return_name}", + }, + }, }, - } + }, }, - } + }, } sort_openapi_paths_by_tag_tree(openapi) diff --git a/pkgs/clan-vm-manager/clan_vm_manager/app.py b/pkgs/clan-vm-manager/clan_vm_manager/app.py index beb57df5f..4a3781420 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/app.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/app.py @@ -22,8 +22,7 @@ log = logging.getLogger(__name__) class MainApplication(Adw.Application): - """ - This class is initialized every time the app is started + """This class is initialized every time the app is started Only the Adw.ApplicationWindow is a singleton. So don't use any singletons in the Adw.Application class. """ @@ -78,7 +77,8 @@ class MainApplication(Adw.Application): if "debug" in options: ToastOverlay.use().add_toast_unique( - InfoToast("Debug logging enabled").toast, "info.debugging.enabled" + InfoToast("Debug logging enabled").toast, + "info.debugging.enabled", ) args = command_line.get_arguments() diff --git a/pkgs/clan-vm-manager/clan_vm_manager/clan_uri.py b/pkgs/clan-vm-manager/clan_vm_manager/clan_uri.py index 14f2c5549..a4a609d1b 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/clan_uri.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/clan_uri.py @@ -33,8 +33,7 @@ class ClanURI: # Check if the URI starts with clan:// # If it does, remove the clan:// prefix prefix = "clan://" - if uri.startswith(prefix): - uri = uri[len(prefix) :] + uri = uri.removeprefix(prefix) # Fix missing colon (caused by browsers like Firefox) if "//" in uri and ":" not in uri.split("//", 1)[0]: diff --git a/pkgs/clan-vm-manager/clan_vm_manager/components/gkvstore.py b/pkgs/clan-vm-manager/clan_vm_manager/components/gkvstore.py index f830b1073..942305acd 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/components/gkvstore.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/components/gkvstore.py @@ -14,7 +14,8 @@ log = logging.getLogger(__name__) # Define type variables for key and value types K = TypeVar("K") # Key type V = TypeVar( - "V", bound=GObject.Object + "V", + bound=GObject.Object, ) # Value type, bound to GObject.GObject or its subclasses @@ -23,8 +24,7 @@ V = TypeVar( # clan_vm_manager/components/gkvstore.py:21: error: Definition of "install_properties" in base class "Object" is incompatible with definition in base class "GInterface" [misc] # clan_vm_manager/components/gkvstore.py:21: error: Definition of "getv" in base class "Object" is incompatible with definition in base class "GInterface" [misc] class GKVStore[K, V: GObject.Object](GObject.GObject, Gio.ListModel): # type: ignore[misc] - """ - A simple key-value store that implements the Gio.ListModel interface, with generic types for keys and values. + """A simple key-value store that implements the Gio.ListModel interface, with generic types for keys and values. Only use self[key] and del self[key] for accessing the items for better performance. This class could be optimized by having the objects remember their position in the list. """ @@ -57,7 +57,9 @@ class GKVStore[K, V: GObject.Object](GObject.GObject, Gio.ListModel): # type: i return False, -1 def find_with_equal_func( - self, item: V, equal_func: Callable[[V, V], bool] + self, + item: V, + equal_func: Callable[[V, V], bool], ) -> tuple[bool, int]: log.warning("Finding is O(n) in GKVStore. Better use indexing") for i, v in enumerate(self.values()): @@ -66,7 +68,10 @@ class GKVStore[K, V: GObject.Object](GObject.GObject, Gio.ListModel): # type: i return False, -1 def find_with_equal_func_full( - self, item: V, equal_func: Callable[[V, V, Any], bool], user_data: Any + self, + item: V, + equal_func: Callable[[V, V, Any], bool], + user_data: Any, ) -> tuple[bool, int]: log.warning("Finding is O(n) in GKVStore. Better use indexing") for i, v in enumerate(self.values()): @@ -77,7 +82,7 @@ class GKVStore[K, V: GObject.Object](GObject.GObject, Gio.ListModel): # type: i def insert(self, position: int, item: V) -> None: log.warning("Inserting is O(n) in GKVStore. Better use append") log.warning( - "This functions may have incorrect items_changed signal behavior. Please test it" + "This functions may have incorrect items_changed signal behavior. Please test it", ) key = self.key_gen(item) if key in self._items: @@ -105,7 +110,10 @@ class GKVStore[K, V: GObject.Object](GObject.GObject, Gio.ListModel): # type: i self.items_changed(position, 0, 1) def insert_sorted( - self, item: V, compare_func: Callable[[V, V, Any], int], user_data: Any + self, + item: V, + compare_func: Callable[[V, V, Any], int], + user_data: Any, ) -> None: msg = "insert_sorted is not implemented in GKVStore" raise NotImplementedError(msg) @@ -225,6 +233,7 @@ class GKVStore[K, V: GObject.Object](GObject.GObject, Gio.ListModel): # type: i return self.values()[-1] def register_on_change( - self, callback: Callable[["GKVStore[K,V]", int, int, int], None] + self, + callback: Callable[["GKVStore[K,V]", int, int, int], None], ) -> None: self.connect("items-changed", callback) diff --git a/pkgs/clan-vm-manager/clan_vm_manager/components/list_splash.py b/pkgs/clan-vm-manager/clan_vm_manager/components/list_splash.py index 1b672d0be..c0aadbe42 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/components/list_splash.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/components/list_splash.py @@ -67,8 +67,7 @@ class EmptySplash(Gtk.Box): return pixbuf def _on_join(self, button: Gtk.Button, entry: Gtk.Entry) -> None: - """ - Callback for the join button + """Callback for the join button Extracts the text from the entry and calls the on_join callback """ log.info(f"Splash screen: Joining {entry.get_text()}") diff --git a/pkgs/clan-vm-manager/clan_vm_manager/components/trayicon.py b/pkgs/clan-vm-manager/clan_vm_manager/components/trayicon.py index e126faf73..8999acdda 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/components/trayicon.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/components/trayicon.py @@ -128,7 +128,6 @@ def encode_path(path: str, prefix: bool = True) -> bytes: On Windows, also append prefix to enable extended-length path. """ - if sys.platform == "win32" and prefix: path = path.replace("/", "\\") @@ -142,10 +141,12 @@ def encode_path(path: str, prefix: bool = True) -> bytes: # from pynicotine.utils import truncate_string_byte def truncate_string_byte( - string: str, byte_limit: int, encoding: str = "utf-8", ellipsize: bool = False + string: str, + byte_limit: int, + encoding: str = "utf-8", + ellipsize: bool = False, ) -> str: """Truncates a string to fit inside a byte limit.""" - string_bytes = string.encode(encoding) if len(string_bytes) <= byte_limit: @@ -217,7 +218,8 @@ class BaseImplementation: def create_menu(self) -> None: self.show_hide_item = self.create_item( - "default", self.application.on_window_hide_unhide + "default", + self.application.on_window_hide_unhide, ) # self.create_item() @@ -365,17 +367,26 @@ class StatusNotifierImplementation(BaseImplementation): def add_property(self, name: str, signature: Any, value: Any) -> None: self.properties[name] = StatusNotifierImplementation.DBusProperty( - name, signature, value + name, + signature, + value, ) def add_signal(self, name: str, args: Any) -> None: self.signals[name] = StatusNotifierImplementation.DBusSignal(name, args) def add_method( - self, name: str, in_args: Any, out_args: Any, callback: Any + self, + name: str, + in_args: Any, + out_args: Any, + callback: Any, ) -> None: self.methods[name] = StatusNotifierImplementation.DBusMethod( - name, in_args, out_args, callback + name, + in_args, + out_args, + callback, ) def emit_signal(self, name: str, *args: Any) -> None: @@ -411,7 +422,12 @@ class StatusNotifierImplementation(BaseImplementation): invocation.return_value(return_value) def on_get_property( - self, _connection, _sender, _path, _interface_name, property_name + self, + _connection, + _sender, + _path, + _interface_name, + property_name, ): prop = self.properties[property_name] return GLib.Variant(prop.signature, prop.value) @@ -483,7 +499,8 @@ class StatusNotifierImplementation(BaseImplementation): for idx, item in self._items.items(): serialized_item = GLib.Variant( - "(ia{sv}av)", (idx, self._serialize_item(item), []) + "(ia{sv}av)", + (idx, self._serialize_item(item), []), ) serialized_items.append(serialized_item) @@ -558,7 +575,7 @@ class StatusNotifierImplementation(BaseImplementation): try: self.bus = Gio.bus_get_sync(bus_type=Gio.BusType.SESSION) self.tray_icon = self.StatusNotifierItemService( - activate_callback=self.activate_callback + activate_callback=self.activate_callback, ) self.tray_icon.register() @@ -573,7 +590,8 @@ class StatusNotifierImplementation(BaseImplementation): interface_name="org.kde.StatusNotifierWatcher", method_name="RegisterStatusNotifierItem", parameters=GLib.Variant( - "(s)", ("/org/ayatana/NotificationItem/Nicotine",) + "(s)", + ("/org/ayatana/NotificationItem/Nicotine",), ), reply_type=None, flags=Gio.DBusCallFlags.NONE, @@ -590,7 +608,6 @@ class StatusNotifierImplementation(BaseImplementation): @staticmethod def check_icon_path(icon_name, icon_path) -> bool: """Check if tray icons exist in the specified icon path.""" - if not icon_path: return False @@ -600,7 +617,8 @@ class StatusNotifierImplementation(BaseImplementation): with os.scandir(encode_path(icon_path)) as entries: for entry in entries: if entry.is_file() and entry.name.decode( - "utf-8", "replace" + "utf-8", + "replace", ).startswith(icon_scheme): return True @@ -611,8 +629,8 @@ class StatusNotifierImplementation(BaseImplementation): def get_icon_path(self): """Returns an icon path to use for tray icons, or None to fall back to - system-wide icons.""" - + system-wide icons. + """ # icon_path = self.application.get_application_icon_path() return "" @@ -811,7 +829,8 @@ class Win32Implementation(BaseImplementation): from ctypes import windll windll.user32.UnregisterClassW( - self.WINDOW_CLASS_NAME, self._window_class.h_instance + self.WINDOW_CLASS_NAME, + self._window_class.h_instance, ) self._window_class = None @@ -850,7 +869,12 @@ class Win32Implementation(BaseImplementation): if GTK_API_VERSION >= 4: icon = ICON_THEME.lookup_icon( - icon_name, fallbacks=None, size=icon_size, scale=1, direction=0, flags=0 + icon_name, + fallbacks=None, + size=icon_size, + scale=1, + direction=0, + flags=0, ) icon_path = icon.get_file().get_path() @@ -858,7 +882,9 @@ class Win32Implementation(BaseImplementation): return ico_buffer pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size( - icon_path, icon_size, icon_size + icon_path, + icon_size, + icon_size, ) else: icon = ICON_THEME.lookup_icon(icon_name, size=icon_size, flags=0) @@ -931,7 +957,8 @@ class Win32Implementation(BaseImplementation): ), u_callback_message=self.WM_TRAYICON, sz_tip=truncate_string_byte( - pynicotine.__application_name__, byte_limit=127 + pynicotine.__application_name__, + byte_limit=127, ), ) action = self.NIM_ADD @@ -943,10 +970,14 @@ class Win32Implementation(BaseImplementation): self._notify_id.h_icon = self._h_icon self._notify_id.sz_info_title = truncate_string_byte( - title, byte_limit=63, ellipsize=True + title, + byte_limit=63, + ellipsize=True, ) self._notify_id.sz_info = truncate_string_byte( - message, byte_limit=255, ellipsize=True + message, + byte_limit=255, + ellipsize=True, ) windll.shell32.Shell_NotifyIconW(action, byref(self._notify_id)) @@ -1019,10 +1050,16 @@ class Win32Implementation(BaseImplementation): item_info = self._serialize_menu_item(item) if not windll.user32.SetMenuItemInfoW( - self._menu, item_id, False, byref(item_info) + self._menu, + item_id, + False, + byref(item_info), ): windll.user32.InsertMenuItemW( - self._menu, item_id, False, byref(item_info) + self._menu, + item_id, + False, + byref(item_info), ) def set_icon(self, icon_name): diff --git a/pkgs/clan-vm-manager/clan_vm_manager/components/vmobj.py b/pkgs/clan-vm-manager/clan_vm_manager/components/vmobj.py index 1c4c60f75..cee2337f8 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/components/vmobj.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/components/vmobj.py @@ -56,10 +56,14 @@ class VMObject(GObject.Object): # Create a process object to store the VM process self.vm_process: MPProcess = MPProcess( - "vm_dummy", mp.Process(), Path("./dummy") + "vm_dummy", + mp.Process(), + Path("./dummy"), ) self.build_process: MPProcess = MPProcess( - "build_dummy", mp.Process(), Path("./dummy") + "build_dummy", + mp.Process(), + Path("./dummy"), ) self._start_thread: threading.Thread = threading.Thread() self.machine: Machine | None = None @@ -77,7 +81,8 @@ class VMObject(GObject.Object): # Create a temporary directory to store the logs self.log_dir: tempfile.TemporaryDirectory = tempfile.TemporaryDirectory( - prefix="clan_vm-", suffix=f"-{self.data.flake.flake_attr}" + prefix="clan_vm-", + suffix=f"-{self.data.flake.flake_attr}", ) self._logs_id: int = 0 self._log_file: IO[str] | None = None @@ -87,7 +92,8 @@ class VMObject(GObject.Object): # and block the signal while we change the state. This is cursed. self.switch: Gtk.Switch = Gtk.Switch() self.switch_handler_id: int = self.switch.connect( - "notify::active", self._on_switch_toggle + "notify::active", + self._on_switch_toggle, ) self.connect("vm_status_changed", self._on_vm_status_changed) @@ -145,7 +151,8 @@ class VMObject(GObject.Object): @contextmanager def _create_machine(self) -> Generator[Machine]: uri = ClanURI.from_str( - url=str(self.data.flake.flake_url), machine_name=self.data.flake.flake_attr + url=str(self.data.flake.flake_url), + machine_name=self.data.flake.flake_attr, ) self.machine = Machine( name=self.data.flake.flake_attr, @@ -154,7 +161,8 @@ class VMObject(GObject.Object): assert self.machine is not None state_dir = vm_state_dir( - flake_url=self.machine.flake.identifier, vm_name=self.machine.name + flake_url=self.machine.flake.identifier, + vm_name=self.machine.name, ) self.qmp_wrap = QMPWrapper(state_dir) assert self.machine is not None @@ -194,7 +202,9 @@ class VMObject(GObject.Object): self.switch.set_sensitive(True) # Start the logs watcher self._logs_id = GLib.timeout_add( - 50, self._get_logs_task, self.build_process + 50, + self._get_logs_task, + self.build_process, ) if self._logs_id == 0: log.error("Failed to start VM log watcher") @@ -307,7 +317,7 @@ class VMObject(GObject.Object): diff = datetime.datetime.now(tz=datetime.UTC) - start_time if diff.seconds > self.KILL_TIMEOUT: log.error( - f"VM {self.get_id()} has not stopped after {self.KILL_TIMEOUT}s. Killing it" + f"VM {self.get_id()} has not stopped after {self.KILL_TIMEOUT}s. Killing it", ) self.vm_process.kill_group() break @@ -334,7 +344,8 @@ class VMObject(GObject.Object): log.debug(f"VM {self.get_id()} has stopped") ToastOverlay.use().add_toast_unique( - InfoToast(f"Stopped {self.get_id()}").toast, "info.vm.exit" + InfoToast(f"Stopped {self.get_id()}").toast, + "info.vm.exit", ) def shutdown(self) -> None: diff --git a/pkgs/clan-vm-manager/clan_vm_manager/history.py b/pkgs/clan-vm-manager/clan_vm_manager/history.py index 1594e8fcd..0284cccb2 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/history.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/history.py @@ -92,7 +92,9 @@ def add_history(uri: ClanURI) -> HistoryEntry: def _add_maschine_to_history_list( - uri_path: str, uri_machine: str, entries: list[HistoryEntry] + uri_path: str, + uri_machine: str, + entries: list[HistoryEntry], ) -> HistoryEntry: for new_entry in entries: if ( @@ -143,10 +145,16 @@ def parse_args() -> argparse.Namespace: ) add_parser = subparser.add_parser("add", help="Add a clan flake") add_parser.add_argument( - "uri", type=ClanURI.from_str, help="Path to the flake", default="." + "uri", + type=ClanURI.from_str, + help="Path to the flake", + default=".", ) add_parser.add_argument( - "--all", help="Add all machines", default=False, action="store_true" + "--all", + help="Add all machines", + default=False, + action="store_true", ) add_parser.set_defaults(func=add_history_command) list_parser = subparser.add_parser("list", help="List recently used flakes") diff --git a/pkgs/clan-vm-manager/clan_vm_manager/singletons/toast.py b/pkgs/clan-vm-manager/clan_vm_manager/singletons/toast.py index be334f01e..73ce5a203 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/singletons/toast.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/singletons/toast.py @@ -16,8 +16,7 @@ log = logging.getLogger(__name__) class ToastOverlay: - """ - The ToastOverlay is a class that manages the display of toasts + """The ToastOverlay is a class that manages the display of toasts It should be used as a singleton in your application to prevent duplicate toasts Usage """ @@ -53,11 +52,14 @@ class ErrorToast: toast: Adw.Toast def __init__( - self, message: str, persistent: bool = False, details: str = "" + self, + message: str, + persistent: bool = False, + details: str = "", ) -> None: super().__init__() self.toast = Adw.Toast.new( - f"""❌ Error {message}""" + f"""❌ Error {message}""", ) self.toast.set_use_markup(True) @@ -85,7 +87,7 @@ class WarningToast: def __init__(self, message: str, persistent: bool = False) -> None: super().__init__() self.toast = Adw.Toast.new( - f"⚠ Warning {message}" + f"⚠ Warning {message}", ) self.toast.set_use_markup(True) @@ -135,7 +137,7 @@ class LogToast: ) -> None: super().__init__() self.toast = Adw.Toast.new( - f"""Logs are available {message}""" + f"""Logs are available {message}""", ) self.toast.set_use_markup(True) diff --git a/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_join.py b/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_join.py index 03826def5..637ac9c30 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_join.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_join.py @@ -45,8 +45,7 @@ class JoinValue(GObject.Object): class JoinList: - """ - This is a singleton. + """This is a singleton. It is initialized with the first call of use() """ @@ -69,28 +68,32 @@ class JoinList: return cls._instance def rerender_join_list( - self, source: GKVStore, position: int, removed: int, added: int + self, + source: GKVStore, + position: int, + removed: int, + added: int, ) -> None: self.list_store.items_changed( - 0, self.list_store.get_n_items(), self.list_store.get_n_items() + 0, + self.list_store.get_n_items(), + self.list_store.get_n_items(), ) def is_empty(self) -> bool: return self.list_store.get_n_items() == 0 def push(self, uri: ClanURI, after_join: Callable[[JoinValue], None]) -> None: - """ - Add a join request. + """Add a join request. This method can add multiple join requests if called subsequently for each request. """ - value = JoinValue(uri) machine_id = Machine(uri.machine_name, uri.flake) machine_id_list = [] for machine_obj in self.list_store: - mvalue: ClanURI = cast(JoinValue, machine_obj).url + mvalue: ClanURI = cast("JoinValue", machine_obj).url machine = Machine(mvalue.machine_name, mvalue.flake) machine_id_list.append(machine.get_id()) diff --git a/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_views.py b/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_views.py index 36e9fa2d9..886e6b00c 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_views.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_views.py @@ -8,8 +8,7 @@ from gi.repository import Adw class ViewStack: - """ - This is a singleton. + """This is a singleton. It is initialized with the first call of use() Usage: diff --git a/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_vms.py b/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_vms.py index 6135dafdb..b10d7891c 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_vms.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/singletons/use_vms.py @@ -53,7 +53,8 @@ class ClanStore: if cls._instance is None: cls._instance = cls.__new__(cls) cls._clan_store = GKVStore( - VMStore, lambda store: store.first().data.flake.flake_url + VMStore, + lambda store: store.first().data.flake.flake_url, ) cls._emitter = Emitter() @@ -74,19 +75,24 @@ class ClanStore: return self._logging_vm def register_on_deep_change( - self, callback: Callable[[GKVStore, int, int, int], None] + self, + callback: Callable[[GKVStore, int, int, int], None], ) -> None: - """ - Register a callback that is called when a clan_store or one of the included VMStores changes - """ + """Register a callback that is called when a clan_store or one of the included VMStores changes""" def on_vmstore_change( - store: VMStore, position: int, removed: int, added: int + store: VMStore, + position: int, + removed: int, + added: int, ) -> None: callback(store, position, removed, added) def on_clanstore_change( - store: "GKVStore", position: int, removed: int, added: int + store: "GKVStore", + position: int, + removed: int, + added: int, ) -> None: if added > 0: store.values()[position].register_on_change(on_vmstore_change) @@ -120,7 +126,9 @@ class ClanStore: logs_view: Logs = views.get_child_by_name("logs") # type: ignore def file_read_callback( - source_object: Gio.File, result: Gio.AsyncResult, _user_data: Any + source_object: Gio.File, + result: Gio.AsyncResult, + _user_data: Any, ) -> None: try: # Finish the asynchronous read operation @@ -155,7 +163,7 @@ class ClanStore: if old_vm: log.info( - f"VM {vm.data.flake.flake_attr} already exists in store. Updating data field." + f"VM {vm.data.flake.flake_attr} already exists in store. Updating data field.", ) old_vm.update(vm.data) else: diff --git a/pkgs/clan-vm-manager/clan_vm_manager/views/details.py b/pkgs/clan-vm-manager/clan_vm_manager/views/details.py index df2d8fa17..6e26aeb7c 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/views/details.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/views/details.py @@ -13,7 +13,8 @@ ListItem = TypeVar("ListItem", bound=GObject.Object) def create_details_list[ListItem: GObject.Object]( - model: Gio.ListStore, render_row: Callable[[Gtk.ListBox, ListItem], Gtk.Widget] + model: Gio.ListStore, + render_row: Callable[[Gtk.ListBox, ListItem], Gtk.Widget], ) -> Gtk.ListBox: boxed_list = Gtk.ListBox() boxed_list.set_selection_mode(Gtk.SelectionMode.NONE) @@ -28,7 +29,10 @@ class PreferencesValue(GObject.Object): data: Any def __init__( - self, variant: Literal["CPU", "MEMORY"], editable: bool, data: Any + self, + variant: Literal["CPU", "MEMORY"], + editable: bool, + data: Any, ) -> None: super().__init__() self.variant = variant @@ -44,13 +48,16 @@ class Details(Gtk.Box): preferences_store.append(PreferencesValue("CPU", True, 1)) self.details_list = create_details_list( - model=preferences_store, render_row=self.render_entry_row + model=preferences_store, + render_row=self.render_entry_row, ) self.append(self.details_list) def render_entry_row( - self, boxed_list: Gtk.ListBox, item: PreferencesValue + self, + boxed_list: Gtk.ListBox, + item: PreferencesValue, ) -> Gtk.Widget: cores: int | None = os.cpu_count() fcores = float(cores) if cores else 1.0 diff --git a/pkgs/clan-vm-manager/clan_vm_manager/views/list.py b/pkgs/clan-vm-manager/clan_vm_manager/views/list.py index 5f104989b..d149e6a48 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/views/list.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/views/list.py @@ -46,8 +46,7 @@ def create_boxed_list[CustomStore: Gio.ListModel, ListItem: GObject.Object]( class ClanList(Gtk.Box): - """ - The ClanList + """The ClanList Is the composition of the ClanListToolbar the clanListView @@ -70,7 +69,8 @@ class ClanList(Gtk.Box): # Add join list self.join_boxed_list = create_boxed_list( - model=JoinList.use().list_store, render_row=self.render_join_row + model=JoinList.use().list_store, + render_row=self.render_join_row, ) self.join_boxed_list.add_css_class("join-list") self.append(self.join_boxed_list) @@ -79,7 +79,8 @@ class ClanList(Gtk.Box): clan_store.connect("is_ready", self.display_splash) self.group_list = create_boxed_list( - model=clan_store.clan_store, render_row=self.render_group_row + model=clan_store.clan_store, + render_row=self.render_group_row, ) self.group_list.add_css_class("group-list") self.append(self.group_list) @@ -95,7 +96,9 @@ class ClanList(Gtk.Box): self.append(self.splash) def render_group_row( - self, boxed_list: Gtk.ListBox, vm_store: VMStore + self, + boxed_list: Gtk.ListBox, + vm_store: VMStore, ) -> Gtk.Widget: self.remove(self.splash) @@ -199,7 +202,8 @@ class ClanList(Gtk.Box): action_id = base64.b64encode(vm.get_id().encode("utf-8")).decode("utf-8") build_logs_action = Gio.SimpleAction.new( - f"logs.{action_id}", GLib.VariantType.new("s") + f"logs.{action_id}", + GLib.VariantType.new("s"), ) build_logs_action.connect("activate", self.on_show_build_logs) @@ -213,7 +217,9 @@ class ClanList(Gtk.Box): # set a callback function for conditionally enabling the build_logs action def on_vm_build_notify( - vm: VMObject, is_building: bool, is_running: bool + vm: VMObject, + is_building: bool, + is_running: bool, ) -> None: build_logs_action.set_enabled(is_building or is_running) app.add_action(build_logs_action) @@ -279,7 +285,9 @@ class ClanList(Gtk.Box): views.set_visible_child_name("logs") def render_join_row( - self, boxed_list: Gtk.ListBox, join_val: JoinValue + self, + boxed_list: Gtk.ListBox, + join_val: JoinValue, ) -> Gtk.Widget: if boxed_list.has_css_class("no-shadow"): boxed_list.remove_css_class("no-shadow") @@ -300,13 +308,13 @@ class ClanList(Gtk.Box): ToastOverlay.use().add_toast_unique( WarningToast( - f"""{join_val.url.machine_name!s} Already exists. Joining again will update it""" + f"""{join_val.url.machine_name!s} Already exists. Joining again will update it""", ).toast, "warning.duplicate.join", ) row.set_subtitle( - sub + "\nClan already exists. Joining again will update it" + sub + "\nClan already exists. Joining again will update it", ) avatar = Adw.Avatar() diff --git a/pkgs/clan-vm-manager/clan_vm_manager/views/logs.py b/pkgs/clan-vm-manager/clan_vm_manager/views/logs.py index f7fb804f5..4c6b7c6d3 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/views/logs.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/views/logs.py @@ -11,8 +11,7 @@ log = logging.getLogger(__name__) class Logs(Gtk.Box): - """ - Simple log view + """Simple log view This includes a banner and a text view and a button to close the log and navigate back to the overview """ @@ -44,9 +43,7 @@ class Logs(Gtk.Box): self.banner.set_title(title) def set_message(self, message: str) -> None: - """ - Set the log message. This will delete any previous message - """ + """Set the log message. This will delete any previous message""" buffer = self.text_view.get_buffer() buffer.set_text(message) @@ -54,9 +51,7 @@ class Logs(Gtk.Box): self.text_view.scroll_to_mark(mark, 0.05, True, 0.0, 1.0) def append_message(self, message: str) -> None: - """ - Append to the end of a potentially existent log message - """ + """Append to the end of a potentially existent log message""" buffer = self.text_view.get_buffer() end_iter = buffer.get_end_iter() buffer.insert(end_iter, message) # type: ignore diff --git a/pkgs/clan-vm-manager/tests/command.py b/pkgs/clan-vm-manager/tests/command.py index 608ae439f..6195877d8 100644 --- a/pkgs/clan-vm-manager/tests/command.py +++ b/pkgs/clan-vm-manager/tests/command.py @@ -53,8 +53,7 @@ class Command: @pytest.fixture def command() -> Iterator[Command]: - """ - Starts a background command. The process is automatically terminated in the end. + """Starts a background command. The process is automatically terminated in the end. >>> p = command.run(["some", "daemon"]) >>> print(p.pid) """ diff --git a/pkgs/clan-vm-manager/tests/conftest.py b/pkgs/clan-vm-manager/tests/conftest.py index 8252cfc1a..7439cc3d6 100644 --- a/pkgs/clan-vm-manager/tests/conftest.py +++ b/pkgs/clan-vm-manager/tests/conftest.py @@ -9,7 +9,7 @@ from clan_lib.nix import nix_shell sys.path.append(str(Path(__file__).parent / "helpers")) sys.path.append( - str(Path(__file__).parent.parent) + str(Path(__file__).parent.parent), ) # Also add clan vm manager to PYTHONPATH pytest_plugins = [ diff --git a/pkgs/clan-vm-manager/tests/root.py b/pkgs/clan-vm-manager/tests/root.py index 8593bd5e6..ceded7e9f 100644 --- a/pkgs/clan-vm-manager/tests/root.py +++ b/pkgs/clan-vm-manager/tests/root.py @@ -13,23 +13,17 @@ else: @pytest.fixture(scope="session") def project_root() -> Path: - """ - Root directory the clan-cli - """ + """Root directory the clan-cli""" return PROJECT_ROOT @pytest.fixture(scope="session") def test_root() -> Path: - """ - Root directory of the tests - """ + """Root directory of the tests""" return TEST_ROOT @pytest.fixture(scope="session") def clan_core() -> Path: - """ - Directory of the clan-core flake - """ + """Directory of the clan-core flake""" return CLAN_CORE diff --git a/pkgs/classgen/main.py b/pkgs/classgen/main.py index 46cc22880..2e8c26152 100644 --- a/pkgs/classgen/main.py +++ b/pkgs/classgen/main.py @@ -28,7 +28,9 @@ def sort_types(items: Iterable[str]) -> list[str]: # Function to map JSON schemas and types to Python types def map_json_type( - json_type: Any, nested_types: list[str] | None = None, parent: Any = None + json_type: Any, + nested_types: list[str] | None = None, + parent: Any = None, ) -> list[str]: if nested_types is None: nested_types = ["Any"] @@ -270,7 +272,9 @@ def generate_dataclass( if isinstance(item_schema, dict): field_types = map_json_type( - prop_type, map_json_type(item_schema), field_name + prop_type, + map_json_type(item_schema), + field_name, ) elif enum := prop_info.get("enum"): literals = ", ".join([f'"{string}"' for string in enum]) @@ -285,8 +289,10 @@ def generate_dataclass( if nested_class_name not in known_classes: nested_classes.append( generate_dataclass( - inner_type, [*attr_path, prop], nested_class_name - ) + inner_type, + [*attr_path, prop], + nested_class_name, + ), ) known_classes.add(nested_class_name) @@ -294,7 +300,7 @@ def generate_dataclass( # Trivial type: # dict[str, inner_type] field_types = [ - f"""dict[str, {" | ".join(map_json_type(inner_type))}]""" + f"""dict[str, {" | ".join(map_json_type(inner_type))}]""", ] elif not inner_type: @@ -303,8 +309,10 @@ def generate_dataclass( if nested_class_name not in known_classes: nested_classes.append( generate_dataclass( - prop_info, [*attr_path, prop], nested_class_name - ) + prop_info, + [*attr_path, prop], + nested_class_name, + ), ) known_classes.add(nested_class_name) elif prop_type == "Unknown": @@ -384,7 +392,7 @@ def generate_dataclass( [ f"{class_name}{n.capitalize()}Type = {x}" for n, x in (required_fields + fields_with_default) - ] + ], ) fields_str = "\n ".join(all_field_declarations) nested_classes_str = "\n\n".join(nested_classes) @@ -423,7 +431,7 @@ from typing import Any, Literal, NotRequired, TypedDict\n # This forces the user to use type-narrowing or casting in the code class Unknown: pass -""" +""", ) f.write(dataclass_code) f.write("\n") diff --git a/pkgs/generate-test-vars/generate_test_vars/cli.py b/pkgs/generate-test-vars/generate_test_vars/cli.py index 80d8476ce..93d762417 100755 --- a/pkgs/generate-test-vars/generate_test_vars/cli.py +++ b/pkgs/generate-test-vars/generate_test_vars/cli.py @@ -29,9 +29,7 @@ sops_pub_key = "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg" def get_machine_names(repo_root: Path, check_attr: str, system: str) -> list[str]: - """ - Get the machine names from the test flake - """ + """Get the machine names from the test flake""" nix_options = [] if tmp_store := nix_test_store(): nix_options += ["--store", str(tmp_store)] @@ -41,34 +39,31 @@ def get_machine_names(repo_root: Path, check_attr: str, system: str) -> list[str "--apply", "builtins.attrNames", *nix_options, - ] + ], ) out = subprocess.run(cmd, check=True, text=True, stdout=subprocess.PIPE) return json.loads(out.stdout.strip()) class TestFlake(Flake): - """ - Flake class which is able to deal with not having an actual flake. + """Flake class which is able to deal with not having an actual flake. All nix build and eval calls will be forwarded to: clan-core#checks.. """ def __init__(self, check_attr: str, *args: Any, **kwargs: Any) -> None: - """ - Initialize the TestFlake with the check attribute. - """ + """Initialize the TestFlake with the check attribute.""" super().__init__(*args, **kwargs) self.check_attr = check_attr def select_machine(self, machine_name: str, selector: str) -> Any: - """ - Select a nix attribute for a specific machine. + """Select a nix attribute for a specific machine. Args: machine_name: The name of the machine selector: The attribute selector string relative to the machine config apply: Optional function to apply to the result + """ from clan_lib.nix import nix_config @@ -83,15 +78,18 @@ class TestFlake(Flake): class TestMachine(Machine): - """ - Machine class which is able to deal with not having an actual flake. + """Machine class which is able to deal with not having an actual flake. All nix build and eval calls will be forwarded to: clan-core#checks...nodes.. """ @override def __init__( - self, name: str, flake: Flake, test_dir: Path, check_attr: str + self, + name: str, + flake: Flake, + test_dir: Path, + check_attr: str, ) -> None: super().__init__(name, flake) self.check_attr = check_attr @@ -102,11 +100,9 @@ class TestMachine(Machine): return self.test_dir def select(self, attr: str) -> Any: - """ - Build the machine and return the path to the result + """Build the machine and return the path to the result accepts a secret store and a facts store # TODO """ - config = nix_config() system = config["system"] test_system = system @@ -114,7 +110,7 @@ class TestMachine(Machine): test_system = system.rstrip("darwin") + "linux" return self.flake.select( - f'checks."{test_system}".{self.check_attr}.machinesCross.{system}.{self.name}.{attr}' + f'checks."{test_system}".{self.check_attr}.machinesCross.{system}.{self.name}.{attr}', ) @@ -199,7 +195,7 @@ def main() -> None: flake.precache( [ f"checks.{test_system}.{opts.check_attr}.machinesCross.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.generators.*.validationHash", - ] + ], ) # This hack is necessary because the sops store uses flake.path to find the machine keys @@ -219,7 +215,7 @@ def main() -> None: }, indent=2, ) - + "\n" + + "\n", ) def mocked_prompts( diff --git a/pkgs/scripts/select-shell.py b/pkgs/scripts/select-shell.py index 91314acd8..772f340bd 100644 --- a/pkgs/scripts/select-shell.py +++ b/pkgs/scripts/select-shell.py @@ -13,7 +13,9 @@ def parse_args() -> argparse.Namespace: parser.add_argument("shell", help="the name of the devshell to select", nargs="?") parser.add_argument("--list", action="store_true", help="list available devshells") parser.add_argument( - "--show", action="store_true", help="show the currently selected devshell" + "--show", + action="store_true", + help="show the currently selected devshell", ) return parser.parse_args() diff --git a/pkgs/zerotier-members/zerotier-members.py b/pkgs/zerotier-members/zerotier-members.py index a132c0bec..78393288f 100755 --- a/pkgs/zerotier-members/zerotier-members.py +++ b/pkgs/zerotier-members/zerotier-members.py @@ -37,7 +37,7 @@ def compute_zerotier_ip(network_id: str, identity: str) -> ipaddress.IPv6Address (node_id >> 16) & 0xFF, (node_id >> 8) & 0xFF, (node_id) & 0xFF, - ] + ], ) return ipaddress.IPv6Address(bytes(addr_parts)) @@ -119,7 +119,9 @@ def main() -> None: parser_list = subparser.add_parser("list", help="List members") parser_list.add_argument( - "--no-headers", action="store_true", help="Do not print headers" + "--no-headers", + action="store_true", + help="Do not print headers", ) parser_list.set_defaults(func=list_members)