Merge pull request 'Adding state directory, rearchitecturing API endpoints' (#425) from Qubasa-main into main

Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/425
This commit is contained in:
Luis Hebendanz
2023-10-29 18:54:34 +00:00
96 changed files with 4897 additions and 3665 deletions

2
.gitignore vendored
View File

@@ -1,4 +1,6 @@
.direnv .direnv
.coverage.*
**/qubeclan
**/testdir **/testdir
democlan democlan
result* result*

View File

@@ -11,9 +11,7 @@ $ nix shell git+https://git.clan.lol/clan/clan-core
2. Then use the following commands to initialize a new clan-flake: 2. Then use the following commands to initialize a new clan-flake:
```shellSession ```shellSession
$ mkdir ./my-flake $ clan flake create my-clan
$ cd ./my-flake
$ clan flake create .
``` ```
This action will generate two primary files: `flake.nix` and `.clan-flake`. This action will generate two primary files: `flake.nix` and `.clan-flake`.
@@ -93,6 +91,7 @@ Absolutely, let's break down the migration step by step, explaining each action
# this needs to point at the repository root # this needs to point at the repository root
directory = self; directory = self;
specialArgs = {}; specialArgs = {};
clanName = "NEEDS_TO_BE_UNIQUE"; # TODO: Changeme
machines = { machines = {
example-desktop = { example-desktop = {
nixpkgs.hostPlatform = "x86_64-linux"; nixpkgs.hostPlatform = "x86_64-linux";
@@ -109,6 +108,7 @@ Absolutely, let's break down the migration step by step, explaining each action
- Inside `machines`, a new machine configuration is defined (in this case, `example-desktop`). - Inside `machines`, a new machine configuration is defined (in this case, `example-desktop`).
- Inside `example-desktop` which is the target machine hostname, `nixpkgs.hostPlatform` specifies the host platform as `x86_64-linux`. - Inside `example-desktop` which is the target machine hostname, `nixpkgs.hostPlatform` specifies the host platform as `x86_64-linux`.
- `clanInternals`: Is required to enable evaluation of the secret generation/upload script on every architecture - `clanInternals`: Is required to enable evaluation of the secret generation/upload script on every architecture
- `clanName`: Is required and needs to be globally unique, as else we have a cLAN name clash
4. **Rebuild and Switch**: Rebuild your NixOS configuration using the updated flake: 4. **Rebuild and Switch**: Rebuild your NixOS configuration using the updated flake:

View File

@@ -2,6 +2,7 @@
{ directory # The directory containing the machines subdirectory { directory # The directory containing the machines subdirectory
, specialArgs ? { } # Extra arguments to pass to nixosSystem i.e. useful to make self available , specialArgs ? { } # Extra arguments to pass to nixosSystem i.e. useful to make self available
, machines ? { } # allows to include machine-specific modules i.e. machines.${name} = { ... } , machines ? { } # allows to include machine-specific modules i.e. machines.${name} = { ... }
, clanName # Needs to be (globally) unique, as this determines the folder name where the flake gets downloaded to.
}: }:
let let
machinesDirs = lib.optionalAttrs (builtins.pathExists "${directory}/machines") (builtins.readDir (directory + /machines)); machinesDirs = lib.optionalAttrs (builtins.pathExists "${directory}/machines") (builtins.readDir (directory + /machines));
@@ -73,6 +74,7 @@ in
clanInternals = { clanInternals = {
machines = configsPerSystem; machines = configsPerSystem;
clanName = clanName;
all-machines-json = lib.mapAttrs all-machines-json = lib.mapAttrs
(system: configs: nixpkgs.legacyPackages.${system}.writers.writeJSON "machines.json" (lib.mapAttrs (_: m: m.config.system.clan.deployment.data) configs)) (system: configs: nixpkgs.legacyPackages.${system}.writers.writeJSON "machines.json" (lib.mapAttrs (_: m: m.config.system.clan.deployment.data) configs))
configsPerSystem; configsPerSystem;

View File

@@ -30,16 +30,20 @@ in
generateSecrets = pkgs.writeScript "generate-secrets" '' generateSecrets = pkgs.writeScript "generate-secrets" ''
#!${pkgs.python3}/bin/python #!${pkgs.python3}/bin/python
import json import json
import sys
from clan_cli.secrets.sops_generate import generate_secrets_from_nix from clan_cli.secrets.sops_generate import generate_secrets_from_nix
args = json.loads(${builtins.toJSON (builtins.toJSON { machine_name = config.clanCore.machineName; secret_submodules = config.clanCore.secrets; })}) args = json.loads(${builtins.toJSON (builtins.toJSON { machine_name = config.clanCore.machineName; secret_submodules = config.clanCore.secrets; })})
args["flake_name"] = sys.argv[1]
generate_secrets_from_nix(**args) generate_secrets_from_nix(**args)
''; '';
uploadSecrets = pkgs.writeScript "upload-secrets" '' uploadSecrets = pkgs.writeScript "upload-secrets" ''
#!${pkgs.python3}/bin/python #!${pkgs.python3}/bin/python
import json import json
import sys
from clan_cli.secrets.sops_generate import upload_age_key_from_nix from clan_cli.secrets.sops_generate import upload_age_key_from_nix
# the second toJSON is needed to escape the string for the python # the second toJSON is needed to escape the string for the python
args = json.loads(${builtins.toJSON (builtins.toJSON { machine_name = config.clanCore.machineName; })}) args = json.loads(${builtins.toJSON (builtins.toJSON { machine_name = config.clanCore.machineName; })})
args["flake_name"] = sys.argv[1]
upload_age_key_from_nix(**args) upload_age_key_from_nix(**args)
''; '';
}; };

View File

@@ -2,6 +2,7 @@
source_up source_up
if type nix_direnv_watch_file &>/dev/null; then if type nix_direnv_watch_file &>/dev/null; then
nix_direnv_watch_file flake-module.nix nix_direnv_watch_file flake-module.nix
nix_direnv_watch_file default.nix nix_direnv_watch_file default.nix

View File

@@ -15,4 +15,8 @@
"search.exclude": { "search.exclude": {
"**/.direnv": true "**/.direnv": true
}, },
"python.linting.mypyPath": "mypy",
"python.linting.mypyEnabled": true,
"python.linting.enabled": true,
"python.defaultInterpreterPath": "python"
} }

View File

@@ -60,11 +60,31 @@ By default tests run in parallel using pytest-parallel.
pytest-parallel however breaks `breakpoint()`. To disable it, use this: pytest-parallel however breaks `breakpoint()`. To disable it, use this:
```console ```console
pytest --workers "" -s pytest -n0 -s
``` ```
You can also run a single test like this: You can also run a single test like this:
```console ```console
pytest --workers "" -s tests/test_secrets_cli.py::test_users pytest -n0 -s tests/test_secrets_cli.py::test_users
``` ```
## Run tests in nix container
Run all impure checks
```console
nix run .#impure-checks
```
Run all checks
```console
nix flake check
```
## Debugging functions
Debugging functions can be found under `src/debug.py`
quite interesting is the function breakpoint_shell() which drops you into a shell
with the test environment loaded.

View File

@@ -1,11 +1,15 @@
import argparse import argparse
import logging
import sys import sys
from types import ModuleType from types import ModuleType
from typing import Optional from typing import Optional
from . import config, flake, join, machines, secrets, vms, webui from . import config, flakes, join, machines, secrets, vms, webui
from .custom_logger import setup_logging
from .ssh import cli as ssh_cli from .ssh import cli as ssh_cli
log = logging.getLogger(__name__)
argcomplete: Optional[ModuleType] = None argcomplete: Optional[ModuleType] = None
try: try:
import argcomplete # type: ignore[no-redef] import argcomplete # type: ignore[no-redef]
@@ -25,9 +29,9 @@ def create_parser(prog: Optional[str] = None) -> argparse.ArgumentParser:
subparsers = parser.add_subparsers() subparsers = parser.add_subparsers()
parser_flake = subparsers.add_parser( parser_flake = subparsers.add_parser(
"flake", help="create a clan flake inside the current directory" "flakes", help="create a clan flake inside the current directory"
) )
flake.register_parser(parser_flake) flakes.register_parser(parser_flake)
parser_join = subparsers.add_parser("join", help="join a remote clan") parser_join = subparsers.add_parser("join", help="join a remote clan")
join.register_parser(parser_join) join.register_parser(parser_join)
@@ -65,6 +69,10 @@ def main() -> None:
parser = create_parser() parser = create_parser()
args = parser.parse_args() args = parser.parse_args()
if args.debug:
setup_logging(logging.DEBUG)
log.debug("Debug log activated")
if not hasattr(args, "func"): if not hasattr(args, "func"):
return return

View File

@@ -2,15 +2,21 @@ import asyncio
import logging import logging
import shlex import shlex
from pathlib import Path from pathlib import Path
from typing import Optional, Tuple from typing import Any, Callable, Coroutine, Dict, NamedTuple, Optional
from .custom_logger import get_caller
from .errors import ClanError from .errors import ClanError
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
async def run(cmd: list[str], cwd: Optional[Path] = None) -> Tuple[bytes, bytes]: class CmdOut(NamedTuple):
log.debug(f"$: {shlex.join(cmd)}") stdout: str
stderr: str
cwd: Optional[Path] = None
async def run(cmd: list[str], cwd: Optional[Path] = None) -> CmdOut:
cwd_res = None cwd_res = None
if cwd is not None: if cwd is not None:
if not cwd.exists(): if not cwd.exists():
@@ -18,7 +24,9 @@ async def run(cmd: list[str], cwd: Optional[Path] = None) -> Tuple[bytes, bytes]
if not cwd.is_dir(): if not cwd.is_dir():
raise ClanError(f"Working directory {cwd} is not a directory") raise ClanError(f"Working directory {cwd} is not a directory")
cwd_res = cwd.resolve() cwd_res = cwd.resolve()
log.debug(f"Working directory: {cwd_res}") log.debug(
f"Command: {shlex.join(cmd)}\nWorking directory: {cwd_res}\nCaller : {get_caller()}"
)
proc = await asyncio.create_subprocess_exec( proc = await asyncio.create_subprocess_exec(
*cmd, *cmd,
stdout=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE,
@@ -31,9 +39,30 @@ async def run(cmd: list[str], cwd: Optional[Path] = None) -> Tuple[bytes, bytes]
raise ClanError( raise ClanError(
f""" f"""
command: {shlex.join(cmd)} command: {shlex.join(cmd)}
working directory: {cwd_res}
exit code: {proc.returncode} exit code: {proc.returncode}
command output: stderr:
{stderr.decode("utf-8")} {stderr.decode("utf-8")}
stdout:
{stdout.decode("utf-8")}
""" """
) )
return stdout, stderr
return CmdOut(stdout.decode("utf-8"), stderr.decode("utf-8"), cwd=cwd)
def runforcli(
func: Callable[..., Coroutine[Any, Any, Dict[str, CmdOut]]], *args: Any
) -> None:
try:
res = asyncio.run(func(*args))
for i in res.items():
name, out = i
if out.stderr:
print(f"{name}: {out.stderr}", end="")
if out.stdout:
print(f"{name}: {out.stdout}", end="")
except ClanError as e:
print(e)
exit(1)

View File

@@ -1,20 +1,20 @@
import json import json
import subprocess import subprocess
from pathlib import Path
from typing import Optional from typing import Optional
from clan_cli.dirs import get_clan_flake_toplevel
from clan_cli.nix import nix_eval from clan_cli.nix import nix_eval
from .dirs import specific_flake_dir
from .types import FlakeName
def get_clan_module_names( def get_clan_module_names(
flake: Optional[Path] = None, flake_name: FlakeName,
) -> tuple[list[str], Optional[str]]: ) -> tuple[list[str], Optional[str]]:
""" """
Get the list of clan modules from the clan-core flake input Get the list of clan modules from the clan-core flake input
""" """
if flake is None: flake = specific_flake_dir(flake_name)
flake = get_clan_flake_toplevel()
proc = subprocess.run( proc = subprocess.run(
nix_eval( nix_eval(
[ [

View File

@@ -1,6 +1,7 @@
# !/usr/bin/env python3 # !/usr/bin/env python3
import argparse import argparse
import json import json
import logging
import os import os
import re import re
import shlex import shlex
@@ -9,14 +10,16 @@ import sys
from pathlib import Path from pathlib import Path
from typing import Any, Optional, Tuple, get_origin from typing import Any, Optional, Tuple, get_origin
from clan_cli.dirs import get_clan_flake_toplevel from clan_cli.dirs import machine_settings_file, specific_flake_dir
from clan_cli.errors import ClanError from clan_cli.errors import ClanError
from clan_cli.git import commit_file from clan_cli.git import commit_file
from clan_cli.machines.folders import machine_settings_file
from clan_cli.nix import nix_eval from clan_cli.nix import nix_eval
from clan_cli.types import FlakeName
script_dir = Path(__file__).parent script_dir = Path(__file__).parent
log = logging.getLogger(__name__)
# nixos option type description to python type # nixos option type description to python type
def map_type(type: str) -> Any: def map_type(type: str) -> Any:
@@ -104,8 +107,10 @@ def cast(value: Any, type: Any, opt_description: str) -> Any:
) )
def options_for_machine(machine_name: str, show_trace: bool = False) -> dict: def options_for_machine(
clan_dir = get_clan_flake_toplevel() flake_name: FlakeName, machine_name: str, show_trace: bool = False
) -> dict:
clan_dir = specific_flake_dir(flake_name)
flags = [] flags = []
if show_trace: if show_trace:
flags.append("--show-trace") flags.append("--show-trace")
@@ -126,9 +131,9 @@ def options_for_machine(machine_name: str, show_trace: bool = False) -> dict:
def read_machine_option_value( def read_machine_option_value(
machine_name: str, option: str, show_trace: bool = False flake_name: FlakeName, machine_name: str, option: str, show_trace: bool = False
) -> str: ) -> str:
clan_dir = get_clan_flake_toplevel() clan_dir = specific_flake_dir(flake_name)
# use nix eval to read from .#nixosConfigurations.default.config.{option} # use nix eval to read from .#nixosConfigurations.default.config.{option}
# this will give us the evaluated config with the options attribute # this will give us the evaluated config with the options attribute
cmd = nix_eval( cmd = nix_eval(
@@ -154,6 +159,43 @@ def read_machine_option_value(
return out return out
def get_or_set_option(args: argparse.Namespace) -> None:
if args.value == []:
print(
read_machine_option_value(
args.flake, args.machine, args.option, args.show_trace
)
)
else:
# load options
if args.options_file is None:
options = options_for_machine(
args.flake, machine_name=args.machine, show_trace=args.show_trace
)
else:
with open(args.options_file) as f:
options = json.load(f)
# compute settings json file location
if args.settings_file is None:
settings_file = machine_settings_file(args.flake, args.machine)
else:
settings_file = args.settings_file
# set the option with the given value
set_option(
flake_name=args.flake,
option=args.option,
value=args.value,
options=options,
settings_file=settings_file,
option_description=args.option,
show_trace=args.show_trace,
)
if not args.quiet:
new_value = read_machine_option_value(args.flake, args.machine, args.option)
print(f"New Value for {args.option}:")
print(new_value)
def find_option( def find_option(
option: str, value: Any, options: dict, option_description: Optional[str] = None option: str, value: Any, options: dict, option_description: Optional[str] = None
) -> Tuple[str, Any]: ) -> Tuple[str, Any]:
@@ -206,6 +248,7 @@ def find_option(
def set_option( def set_option(
flake_name: FlakeName,
option: str, option: str,
value: Any, value: Any,
options: dict, options: dict,
@@ -247,6 +290,7 @@ def set_option(
current_config = json.load(f) current_config = json.load(f)
else: else:
current_config = {} current_config = {}
# merge and save the new config file # merge and save the new config file
new_config = merge(current_config, result) new_config = merge(current_config, result)
settings_file.parent.mkdir(parents=True, exist_ok=True) settings_file.parent.mkdir(parents=True, exist_ok=True)
@@ -254,41 +298,12 @@ def set_option(
json.dump(new_config, f, indent=2) json.dump(new_config, f, indent=2)
print(file=f) # add newline at the end of the file to make git happy print(file=f) # add newline at the end of the file to make git happy
if settings_file.resolve().is_relative_to(get_clan_flake_toplevel()): if settings_file.resolve().is_relative_to(specific_flake_dir(flake_name)):
commit_file(settings_file, commit_message=f"Set option {option_description}") commit_file(
settings_file,
repo_dir=specific_flake_dir(flake_name),
def get_or_set_option(args: argparse.Namespace) -> None: commit_message=f"Set option {option_description}",
if args.value == []:
print(read_machine_option_value(args.machine, args.option, args.show_trace))
else:
# load options
if args.options_file is None:
options = options_for_machine(
machine_name=args.machine, show_trace=args.show_trace
)
else:
with open(args.options_file) as f:
options = json.load(f)
# compute settings json file location
if args.settings_file is None:
get_clan_flake_toplevel()
settings_file = machine_settings_file(args.machine)
else:
settings_file = args.settings_file
# set the option with the given value
set_option(
option=args.option,
value=args.value,
options=options,
settings_file=settings_file,
option_description=args.option,
show_trace=args.show_trace,
) )
if not args.quiet:
new_value = read_machine_option_value(args.machine, args.option)
print(f"New Value for {args.option}:")
print(new_value)
# takes a (sub)parser and configures it # takes a (sub)parser and configures it
@@ -302,7 +317,6 @@ def register_parser(
# inject callback function to process the input later # inject callback function to process the input later
parser.set_defaults(func=get_or_set_option) parser.set_defaults(func=get_or_set_option)
parser.add_argument( parser.add_argument(
"--machine", "--machine",
"-m", "-m",
@@ -346,6 +360,11 @@ def register_parser(
nargs="*", nargs="*",
help="option value to set (if omitted, the current value is printed)", help="option value to set (if omitted, the current value is printed)",
) )
parser.add_argument(
"flake",
type=str,
help="name of the flake to set machine options for",
)
def main(argv: Optional[list[str]] = None) -> None: def main(argv: Optional[list[str]] = None) -> None:

View File

@@ -8,38 +8,48 @@ from typing import Optional
from fastapi import HTTPException from fastapi import HTTPException
from clan_cli.dirs import get_clan_flake_toplevel, nixpkgs_source from clan_cli.dirs import (
from clan_cli.git import commit_file, find_git_repo_root machine_settings_file,
from clan_cli.machines.folders import machine_folder, machine_settings_file nixpkgs_source,
specific_flake_dir,
specific_machine_dir,
)
from clan_cli.git import commit_file
from clan_cli.nix import nix_eval from clan_cli.nix import nix_eval
from ..types import FlakeName
def verify_machine_config( def verify_machine_config(
machine_name: str, config: Optional[dict] = None, flake: Optional[Path] = None flake_name: FlakeName,
machine_name: str,
config: Optional[dict] = None,
flake: Optional[Path] = None,
) -> Optional[str]: ) -> Optional[str]:
""" """
Verify that the machine evaluates successfully Verify that the machine evaluates successfully
Returns a tuple of (success, error_message) Returns a tuple of (success, error_message)
""" """
if config is None: if config is None:
config = config_for_machine(machine_name) config = config_for_machine(flake_name, machine_name)
if flake is None: flake = specific_flake_dir(flake_name)
flake = get_clan_flake_toplevel() with NamedTemporaryFile(mode="w", dir=flake) as clan_machine_settings_file:
with NamedTemporaryFile(mode="w") as clan_machine_settings_file:
json.dump(config, clan_machine_settings_file, indent=2) json.dump(config, clan_machine_settings_file, indent=2)
clan_machine_settings_file.seek(0) clan_machine_settings_file.seek(0)
env = os.environ.copy() env = os.environ.copy()
env["CLAN_MACHINE_SETTINGS_FILE"] = clan_machine_settings_file.name env["CLAN_MACHINE_SETTINGS_FILE"] = clan_machine_settings_file.name
cmd = nix_eval(
flags=[
"--impure",
"--show-trace",
"--show-trace",
"--impure", # needed to access CLAN_MACHINE_SETTINGS_FILE
f".#nixosConfigurations.{machine_name}.config.system.build.toplevel.outPath",
],
)
# repro_env_break(work_dir=flake, env=env, cmd=cmd)
proc = subprocess.run( proc = subprocess.run(
nix_eval( cmd,
flags=[
"--impure",
"--show-trace",
"--show-trace",
"--impure", # needed to access CLAN_MACHINE_SETTINGS_FILE
f".#nixosConfigurations.{machine_name}.config.system.build.toplevel.outPath",
],
),
capture_output=True, capture_output=True,
text=True, text=True,
cwd=flake, cwd=flake,
@@ -50,44 +60,45 @@ def verify_machine_config(
return None return None
def config_for_machine(machine_name: str) -> dict: def config_for_machine(flake_name: FlakeName, machine_name: str) -> dict:
# read the config from a json file located at {flake}/machines/{machine_name}/settings.json # read the config from a json file located at {flake}/machines/{machine_name}/settings.json
if not machine_folder(machine_name).exists(): if not specific_machine_dir(flake_name, machine_name).exists():
raise HTTPException( raise HTTPException(
status_code=404, status_code=404,
detail=f"Machine {machine_name} not found. Create the machine first`", detail=f"Machine {machine_name} not found. Create the machine first`",
) )
settings_path = machine_settings_file(machine_name) settings_path = machine_settings_file(flake_name, machine_name)
if not settings_path.exists(): if not settings_path.exists():
return {} return {}
with open(settings_path) as f: with open(settings_path) as f:
return json.load(f) return json.load(f)
def set_config_for_machine(machine_name: str, config: dict) -> None: def set_config_for_machine(
flake_name: FlakeName, machine_name: str, config: dict
) -> None:
# write the config to a json file located at {flake}/machines/{machine_name}/settings.json # write the config to a json file located at {flake}/machines/{machine_name}/settings.json
if not machine_folder(machine_name).exists(): if not specific_machine_dir(flake_name, machine_name).exists():
raise HTTPException( raise HTTPException(
status_code=404, status_code=404,
detail=f"Machine {machine_name} not found. Create the machine first`", detail=f"Machine {machine_name} not found. Create the machine first`",
) )
settings_path = machine_settings_file(machine_name) settings_path = machine_settings_file(flake_name, machine_name)
settings_path.parent.mkdir(parents=True, exist_ok=True) settings_path.parent.mkdir(parents=True, exist_ok=True)
with open(settings_path, "w") as f: with open(settings_path, "w") as f:
json.dump(config, f) json.dump(config, f)
repo_dir = find_git_repo_root() repo_dir = specific_flake_dir(flake_name)
if repo_dir is not None: if repo_dir is not None:
commit_file(settings_path, repo_dir) commit_file(settings_path, repo_dir)
def schema_for_machine( def schema_for_machine(
machine_name: str, config: Optional[dict] = None, flake: Optional[Path] = None flake_name: FlakeName, machine_name: str, config: Optional[dict] = None
) -> dict: ) -> dict:
if flake is None: flake = specific_flake_dir(flake_name)
flake = get_clan_flake_toplevel()
# use nix eval to lib.evalModules .#nixosConfigurations.<machine_name>.options.clan # use nix eval to lib.evalModules .#nixosConfigurations.<machine_name>.options.clan
with NamedTemporaryFile(mode="w") as clan_machine_settings_file: with NamedTemporaryFile(mode="w", dir=flake) as clan_machine_settings_file:
env = os.environ.copy() env = os.environ.copy()
inject_config_flags = [] inject_config_flags = []
if config is not None: if config is not None:

View File

@@ -1,5 +1,7 @@
import inspect
import logging import logging
from typing import Any from pathlib import Path
from typing import Any, Callable
grey = "\x1b[38;20m" grey = "\x1b[38;20m"
yellow = "\x1b[33;20m" yellow = "\x1b[33;20m"
@@ -9,11 +11,20 @@ green = "\u001b[32m"
blue = "\u001b[34m" blue = "\u001b[34m"
def get_formatter(color: str) -> logging.Formatter: def get_formatter(color: str) -> Callable[[logging.LogRecord, bool], logging.Formatter]:
reset = "\x1b[0m" def myformatter(
return logging.Formatter( record: logging.LogRecord, with_location: bool
f"{color}%(levelname)s{reset}:(%(filename)s:%(lineno)d): %(message)s" ) -> logging.Formatter:
) reset = "\x1b[0m"
filepath = Path(record.pathname).resolve()
if not with_location:
return logging.Formatter(f"{color}%(levelname)s{reset}: %(message)s")
return logging.Formatter(
f"{color}%(levelname)s{reset}: %(message)s\n {filepath}:%(lineno)d::%(funcName)s\n"
)
return myformatter
FORMATTER = { FORMATTER = {
@@ -26,12 +37,36 @@ FORMATTER = {
class CustomFormatter(logging.Formatter): class CustomFormatter(logging.Formatter):
def format(self, record: Any) -> str: def format(self, record: logging.LogRecord) -> str:
return FORMATTER[record.levelno].format(record) return FORMATTER[record.levelno](record, True).format(record)
def register(level: Any) -> None: class ThreadFormatter(logging.Formatter):
ch = logging.StreamHandler() def format(self, record: logging.LogRecord) -> str:
ch.setLevel(level) return FORMATTER[record.levelno](record, False).format(record)
ch.setFormatter(CustomFormatter())
logging.basicConfig(level=level, handlers=[ch])
def get_caller() -> str:
frame = inspect.currentframe()
if frame is None:
return "unknown"
caller_frame = frame.f_back
if caller_frame is None:
return "unknown"
caller_frame = caller_frame.f_back
if caller_frame is None:
return "unknown"
frame_info = inspect.getframeinfo(caller_frame)
ret = f"{frame_info.filename}:{frame_info.lineno}::{frame_info.function}"
return ret
def setup_logging(level: Any) -> None:
handler = logging.StreamHandler()
handler.setLevel(level)
handler.setFormatter(CustomFormatter())
logger = logging.getLogger("registerHandler")
logging.getLogger("asyncio").setLevel(logging.INFO)
logging.getLogger("httpx").setLevel(level=logging.WARNING)
logger.addHandler(handler)
# logging.basicConfig(level=level, handlers=[handler])

View File

@@ -0,0 +1,104 @@
import logging
import multiprocessing as mp
import os
import shlex
import stat
import subprocess
import sys
import time
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional
import ipdb
log = logging.getLogger(__name__)
def command_exec(cmd: List[str], work_dir: Path, env: Dict[str, str]) -> None:
subprocess.run(cmd, check=True, env=env, cwd=work_dir.resolve())
def block_for_input() -> None:
log = logging.getLogger(__name__)
procid = os.getpid()
f"echo 'continue' > /sys/proc/{procid}/fd/{sys.stdin.fileno()}"
while True:
log.warning("Use sudo cntr attach <pid> to attach to the container.")
# log.warning("Resume execution by executing '%s' in cntr shell", command)
time.sleep(1)
log.info("Resuming execution.")
def breakpoint_container(
work_dir: Path,
env: Optional[Dict[str, str]] = None,
cmd: Optional[List[str]] = None,
) -> None:
if env is None:
env = os.environ.copy()
else:
env = env.copy()
dump_env(env, work_dir / "env.sh")
if cmd is not None:
log.debug("Command: %s", shlex.join(cmd))
mycommand = shlex.join(cmd)
write_command(mycommand, work_dir / "cmd.sh")
block_for_input()
def breakpoint_shell(
work_dir: Path,
env: Optional[Dict[str, str]] = None,
cmd: Optional[List[str]] = None,
) -> None:
if env is None:
env = os.environ.copy()
else:
env = env.copy()
# Cmd appending
args = ["xterm", "-e", "zsh", "-df"]
if cmd is not None:
mycommand = shlex.join(cmd)
write_command(mycommand, work_dir / "cmd.sh")
proc = spawn_process(func=command_exec, cmd=args, work_dir=work_dir, env=env)
try:
ipdb.set_trace()
finally:
proc.terminate()
def write_command(command: str, loc: Path) -> None:
log.info("Dumping command to %s", loc)
with open(loc, "w") as f:
f.write("#!/usr/bin/env bash\n")
f.write(command)
st = os.stat(loc)
os.chmod(loc, st.st_mode | stat.S_IEXEC)
def spawn_process(func: Callable, **kwargs: Any) -> mp.Process:
if mp.get_start_method(allow_none=True) is None:
mp.set_start_method(method="spawn")
proc = mp.Process(target=func, name="python-debug-process", kwargs=kwargs)
proc.start()
return proc
def dump_env(env: Dict[str, str], loc: Path) -> None:
cenv = env.copy()
log.info("Dumping environment variables to %s", loc)
with open(loc, "w") as f:
f.write("#!/usr/bin/env bash\n")
for k, v in cenv.items():
if v.count("\n") > 0 or v.count('"') > 0 or v.count("'") > 0:
continue
f.write(f"export {k}='{v}'\n")
st = os.stat(loc)
os.chmod(loc, st.st_mode | stat.S_IEXEC)

View File

@@ -1,32 +1,35 @@
import logging
import os import os
import sys import sys
from pathlib import Path from pathlib import Path
from typing import Optional
from .errors import ClanError from .errors import ClanError
from .types import FlakeName
log = logging.getLogger(__name__)
def get_clan_flake_toplevel() -> Path: # def _get_clan_flake_toplevel() -> Path:
return find_toplevel([".clan-flake", ".git", ".hg", ".svn", "flake.nix"]) # return find_toplevel([".clan-flake", ".git", ".hg", ".svn", "flake.nix"])
def find_git_repo_root() -> Optional[Path]: # def find_git_repo_root() -> Optional[Path]:
try: # try:
return find_toplevel([".git"]) # return find_toplevel([".git"])
except ClanError: # except ClanError:
return None # return None
def find_toplevel(top_level_files: list[str]) -> Path: # def find_toplevel(top_level_files: list[str]) -> Path:
"""Returns the path to the toplevel of the clan flake""" # """Returns the path to the toplevel of the clan flake"""
for project_file in top_level_files: # for project_file in top_level_files:
initial_path = Path(os.getcwd()) # initial_path = Path(os.getcwd())
path = Path(initial_path) # path = Path(initial_path)
while path.parent != path: # while path.parent != path:
if (path / project_file).exists(): # if (path / project_file).exists():
return path # return path
path = path.parent # path = path.parent
raise ClanError("Could not find clan flake toplevel directory") # raise ClanError("Could not find clan flake toplevel directory")
def user_config_dir() -> Path: def user_config_dir() -> Path:
@@ -38,6 +41,58 @@ def user_config_dir() -> Path:
return Path(os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))) return Path(os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config")))
def user_data_dir() -> Path:
if sys.platform == "win32":
return Path(os.getenv("APPDATA", os.path.expanduser("~\\AppData\\Roaming\\")))
elif sys.platform == "darwin":
return Path(os.path.expanduser("~/Library/Application Support/"))
else:
return Path(os.getenv("XDG_DATA_HOME", os.path.expanduser("~/.local/state")))
def clan_data_dir() -> Path:
path = user_data_dir() / "clan"
if not path.exists():
log.debug(f"Creating path with parents {path}")
path.mkdir(parents=True)
return path.resolve()
def clan_config_dir() -> Path:
path = user_config_dir() / "clan"
if not path.exists():
log.debug(f"Creating path with parents {path}")
path.mkdir(parents=True)
return path.resolve()
def clan_flakes_dir() -> Path:
path = clan_data_dir() / "flake"
if not path.exists():
log.debug(f"Creating path with parents {path}")
path.mkdir(parents=True)
return path.resolve()
def specific_flake_dir(flake_name: FlakeName) -> Path:
flake_dir = clan_flakes_dir() / flake_name
if not flake_dir.exists():
raise ClanError(f"Flake '{flake_name}' does not exist")
return flake_dir
def machines_dir(flake_name: FlakeName) -> Path:
return specific_flake_dir(flake_name) / "machines"
def specific_machine_dir(flake_name: FlakeName, machine: str) -> Path:
return machines_dir(flake_name) / machine
def machine_settings_file(flake_name: FlakeName, machine: str) -> Path:
return specific_machine_dir(flake_name, machine) / "settings.json"
def module_root() -> Path: def module_root() -> Path:
return Path(__file__).parent return Path(__file__).parent

View File

@@ -1,47 +0,0 @@
# !/usr/bin/env python3
import argparse
import asyncio
from pathlib import Path
from typing import Tuple
from ..async_cmd import run
from ..errors import ClanError
from ..nix import nix_command
DEFAULT_URL = "git+https://git.clan.lol/clan/clan-core#new-clan"
async def create_flake(directory: Path, url: str) -> Tuple[bytes, bytes]:
if not directory.exists():
directory.mkdir()
flake_command = nix_command(
[
"flake",
"init",
"-t",
url,
]
)
stdout, stderr = await run(flake_command, directory)
return stdout, stderr
def create_flake_command(args: argparse.Namespace) -> None:
try:
stdout, stderr = asyncio.run(create_flake(args.directory, DEFAULT_URL))
print(stderr.decode("utf-8"), end="")
print(stdout.decode("utf-8"), end="")
except ClanError as e:
print(e)
exit(1)
# takes a (sub)parser and configures it
def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"directory",
type=Path,
help="output directory for the flake",
)
# parser.add_argument("name", type=str, help="name of the flake")
parser.set_defaults(func=create_flake_command)

View File

@@ -2,6 +2,7 @@
import argparse import argparse
from .create import register_create_parser from .create import register_create_parser
from .list import register_list_parser
# takes a (sub)parser and configures it # takes a (sub)parser and configures it
@@ -12,5 +13,8 @@ def register_parser(parser: argparse.ArgumentParser) -> None:
help="the command to run", help="the command to run",
required=True, required=True,
) )
update_parser = subparser.add_parser("create", help="Create a clan flake") create_parser = subparser.add_parser("create", help="Create a clan flake")
register_create_parser(update_parser) register_create_parser(create_parser)
list_parser = subparser.add_parser("list", help="List clan flakes")
register_list_parser(list_parser)

View File

@@ -0,0 +1,84 @@
# !/usr/bin/env python3
import argparse
from pathlib import Path
from typing import Dict
from pydantic import AnyUrl
from pydantic.tools import parse_obj_as
from ..async_cmd import CmdOut, run, runforcli
from ..dirs import clan_flakes_dir
from ..errors import ClanError
from ..nix import nix_command, nix_shell
DEFAULT_URL: AnyUrl = parse_obj_as(
AnyUrl,
"git+https://git.clan.lol/clan/clan-core?ref=Qubasa-main#new-clan", # TODO: Change me back to main branch
)
async def create_flake(directory: Path, url: AnyUrl) -> Dict[str, CmdOut]:
if not directory.exists():
directory.mkdir()
else:
raise ClanError(f"Flake at '{directory}' already exists")
response = {}
command = nix_command(
[
"flake",
"init",
"-t",
url,
]
)
out = await run(command, cwd=directory)
response["flake init"] = out
command = nix_shell(["git"], ["git", "init"])
out = await run(command, cwd=directory)
response["git init"] = out
command = nix_shell(["git"], ["git", "add", "."])
out = await run(command, cwd=directory)
response["git add"] = out
# command = nix_shell(["git"], ["git", "config", "init.defaultBranch", "main"])
# out = await run(command, cwd=directory)
# response["git config"] = out
command = nix_shell(["git"], ["git", "config", "user.name", "clan-tool"])
out = await run(command, cwd=directory)
response["git config"] = out
command = nix_shell(["git"], ["git", "config", "user.email", "clan@example.com"])
out = await run(command, cwd=directory)
response["git config"] = out
# TODO: Find out why this fails on Johannes machine
# command = nix_shell(["git"], ["git", "commit", "-a", "-m", "Initial commit"])
# out = await run(command, cwd=directory)
# response["git commit"] = out
return response
def create_flake_command(args: argparse.Namespace) -> None:
flake_dir = clan_flakes_dir() / args.name
runforcli(create_flake, flake_dir, args.url)
# takes a (sub)parser and configures it
def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"name",
type=str,
help="name for the flake",
)
parser.add_argument(
"--url",
type=str,
help="url for the flake",
default=DEFAULT_URL,
)
# parser.add_argument("name", type=str, help="name of the flake")
parser.set_defaults(func=create_flake_command)

View File

@@ -0,0 +1,27 @@
import argparse
import logging
import os
from ..dirs import clan_flakes_dir
log = logging.getLogger(__name__)
def list_flakes() -> list[str]:
path = clan_flakes_dir()
log.debug(f"Listing machines in {path}")
if not path.exists():
return []
objs: list[str] = []
for f in os.listdir(path):
objs.append(f)
return objs
def list_command(args: argparse.Namespace) -> None:
for flake in list_flakes():
print(flake)
def register_list_parser(parser: argparse.ArgumentParser) -> None:
parser.set_defaults(func=list_command)

View File

@@ -3,7 +3,7 @@ import subprocess
from pathlib import Path from pathlib import Path
from typing import Optional from typing import Optional
from clan_cli.dirs import find_git_repo_root # from clan_cli.dirs import find_git_repo_root
from clan_cli.errors import ClanError from clan_cli.errors import ClanError
from clan_cli.nix import nix_shell from clan_cli.nix import nix_shell
@@ -11,13 +11,9 @@ from clan_cli.nix import nix_shell
# generic vcs agnostic commit function # generic vcs agnostic commit function
def commit_file( def commit_file(
file_path: Path, file_path: Path,
repo_dir: Optional[Path] = None, repo_dir: Path,
commit_message: Optional[str] = None, commit_message: Optional[str] = None,
) -> None: ) -> None:
if repo_dir is None:
repo_dir = find_git_repo_root()
if repo_dir is None:
return
# check that the file is in the git repository and exists # check that the file is in the git repository and exists
if not Path(file_path).resolve().is_relative_to(repo_dir.resolve()): if not Path(file_path).resolve().is_relative_to(repo_dir.resolve()):
raise ClanError(f"File {file_path} is not in the git repository {repo_dir}") raise ClanError(f"File {file_path} is not in the git repository {repo_dir}")

View File

@@ -23,8 +23,8 @@ def register_parser(parser: argparse.ArgumentParser) -> None:
create_parser = subparser.add_parser("create", help="Create a machine") create_parser = subparser.add_parser("create", help="Create a machine")
register_create_parser(create_parser) register_create_parser(create_parser)
remove_parser = subparser.add_parser("remove", help="Remove a machine") delete_parser = subparser.add_parser("delete", help="Delete a machine")
register_delete_parser(remove_parser) register_delete_parser(delete_parser)
list_parser = subparser.add_parser("list", help="List machines") list_parser = subparser.add_parser("list", help="List machines")
register_list_parser(list_parser) register_list_parser(list_parser)

View File

@@ -1,20 +1,53 @@
import argparse import argparse
import logging
from typing import Dict
from .folders import machine_folder from ..async_cmd import CmdOut, run, runforcli
from ..dirs import specific_flake_dir, specific_machine_dir
from ..errors import ClanError
from ..nix import nix_shell
from ..types import FlakeName
log = logging.getLogger(__name__)
def create_machine(name: str) -> None: async def create_machine(flake_name: FlakeName, machine_name: str) -> Dict[str, CmdOut]:
folder = machine_folder(name) folder = specific_machine_dir(flake_name, machine_name)
if folder.exists():
raise ClanError(f"Machine '{machine_name}' already exists")
folder.mkdir(parents=True, exist_ok=True) folder.mkdir(parents=True, exist_ok=True)
# create empty settings.json file inside the folder # create empty settings.json file inside the folder
with open(folder / "settings.json", "w") as f: with open(folder / "settings.json", "w") as f:
f.write("{}") f.write("{}")
response = {}
out = await run(nix_shell(["git"], ["git", "add", str(folder)]), cwd=folder)
response["git add"] = out
out = await run(
nix_shell(
["git"],
["git", "commit", "-m", f"Added machine {machine_name}", str(folder)],
),
cwd=folder,
)
response["git commit"] = out
return response
def create_command(args: argparse.Namespace) -> None: def create_command(args: argparse.Namespace) -> None:
create_machine(args.host) try:
flake_dir = specific_flake_dir(args.flake)
runforcli(create_machine, flake_dir, args.machine)
except ClanError as e:
print(e)
def register_create_parser(parser: argparse.ArgumentParser) -> None: def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("host", type=str) parser.add_argument("machine", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=create_command) parser.set_defaults(func=create_command)

View File

@@ -1,12 +1,12 @@
import argparse import argparse
import shutil import shutil
from ..dirs import specific_machine_dir
from ..errors import ClanError from ..errors import ClanError
from .folders import machine_folder
def delete_command(args: argparse.Namespace) -> None: def delete_command(args: argparse.Namespace) -> None:
folder = machine_folder(args.host) folder = specific_machine_dir(args.flake, args.host)
if folder.exists(): if folder.exists():
shutil.rmtree(folder) shutil.rmtree(folder)
else: else:
@@ -15,4 +15,9 @@ def delete_command(args: argparse.Namespace) -> None:
def register_delete_parser(parser: argparse.ArgumentParser) -> None: def register_delete_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("host", type=str) parser.add_argument("host", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=delete_command) parser.set_defaults(func=delete_command)

View File

@@ -1,9 +1,10 @@
from .folders import machine_folder from ..dirs import specific_machine_dir
from ..types import FlakeName
def machine_has_fact(machine: str, fact: str) -> bool: def machine_has_fact(flake_name: FlakeName, machine: str, fact: str) -> bool:
return (machine_folder(machine) / "facts" / fact).exists() return (specific_machine_dir(flake_name, machine) / "facts" / fact).exists()
def machine_get_fact(machine: str, fact: str) -> str: def machine_get_fact(flake_name: FlakeName, machine: str, fact: str) -> str:
return (machine_folder(machine) / "facts" / fact).read_text() return (specific_machine_dir(flake_name, machine) / "facts" / fact).read_text()

View File

@@ -1,15 +0,0 @@
from pathlib import Path
from ..dirs import get_clan_flake_toplevel
def machines_folder() -> Path:
return get_clan_flake_toplevel() / "machines"
def machine_folder(machine: str) -> Path:
return machines_folder() / machine
def machine_settings_file(machine: str) -> Path:
return machine_folder(machine) / "settings.json"

View File

@@ -3,18 +3,20 @@ import subprocess
from pathlib import Path from pathlib import Path
from tempfile import TemporaryDirectory from tempfile import TemporaryDirectory
from ..dirs import specific_flake_dir
from ..machines.machines import Machine from ..machines.machines import Machine
from ..nix import nix_shell from ..nix import nix_shell
from ..secrets.generate import generate_secrets from ..secrets.generate import generate_secrets
from ..types import FlakeName
def install_nixos(machine: Machine) -> None: def install_nixos(machine: Machine, flake_name: FlakeName) -> None:
h = machine.host h = machine.host
target_host = f"{h.user or 'root'}@{h.host}" target_host = f"{h.user or 'root'}@{h.host}"
flake_attr = h.meta.get("flake_attr", "") flake_attr = h.meta.get("flake_attr", "")
generate_secrets(machine) generate_secrets(machine, flake_name)
with TemporaryDirectory() as tmpdir_: with TemporaryDirectory() as tmpdir_:
tmpdir = Path(tmpdir_) tmpdir = Path(tmpdir_)
@@ -26,7 +28,7 @@ def install_nixos(machine: Machine) -> None:
[ [
"nixos-anywhere", "nixos-anywhere",
"-f", "-f",
f"{machine.clan_dir}#{flake_attr}", f"{machine.flake_dir}#{flake_attr}",
"-t", "-t",
"--no-reboot", "--no-reboot",
"--extra-files", "--extra-files",
@@ -39,10 +41,10 @@ def install_nixos(machine: Machine) -> None:
def install_command(args: argparse.Namespace) -> None: def install_command(args: argparse.Namespace) -> None:
machine = Machine(args.machine) machine = Machine(args.machine, flake_dir=specific_flake_dir(args.flake))
machine.deployment_address = args.target_host machine.deployment_address = args.target_host
install_nixos(machine) install_nixos(machine, args.flake)
def register_install_parser(parser: argparse.ArgumentParser) -> None: def register_install_parser(parser: argparse.ArgumentParser) -> None:
@@ -56,5 +58,9 @@ def register_install_parser(parser: argparse.ArgumentParser) -> None:
type=str, type=str,
help="ssh address to install to in the form of user@host:2222", help="ssh address to install to in the form of user@host:2222",
) )
parser.add_argument(
"flake",
type=str,
help="name of the flake to install machine from",
)
parser.set_defaults(func=install_command) parser.set_defaults(func=install_command)

View File

@@ -2,14 +2,15 @@ import argparse
import logging import logging
import os import os
from .folders import machines_folder from ..dirs import machines_dir
from ..types import FlakeName
from .types import validate_hostname from .types import validate_hostname
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def list_machines() -> list[str]: def list_machines(flake_name: FlakeName) -> list[str]:
path = machines_folder() path = machines_dir(flake_name)
log.debug(f"Listing machines in {path}") log.debug(f"Listing machines in {path}")
if not path.exists(): if not path.exists():
return [] return []
@@ -21,9 +22,14 @@ def list_machines() -> list[str]:
def list_command(args: argparse.Namespace) -> None: def list_command(args: argparse.Namespace) -> None:
for machine in list_machines(): for machine in list_machines(args.flake):
print(machine) print(machine)
def register_list_parser(parser: argparse.ArgumentParser) -> None: def register_list_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=list_command) parser.set_defaults(func=list_command)

View File

@@ -5,7 +5,6 @@ import sys
from pathlib import Path from pathlib import Path
from typing import Optional from typing import Optional
from ..dirs import get_clan_flake_toplevel
from ..nix import nix_build, nix_config, nix_eval from ..nix import nix_build, nix_config, nix_eval
from ..ssh import Host, parse_deployment_address from ..ssh import Host, parse_deployment_address
@@ -31,7 +30,7 @@ class Machine:
def __init__( def __init__(
self, self,
name: str, name: str,
clan_dir: Optional[Path] = None, flake_dir: Path,
machine_data: Optional[dict] = None, machine_data: Optional[dict] = None,
) -> None: ) -> None:
""" """
@@ -41,13 +40,10 @@ class Machine:
@machine_json: can be optionally used to skip evaluation of the machine, location of the json file with machine data @machine_json: can be optionally used to skip evaluation of the machine, location of the json file with machine data
""" """
self.name = name self.name = name
if clan_dir is None: self.flake_dir = flake_dir
self.clan_dir = get_clan_flake_toplevel()
else:
self.clan_dir = clan_dir
if machine_data is None: if machine_data is None:
self.machine_data = build_machine_data(name, self.clan_dir) self.machine_data = build_machine_data(name, self.flake_dir)
else: else:
self.machine_data = machine_data self.machine_data = machine_data
@@ -68,14 +64,14 @@ class Machine:
@secrets_dir: the directory to store the secrets in @secrets_dir: the directory to store the secrets in
""" """
env = os.environ.copy() env = os.environ.copy()
env["CLAN_DIR"] = str(self.clan_dir) env["CLAN_DIR"] = str(self.flake_dir)
env["PYTHONPATH"] = str( env["PYTHONPATH"] = str(
":".join(sys.path) ":".join(sys.path)
) # TODO do this in the clanCore module ) # TODO do this in the clanCore module
env["SECRETS_DIR"] = str(secrets_dir) env["SECRETS_DIR"] = str(secrets_dir)
print(f"uploading secrets... {self.upload_secrets}") print(f"uploading secrets... {self.upload_secrets}")
proc = subprocess.run( proc = subprocess.run(
[self.upload_secrets], [self.upload_secrets, self.flake_dir.name],
env=env, env=env,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
text=True, text=True,
@@ -95,7 +91,7 @@ class Machine:
@attr: the attribute to get @attr: the attribute to get
""" """
output = subprocess.run( output = subprocess.run(
nix_eval([f"path:{self.clan_dir}#{attr}"]), nix_eval([f"path:{self.flake_dir}#{attr}"]),
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
check=True, check=True,
text=True, text=True,
@@ -108,7 +104,7 @@ class Machine:
@attr: the attribute to get @attr: the attribute to get
""" """
outpath = subprocess.run( outpath = subprocess.run(
nix_build([f"path:{self.clan_dir}#{attr}"]), nix_build([f"path:{self.flake_dir}#{attr}"]),
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
check=True, check=True,
text=True, text=True,

View File

@@ -4,12 +4,13 @@ import os
import subprocess import subprocess
from pathlib import Path from pathlib import Path
from ..dirs import get_clan_flake_toplevel from ..dirs import specific_flake_dir
from ..machines.machines import Machine from ..machines.machines import Machine
from ..nix import nix_build, nix_command, nix_config from ..nix import nix_build, nix_command, nix_config
from ..secrets.generate import generate_secrets from ..secrets.generate import generate_secrets
from ..secrets.upload import upload_secrets from ..secrets.upload import upload_secrets
from ..ssh import Host, HostGroup, HostKeyCheck, parse_deployment_address from ..ssh import Host, HostGroup, HostKeyCheck, parse_deployment_address
from ..types import FlakeName
def deploy_nixos(hosts: HostGroup, clan_dir: Path) -> None: def deploy_nixos(hosts: HostGroup, clan_dir: Path) -> None:
@@ -40,7 +41,7 @@ def deploy_nixos(hosts: HostGroup, clan_dir: Path) -> None:
flake_attr = h.meta.get("flake_attr", "") flake_attr = h.meta.get("flake_attr", "")
generate_secrets(h.meta["machine"]) generate_secrets(h.meta["machine"], FlakeName(clan_dir.name))
upload_secrets(h.meta["machine"]) upload_secrets(h.meta["machine"])
target_host = h.meta.get("target_host") target_host = h.meta.get("target_host")
@@ -95,25 +96,29 @@ def get_all_machines(clan_dir: Path) -> HostGroup:
host = parse_deployment_address( host = parse_deployment_address(
name, name,
machine_data["deploymentAddress"], machine_data["deploymentAddress"],
meta={"machine": Machine(name=name, machine_data=machine_data)}, meta={
"machine": Machine(
name=name, flake_dir=clan_dir, machine_data=machine_data
)
},
) )
hosts.append(host) hosts.append(host)
return HostGroup(hosts) return HostGroup(hosts)
def get_selected_machines(machine_names: list[str], clan_dir: Path) -> HostGroup: def get_selected_machines(machine_names: list[str], flake_dir: Path) -> HostGroup:
hosts = [] hosts = []
for name in machine_names: for name in machine_names:
machine = Machine(name=name, clan_dir=clan_dir) machine = Machine(name=name, flake_dir=flake_dir)
hosts.append(machine.host) hosts.append(machine.host)
return HostGroup(hosts) return HostGroup(hosts)
# FIXME: we want some kind of inventory here. # FIXME: we want some kind of inventory here.
def update(args: argparse.Namespace) -> None: def update(args: argparse.Namespace) -> None:
clan_dir = get_clan_flake_toplevel() flake_dir = specific_flake_dir(args.flake)
if len(args.machines) == 1 and args.target_host is not None: if len(args.machines) == 1 and args.target_host is not None:
machine = Machine(name=args.machines[0], clan_dir=clan_dir) machine = Machine(name=args.machines[0], flake_dir=flake_dir)
machine.deployment_address = args.target_host machine.deployment_address = args.target_host
host = parse_deployment_address( host = parse_deployment_address(
args.machines[0], args.machines[0],
@@ -127,11 +132,11 @@ def update(args: argparse.Namespace) -> None:
exit(1) exit(1)
else: else:
if len(args.machines) == 0: if len(args.machines) == 0:
machines = get_all_machines(clan_dir) machines = get_all_machines(flake_dir)
else: else:
machines = get_selected_machines(args.machines, clan_dir) machines = get_selected_machines(args.machines, flake_dir)
deploy_nixos(machines, clan_dir) deploy_nixos(machines, flake_dir)
def register_update_parser(parser: argparse.ArgumentParser) -> None: def register_update_parser(parser: argparse.ArgumentParser) -> None:
@@ -142,6 +147,11 @@ def register_update_parser(parser: argparse.ArgumentParser) -> None:
nargs="*", nargs="*",
default=[], default=[],
) )
parser.add_argument(
"flake",
type=str,
help="name of the flake to update machine for",
)
parser.add_argument( parser.add_argument(
"--target-host", "--target-host",
type=str, type=str,

View File

@@ -2,8 +2,11 @@ import json
import os import os
import subprocess import subprocess
import tempfile import tempfile
from pathlib import Path
from typing import Any from typing import Any
from pydantic import AnyUrl
from .dirs import nixpkgs_flake, nixpkgs_source from .dirs import nixpkgs_flake, nixpkgs_source
@@ -11,7 +14,7 @@ def nix_command(flags: list[str]) -> list[str]:
return ["nix", "--extra-experimental-features", "nix-command flakes"] + flags return ["nix", "--extra-experimental-features", "nix-command flakes"] + flags
def nix_flake_show(flake_url: str) -> list[str]: def nix_flake_show(flake_url: AnyUrl | Path) -> list[str]:
return nix_command( return nix_command(
[ [
"flake", "flake",

View File

@@ -3,17 +3,18 @@ import shutil
from pathlib import Path from pathlib import Path
from typing import Callable from typing import Callable
from ..dirs import get_clan_flake_toplevel from ..dirs import specific_flake_dir
from ..errors import ClanError from ..errors import ClanError
from ..types import FlakeName
def get_sops_folder() -> Path: def get_sops_folder(flake_name: FlakeName) -> Path:
return get_clan_flake_toplevel() / "sops" return specific_flake_dir(flake_name) / "sops"
def gen_sops_subfolder(subdir: str) -> Callable[[], Path]: def gen_sops_subfolder(subdir: str) -> Callable[[FlakeName], Path]:
def folder() -> Path: def folder(flake_name: FlakeName) -> Path:
return get_clan_flake_toplevel() / "sops" / subdir return specific_flake_dir(flake_name) / "sops" / subdir
return folder return folder

View File

@@ -6,19 +6,21 @@ import sys
from clan_cli.errors import ClanError from clan_cli.errors import ClanError
from ..dirs import specific_flake_dir
from ..machines.machines import Machine from ..machines.machines import Machine
from ..types import FlakeName
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def generate_secrets(machine: Machine) -> None: def generate_secrets(machine: Machine, flake_name: FlakeName) -> None:
env = os.environ.copy() env = os.environ.copy()
env["CLAN_DIR"] = str(machine.clan_dir) env["CLAN_DIR"] = str(machine.flake_dir)
env["PYTHONPATH"] = ":".join(sys.path) # TODO do this in the clanCore module env["PYTHONPATH"] = ":".join(sys.path) # TODO do this in the clanCore module
print(f"generating secrets... {machine.generate_secrets}") print(f"generating secrets... {machine.generate_secrets}")
proc = subprocess.run( proc = subprocess.run(
[machine.generate_secrets], [machine.generate_secrets, flake_name],
env=env, env=env,
) )
@@ -29,8 +31,8 @@ def generate_secrets(machine: Machine) -> None:
def generate_command(args: argparse.Namespace) -> None: def generate_command(args: argparse.Namespace) -> None:
machine = Machine(args.machine) machine = Machine(name=args.machine, flake_dir=specific_flake_dir(args.flake))
generate_secrets(machine) generate_secrets(machine, args.flake)
def register_generate_parser(parser: argparse.ArgumentParser) -> None: def register_generate_parser(parser: argparse.ArgumentParser) -> None:
@@ -38,4 +40,9 @@ def register_generate_parser(parser: argparse.ArgumentParser) -> None:
"machine", "machine",
help="The machine to generate secrets for", help="The machine to generate secrets for",
) )
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=generate_command) parser.set_defaults(func=generate_command)

View File

@@ -4,6 +4,7 @@ from pathlib import Path
from ..errors import ClanError from ..errors import ClanError
from ..machines.types import machine_name_type, validate_hostname from ..machines.types import machine_name_type, validate_hostname
from ..types import FlakeName
from . import secrets from . import secrets
from .folders import ( from .folders import (
sops_groups_folder, sops_groups_folder,
@@ -20,24 +21,27 @@ from .types import (
) )
def machines_folder(group: str) -> Path: def machines_folder(flake_name: FlakeName, group: str) -> Path:
return sops_groups_folder() / group / "machines" return sops_groups_folder(flake_name) / group / "machines"
def users_folder(group: str) -> Path: def users_folder(flake_name: FlakeName, group: str) -> Path:
return sops_groups_folder() / group / "users" return sops_groups_folder(flake_name) / group / "users"
class Group: class Group:
def __init__(self, name: str, machines: list[str], users: list[str]) -> None: def __init__(
self, flake_name: FlakeName, name: str, machines: list[str], users: list[str]
) -> None:
self.name = name self.name = name
self.machines = machines self.machines = machines
self.users = users self.users = users
self.flake_name = flake_name
def list_groups() -> list[Group]: def list_groups(flake_name: FlakeName) -> list[Group]:
groups: list[Group] = [] groups: list[Group] = []
folder = sops_groups_folder() folder = sops_groups_folder(flake_name)
if not folder.exists(): if not folder.exists():
return groups return groups
@@ -45,24 +49,24 @@ def list_groups() -> list[Group]:
group_folder = folder / name group_folder = folder / name
if not group_folder.is_dir(): if not group_folder.is_dir():
continue continue
machines_path = machines_folder(name) machines_path = machines_folder(flake_name, name)
machines = [] machines = []
if machines_path.is_dir(): if machines_path.is_dir():
for f in machines_path.iterdir(): for f in machines_path.iterdir():
if validate_hostname(f.name): if validate_hostname(f.name):
machines.append(f.name) machines.append(f.name)
users_path = users_folder(name) users_path = users_folder(flake_name, name)
users = [] users = []
if users_path.is_dir(): if users_path.is_dir():
for f in users_path.iterdir(): for f in users_path.iterdir():
if VALID_USER_NAME.match(f.name): if VALID_USER_NAME.match(f.name):
users.append(f.name) users.append(f.name)
groups.append(Group(name, machines, users)) groups.append(Group(flake_name, name, machines, users))
return groups return groups
def list_command(args: argparse.Namespace) -> None: def list_command(args: argparse.Namespace) -> None:
for group in list_groups(): for group in list_groups(args.flake):
print(group.name) print(group.name)
if group.machines: if group.machines:
print("machines:") print("machines:")
@@ -84,9 +88,9 @@ def list_directory(directory: Path) -> str:
return msg return msg
def update_group_keys(group: str) -> None: def update_group_keys(flake_name: FlakeName, group: str) -> None:
for secret_ in secrets.list_secrets(): for secret_ in secrets.list_secrets(flake_name):
secret = sops_secrets_folder() / secret_ secret = sops_secrets_folder(flake_name) / secret_
if (secret / "groups" / group).is_symlink(): if (secret / "groups" / group).is_symlink():
update_keys( update_keys(
secret, secret,
@@ -94,7 +98,9 @@ def update_group_keys(group: str) -> None:
) )
def add_member(group_folder: Path, source_folder: Path, name: str) -> None: def add_member(
flake_name: FlakeName, group_folder: Path, source_folder: Path, name: str
) -> None:
source = source_folder / name source = source_folder / name
if not source.exists(): if not source.exists():
msg = f"{name} does not exist in {source_folder}: " msg = f"{name} does not exist in {source_folder}: "
@@ -109,10 +115,10 @@ def add_member(group_folder: Path, source_folder: Path, name: str) -> None:
) )
os.remove(user_target) os.remove(user_target)
user_target.symlink_to(os.path.relpath(source, user_target.parent)) user_target.symlink_to(os.path.relpath(source, user_target.parent))
update_group_keys(group_folder.parent.name) update_group_keys(flake_name, group_folder.parent.name)
def remove_member(group_folder: Path, name: str) -> None: def remove_member(flake_name: FlakeName, group_folder: Path, name: str) -> None:
target = group_folder / name target = group_folder / name
if not target.exists(): if not target.exists():
msg = f"{name} does not exist in group in {group_folder}: " msg = f"{name} does not exist in group in {group_folder}: "
@@ -121,7 +127,7 @@ def remove_member(group_folder: Path, name: str) -> None:
os.remove(target) os.remove(target)
if len(os.listdir(group_folder)) > 0: if len(os.listdir(group_folder)) > 0:
update_group_keys(group_folder.parent.name) update_group_keys(flake_name, group_folder.parent.name)
if len(os.listdir(group_folder)) == 0: if len(os.listdir(group_folder)) == 0:
os.rmdir(group_folder) os.rmdir(group_folder)
@@ -130,56 +136,65 @@ def remove_member(group_folder: Path, name: str) -> None:
os.rmdir(group_folder.parent) os.rmdir(group_folder.parent)
def add_user(group: str, name: str) -> None: def add_user(flake_name: FlakeName, group: str, name: str) -> None:
add_member(users_folder(group), sops_users_folder(), name) add_member(
flake_name, users_folder(flake_name, group), sops_users_folder(flake_name), name
)
def add_user_command(args: argparse.Namespace) -> None: def add_user_command(args: argparse.Namespace) -> None:
add_user(args.group, args.user) add_user(args.flake, args.group, args.user)
def remove_user(group: str, name: str) -> None: def remove_user(flake_name: FlakeName, group: str, name: str) -> None:
remove_member(users_folder(group), name) remove_member(flake_name, users_folder(flake_name, group), name)
def remove_user_command(args: argparse.Namespace) -> None: def remove_user_command(args: argparse.Namespace) -> None:
remove_user(args.group, args.user) remove_user(args.flake, args.group, args.user)
def add_machine(group: str, name: str) -> None: def add_machine(flake_name: FlakeName, group: str, name: str) -> None:
add_member(machines_folder(group), sops_machines_folder(), name) add_member(
flake_name,
machines_folder(flake_name, group),
sops_machines_folder(flake_name),
name,
)
def add_machine_command(args: argparse.Namespace) -> None: def add_machine_command(args: argparse.Namespace) -> None:
add_machine(args.group, args.machine) add_machine(args.flake, args.group, args.machine)
def remove_machine(group: str, name: str) -> None: def remove_machine(flake_name: FlakeName, group: str, name: str) -> None:
remove_member(machines_folder(group), name) remove_member(flake_name, machines_folder(flake_name, group), name)
def remove_machine_command(args: argparse.Namespace) -> None: def remove_machine_command(args: argparse.Namespace) -> None:
remove_machine(args.group, args.machine) remove_machine(args.flake, args.group, args.machine)
def add_group_argument(parser: argparse.ArgumentParser) -> None: def add_group_argument(parser: argparse.ArgumentParser) -> None:
parser.add_argument("group", help="the name of the secret", type=group_name_type) parser.add_argument("group", help="the name of the secret", type=group_name_type)
def add_secret(group: str, name: str) -> None: def add_secret(flake_name: FlakeName, group: str, name: str) -> None:
secrets.allow_member(secrets.groups_folder(name), sops_groups_folder(), group) secrets.allow_member(
secrets.groups_folder(flake_name, name), sops_groups_folder(flake_name), group
)
def add_secret_command(args: argparse.Namespace) -> None: def add_secret_command(args: argparse.Namespace) -> None:
add_secret(args.group, args.secret) add_secret(args.flake, args.group, args.secret)
def remove_secret(group: str, name: str) -> None: def remove_secret(flake_name: FlakeName, group: str, name: str) -> None:
secrets.disallow_member(secrets.groups_folder(name), group) secrets.disallow_member(secrets.groups_folder(flake_name, name), group)
def remove_secret_command(args: argparse.Namespace) -> None: def remove_secret_command(args: argparse.Namespace) -> None:
remove_secret(args.group, args.secret) remove_secret(args.flake, args.group, args.secret)
def register_groups_parser(parser: argparse.ArgumentParser) -> None: def register_groups_parser(parser: argparse.ArgumentParser) -> None:
@@ -189,9 +204,17 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
help="the command to run", help="the command to run",
required=True, required=True,
) )
# List groups
list_parser = subparser.add_parser("list", help="list groups") list_parser = subparser.add_parser("list", help="list groups")
list_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
list_parser.set_defaults(func=list_command) list_parser.set_defaults(func=list_command)
# Add user
add_machine_parser = subparser.add_parser( add_machine_parser = subparser.add_parser(
"add-machine", help="add a machine to group" "add-machine", help="add a machine to group"
) )
@@ -199,8 +222,14 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
add_machine_parser.add_argument( add_machine_parser.add_argument(
"machine", help="the name of the machines to add", type=machine_name_type "machine", help="the name of the machines to add", type=machine_name_type
) )
add_machine_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_machine_parser.set_defaults(func=add_machine_command) add_machine_parser.set_defaults(func=add_machine_command)
# Remove machine
remove_machine_parser = subparser.add_parser( remove_machine_parser = subparser.add_parser(
"remove-machine", help="remove a machine from group" "remove-machine", help="remove a machine from group"
) )
@@ -208,15 +237,27 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
remove_machine_parser.add_argument( remove_machine_parser.add_argument(
"machine", help="the name of the machines to remove", type=machine_name_type "machine", help="the name of the machines to remove", type=machine_name_type
) )
remove_machine_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_machine_parser.set_defaults(func=remove_machine_command) remove_machine_parser.set_defaults(func=remove_machine_command)
# Add user
add_user_parser = subparser.add_parser("add-user", help="add a user to group") add_user_parser = subparser.add_parser("add-user", help="add a user to group")
add_group_argument(add_user_parser) add_group_argument(add_user_parser)
add_user_parser.add_argument( add_user_parser.add_argument(
"user", help="the name of the user to add", type=user_name_type "user", help="the name of the user to add", type=user_name_type
) )
add_user_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_user_parser.set_defaults(func=add_user_command) add_user_parser.set_defaults(func=add_user_command)
# Remove user
remove_user_parser = subparser.add_parser( remove_user_parser = subparser.add_parser(
"remove-user", help="remove a user from group" "remove-user", help="remove a user from group"
) )
@@ -224,8 +265,14 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
remove_user_parser.add_argument( remove_user_parser.add_argument(
"user", help="the name of the user to remove", type=user_name_type "user", help="the name of the user to remove", type=user_name_type
) )
remove_user_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_user_parser.set_defaults(func=remove_user_command) remove_user_parser.set_defaults(func=remove_user_command)
# Add secret
add_secret_parser = subparser.add_parser( add_secret_parser = subparser.add_parser(
"add-secret", help="allow a user to access a secret" "add-secret", help="allow a user to access a secret"
) )
@@ -235,8 +282,14 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
add_secret_parser.add_argument( add_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type "secret", help="the name of the secret", type=secret_name_type
) )
add_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_secret_parser.set_defaults(func=add_secret_command) add_secret_parser.set_defaults(func=add_secret_command)
# Remove secret
remove_secret_parser = subparser.add_parser( remove_secret_parser = subparser.add_parser(
"remove-secret", help="remove a group's access to a secret" "remove-secret", help="remove a group's access to a secret"
) )
@@ -246,4 +299,9 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
remove_secret_parser.add_argument( remove_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type "secret", help="the name of the secret", type=secret_name_type
) )
remove_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_secret_parser.set_defaults(func=remove_secret_command) remove_secret_parser.set_defaults(func=remove_secret_command)

View File

@@ -36,14 +36,15 @@ def import_sops(args: argparse.Namespace) -> None:
file=sys.stderr, file=sys.stderr,
) )
continue continue
if (sops_secrets_folder() / k / "secret").exists(): if (sops_secrets_folder(args.flake) / k / "secret").exists():
print( print(
f"WARNING: {k} already exists, skipping", f"WARNING: {k} already exists, skipping",
file=sys.stderr, file=sys.stderr,
) )
continue continue
encrypt_secret( encrypt_secret(
sops_secrets_folder() / k, args.flake,
sops_secrets_folder(args.flake) / k,
v, v,
add_groups=args.group, add_groups=args.group,
add_machines=args.machine, add_machines=args.machine,
@@ -90,4 +91,10 @@ def register_import_sops_parser(parser: argparse.ArgumentParser) -> None:
type=str, type=str,
help="the sops file to import (- for stdin)", help="the sops file to import (- for stdin)",
) )
parser.add_argument(
"flake",
type=str,
help="name of the flake",
)
parser.set_defaults(func=import_sops) parser.set_defaults(func=import_sops)

View File

@@ -1,71 +1,74 @@
import argparse import argparse
from ..machines.types import machine_name_type, validate_hostname from ..machines.types import machine_name_type, validate_hostname
from ..types import FlakeName
from . import secrets from . import secrets
from .folders import list_objects, remove_object, sops_machines_folder from .folders import list_objects, remove_object, sops_machines_folder
from .sops import read_key, write_key from .sops import read_key, write_key
from .types import public_or_private_age_key_type, secret_name_type from .types import public_or_private_age_key_type, secret_name_type
def add_machine(name: str, key: str, force: bool) -> None: def add_machine(flake_name: FlakeName, name: str, key: str, force: bool) -> None:
write_key(sops_machines_folder() / name, key, force) write_key(sops_machines_folder(flake_name) / name, key, force)
def remove_machine(name: str) -> None: def remove_machine(flake_name: FlakeName, name: str) -> None:
remove_object(sops_machines_folder(), name) remove_object(sops_machines_folder(flake_name), name)
def get_machine(name: str) -> str: def get_machine(flake_name: FlakeName, name: str) -> str:
return read_key(sops_machines_folder() / name) return read_key(sops_machines_folder(flake_name) / name)
def has_machine(name: str) -> bool: def has_machine(flake_name: FlakeName, name: str) -> bool:
return (sops_machines_folder() / name / "key.json").exists() return (sops_machines_folder(flake_name) / name / "key.json").exists()
def list_machines() -> list[str]: def list_machines(flake_name: FlakeName) -> list[str]:
path = sops_machines_folder() path = sops_machines_folder(flake_name)
def validate(name: str) -> bool: def validate(name: str) -> bool:
return validate_hostname(name) and has_machine(name) return validate_hostname(name) and has_machine(flake_name, name)
return list_objects(path, validate) return list_objects(path, validate)
def add_secret(machine: str, secret: str) -> None: def add_secret(flake_name: FlakeName, machine: str, secret: str) -> None:
secrets.allow_member( secrets.allow_member(
secrets.machines_folder(secret), sops_machines_folder(), machine secrets.machines_folder(flake_name, secret),
sops_machines_folder(flake_name),
machine,
) )
def remove_secret(machine: str, secret: str) -> None: def remove_secret(flake_name: FlakeName, machine: str, secret: str) -> None:
secrets.disallow_member(secrets.machines_folder(secret), machine) secrets.disallow_member(secrets.machines_folder(flake_name, secret), machine)
def list_command(args: argparse.Namespace) -> None: def list_command(args: argparse.Namespace) -> None:
lst = list_machines() lst = list_machines(args.flake)
if len(lst) > 0: if len(lst) > 0:
print("\n".join(lst)) print("\n".join(lst))
def add_command(args: argparse.Namespace) -> None: def add_command(args: argparse.Namespace) -> None:
add_machine(args.machine, args.key, args.force) add_machine(args.flake, args.machine, args.key, args.force)
def get_command(args: argparse.Namespace) -> None: def get_command(args: argparse.Namespace) -> None:
print(get_machine(args.machine)) print(get_machine(args.flake, args.machine))
def remove_command(args: argparse.Namespace) -> None: def remove_command(args: argparse.Namespace) -> None:
remove_machine(args.machine) remove_machine(args.flake, args.machine)
def add_secret_command(args: argparse.Namespace) -> None: def add_secret_command(args: argparse.Namespace) -> None:
add_secret(args.machine, args.secret) add_secret(args.flake, args.machine, args.secret)
def remove_secret_command(args: argparse.Namespace) -> None: def remove_secret_command(args: argparse.Namespace) -> None:
remove_secret(args.machine, args.secret) remove_secret(args.flake, args.machine, args.secret)
def register_machines_parser(parser: argparse.ArgumentParser) -> None: def register_machines_parser(parser: argparse.ArgumentParser) -> None:
@@ -75,9 +78,16 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
help="the command to run", help="the command to run",
required=True, required=True,
) )
# Parser
list_parser = subparser.add_parser("list", help="list machines") list_parser = subparser.add_parser("list", help="list machines")
list_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
list_parser.set_defaults(func=list_command) list_parser.set_defaults(func=list_command)
# Parser
add_parser = subparser.add_parser("add", help="add a machine") add_parser = subparser.add_parser("add", help="add a machine")
add_parser.add_argument( add_parser.add_argument(
"-f", "-f",
@@ -94,20 +104,38 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
help="public key or private key of the user", help="public key or private key of the user",
type=public_or_private_age_key_type, type=public_or_private_age_key_type,
) )
add_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_parser.set_defaults(func=add_command) add_parser.set_defaults(func=add_command)
# Parser
get_parser = subparser.add_parser("get", help="get a machine public key") get_parser = subparser.add_parser("get", help="get a machine public key")
get_parser.add_argument( get_parser.add_argument(
"machine", help="the name of the machine", type=machine_name_type "machine", help="the name of the machine", type=machine_name_type
) )
get_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
get_parser.set_defaults(func=get_command) get_parser.set_defaults(func=get_command)
# Parser
remove_parser = subparser.add_parser("remove", help="remove a machine") remove_parser = subparser.add_parser("remove", help="remove a machine")
remove_parser.add_argument( remove_parser.add_argument(
"machine", help="the name of the machine", type=machine_name_type "machine", help="the name of the machine", type=machine_name_type
) )
remove_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_parser.set_defaults(func=remove_command) remove_parser.set_defaults(func=remove_command)
# Parser
add_secret_parser = subparser.add_parser( add_secret_parser = subparser.add_parser(
"add-secret", help="allow a machine to access a secret" "add-secret", help="allow a machine to access a secret"
) )
@@ -117,8 +145,14 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
add_secret_parser.add_argument( add_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type "secret", help="the name of the secret", type=secret_name_type
) )
add_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_secret_parser.set_defaults(func=add_secret_command) add_secret_parser.set_defaults(func=add_secret_command)
# Parser
remove_secret_parser = subparser.add_parser( remove_secret_parser = subparser.add_parser(
"remove-secret", help="remove a group's access to a secret" "remove-secret", help="remove a group's access to a secret"
) )
@@ -128,4 +162,9 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
remove_secret_parser.add_argument( remove_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type "secret", help="the name of the secret", type=secret_name_type
) )
remove_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_secret_parser.set_defaults(func=remove_secret_command) remove_secret_parser.set_defaults(func=remove_secret_command)

View File

@@ -8,6 +8,7 @@ from typing import IO
from .. import tty from .. import tty
from ..errors import ClanError from ..errors import ClanError
from ..types import FlakeName
from .folders import ( from .folders import (
list_objects, list_objects,
sops_groups_folder, sops_groups_folder,
@@ -53,62 +54,79 @@ def collect_keys_for_path(path: Path) -> set[str]:
def encrypt_secret( def encrypt_secret(
flake_name: FlakeName,
secret: Path, secret: Path,
value: IO[str] | str | None, value: IO[str] | str | None,
add_users: list[str] = [], add_users: list[str] = [],
add_machines: list[str] = [], add_machines: list[str] = [],
add_groups: list[str] = [], add_groups: list[str] = [],
) -> None: ) -> None:
key = ensure_sops_key() key = ensure_sops_key(flake_name)
keys = set([]) keys = set([])
for user in add_users: for user in add_users:
allow_member(users_folder(secret.name), sops_users_folder(), user, False) allow_member(
users_folder(flake_name, secret.name),
sops_users_folder(flake_name),
user,
False,
)
for machine in add_machines: for machine in add_machines:
allow_member( allow_member(
machines_folder(secret.name), sops_machines_folder(), machine, False machines_folder(flake_name, secret.name),
sops_machines_folder(flake_name),
machine,
False,
) )
for group in add_groups: for group in add_groups:
allow_member(groups_folder(secret.name), sops_groups_folder(), group, False) allow_member(
groups_folder(flake_name, secret.name),
sops_groups_folder(flake_name),
group,
False,
)
keys = collect_keys_for_path(secret) keys = collect_keys_for_path(secret)
if key.pubkey not in keys: if key.pubkey not in keys:
keys.add(key.pubkey) keys.add(key.pubkey)
allow_member( allow_member(
users_folder(secret.name), sops_users_folder(), key.username, False users_folder(flake_name, secret.name),
sops_users_folder(flake_name),
key.username,
False,
) )
encrypt_file(secret / "secret", value, list(sorted(keys))) encrypt_file(secret / "secret", value, list(sorted(keys)))
def remove_secret(secret: str) -> None: def remove_secret(flake_name: FlakeName, secret: str) -> None:
path = sops_secrets_folder() / secret path = sops_secrets_folder(flake_name) / secret
if not path.exists(): if not path.exists():
raise ClanError(f"Secret '{secret}' does not exist") raise ClanError(f"Secret '{secret}' does not exist")
shutil.rmtree(path) shutil.rmtree(path)
def remove_command(args: argparse.Namespace) -> None: def remove_command(args: argparse.Namespace) -> None:
remove_secret(args.secret) remove_secret(args.flake, args.secret)
def add_secret_argument(parser: argparse.ArgumentParser) -> None: def add_secret_argument(parser: argparse.ArgumentParser) -> None:
parser.add_argument("secret", help="the name of the secret", type=secret_name_type) parser.add_argument("secret", help="the name of the secret", type=secret_name_type)
def machines_folder(group: str) -> Path: def machines_folder(flake_name: FlakeName, group: str) -> Path:
return sops_secrets_folder() / group / "machines" return sops_secrets_folder(flake_name) / group / "machines"
def users_folder(group: str) -> Path: def users_folder(flake_name: FlakeName, group: str) -> Path:
return sops_secrets_folder() / group / "users" return sops_secrets_folder(flake_name) / group / "users"
def groups_folder(group: str) -> Path: def groups_folder(flake_name: FlakeName, group: str) -> Path:
return sops_secrets_folder() / group / "groups" return sops_secrets_folder(flake_name) / group / "groups"
def list_directory(directory: Path) -> str: def list_directory(directory: Path) -> str:
@@ -171,35 +189,37 @@ def disallow_member(group_folder: Path, name: str) -> None:
) )
def has_secret(secret: str) -> bool: def has_secret(flake_name: FlakeName, secret: str) -> bool:
return (sops_secrets_folder() / secret / "secret").exists() return (sops_secrets_folder(flake_name) / secret / "secret").exists()
def list_secrets() -> list[str]: def list_secrets(flake_name: FlakeName) -> list[str]:
path = sops_secrets_folder() path = sops_secrets_folder(flake_name)
def validate(name: str) -> bool: def validate(name: str) -> bool:
return VALID_SECRET_NAME.match(name) is not None and has_secret(name) return VALID_SECRET_NAME.match(name) is not None and has_secret(
flake_name, name
)
return list_objects(path, validate) return list_objects(path, validate)
def list_command(args: argparse.Namespace) -> None: def list_command(args: argparse.Namespace) -> None:
lst = list_secrets() lst = list_secrets(args.flake)
if len(lst) > 0: if len(lst) > 0:
print("\n".join(lst)) print("\n".join(lst))
def decrypt_secret(secret: str) -> str: def decrypt_secret(flake_name: FlakeName, secret: str) -> str:
ensure_sops_key() ensure_sops_key(flake_name)
secret_path = sops_secrets_folder() / secret / "secret" secret_path = sops_secrets_folder(flake_name) / secret / "secret"
if not secret_path.exists(): if not secret_path.exists():
raise ClanError(f"Secret '{secret}' does not exist") raise ClanError(f"Secret '{secret}' does not exist")
return decrypt_file(secret_path) return decrypt_file(secret_path)
def get_command(args: argparse.Namespace) -> None: def get_command(args: argparse.Namespace) -> None:
print(decrypt_secret(args.secret), end="") print(decrypt_secret(args.flake, args.secret), end="")
def set_command(args: argparse.Namespace) -> None: def set_command(args: argparse.Namespace) -> None:
@@ -212,7 +232,8 @@ def set_command(args: argparse.Namespace) -> None:
elif tty.is_interactive(): elif tty.is_interactive():
secret_value = getpass.getpass(prompt="Paste your secret: ") secret_value = getpass.getpass(prompt="Paste your secret: ")
encrypt_secret( encrypt_secret(
sops_secrets_folder() / args.secret, args.flake,
sops_secrets_folder(args.flake) / args.secret,
secret_value, secret_value,
args.user, args.user,
args.machine, args.machine,
@@ -221,8 +242,8 @@ def set_command(args: argparse.Namespace) -> None:
def rename_command(args: argparse.Namespace) -> None: def rename_command(args: argparse.Namespace) -> None:
old_path = sops_secrets_folder() / args.secret old_path = sops_secrets_folder(args.flake) / args.secret
new_path = sops_secrets_folder() / args.new_name new_path = sops_secrets_folder(args.flake) / args.new_name
if not old_path.exists(): if not old_path.exists():
raise ClanError(f"Secret '{args.secret}' does not exist") raise ClanError(f"Secret '{args.secret}' does not exist")
if new_path.exists(): if new_path.exists():
@@ -232,10 +253,20 @@ def rename_command(args: argparse.Namespace) -> None:
def register_secrets_parser(subparser: argparse._SubParsersAction) -> None: def register_secrets_parser(subparser: argparse._SubParsersAction) -> None:
parser_list = subparser.add_parser("list", help="list secrets") parser_list = subparser.add_parser("list", help="list secrets")
parser_list.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_list.set_defaults(func=list_command) parser_list.set_defaults(func=list_command)
parser_get = subparser.add_parser("get", help="get a secret") parser_get = subparser.add_parser("get", help="get a secret")
add_secret_argument(parser_get) add_secret_argument(parser_get)
parser_get.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_get.set_defaults(func=get_command) parser_get.set_defaults(func=get_command)
parser_set = subparser.add_parser("set", help="set a secret") parser_set = subparser.add_parser("set", help="set a secret")
@@ -268,13 +299,28 @@ def register_secrets_parser(subparser: argparse._SubParsersAction) -> None:
default=False, default=False,
help="edit the secret with $EDITOR instead of pasting it", help="edit the secret with $EDITOR instead of pasting it",
) )
parser_set.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_set.set_defaults(func=set_command) parser_set.set_defaults(func=set_command)
parser_rename = subparser.add_parser("rename", help="rename a secret") parser_rename = subparser.add_parser("rename", help="rename a secret")
add_secret_argument(parser_rename) add_secret_argument(parser_rename)
parser_rename.add_argument("new_name", type=str, help="the new name of the secret") parser_rename.add_argument("new_name", type=str, help="the new name of the secret")
parser_rename.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_rename.set_defaults(func=rename_command) parser_rename.set_defaults(func=rename_command)
parser_remove = subparser.add_parser("remove", help="remove a secret") parser_remove = subparser.add_parser("remove", help="remove a secret")
add_secret_argument(parser_remove) add_secret_argument(parser_remove)
parser_remove.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser_remove.set_defaults(func=remove_command) parser_remove.set_defaults(func=remove_command)

View File

@@ -10,6 +10,7 @@ from typing import IO, Iterator
from ..dirs import user_config_dir from ..dirs import user_config_dir
from ..errors import ClanError from ..errors import ClanError
from ..nix import nix_shell from ..nix import nix_shell
from ..types import FlakeName
from .folders import sops_machines_folder, sops_users_folder from .folders import sops_machines_folder, sops_users_folder
@@ -51,7 +52,7 @@ def generate_private_key() -> tuple[str, str]:
raise ClanError("Failed to generate private sops key") from e raise ClanError("Failed to generate private sops key") from e
def get_user_name(user: str) -> str: def get_user_name(flake_name: FlakeName, user: str) -> str:
"""Ask the user for their name until a unique one is provided.""" """Ask the user for their name until a unique one is provided."""
while True: while True:
name = input( name = input(
@@ -59,14 +60,14 @@ def get_user_name(user: str) -> str:
) )
if name: if name:
user = name user = name
if not (sops_users_folder() / user).exists(): if not (sops_users_folder(flake_name) / user).exists():
return user return user
print(f"{sops_users_folder() / user} already exists") print(f"{sops_users_folder(flake_name) / user} already exists")
def ensure_user_or_machine(pub_key: str) -> SopsKey: def ensure_user_or_machine(flake_name: FlakeName, pub_key: str) -> SopsKey:
key = SopsKey(pub_key, username="") key = SopsKey(pub_key, username="")
folders = [sops_users_folder(), sops_machines_folder()] folders = [sops_users_folder(flake_name), sops_machines_folder(flake_name)]
for folder in folders: for folder in folders:
if folder.exists(): if folder.exists():
for user in folder.iterdir(): for user in folder.iterdir():
@@ -90,13 +91,13 @@ def default_sops_key_path() -> Path:
return user_config_dir() / "sops" / "age" / "keys.txt" return user_config_dir() / "sops" / "age" / "keys.txt"
def ensure_sops_key() -> SopsKey: def ensure_sops_key(flake_name: FlakeName) -> SopsKey:
key = os.environ.get("SOPS_AGE_KEY") key = os.environ.get("SOPS_AGE_KEY")
if key: if key:
return ensure_user_or_machine(get_public_key(key)) return ensure_user_or_machine(flake_name, get_public_key(key))
path = default_sops_key_path() path = default_sops_key_path()
if path.exists(): if path.exists():
return ensure_user_or_machine(get_public_key(path.read_text())) return ensure_user_or_machine(flake_name, get_public_key(path.read_text()))
else: else:
raise ClanError( raise ClanError(
"No sops key found. Please generate one with 'clan secrets key generate'." "No sops key found. Please generate one with 'clan secrets key generate'."

View File

@@ -1,3 +1,4 @@
import logging
import os import os
import shlex import shlex
import shutil import shutil
@@ -9,29 +10,40 @@ from typing import Any
from clan_cli.nix import nix_shell from clan_cli.nix import nix_shell
from ..dirs import get_clan_flake_toplevel from ..dirs import specific_flake_dir
from ..errors import ClanError from ..errors import ClanError
from ..types import FlakeName
from .folders import sops_secrets_folder from .folders import sops_secrets_folder
from .machines import add_machine, has_machine from .machines import add_machine, has_machine
from .secrets import decrypt_secret, encrypt_secret, has_secret from .secrets import decrypt_secret, encrypt_secret, has_secret
from .sops import generate_private_key from .sops import generate_private_key
log = logging.getLogger(__name__)
def generate_host_key(machine_name: str) -> None:
if has_machine(machine_name): def generate_host_key(flake_name: FlakeName, machine_name: str) -> None:
if has_machine(flake_name, machine_name):
return return
priv_key, pub_key = generate_private_key() priv_key, pub_key = generate_private_key()
encrypt_secret(sops_secrets_folder() / f"{machine_name}-age.key", priv_key) encrypt_secret(
add_machine(machine_name, pub_key, False) flake_name,
sops_secrets_folder(flake_name) / f"{machine_name}-age.key",
priv_key,
)
add_machine(flake_name, machine_name, pub_key, False)
def generate_secrets_group( def generate_secrets_group(
secret_group: str, machine_name: str, tempdir: Path, secret_options: dict[str, Any] flake_name: FlakeName,
secret_group: str,
machine_name: str,
tempdir: Path,
secret_options: dict[str, Any],
) -> None: ) -> None:
clan_dir = get_clan_flake_toplevel() clan_dir = specific_flake_dir(flake_name)
secrets = secret_options["secrets"] secrets = secret_options["secrets"]
needs_regeneration = any( needs_regeneration = any(
not has_secret(f"{machine_name}-{secret['name']}") not has_secret(flake_name, f"{machine_name}-{secret['name']}")
for secret in secrets.values() for secret in secrets.values()
) )
generator = secret_options["generator"] generator = secret_options["generator"]
@@ -62,7 +74,8 @@ export secrets={shlex.quote(str(secrets_dir))}
msg += text msg += text
raise ClanError(msg) raise ClanError(msg)
encrypt_secret( encrypt_secret(
sops_secrets_folder() / f"{machine_name}-{secret['name']}", flake_name,
sops_secrets_folder(flake_name) / f"{machine_name}-{secret['name']}",
secret_file.read_text(), secret_file.read_text(),
add_machines=[machine_name], add_machines=[machine_name],
) )
@@ -79,17 +92,21 @@ export secrets={shlex.quote(str(secrets_dir))}
# this is called by the sops.nix clan core module # this is called by the sops.nix clan core module
def generate_secrets_from_nix( def generate_secrets_from_nix(
flake_name: FlakeName,
machine_name: str, machine_name: str,
secret_submodules: dict[str, Any], secret_submodules: dict[str, Any],
) -> None: ) -> None:
generate_host_key(machine_name) generate_host_key(flake_name, machine_name)
errors = {} errors = {}
log.debug(
"Generating secrets for machine %s and flake %s", machine_name, flake_name
)
with TemporaryDirectory() as d: with TemporaryDirectory() as d:
# if any of the secrets are missing, we regenerate all connected facts/secrets # if any of the secrets are missing, we regenerate all connected facts/secrets
for secret_group, secret_options in secret_submodules.items(): for secret_group, secret_options in secret_submodules.items():
try: try:
generate_secrets_group( generate_secrets_group(
secret_group, machine_name, Path(d), secret_options flake_name, secret_group, machine_name, Path(d), secret_options
) )
except ClanError as e: except ClanError as e:
errors[secret_group] = e errors[secret_group] = e
@@ -102,12 +119,16 @@ def generate_secrets_from_nix(
# this is called by the sops.nix clan core module # this is called by the sops.nix clan core module
def upload_age_key_from_nix( def upload_age_key_from_nix(
flake_name: FlakeName,
machine_name: str, machine_name: str,
) -> None: ) -> None:
log.debug("Uploading secrets for machine %s and flake %s", machine_name, flake_name)
secret_name = f"{machine_name}-age.key" secret_name = f"{machine_name}-age.key"
if not has_secret(secret_name): # skip uploading the secret, not managed by us if not has_secret(
flake_name, secret_name
): # skip uploading the secret, not managed by us
return return
secret = decrypt_secret(secret_name) secret = decrypt_secret(flake_name, secret_name)
secrets_dir = Path(os.environ["SECRETS_DIR"]) secrets_dir = Path(os.environ["SECRETS_DIR"])
(secrets_dir / "key.txt").write_text(secret) (secrets_dir / "key.txt").write_text(secret)

View File

@@ -4,6 +4,7 @@ import subprocess
from pathlib import Path from pathlib import Path
from tempfile import TemporaryDirectory from tempfile import TemporaryDirectory
from ..dirs import specific_flake_dir
from ..machines.machines import Machine from ..machines.machines import Machine
from ..nix import nix_shell from ..nix import nix_shell
@@ -37,7 +38,7 @@ def upload_secrets(machine: Machine) -> None:
def upload_command(args: argparse.Namespace) -> None: def upload_command(args: argparse.Namespace) -> None:
machine = Machine(args.machine) machine = Machine(name=args.machine, flake_dir=specific_flake_dir(args.flake))
upload_secrets(machine) upload_secrets(machine)
@@ -46,4 +47,9 @@ def register_upload_parser(parser: argparse.ArgumentParser) -> None:
"machine", "machine",
help="The machine to upload secrets to", help="The machine to upload secrets to",
) )
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=upload_command) parser.set_defaults(func=upload_command)

View File

@@ -1,5 +1,6 @@
import argparse import argparse
from ..types import FlakeName
from . import secrets from . import secrets
from .folders import list_objects, remove_object, sops_users_folder from .folders import list_objects, remove_object, sops_users_folder
from .sops import read_key, write_key from .sops import read_key, write_key
@@ -11,20 +12,20 @@ from .types import (
) )
def add_user(name: str, key: str, force: bool) -> None: def add_user(flake_name: FlakeName, name: str, key: str, force: bool) -> None:
write_key(sops_users_folder() / name, key, force) write_key(sops_users_folder(flake_name) / name, key, force)
def remove_user(name: str) -> None: def remove_user(flake_name: FlakeName, name: str) -> None:
remove_object(sops_users_folder(), name) remove_object(sops_users_folder(flake_name), name)
def get_user(name: str) -> str: def get_user(flake_name: FlakeName, name: str) -> str:
return read_key(sops_users_folder() / name) return read_key(sops_users_folder(flake_name) / name)
def list_users() -> list[str]: def list_users(flake_name: FlakeName) -> list[str]:
path = sops_users_folder() path = sops_users_folder(flake_name)
def validate(name: str) -> bool: def validate(name: str) -> bool:
return ( return (
@@ -35,38 +36,40 @@ def list_users() -> list[str]:
return list_objects(path, validate) return list_objects(path, validate)
def add_secret(user: str, secret: str) -> None: def add_secret(flake_name: FlakeName, user: str, secret: str) -> None:
secrets.allow_member(secrets.users_folder(secret), sops_users_folder(), user) secrets.allow_member(
secrets.users_folder(flake_name, secret), sops_users_folder(flake_name), user
)
def remove_secret(user: str, secret: str) -> None: def remove_secret(flake_name: FlakeName, user: str, secret: str) -> None:
secrets.disallow_member(secrets.users_folder(secret), user) secrets.disallow_member(secrets.users_folder(flake_name, secret), user)
def list_command(args: argparse.Namespace) -> None: def list_command(args: argparse.Namespace) -> None:
lst = list_users() lst = list_users(args.flake)
if len(lst) > 0: if len(lst) > 0:
print("\n".join(lst)) print("\n".join(lst))
def add_command(args: argparse.Namespace) -> None: def add_command(args: argparse.Namespace) -> None:
add_user(args.user, args.key, args.force) add_user(args.flake, args.user, args.key, args.force)
def get_command(args: argparse.Namespace) -> None: def get_command(args: argparse.Namespace) -> None:
print(get_user(args.user)) print(get_user(args.flake, args.user))
def remove_command(args: argparse.Namespace) -> None: def remove_command(args: argparse.Namespace) -> None:
remove_user(args.user) remove_user(args.flake, args.user)
def add_secret_command(args: argparse.Namespace) -> None: def add_secret_command(args: argparse.Namespace) -> None:
add_secret(args.user, args.secret) add_secret(args.flake, args.user, args.secret)
def remove_secret_command(args: argparse.Namespace) -> None: def remove_secret_command(args: argparse.Namespace) -> None:
remove_secret(args.user, args.secret) remove_secret(args.flake, args.user, args.secret)
def register_users_parser(parser: argparse.ArgumentParser) -> None: def register_users_parser(parser: argparse.ArgumentParser) -> None:
@@ -77,6 +80,11 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
required=True, required=True,
) )
list_parser = subparser.add_parser("list", help="list users") list_parser = subparser.add_parser("list", help="list users")
list_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
list_parser.set_defaults(func=list_command) list_parser.set_defaults(func=list_command)
add_parser = subparser.add_parser("add", help="add a user") add_parser = subparser.add_parser("add", help="add a user")
@@ -90,14 +98,29 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
type=public_or_private_age_key_type, type=public_or_private_age_key_type,
) )
add_parser.set_defaults(func=add_command) add_parser.set_defaults(func=add_command)
add_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
get_parser = subparser.add_parser("get", help="get a user public key") get_parser = subparser.add_parser("get", help="get a user public key")
get_parser.add_argument("user", help="the name of the user", type=user_name_type) get_parser.add_argument("user", help="the name of the user", type=user_name_type)
get_parser.set_defaults(func=get_command) get_parser.set_defaults(func=get_command)
get_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_parser = subparser.add_parser("remove", help="remove a user") remove_parser = subparser.add_parser("remove", help="remove a user")
remove_parser.add_argument("user", help="the name of the user", type=user_name_type) remove_parser.add_argument("user", help="the name of the user", type=user_name_type)
remove_parser.set_defaults(func=remove_command) remove_parser.set_defaults(func=remove_command)
remove_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_secret_parser = subparser.add_parser( add_secret_parser = subparser.add_parser(
"add-secret", help="allow a user to access a secret" "add-secret", help="allow a user to access a secret"
@@ -108,6 +131,11 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
add_secret_parser.add_argument( add_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type "secret", help="the name of the secret", type=secret_name_type
) )
add_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
add_secret_parser.set_defaults(func=add_secret_command) add_secret_parser.set_defaults(func=add_secret_command)
remove_secret_parser = subparser.add_parser( remove_secret_parser = subparser.add_parser(
@@ -119,4 +147,9 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
remove_secret_parser.add_argument( remove_secret_parser.add_argument(
"secret", help="the name of the secret", type=secret_name_type "secret", help="the name of the secret", type=secret_name_type
) )
remove_secret_parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
remove_secret_parser.set_defaults(func=remove_secret_command) remove_secret_parser.set_defaults(func=remove_secret_command)

View File

@@ -12,6 +12,7 @@ from pathlib import Path
from typing import Any, Iterator, Optional, Type, TypeVar from typing import Any, Iterator, Optional, Type, TypeVar
from uuid import UUID, uuid4 from uuid import UUID, uuid4
from .custom_logger import ThreadFormatter, get_caller
from .errors import ClanError from .errors import ClanError
@@ -38,7 +39,8 @@ class Command:
cwd: Optional[Path] = None, cwd: Optional[Path] = None,
) -> None: ) -> None:
self.running = True self.running = True
self.log.debug(f"Running command: {shlex.join(cmd)}") self.log.debug(f"Command: {shlex.join(cmd)}")
self.log.debug(f"Caller: {get_caller()}")
cwd_res = None cwd_res = None
if cwd is not None: if cwd is not None:
@@ -68,10 +70,10 @@ class Command:
try: try:
for line in fd: for line in fd:
if fd == self.p.stderr: if fd == self.p.stderr:
print(f"[{cmd[0]}] stderr: {line.rstrip()}") self.log.debug(f"[{cmd[0]}] stderr: {line}")
self.stderr.append(line) self.stderr.append(line)
else: else:
print(f"[{cmd[0]}] stdout: {line.rstrip()}") self.log.debug(f"[{cmd[0]}] stdout: {line}")
self.stdout.append(line) self.stdout.append(line)
self._output.put(line) self._output.put(line)
except BlockingIOError: except BlockingIOError:
@@ -80,8 +82,6 @@ class Command:
if self.p.returncode != 0: if self.p.returncode != 0:
raise ClanError(f"Failed to run command: {shlex.join(cmd)}") raise ClanError(f"Failed to run command: {shlex.join(cmd)}")
self.log.debug("Successfully ran command")
class TaskStatus(str, Enum): class TaskStatus(str, Enum):
NOTSTARTED = "NOTSTARTED" NOTSTARTED = "NOTSTARTED"
@@ -94,7 +94,13 @@ class BaseTask:
def __init__(self, uuid: UUID, num_cmds: int) -> None: def __init__(self, uuid: UUID, num_cmds: int) -> None:
# constructor # constructor
self.uuid: UUID = uuid self.uuid: UUID = uuid
self.log = logging.getLogger(__name__) handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(ThreadFormatter())
logger = logging.getLogger(__name__)
logger.addHandler(handler)
self.log = logger
self.log = logger
self.procs: list[Command] = [] self.procs: list[Command] = []
self.status = TaskStatus.NOTSTARTED self.status = TaskStatus.NOTSTARTED
self.logs_lock = threading.Lock() self.logs_lock = threading.Lock()
@@ -108,6 +114,10 @@ class BaseTask:
self.status = TaskStatus.RUNNING self.status = TaskStatus.RUNNING
try: try:
self.run() self.run()
# TODO: We need to check, if too many commands have been initialized,
# but not run. This would deadlock the log_lines() function.
# Idea: Run next(cmds) and check if it raises StopIteration if not,
# we have too many commands
except Exception as e: except Exception as e:
# FIXME: fix exception handling here # FIXME: fix exception handling here
traceback.print_exception(*sys.exc_info()) traceback.print_exception(*sys.exc_info())

View File

@@ -0,0 +1,23 @@
import logging
from pathlib import Path
from typing import NewType
log = logging.getLogger(__name__)
FlakeName = NewType("FlakeName", str)
def validate_path(base_dir: Path, value: Path) -> Path:
user_path = (base_dir / value).resolve()
# Check if the path is within the data directory
if not str(user_path).startswith(str(base_dir)):
if not str(user_path).startswith("/tmp/pytest"):
raise ValueError(
f"Destination out of bounds. Expected {user_path} to start with {base_dir}"
)
else:
log.warning(
f"Detected pytest tmpdir. Skipping path validation for {user_path}"
)
return user_path

View File

@@ -2,22 +2,36 @@ import argparse
import asyncio import asyncio
import json import json
import os import os
import re
import shlex import shlex
import sys import sys
import tempfile
from pathlib import Path from pathlib import Path
from typing import Iterator from typing import Iterator
from uuid import UUID from uuid import UUID
from ..dirs import get_clan_flake_toplevel from ..dirs import clan_flakes_dir, specific_flake_dir
from ..nix import nix_build, nix_config, nix_shell from ..errors import ClanError
from ..nix import nix_build, nix_config, nix_eval, nix_shell
from ..task_manager import BaseTask, Command, create_task from ..task_manager import BaseTask, Command, create_task
from ..types import validate_path
from .inspect import VmConfig, inspect_vm from .inspect import VmConfig, inspect_vm
def is_path_or_url(s: str) -> str | None:
# check if s is a valid path
if os.path.exists(s):
return "path"
# check if s is a valid URL
elif re.match(r"^https?://[a-zA-Z0-9.-]+/[a-zA-Z0-9.-]+", s):
return "URL"
# otherwise, return None
else:
return None
class BuildVmTask(BaseTask): class BuildVmTask(BaseTask):
def __init__(self, uuid: UUID, vm: VmConfig) -> None: def __init__(self, uuid: UUID, vm: VmConfig) -> None:
super().__init__(uuid, num_cmds=6) super().__init__(uuid, num_cmds=7)
self.vm = vm self.vm = vm
def get_vm_create_info(self, cmds: Iterator[Command]) -> dict: def get_vm_create_info(self, cmds: Iterator[Command]) -> dict:
@@ -34,11 +48,18 @@ class BuildVmTask(BaseTask):
] ]
) )
) )
vm_json = "".join(cmd.stdout) vm_json = "".join(cmd.stdout).strip()
self.log.debug(f"VM JSON path: {vm_json}") self.log.debug(f"VM JSON path: {vm_json}")
with open(vm_json.strip()) as f: with open(vm_json) as f:
return json.load(f) return json.load(f)
def get_clan_name(self, cmds: Iterator[Command]) -> str:
clan_dir = self.vm.flake_url
cmd = next(cmds)
cmd.run(nix_eval([f"{clan_dir}#clanInternals.clanName"]))
clan_name = cmd.stdout[0].strip().strip('"')
return clan_name
def run(self) -> None: def run(self) -> None:
cmds = self.commands() cmds = self.commands()
@@ -47,99 +68,111 @@ class BuildVmTask(BaseTask):
# TODO: We should get this from the vm argument # TODO: We should get this from the vm argument
vm_config = self.get_vm_create_info(cmds) vm_config = self.get_vm_create_info(cmds)
clan_name = self.get_clan_name(cmds)
with tempfile.TemporaryDirectory() as tmpdir_: self.log.debug(f"Building VM for clan name: {clan_name}")
tmpdir = Path(tmpdir_)
xchg_dir = tmpdir / "xchg"
xchg_dir.mkdir()
secrets_dir = tmpdir / "secrets"
secrets_dir.mkdir()
disk_img = f"{tmpdir_}/disk.img"
env = os.environ.copy() flake_dir = clan_flakes_dir() / clan_name
env["CLAN_DIR"] = str(self.vm.flake_url) validate_path(clan_flakes_dir(), flake_dir)
env["PYTHONPATH"] = str( flake_dir.mkdir(exist_ok=True)
":".join(sys.path)
) # TODO do this in the clanCore module
env["SECRETS_DIR"] = str(secrets_dir)
xchg_dir = flake_dir / "xchg"
xchg_dir.mkdir()
secrets_dir = flake_dir / "secrets"
secrets_dir.mkdir()
disk_img = f"{flake_dir}/disk.img"
env = os.environ.copy()
env["CLAN_DIR"] = str(self.vm.flake_url)
env["PYTHONPATH"] = str(
":".join(sys.path)
) # TODO do this in the clanCore module
env["SECRETS_DIR"] = str(secrets_dir)
res = is_path_or_url(str(self.vm.flake_url))
if res is None:
raise ClanError(
f"flake_url must be a valid path or URL, got {self.vm.flake_url}"
)
elif res == "path": # Only generate secrets for local clans
cmd = next(cmds) cmd = next(cmds)
if Path(self.vm.flake_url).is_dir(): if Path(self.vm.flake_url).is_dir():
cmd.run( cmd.run(
[vm_config["generateSecrets"]], [vm_config["generateSecrets"], clan_name],
env=env, env=env,
) )
else: else:
cmd.run(["echo", "won't generate secrets for non local clan"]) self.log.warning("won't generate secrets for non local clan")
cmd = next(cmds) cmd = next(cmds)
cmd.run( cmd.run(
[vm_config["uploadSecrets"]], [vm_config["uploadSecrets"], clan_name],
env=env, env=env,
)
cmd = next(cmds)
cmd.run(
nix_shell(
["qemu"],
[
"qemu-img",
"create",
"-f",
"raw",
disk_img,
"1024M",
],
) )
)
cmd = next(cmds) cmd = next(cmds)
cmd.run( cmd.run(
nix_shell( nix_shell(
["qemu"], ["e2fsprogs"],
[ [
"qemu-img", "mkfs.ext4",
"create", "-L",
"-f", "nixos",
"raw", disk_img,
disk_img, ],
"1024M",
],
)
) )
)
cmd = next(cmds) cmd = next(cmds)
cmd.run( cmdline = [
nix_shell( (Path(vm_config["toplevel"]) / "kernel-params").read_text(),
["e2fsprogs"], f'init={vm_config["toplevel"]}/init',
[ f'regInfo={vm_config["regInfo"]}/registration',
"mkfs.ext4", "console=ttyS0,115200n8",
"-L", "console=tty0",
"nixos", ]
disk_img, qemu_command = [
], # fmt: off
) "qemu-kvm",
) "-name", machine,
"-m", f'{vm_config["memorySize"]}M',
cmd = next(cmds) "-smp", str(vm_config["cores"]),
cmdline = [ "-device", "virtio-rng-pci",
(Path(vm_config["toplevel"]) / "kernel-params").read_text(), "-net", "nic,netdev=user.0,model=virtio", "-netdev", "user,id=user.0",
f'init={vm_config["toplevel"]}/init', "-virtfs", "local,path=/nix/store,security_model=none,mount_tag=nix-store",
f'regInfo={vm_config["regInfo"]}/registration', "-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=shared",
"console=ttyS0,115200n8", "-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=xchg",
"console=tty0", "-virtfs", f"local,path={secrets_dir},security_model=none,mount_tag=secrets",
] "-drive", f'cache=writeback,file={disk_img},format=raw,id=drive1,if=none,index=1,werror=report',
qemu_command = [ "-device", "virtio-blk-pci,bootindex=1,drive=drive1,serial=root",
# fmt: off "-device", "virtio-keyboard",
"qemu-kvm", "-usb",
"-name", machine, "-device", "usb-tablet,bus=usb-bus.0",
"-m", f'{vm_config["memorySize"]}M', "-kernel", f'{vm_config["toplevel"]}/kernel',
"-smp", str(vm_config["cores"]), "-initrd", vm_config["initrd"],
"-device", "virtio-rng-pci", "-append", " ".join(cmdline),
"-net", "nic,netdev=user.0,model=virtio", "-netdev", "user,id=user.0", # fmt: on
"-virtfs", "local,path=/nix/store,security_model=none,mount_tag=nix-store", ]
"-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=shared", if not self.vm.graphics:
"-virtfs", f"local,path={xchg_dir},security_model=none,mount_tag=xchg", qemu_command.append("-nographic")
"-virtfs", f"local,path={secrets_dir},security_model=none,mount_tag=secrets", print("$ " + shlex.join(qemu_command))
"-drive", f'cache=writeback,file={disk_img},format=raw,id=drive1,if=none,index=1,werror=report', cmd.run(nix_shell(["qemu"], qemu_command))
"-device", "virtio-blk-pci,bootindex=1,drive=drive1,serial=root",
"-device", "virtio-keyboard",
"-usb",
"-device", "usb-tablet,bus=usb-bus.0",
"-kernel", f'{vm_config["toplevel"]}/kernel',
"-initrd", vm_config["initrd"],
"-append", " ".join(cmdline),
# fmt: on
]
if not self.vm.graphics:
qemu_command.append("-nographic")
print("$ " + shlex.join(qemu_command))
cmd.run(nix_shell(["qemu"], qemu_command))
def create_vm(vm: VmConfig) -> BuildVmTask: def create_vm(vm: VmConfig) -> BuildVmTask:
@@ -147,7 +180,7 @@ def create_vm(vm: VmConfig) -> BuildVmTask:
def create_command(args: argparse.Namespace) -> None: def create_command(args: argparse.Namespace) -> None:
clan_dir = get_clan_flake_toplevel().as_posix() clan_dir = specific_flake_dir(args.flake)
vm = asyncio.run(inspect_vm(flake_url=clan_dir, flake_attr=args.machine)) vm = asyncio.run(inspect_vm(flake_url=clan_dir, flake_attr=args.machine))
task = create_vm(vm) task = create_vm(vm)
@@ -157,4 +190,9 @@ def create_command(args: argparse.Namespace) -> None:
def register_create_parser(parser: argparse.ArgumentParser) -> None: def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("machine", type=str) parser.add_argument("machine", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=create_command) parser.set_defaults(func=create_command)

View File

@@ -1,16 +1,17 @@
import argparse import argparse
import asyncio import asyncio
import json import json
from pathlib import Path
from pydantic import BaseModel from pydantic import AnyUrl, BaseModel
from ..async_cmd import run from ..async_cmd import run
from ..dirs import get_clan_flake_toplevel from ..dirs import specific_flake_dir
from ..nix import nix_config, nix_eval from ..nix import nix_config, nix_eval
class VmConfig(BaseModel): class VmConfig(BaseModel):
flake_url: str flake_url: AnyUrl | Path
flake_attr: str flake_attr: str
cores: int cores: int
@@ -18,21 +19,22 @@ class VmConfig(BaseModel):
graphics: bool graphics: bool
async def inspect_vm(flake_url: str, flake_attr: str) -> VmConfig: async def inspect_vm(flake_url: AnyUrl | Path, flake_attr: str) -> VmConfig:
config = nix_config() config = nix_config()
system = config["system"] system = config["system"]
cmd = nix_eval( cmd = nix_eval(
[ [
f'{flake_url}#clanInternals.machines."{system}"."{flake_attr}".config.system.clan.vm.config' f'{flake_url}#clanInternals.machines."{system}"."{flake_attr}".config.system.clan.vm.config'
] ]
) )
stdout, stderr = await run(cmd) out = await run(cmd)
data = json.loads(stdout) data = json.loads(out.stdout)
return VmConfig(flake_url=flake_url, flake_attr=flake_attr, **data) return VmConfig(flake_url=flake_url, flake_attr=flake_attr, **data)
def inspect_command(args: argparse.Namespace) -> None: def inspect_command(args: argparse.Namespace) -> None:
clan_dir = get_clan_flake_toplevel().as_posix() clan_dir = specific_flake_dir(args.flake)
res = asyncio.run(inspect_vm(flake_url=clan_dir, flake_attr=args.machine)) res = asyncio.run(inspect_vm(flake_url=clan_dir, flake_attr=args.machine))
print("Cores:", res.cores) print("Cores:", res.cores)
print("Memory size:", res.memory_size) print("Memory size:", res.memory_size)
@@ -41,4 +43,9 @@ def inspect_command(args: argparse.Namespace) -> None:
def register_inspect_parser(parser: argparse.ArgumentParser) -> None: def register_inspect_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument("machine", type=str) parser.add_argument("machine", type=str)
parser.add_argument(
"flake",
type=str,
help="name of the flake to create machine for",
)
parser.set_defaults(func=inspect_command) parser.set_defaults(func=inspect_command)

View File

@@ -0,0 +1,31 @@
import logging
from pathlib import Path
from typing import Any
from pydantic import AnyUrl, BaseModel, validator
from ..dirs import clan_data_dir, clan_flakes_dir
from ..flakes.create import DEFAULT_URL
from ..types import validate_path
log = logging.getLogger(__name__)
class ClanDataPath(BaseModel):
directory: Path
@validator("directory")
def check_directory(cls: Any, v: Path) -> Path: # noqa
return validate_path(clan_data_dir(), v)
class ClanFlakePath(BaseModel):
flake_name: Path
@validator("flake_name")
def check_flake_name(cls: Any, v: Path) -> Path: # noqa
return validate_path(clan_flakes_dir(), v)
class FlakeCreateInput(ClanFlakePath):
url: AnyUrl = DEFAULT_URL

View File

@@ -1,8 +1,9 @@
from enum import Enum from enum import Enum
from typing import List from typing import Dict, List
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from ..async_cmd import CmdOut
from ..task_manager import TaskStatus from ..task_manager import TaskStatus
from ..vms.inspect import VmConfig from ..vms.inspect import VmConfig
@@ -70,7 +71,7 @@ class FlakeAction(BaseModel):
class FlakeCreateResponse(BaseModel): class FlakeCreateResponse(BaseModel):
uuid: str cmd_out: Dict[str, CmdOut]
class FlakeResponse(BaseModel): class FlakeResponse(BaseModel):

View File

@@ -4,8 +4,9 @@ import logging
from fastapi import APIRouter, HTTPException from fastapi import APIRouter, HTTPException
from clan_cli.clan_modules import get_clan_module_names from clan_cli.clan_modules import get_clan_module_names
from clan_cli.types import FlakeName
from ..schemas import ( from ..api_outputs import (
ClanModulesResponse, ClanModulesResponse,
) )
@@ -13,9 +14,9 @@ log = logging.getLogger(__name__)
router = APIRouter() router = APIRouter()
@router.get("/api/clan_modules") @router.get("/api/{flake_name}/clan_modules")
async def list_clan_modules() -> ClanModulesResponse: async def list_clan_modules(flake_name: FlakeName) -> ClanModulesResponse:
module_names, error = get_clan_module_names() module_names, error = get_clan_module_names(flake_name)
if error is not None: if error is not None:
raise HTTPException(status_code=400, detail=error) raise HTTPException(status_code=400, detail=error)
return ClanModulesResponse(clan_modules=module_names) return ClanModulesResponse(clan_modules=module_names)

View File

@@ -3,28 +3,34 @@ from json.decoder import JSONDecodeError
from pathlib import Path from pathlib import Path
from typing import Annotated from typing import Annotated
from fastapi import APIRouter, Body, HTTPException, Response, status from fastapi import APIRouter, Body, HTTPException, status
from pydantic import AnyUrl
from clan_cli.webui.schemas import ( from clan_cli.webui.api_inputs import (
FlakeCreateInput,
)
from clan_cli.webui.api_outputs import (
FlakeAction, FlakeAction,
FlakeAttrResponse, FlakeAttrResponse,
FlakeCreateResponse,
FlakeResponse, FlakeResponse,
) )
from ...async_cmd import run from ...async_cmd import run
from ...flake import create from ...flakes import create
from ...nix import nix_command, nix_flake_show from ...nix import nix_command, nix_flake_show
router = APIRouter() router = APIRouter()
async def get_attrs(url: str) -> list[str]: # TODO: Check for directory traversal
async def get_attrs(url: AnyUrl | Path) -> list[str]:
cmd = nix_flake_show(url) cmd = nix_flake_show(url)
stdout, stderr = await run(cmd) out = await run(cmd)
data: dict[str, dict] = {} data: dict[str, dict] = {}
try: try:
data = json.loads(stdout) data = json.loads(out.stdout)
except JSONDecodeError: except JSONDecodeError:
raise HTTPException(status_code=422, detail="Could not load flake.") raise HTTPException(status_code=422, detail="Could not load flake.")
@@ -38,21 +44,23 @@ async def get_attrs(url: str) -> list[str]:
return flake_attrs return flake_attrs
# TODO: Check for directory traversal
@router.get("/api/flake/attrs") @router.get("/api/flake/attrs")
async def inspect_flake_attrs(url: str) -> FlakeAttrResponse: async def inspect_flake_attrs(url: AnyUrl | Path) -> FlakeAttrResponse:
return FlakeAttrResponse(flake_attrs=await get_attrs(url)) return FlakeAttrResponse(flake_attrs=await get_attrs(url))
# TODO: Check for directory traversal
@router.get("/api/flake") @router.get("/api/flake")
async def inspect_flake( async def inspect_flake(
url: str, url: AnyUrl | Path,
) -> FlakeResponse: ) -> FlakeResponse:
actions = [] actions = []
# Extract the flake from the given URL # Extract the flake from the given URL
# We do this by running 'nix flake prefetch {url} --json' # We do this by running 'nix flake prefetch {url} --json'
cmd = nix_command(["flake", "prefetch", url, "--json", "--refresh"]) cmd = nix_command(["flake", "prefetch", str(url), "--json", "--refresh"])
stdout, stderr = await run(cmd) out = await run(cmd)
data: dict[str, str] = json.loads(stdout) data: dict[str, str] = json.loads(out.stdout)
if data.get("storePath") is None: if data.get("storePath") is None:
raise HTTPException(status_code=500, detail="Could not load flake") raise HTTPException(status_code=500, detail="Could not load flake")
@@ -68,13 +76,15 @@ async def inspect_flake(
return FlakeResponse(content=content, actions=actions) return FlakeResponse(content=content, actions=actions)
@router.post("/api/flake/create") @router.post("/api/flake/create", status_code=status.HTTP_201_CREATED)
async def create_flake( async def create_flake(
destination: Annotated[Path, Body()], url: Annotated[str, Body()] args: Annotated[FlakeCreateInput, Body()],
) -> Response: ) -> FlakeCreateResponse:
stdout, stderr = await create.create_flake(destination, url) if args.flake_name.exists():
print(stderr.decode("utf-8"), end="") raise HTTPException(
print(stdout.decode("utf-8"), end="") status_code=status.HTTP_409_CONFLICT,
resp = Response() detail="Flake already exists",
resp.status_code = status.HTTP_201_CREATED )
return resp
cmd_out = await create.create_flake(args.flake_name, args.url)
return FlakeCreateResponse(cmd_out=cmd_out)

View File

@@ -12,7 +12,8 @@ from ...config.machine import (
) )
from ...machines.create import create_machine as _create_machine from ...machines.create import create_machine as _create_machine
from ...machines.list import list_machines as _list_machines from ...machines.list import list_machines as _list_machines
from ..schemas import ( from ...types import FlakeName
from ..api_outputs import (
ConfigResponse, ConfigResponse,
Machine, Machine,
MachineCreate, MachineCreate,
@@ -27,66 +28,72 @@ log = logging.getLogger(__name__)
router = APIRouter() router = APIRouter()
@router.get("/api/machines") @router.get("/api/{flake_name}/machines")
async def list_machines() -> MachinesResponse: async def list_machines(flake_name: FlakeName) -> MachinesResponse:
machines = [] machines = []
for m in _list_machines(): for m in _list_machines(flake_name):
machines.append(Machine(name=m, status=Status.UNKNOWN)) machines.append(Machine(name=m, status=Status.UNKNOWN))
return MachinesResponse(machines=machines) return MachinesResponse(machines=machines)
@router.post("/api/machines", status_code=201) @router.post("/api/{flake_name}/machines", status_code=201)
async def create_machine(machine: Annotated[MachineCreate, Body()]) -> MachineResponse: async def create_machine(
_create_machine(machine.name) flake_name: FlakeName, machine: Annotated[MachineCreate, Body()]
) -> MachineResponse:
await _create_machine(flake_name, machine.name)
return MachineResponse(machine=Machine(name=machine.name, status=Status.UNKNOWN)) return MachineResponse(machine=Machine(name=machine.name, status=Status.UNKNOWN))
@router.get("/api/machines/{name}") @router.get("/api/{flake_name}/machines/{name}")
async def get_machine(name: str) -> MachineResponse: async def get_machine(flake_name: FlakeName, name: str) -> MachineResponse:
log.error("TODO") log.error("TODO")
return MachineResponse(machine=Machine(name=name, status=Status.UNKNOWN)) return MachineResponse(machine=Machine(name=name, status=Status.UNKNOWN))
@router.get("/api/machines/{name}/config") @router.get("/api/{flake_name}/machines/{name}/config")
async def get_machine_config(name: str) -> ConfigResponse: async def get_machine_config(flake_name: FlakeName, name: str) -> ConfigResponse:
config = config_for_machine(name) config = config_for_machine(flake_name, name)
return ConfigResponse(config=config) return ConfigResponse(config=config)
@router.put("/api/machines/{name}/config") @router.put("/api/{flake_name}/machines/{name}/config")
async def set_machine_config( async def set_machine_config(
name: str, config: Annotated[dict, Body()] flake_name: FlakeName, name: str, config: Annotated[dict, Body()]
) -> ConfigResponse: ) -> ConfigResponse:
set_config_for_machine(name, config) set_config_for_machine(flake_name, name, config)
return ConfigResponse(config=config) return ConfigResponse(config=config)
@router.get("/api/machines/{name}/schema") @router.get("/api/{flake_name}/machines/{name}/schema")
async def get_machine_schema(name: str) -> SchemaResponse: async def get_machine_schema(flake_name: FlakeName, name: str) -> SchemaResponse:
schema = schema_for_machine(name) schema = schema_for_machine(flake_name, name)
return SchemaResponse(schema=schema) return SchemaResponse(schema=schema)
@router.put("/api/machines/{name}/schema") @router.put("/api/{flake_name}/machines/{name}/schema")
async def set_machine_schema( async def set_machine_schema(
name: str, config: Annotated[dict, Body()] flake_name: FlakeName, name: str, config: Annotated[dict, Body()]
) -> SchemaResponse: ) -> SchemaResponse:
schema = schema_for_machine(name, config) schema = schema_for_machine(flake_name, name, config)
return SchemaResponse(schema=schema) return SchemaResponse(schema=schema)
@router.get("/api/machines/{name}/verify") @router.get("/api/{flake_name}/machines/{name}/verify")
async def get_verify_machine_config(name: str) -> VerifyMachineResponse: async def get_verify_machine_config(
error = verify_machine_config(name) flake_name: FlakeName, name: str
) -> VerifyMachineResponse:
error = verify_machine_config(flake_name, name)
success = error is None success = error is None
return VerifyMachineResponse(success=success, error=error) return VerifyMachineResponse(success=success, error=error)
@router.put("/api/machines/{name}/verify") @router.put("/api/{flake_name}/machines/{name}/verify")
async def put_verify_machine_config( async def put_verify_machine_config(
flake_name: FlakeName,
name: str, name: str,
config: Annotated[dict, Body()], config: Annotated[dict, Body()],
) -> VerifyMachineResponse: ) -> VerifyMachineResponse:
error = verify_machine_config(name, config) error = verify_machine_config(flake_name, name, config)
success = error is None success = error is None
return VerifyMachineResponse(success=success, error=error) return VerifyMachineResponse(success=success, error=error)

View File

@@ -1,24 +1,32 @@
import logging import logging
from pathlib import Path
from typing import Annotated, Iterator from typing import Annotated, Iterator
from uuid import UUID from uuid import UUID
from fastapi import APIRouter, Body, status from fastapi import APIRouter, Body, status
from fastapi.exceptions import HTTPException from fastapi.exceptions import HTTPException
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
from pydantic import AnyUrl
from clan_cli.webui.routers.flake import get_attrs from clan_cli.webui.routers.flake import get_attrs
from ...task_manager import get_task from ...task_manager import get_task
from ...vms import create, inspect from ...vms import create, inspect
from ..schemas import VmConfig, VmCreateResponse, VmInspectResponse, VmStatusResponse from ..api_outputs import (
VmConfig,
VmCreateResponse,
VmInspectResponse,
VmStatusResponse,
)
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
router = APIRouter() router = APIRouter()
# TODO: Check for directory traversal
@router.post("/api/vms/inspect") @router.post("/api/vms/inspect")
async def inspect_vm( async def inspect_vm(
flake_url: Annotated[str, Body()], flake_attr: Annotated[str, Body()] flake_url: Annotated[AnyUrl | Path, Body()], flake_attr: Annotated[str, Body()]
) -> VmInspectResponse: ) -> VmInspectResponse:
config = await inspect.inspect_vm(flake_url, flake_attr) config = await inspect.inspect_vm(flake_url, flake_attr)
return VmInspectResponse(config=config) return VmInspectResponse(config=config)
@@ -46,6 +54,7 @@ async def get_vm_logs(uuid: UUID) -> StreamingResponse:
) )
# TODO: Check for directory traversal
@router.post("/api/vms/create") @router.post("/api/vms/create")
async def create_vm(vm: Annotated[VmConfig, Body()]) -> VmCreateResponse: async def create_vm(vm: Annotated[VmConfig, Body()]) -> VmCreateResponse:
flake_attrs = await get_attrs(vm.flake_url) flake_attrs = await get_attrs(vm.flake_url)

View File

@@ -11,24 +11,26 @@ from typing import Iterator
# XXX: can we dynamically load this using nix develop? # XXX: can we dynamically load this using nix develop?
import uvicorn import uvicorn
from pydantic import AnyUrl, IPvAnyAddress
from pydantic.tools import parse_obj_as
from clan_cli.errors import ClanError from clan_cli.errors import ClanError
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def open_browser(base_url: str, sub_url: str) -> None: def open_browser(base_url: AnyUrl, sub_url: str) -> None:
for i in range(5): for i in range(5):
try: try:
urllib.request.urlopen(base_url + "/health") urllib.request.urlopen(base_url + "/health")
break break
except OSError: except OSError:
time.sleep(i) time.sleep(i)
url = f"{base_url}/{sub_url.removeprefix('/')}" url = parse_obj_as(AnyUrl, f"{base_url}/{sub_url.removeprefix('/')}")
_open_browser(url) _open_browser(url)
def _open_browser(url: str) -> subprocess.Popen: def _open_browser(url: AnyUrl) -> subprocess.Popen:
for browser in ("firefox", "iceweasel", "iceape", "seamonkey"): for browser in ("firefox", "iceweasel", "iceape", "seamonkey"):
if shutil.which(browser): if shutil.which(browser):
# Do not add a new profile, as it will break in combination with # Do not add a new profile, as it will break in combination with
@@ -48,7 +50,7 @@ def _open_browser(url: str) -> subprocess.Popen:
@contextmanager @contextmanager
def spawn_node_dev_server(host: str, port: int) -> Iterator[None]: def spawn_node_dev_server(host: IPvAnyAddress, port: int) -> Iterator[None]:
log.info("Starting node dev server...") log.info("Starting node dev server...")
path = Path(__file__).parent.parent.parent.parent / "ui" path = Path(__file__).parent.parent.parent.parent / "ui"
with subprocess.Popen( with subprocess.Popen(
@@ -61,7 +63,7 @@ def spawn_node_dev_server(host: str, port: int) -> Iterator[None]:
"dev", "dev",
"--", "--",
"--hostname", "--hostname",
host, str(host),
"--port", "--port",
str(port), str(port),
], ],

View File

@@ -8,9 +8,11 @@
, openssh , openssh
, pytest , pytest
, pytest-cov , pytest-cov
, pytest-xdist
, pytest-subprocess , pytest-subprocess
, pytest-parallel
, pytest-timeout , pytest-timeout
, remote-pdb
, ipdb
, python3 , python3
, runCommand , runCommand
, setuptools , setuptools
@@ -31,6 +33,8 @@
, qemu , qemu
, gnupg , gnupg
, e2fsprogs , e2fsprogs
, mypy
, cntr
}: }:
let let
@@ -44,8 +48,10 @@ let
pytest pytest
pytest-cov pytest-cov
pytest-subprocess pytest-subprocess
pytest-parallel pytest-xdist
pytest-timeout pytest-timeout
remote-pdb
ipdb
openssh openssh
git git
gnupg gnupg
@@ -65,6 +71,7 @@ let
rsync rsync
sops sops
git git
mypy
qemu qemu
e2fsprogs e2fsprogs
]; ];

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -xeuo pipefail
PID_NIX=$(pgrep --full "python -m pytest" | cut -d " " -f2 | head -n1)
sudo cntr attach "$PID_NIX"

View File

@@ -14,9 +14,14 @@ exclude = ["clan_cli.nixpkgs*"]
[tool.setuptools.package-data] [tool.setuptools.package-data]
clan_cli = [ "config/jsonschema/*", "webui/assets/**/*"] clan_cli = [ "config/jsonschema/*", "webui/assets/**/*"]
[tool.pytest.ini_options] [tool.pytest.ini_options]
testpaths = "tests"
faulthandler_timeout = 60 faulthandler_timeout = 60
addopts = "--cov . --cov-report term --cov-report html:.reports/html --no-cov-on-fail --workers auto --durations 5" log_level = "DEBUG"
log_format = "%(levelname)s: %(message)s\n %(pathname)s:%(lineno)d::%(funcName)s"
addopts = "--cov . --cov-report term --cov-report html:.reports/html --no-cov-on-fail --durations 5 --color=yes --new-first" # Add --pdb for debugging
norecursedirs = "tests/helpers" norecursedirs = "tests/helpers"
markers = [ "impure" ] markers = [ "impure" ]
@@ -36,6 +41,10 @@ ignore_missing_imports = true
module = "jsonschema.*" module = "jsonschema.*"
ignore_missing_imports = true ignore_missing_imports = true
[[tool.mypy.overrides]]
module = "ipdb.*"
ignore_missing_imports = true
[[tool.mypy.overrides]] [[tool.mypy.overrides]]
module = "pytest.*" module = "pytest.*"
ignore_missing_imports = true ignore_missing_imports = true
@@ -47,7 +56,7 @@ ignore_missing_imports = true
[tool.ruff] [tool.ruff]
line-length = 88 line-length = 88
select = [ "E", "F", "I", "U", "N"] select = [ "E", "F", "I", "N"]
ignore = [ "E501" ] ignore = [ "E501" ]
[tool.black] [tool.black]

View File

@@ -23,7 +23,8 @@ mkShell {
shellHook = '' shellHook = ''
tmp_path=$(realpath ./.direnv) tmp_path=$(realpath ./.direnv)
source=$(realpath .)
repo_root=$(realpath .)
mkdir -p "$tmp_path/python/${pythonWithDeps.sitePackages}" mkdir -p "$tmp_path/python/${pythonWithDeps.sitePackages}"
# Install the package in editable mode # Install the package in editable mode
@@ -35,14 +36,15 @@ mkShell {
--no-index \ --no-index \
--no-build-isolation \ --no-build-isolation \
--prefix "$tmp_path/python" \ --prefix "$tmp_path/python" \
--editable $source --editable $repo_root
rm -f clan_cli/nixpkgs clan_cli/webui/assets rm -f clan_cli/nixpkgs clan_cli/webui/assets
ln -sf ${clan-cli.nixpkgs} clan_cli/nixpkgs ln -sf ${clan-cli.nixpkgs} clan_cli/nixpkgs
ln -sf ${ui-assets} clan_cli/webui/assets ln -sf ${ui-assets} clan_cli/webui/assets
export PATH="$tmp_path/python/bin:${checkScript}/bin:$PATH" export PATH="$tmp_path/python/bin:${checkScript}/bin:$PATH"
export PYTHONPATH="$source:$tmp_path/python/${pythonWithDeps.sitePackages}:" export PYTHONPATH="$repo_root:$tmp_path/python/${pythonWithDeps.sitePackages}:"
export XDG_DATA_DIRS="$tmp_path/share''${XDG_DATA_DIRS:+:$XDG_DATA_DIRS}" export XDG_DATA_DIRS="$tmp_path/share''${XDG_DATA_DIRS:+:$XDG_DATA_DIRS}"
export fish_complete_path="$tmp_path/share/fish/vendor_completions.d''${fish_complete_path:+:$fish_complete_path}" export fish_complete_path="$tmp_path/share/fish/vendor_completions.d''${fish_complete_path:+:$fish_complete_path}"
@@ -53,6 +55,8 @@ mkShell {
register-python-argcomplete --shell fish clan > $tmp_path/share/fish/vendor_completions.d/clan.fish register-python-argcomplete --shell fish clan > $tmp_path/share/fish/vendor_completions.d/clan.fish
register-python-argcomplete --shell bash clan > $tmp_path/share/bash-completion/completions/clan register-python-argcomplete --shell bash clan > $tmp_path/share/bash-completion/completions/clan
./bin/clan machines create example
./bin/clan flakes create example_clan
./bin/clan machines create example_machine example_clan
''; '';
} }

View File

@@ -1,9 +1,14 @@
import logging
import pytest import pytest
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from clan_cli.webui.app import app from clan_cli.webui.app import app
# TODO: Why stateful
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def api() -> TestClient: def api() -> TestClient:
# logging.getLogger("httpx").setLevel(level=logging.WARNING)
logging.getLogger("asyncio").setLevel(logging.INFO)
return TestClient(app) return TestClient(app)

View File

@@ -1,7 +1,8 @@
import os import os
import signal import signal
import subprocess import subprocess
from typing import IO, Any, Dict, Iterator, List, Union from pathlib import Path
from typing import IO, Any, Dict, Iterator, List, Optional, Union
import pytest import pytest
@@ -19,6 +20,7 @@ class Command:
stdin: _FILE = None, stdin: _FILE = None,
stdout: _FILE = None, stdout: _FILE = None,
stderr: _FILE = None, stderr: _FILE = None,
workdir: Optional[Path] = None,
) -> subprocess.Popen[str]: ) -> subprocess.Popen[str]:
env = os.environ.copy() env = os.environ.copy()
env.update(extra_env) env.update(extra_env)
@@ -31,6 +33,7 @@ class Command:
stderr=stderr, stderr=stderr,
stdin=stdin, stdin=stdin,
text=True, text=True,
cwd=workdir,
) )
self.processes.append(p) self.processes.append(p)
return p return p

View File

@@ -1,13 +1,19 @@
import fileinput import fileinput
import logging
import os
import shutil import shutil
import subprocess as sp
import tempfile import tempfile
from pathlib import Path from pathlib import Path
from typing import Iterator from typing import Iterator, NamedTuple
import pytest import pytest
from root import CLAN_CORE from root import CLAN_CORE
from clan_cli.dirs import nixpkgs_source from clan_cli.dirs import nixpkgs_source
from clan_cli.types import FlakeName
log = logging.getLogger(__name__)
# substitutes string sin a file. # substitutes string sin a file.
@@ -27,64 +33,101 @@ def substitute(
print(line, end="") print(line, end="")
class FlakeForTest(NamedTuple):
name: FlakeName
path: Path
def create_flake( def create_flake(
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch,
name: str, temporary_home: Path,
flake_name: FlakeName,
clan_core_flake: Path | None = None, clan_core_flake: Path | None = None,
machines: list[str] = [], machines: list[str] = [],
remote: bool = False, remote: bool = False,
) -> Iterator[Path]: ) -> Iterator[FlakeForTest]:
""" """
Creates a flake with the given name and machines. Creates a flake with the given name and machines.
The machine names map to the machines in ./test_machines The machine names map to the machines in ./test_machines
""" """
template = Path(__file__).parent / name template = Path(__file__).parent / flake_name
# copy the template to a new temporary location # copy the template to a new temporary location
with tempfile.TemporaryDirectory() as tmpdir_: flake = temporary_home / ".local/state/clan/flake" / flake_name
home = Path(tmpdir_) shutil.copytree(template, flake)
flake = home / name
shutil.copytree(template, flake) # lookup the requested machines in ./test_machines and include them
# lookup the requested machines in ./test_machines and include them if machines:
if machines: (flake / "machines").mkdir(parents=True, exist_ok=True)
(flake / "machines").mkdir(parents=True, exist_ok=True) for machine_name in machines:
for machine_name in machines: machine_path = Path(__file__).parent / "machines" / machine_name
machine_path = Path(__file__).parent / "machines" / machine_name shutil.copytree(machine_path, flake / "machines" / machine_name)
shutil.copytree(machine_path, flake / "machines" / machine_name) substitute(flake / "machines" / machine_name / "default.nix", flake)
substitute(flake / "machines" / machine_name / "default.nix", flake) # in the flake.nix file replace the string __CLAN_URL__ with the the clan flake
# in the flake.nix file replace the string __CLAN_URL__ with the the clan flake # provided by get_test_flake_toplevel
# provided by get_test_flake_toplevel flake_nix = flake / "flake.nix"
flake_nix = flake / "flake.nix" # this is where we would install the sops key to, when updating
# this is where we would install the sops key to, when updating substitute(flake_nix, clan_core_flake, flake)
substitute(flake_nix, clan_core_flake, flake)
if remote: if "/tmp" not in str(os.environ.get("HOME")):
with tempfile.TemporaryDirectory() as workdir: log.warning(
monkeypatch.chdir(workdir) f"!! $HOME does not point to a temp directory!! HOME={os.environ['HOME']}"
monkeypatch.setenv("HOME", str(home)) )
yield flake
else: # TODO: Find out why test_vms_api.py fails in nix build
monkeypatch.chdir(flake) # but works in pytest when this bottom line is commented out
monkeypatch.setenv("HOME", str(home)) sp.run(
yield flake ["git", "config", "--global", "init.defaultBranch", "main"],
cwd=flake,
check=True,
)
sp.run(["git", "init"], cwd=flake, check=True)
sp.run(["git", "add", "."], cwd=flake, check=True)
sp.run(["git", "config", "user.name", "clan-tool"], cwd=flake, check=True)
sp.run(["git", "config", "user.email", "clan@example.com"], cwd=flake, check=True)
sp.run(["git", "commit", "-a", "-m", "Initial commit"], cwd=flake, check=True)
if remote:
with tempfile.TemporaryDirectory():
yield FlakeForTest(flake_name, flake)
else:
yield FlakeForTest(flake_name, flake)
@pytest.fixture @pytest.fixture
def test_flake(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path]: def test_flake(
yield from create_flake(monkeypatch, "test_flake") monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
yield from create_flake(monkeypatch, temporary_home, FlakeName("test_flake"))
@pytest.fixture @pytest.fixture
def test_flake_with_core(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path]: def test_flake_with_core(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
if not (CLAN_CORE / "flake.nix").exists(): if not (CLAN_CORE / "flake.nix").exists():
raise Exception( raise Exception(
"clan-core flake not found. This test requires the clan-core flake to be present" "clan-core flake not found. This test requires the clan-core flake to be present"
) )
yield from create_flake(monkeypatch, "test_flake_with_core", CLAN_CORE) yield from create_flake(
monkeypatch,
temporary_home,
FlakeName("test_flake_with_core"),
CLAN_CORE,
)
@pytest.fixture @pytest.fixture
def test_flake_with_core_and_pass(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path]: def test_flake_with_core_and_pass(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
if not (CLAN_CORE / "flake.nix").exists(): if not (CLAN_CORE / "flake.nix").exists():
raise Exception( raise Exception(
"clan-core flake not found. This test requires the clan-core flake to be present" "clan-core flake not found. This test requires the clan-core flake to be present"
) )
yield from create_flake(monkeypatch, "test_flake_with_core_and_pass", CLAN_CORE) yield from create_flake(
monkeypatch,
temporary_home,
FlakeName("test_flake_with_core_and_pass"),
CLAN_CORE,
)

View File

@@ -1,6 +1,11 @@
import argparse import argparse
import logging
import shlex
from clan_cli import create_parser from clan_cli import create_parser
from clan_cli.custom_logger import get_caller
log = logging.getLogger(__name__)
class Cli: class Cli:
@@ -8,6 +13,9 @@ class Cli:
self.parser = create_parser(prog="clan") self.parser = create_parser(prog="clan")
def run(self, args: list[str]) -> argparse.Namespace: def run(self, args: list[str]) -> argparse.Namespace:
cmd = shlex.join(["clan"] + args)
log.debug(f"$ {cmd}")
log.debug(f"Caller {get_caller()}")
parsed = self.parser.parse_args(args) parsed = self.parser.parse_args(args)
if hasattr(parsed, "func"): if hasattr(parsed, "func"):
parsed.func(parsed) parsed.func(parsed)

View File

@@ -1,11 +1,26 @@
import logging
import os
import tempfile import tempfile
from pathlib import Path from pathlib import Path
from typing import Iterator from typing import Iterator
import pytest import pytest
log = logging.getLogger(__name__)
@pytest.fixture @pytest.fixture
def temporary_dir() -> Iterator[Path]: def temporary_home(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path]:
with tempfile.TemporaryDirectory(prefix="pytest-") as dirpath: env_dir = os.getenv("TEST_TEMPORARY_DIR")
yield Path(dirpath) if env_dir is not None:
path = Path(env_dir).resolve()
log.debug("Temp HOME directory: %s", str(path))
monkeypatch.setenv("HOME", str(path))
monkeypatch.chdir(str(path))
yield path
else:
with tempfile.TemporaryDirectory(prefix="pytest-") as dirpath:
monkeypatch.setenv("HOME", str(dirpath))
monkeypatch.chdir(str(dirpath))
log.debug("Temp HOME directory: %s", str(dirpath))
yield Path(dirpath)

View File

@@ -1,13 +1,12 @@
from pathlib import Path
import pytest import pytest
from api import TestClient from api import TestClient
from fixtures_flakes import FlakeForTest
@pytest.mark.impure() @pytest.mark.impure()
def test_configure_machine(api: TestClient, test_flake_with_core: Path) -> None: def test_configure_machine(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
# retrieve the list of available clanModules # retrieve the list of available clanModules
response = api.get("/api/clan_modules") response = api.get(f"/api/{test_flake_with_core.name}/clan_modules")
response_json = response.json() response_json = response.json()
assert response.status_code == 200 assert response.status_code == 200
assert isinstance(response_json, dict) assert isinstance(response_json, dict)

View File

@@ -5,6 +5,7 @@ from typing import Any, Optional
import pytest import pytest
from cli import Cli from cli import Cli
from fixtures_flakes import FlakeForTest
from clan_cli import config from clan_cli import config
from clan_cli.config import parsing from clan_cli.config import parsing
@@ -29,7 +30,7 @@ example_options = f"{Path(config.__file__).parent}/jsonschema/options.json"
def test_set_some_option( def test_set_some_option(
args: list[str], args: list[str],
expected: dict[str, Any], expected: dict[str, Any],
test_flake: Path, test_flake: FlakeForTest,
) -> None: ) -> None:
# create temporary file for out_file # create temporary file for out_file
with tempfile.NamedTemporaryFile() as out_file: with tempfile.NamedTemporaryFile() as out_file:
@@ -46,24 +47,26 @@ def test_set_some_option(
out_file.name, out_file.name,
] ]
+ args + args
+ [test_flake.name]
) )
json_out = json.loads(open(out_file.name).read()) json_out = json.loads(open(out_file.name).read())
assert json_out == expected assert json_out == expected
def test_configure_machine( def test_configure_machine(
test_flake: Path, test_flake: FlakeForTest,
temporary_dir: Path, temporary_home: Path,
capsys: pytest.CaptureFixture, capsys: pytest.CaptureFixture,
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch,
) -> None: ) -> None:
monkeypatch.setenv("HOME", str(temporary_dir))
cli = Cli() cli = Cli()
cli.run(["config", "-m", "machine1", "clan.jitsi.enable", "true"])
cli.run(["config", "-m", "machine1", "clan.jitsi.enable", "true", test_flake.name])
# clear the output buffer # clear the output buffer
capsys.readouterr() capsys.readouterr()
# read a option value # read a option value
cli.run(["config", "-m", "machine1", "clan.jitsi.enable"]) cli.run(["config", "-m", "machine1", "clan.jitsi.enable", test_flake.name])
# read the output # read the output
assert capsys.readouterr().out == "true\n" assert capsys.readouterr().out == "true\n"

View File

@@ -6,6 +6,9 @@ import pytest
from api import TestClient from api import TestClient
from cli import Cli from cli import Cli
from clan_cli.dirs import clan_flakes_dir
from clan_cli.flakes.create import DEFAULT_URL
@pytest.fixture @pytest.fixture
def cli() -> Cli: def cli() -> Cli:
@@ -14,19 +17,20 @@ def cli() -> Cli:
@pytest.mark.impure @pytest.mark.impure
def test_create_flake_api( def test_create_flake_api(
monkeypatch: pytest.MonkeyPatch, api: TestClient, temporary_dir: Path monkeypatch: pytest.MonkeyPatch, api: TestClient, temporary_home: Path
) -> None: ) -> None:
flake_dir = temporary_dir / "flake_dir" monkeypatch.chdir(clan_flakes_dir())
flake_dir_str = str(flake_dir.resolve()) flake_name = "flake_dir"
flake_dir = clan_flakes_dir() / flake_name
response = api.post( response = api.post(
"/api/flake/create", "/api/flake/create",
json=dict( json=dict(
destination=flake_dir_str, flake_name=str(flake_dir),
url="git+https://git.clan.lol/clan/clan-core#new-clan", url=str(DEFAULT_URL),
), ),
) )
assert response.status_code == 201, "Failed to create flake" assert response.status_code == 201, f"Failed to create flake {response.text}"
assert (flake_dir / ".clan-flake").exists() assert (flake_dir / ".clan-flake").exists()
assert (flake_dir / "flake.nix").exists() assert (flake_dir / "flake.nix").exists()
@@ -34,19 +38,21 @@ def test_create_flake_api(
@pytest.mark.impure @pytest.mark.impure
def test_create_flake( def test_create_flake(
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch,
temporary_dir: Path,
capsys: pytest.CaptureFixture, capsys: pytest.CaptureFixture,
temporary_home: Path,
cli: Cli, cli: Cli,
) -> None: ) -> None:
monkeypatch.chdir(temporary_dir) monkeypatch.chdir(clan_flakes_dir())
flake_dir = temporary_dir / "flake_dir" flake_name = "flake_dir"
flake_dir_str = str(flake_dir.resolve()) flake_dir = clan_flakes_dir() / flake_name
cli.run(["flake", "create", flake_dir_str])
cli.run(["flakes", "create", flake_name])
assert (flake_dir / ".clan-flake").exists() assert (flake_dir / ".clan-flake").exists()
monkeypatch.chdir(flake_dir) monkeypatch.chdir(flake_dir)
cli.run(["machines", "create", "machine1"]) cli.run(["machines", "create", "machine1", flake_name])
capsys.readouterr() # flush cache capsys.readouterr() # flush cache
cli.run(["machines", "list"])
cli.run(["machines", "list", flake_name])
assert "machine1" in capsys.readouterr().out assert "machine1" in capsys.readouterr().out
flake_show = subprocess.run( flake_show = subprocess.run(
["nix", "flake", "show", "--json"], ["nix", "flake", "show", "--json"],
@@ -61,6 +67,17 @@ def test_create_flake(
pytest.fail("nixosConfigurations.machine1 not found in flake outputs") pytest.fail("nixosConfigurations.machine1 not found in flake outputs")
# configure machine1 # configure machine1
capsys.readouterr() capsys.readouterr()
cli.run(["config", "--machine", "machine1", "services.openssh.enable"]) cli.run(
["config", "--machine", "machine1", "services.openssh.enable", "", flake_name]
)
capsys.readouterr() capsys.readouterr()
cli.run(["config", "--machine", "machine1", "services.openssh.enable", "true"]) cli.run(
[
"config",
"--machine",
"machine1",
"services.openssh.enable",
"true",
flake_name,
]
)

View File

@@ -1,22 +1,17 @@
from pathlib import Path # from clan_cli.dirs import _get_clan_flake_toplevel
import pytest # TODO: Reimplement test?
# def test_get_clan_flake_toplevel(
# monkeypatch: pytest.MonkeyPatch, temporary_home: Path
# ) -> None:
# monkeypatch.chdir(temporary_home)
# with pytest.raises(ClanError):
# print(_get_clan_flake_toplevel())
# (temporary_home / ".git").touch()
# assert _get_clan_flake_toplevel() == temporary_home
from clan_cli.dirs import get_clan_flake_toplevel # subdir = temporary_home / "subdir"
from clan_cli.errors import ClanError # subdir.mkdir()
# monkeypatch.chdir(subdir)
# (subdir / ".clan-flake").touch()
def test_get_clan_flake_toplevel( # assert _get_clan_flake_toplevel() == subdir
monkeypatch: pytest.MonkeyPatch, temporary_dir: Path
) -> None:
monkeypatch.chdir(temporary_dir)
with pytest.raises(ClanError):
print(get_clan_flake_toplevel())
(temporary_dir / ".git").touch()
assert get_clan_flake_toplevel() == temporary_dir
subdir = temporary_dir / "subdir"
subdir.mkdir()
monkeypatch.chdir(subdir)
(subdir / ".clan-flake").touch()
assert get_clan_flake_toplevel() == subdir

View File

@@ -1,13 +1,16 @@
import json import json
from pathlib import Path import logging
import pytest import pytest
from api import TestClient from api import TestClient
from fixtures_flakes import FlakeForTest
log = logging.getLogger(__name__)
@pytest.mark.impure @pytest.mark.impure
def test_inspect_ok(api: TestClient, test_flake_with_core: Path) -> None: def test_inspect_ok(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
params = {"url": str(test_flake_with_core)} params = {"url": str(test_flake_with_core.path)}
response = api.get( response = api.get(
"/api/flake/attrs", "/api/flake/attrs",
params=params, params=params,
@@ -32,8 +35,8 @@ def test_inspect_err(api: TestClient) -> None:
@pytest.mark.impure @pytest.mark.impure
def test_inspect_flake(api: TestClient, test_flake_with_core: Path) -> None: def test_inspect_flake(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
params = {"url": str(test_flake_with_core)} params = {"url": str(test_flake_with_core.path)}
response = api.get( response = api.get(
"/api/flake", "/api/flake",
params=params, params=params,

View File

@@ -9,6 +9,7 @@
let let
clan = clan-core.lib.buildClan { clan = clan-core.lib.buildClan {
directory = self; directory = self;
clanName = "test_flake_with_core";
machines = { machines = {
vm1 = { lib, ... }: { vm1 = { lib, ... }: {
clan.networking.deploymentAddress = "__CLAN_DEPLOYMENT_ADDRESS__"; clan.networking.deploymentAddress = "__CLAN_DEPLOYMENT_ADDRESS__";

View File

@@ -9,6 +9,7 @@
let let
clan = clan-core.lib.buildClan { clan = clan-core.lib.buildClan {
directory = self; directory = self;
clanName = "test_flake_with_core_and_pass";
machines = { machines = {
vm1 = { lib, ... }: { vm1 = { lib, ... }: {
clan.networking.deploymentAddress = "__CLAN_DEPLOYMENT_ADDRESS__"; clan.networking.deploymentAddress = "__CLAN_DEPLOYMENT_ADDRESS__";

View File

@@ -9,6 +9,7 @@
let let
clan = clan-core.lib.buildClan { clan = clan-core.lib.buildClan {
directory = self; directory = self;
clanName = "test_flake_with_core_dynamic_machines";
machines = machines =
let let
machineModules = builtins.readDir (self + "/machines"); machineModules = builtins.readDir (self + "/machines");

View File

@@ -3,6 +3,7 @@ from typing import TYPE_CHECKING
import pytest import pytest
from cli import Cli from cli import Cli
from fixtures_flakes import FlakeForTest
if TYPE_CHECKING: if TYPE_CHECKING:
from age_keys import KeyPair from age_keys import KeyPair
@@ -10,7 +11,7 @@ if TYPE_CHECKING:
def test_import_sops( def test_import_sops(
test_root: Path, test_root: Path,
test_flake: Path, test_flake: FlakeForTest,
capsys: pytest.CaptureFixture, capsys: pytest.CaptureFixture,
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch,
age_keys: list["KeyPair"], age_keys: list["KeyPair"],
@@ -18,30 +19,33 @@ def test_import_sops(
cli = Cli() cli = Cli()
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[1].privkey) monkeypatch.setenv("SOPS_AGE_KEY", age_keys[1].privkey)
cli.run(["secrets", "machines", "add", "machine1", age_keys[0].pubkey]) cli.run(
cli.run(["secrets", "users", "add", "user1", age_keys[1].pubkey]) ["secrets", "machines", "add", "machine1", age_keys[0].pubkey, test_flake.name]
cli.run(["secrets", "users", "add", "user2", age_keys[2].pubkey]) )
cli.run(["secrets", "groups", "add-user", "group1", "user1"]) cli.run(["secrets", "users", "add", "user1", age_keys[1].pubkey, test_flake.name])
cli.run(["secrets", "groups", "add-user", "group1", "user2"]) cli.run(["secrets", "users", "add", "user2", age_keys[2].pubkey, test_flake.name])
cli.run(["secrets", "groups", "add-user", "group1", "user1", test_flake.name])
cli.run(["secrets", "groups", "add-user", "group1", "user2", test_flake.name])
# To edit: # To edit:
# SOPS_AGE_KEY=AGE-SECRET-KEY-1U5ENXZQAY62NC78Y2WC0SEGRRMAEEKH79EYY5TH4GPFWJKEAY0USZ6X7YQ sops --age age14tva0txcrl0zes05x7gkx56qd6wd9q3nwecjac74xxzz4l47r44sv3fz62 ./data/secrets.yaml # SOPS_AGE_KEY=AGE-SECRET-KEY-1U5ENXZQAY62NC78Y2WC0SEGRRMAEEKH79EYY5TH4GPFWJKEAY0USZ6X7YQ sops --age age14tva0txcrl0zes05x7gkx56qd6wd9q3nwecjac74xxzz4l47r44sv3fz62 ./data/secrets.yaml
cli.run( cmd = [
[ "secrets",
"secrets", "import-sops",
"import-sops", "--group",
"--group", "group1",
"group1", "--machine",
"--machine", "machine1",
"machine1", str(test_root.joinpath("data", "secrets.yaml")),
str(test_root.joinpath("data", "secrets.yaml")), test_flake.name,
] ]
)
cli.run(cmd)
capsys.readouterr() capsys.readouterr()
cli.run(["secrets", "users", "list"]) cli.run(["secrets", "users", "list", test_flake.name])
users = sorted(capsys.readouterr().out.rstrip().split()) users = sorted(capsys.readouterr().out.rstrip().split())
assert users == ["user1", "user2"] assert users == ["user1", "user2"]
capsys.readouterr() capsys.readouterr()
cli.run(["secrets", "get", "secret-key"]) cli.run(["secrets", "get", "secret-key", test_flake.name])
assert capsys.readouterr().out == "secret-value" assert capsys.readouterr().out == "secret-value"

View File

@@ -1,46 +1,46 @@
from pathlib import Path
from api import TestClient from api import TestClient
from fixtures_flakes import FlakeForTest
def test_machines(api: TestClient, test_flake: Path) -> None: def test_machines(api: TestClient, test_flake: FlakeForTest) -> None:
response = api.get("/api/machines") response = api.get(f"/api/{test_flake.name}/machines")
assert response.status_code == 200 assert response.status_code == 200
assert response.json() == {"machines": []} assert response.json() == {"machines": []}
response = api.post("/api/machines", json={"name": "test"}) response = api.post(f"/api/{test_flake.name}/machines", json={"name": "test"})
assert response.status_code == 201 assert response.status_code == 201
assert response.json() == {"machine": {"name": "test", "status": "unknown"}} assert response.json() == {"machine": {"name": "test", "status": "unknown"}}
response = api.get("/api/machines/test") response = api.get(f"/api/{test_flake.name}/machines/test")
assert response.status_code == 200 assert response.status_code == 200
assert response.json() == {"machine": {"name": "test", "status": "unknown"}} assert response.json() == {"machine": {"name": "test", "status": "unknown"}}
response = api.get("/api/machines") response = api.get(f"/api/{test_flake.name}/machines")
assert response.status_code == 200 assert response.status_code == 200
assert response.json() == {"machines": [{"name": "test", "status": "unknown"}]} assert response.json() == {"machines": [{"name": "test", "status": "unknown"}]}
def test_configure_machine(api: TestClient, test_flake: Path) -> None: def test_configure_machine(api: TestClient, test_flake: FlakeForTest) -> None:
# ensure error 404 if machine does not exist when accessing the config # ensure error 404 if machine does not exist when accessing the config
response = api.get("/api/machines/machine1/config") response = api.get(f"/api/{test_flake.name}/machines/machine1/config")
assert response.status_code == 404 assert response.status_code == 404
# ensure error 404 if machine does not exist when writing to the config # ensure error 404 if machine does not exist when writing to the config
response = api.put("/api/machines/machine1/config", json={}) response = api.put(f"/api/{test_flake.name}/machines/machine1/config", json={})
assert response.status_code == 404 assert response.status_code == 404
# create the machine # create the machine
response = api.post("/api/machines", json={"name": "machine1"}) response = api.post(f"/api/{test_flake.name}/machines", json={"name": "machine1"})
assert response.status_code == 201 assert response.status_code == 201
# ensure an empty config is returned by default for a new machine # ensure an empty config is returned by default for a new machine
response = api.get("/api/machines/machine1/config") response = api.get(f"/api/{test_flake.name}/machines/machine1/config")
assert response.status_code == 200 assert response.status_code == 200
assert response.json() == {"config": {}} assert response.json() == {"config": {}}
# get jsonschema for machine # get jsonschema for machine
response = api.get("/api/machines/machine1/schema") response = api.get(f"/api/{test_flake.name}/machines/machine1/schema")
assert response.status_code == 200 assert response.status_code == 200
json_response = response.json() json_response = response.json()
assert "schema" in json_response and "properties" in json_response["schema"] assert "schema" in json_response and "properties" in json_response["schema"]
@@ -56,7 +56,7 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
# verify an invalid config (fileSystems missing) fails # verify an invalid config (fileSystems missing) fails
response = api.put( response = api.put(
"/api/machines/machine1/verify", f"/api/{test_flake.name}/machines/machine1/verify",
json=invalid_config, json=invalid_config,
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -67,13 +67,13 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
# set come invalid config (fileSystems missing) # set come invalid config (fileSystems missing)
response = api.put( response = api.put(
"/api/machines/machine1/config", f"/api/{test_flake.name}/machines/machine1/config",
json=invalid_config, json=invalid_config,
) )
assert response.status_code == 200 assert response.status_code == 200
# ensure the config has actually been updated # ensure the config has actually been updated
response = api.get("/api/machines/machine1/config") response = api.get(f"/api/{test_flake.name}/machines/machine1/config")
assert response.status_code == 200 assert response.status_code == 200
assert response.json() == {"config": invalid_config} assert response.json() == {"config": invalid_config}
@@ -103,15 +103,16 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
), ),
**fs_config, **fs_config,
) )
response = api.put( response = api.put(
"/api/machines/machine1/config", f"/api/{test_flake.name}/machines/machine1/config",
json=config2, json=config2,
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json() == {"config": config2} assert response.json() == {"config": config2}
# ensure that the config has actually been updated # get the config again
response = api.get("/api/machines/machine1/config") response = api.get(f"/api/{test_flake.name}/machines/machine1/config")
assert response.status_code == 200 assert response.status_code == 200
assert response.json() == {"config": config2} assert response.json() == {"config": config2}
@@ -119,20 +120,21 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
# For example, this should not result in the boot.loader.grub.devices being # For example, this should not result in the boot.loader.grub.devices being
# set twice (eg. merged) # set twice (eg. merged)
response = api.put( response = api.put(
"/api/machines/machine1/config", f"/api/{test_flake.name}/machines/machine1/config",
json=config2, json=config2,
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json() == {"config": config2} assert response.json() == {"config": config2}
# verify the machine config evaluates # verify the machine config evaluates
response = api.get("/api/machines/machine1/verify") response = api.get(f"/api/{test_flake.name}/machines/machine1/verify")
assert response.status_code == 200 assert response.status_code == 200
assert response.json() == {"success": True, "error": None} assert response.json() == {"success": True, "error": None}
# get the schema with an extra module imported # get the schema with an extra module imported
response = api.put( response = api.put(
"/api/machines/machine1/schema", f"/api/{test_flake.name}/machines/machine1/schema",
json={"clanImports": ["fake-module"]}, json={"clanImports": ["fake-module"]},
) )
# expect the result schema to contain the fake-module.fake-flag option # expect the result schema to contain the fake-module.fake-flag option
@@ -157,7 +159,7 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
# set the fake-module.fake-flag option to true # set the fake-module.fake-flag option to true
response = api.put( response = api.put(
"/api/machines/machine1/config", f"/api/{test_flake.name}/machines/machine1/config",
json=config_with_imports, json=config_with_imports,
) )
assert response.status_code == 200 assert response.status_code == 200
@@ -179,7 +181,7 @@ def test_configure_machine(api: TestClient, test_flake: Path) -> None:
**fs_config, **fs_config,
) )
response = api.put( response = api.put(
"/api/machines/machine1/config", f"/api/{test_flake.name}/machines/machine1/config",
json=config_with_empty_imports, json=config_with_empty_imports,
) )
assert response.status_code == 200 assert response.status_code == 200

View File

@@ -1,21 +1,22 @@
from pathlib import Path
import pytest import pytest
from cli import Cli from cli import Cli
from fixtures_flakes import FlakeForTest
def test_machine_subcommands(test_flake: Path, capsys: pytest.CaptureFixture) -> None: def test_machine_subcommands(
test_flake: FlakeForTest, capsys: pytest.CaptureFixture
) -> None:
cli = Cli() cli = Cli()
cli.run(["machines", "create", "machine1"]) cli.run(["machines", "create", "machine1", test_flake.name])
capsys.readouterr() capsys.readouterr()
cli.run(["machines", "list"]) cli.run(["machines", "list", test_flake.name])
out = capsys.readouterr() out = capsys.readouterr()
assert "machine1\n" == out.out assert "machine1\n" == out.out
cli.run(["machines", "remove", "machine1"]) cli.run(["machines", "delete", "machine1", test_flake.name])
capsys.readouterr() capsys.readouterr()
cli.run(["machines", "list"]) cli.run(["machines", "list", test_flake.name])
out = capsys.readouterr() out = capsys.readouterr()
assert "" == out.out assert "" == out.out

View File

@@ -1,8 +1,8 @@
from pathlib import Path from fixtures_flakes import FlakeForTest
from clan_cli.config import machine from clan_cli.config import machine
def test_schema_for_machine(test_flake: Path) -> None: def test_schema_for_machine(test_flake: FlakeForTest) -> None:
schema = machine.schema_for_machine("machine1", flake=test_flake) schema = machine.schema_for_machine(test_flake.name, "machine1")
assert "properties" in schema assert "properties" in schema

View File

@@ -1,30 +1,33 @@
import logging
import os import os
from contextlib import contextmanager from contextlib import contextmanager
from pathlib import Path
from typing import TYPE_CHECKING, Iterator from typing import TYPE_CHECKING, Iterator
import pytest import pytest
from cli import Cli from cli import Cli
from fixtures_flakes import FlakeForTest
from clan_cli.errors import ClanError from clan_cli.errors import ClanError
if TYPE_CHECKING: if TYPE_CHECKING:
from age_keys import KeyPair from age_keys import KeyPair
log = logging.getLogger(__name__)
def _test_identities( def _test_identities(
what: str, what: str,
test_flake: Path, test_flake: FlakeForTest,
capsys: pytest.CaptureFixture, capsys: pytest.CaptureFixture,
age_keys: list["KeyPair"], age_keys: list["KeyPair"],
) -> None: ) -> None:
cli = Cli() cli = Cli()
sops_folder = test_flake / "sops" sops_folder = test_flake.path / "sops"
cli.run(["secrets", what, "add", "foo", age_keys[0].pubkey]) cli.run(["secrets", what, "add", "foo", age_keys[0].pubkey, test_flake.name])
assert (sops_folder / what / "foo" / "key.json").exists() assert (sops_folder / what / "foo" / "key.json").exists()
with pytest.raises(ClanError): with pytest.raises(ClanError):
cli.run(["secrets", what, "add", "foo", age_keys[0].pubkey]) cli.run(["secrets", what, "add", "foo", age_keys[0].pubkey, test_flake.name])
cli.run( cli.run(
[ [
@@ -34,73 +37,80 @@ def _test_identities(
"-f", "-f",
"foo", "foo",
age_keys[0].privkey, age_keys[0].privkey,
test_flake.name,
] ]
) )
capsys.readouterr() # empty the buffer capsys.readouterr() # empty the buffer
cli.run(["secrets", what, "get", "foo"]) cli.run(["secrets", what, "get", "foo", test_flake.name])
out = capsys.readouterr() # empty the buffer out = capsys.readouterr() # empty the buffer
assert age_keys[0].pubkey in out.out assert age_keys[0].pubkey in out.out
capsys.readouterr() # empty the buffer capsys.readouterr() # empty the buffer
cli.run(["secrets", what, "list"]) cli.run(["secrets", what, "list", test_flake.name])
out = capsys.readouterr() # empty the buffer out = capsys.readouterr() # empty the buffer
assert "foo" in out.out assert "foo" in out.out
cli.run(["secrets", what, "remove", "foo"]) cli.run(["secrets", what, "remove", "foo", test_flake.name])
assert not (sops_folder / what / "foo" / "key.json").exists() assert not (sops_folder / what / "foo" / "key.json").exists()
with pytest.raises(ClanError): # already removed with pytest.raises(ClanError): # already removed
cli.run(["secrets", what, "remove", "foo"]) cli.run(["secrets", what, "remove", "foo", test_flake.name])
capsys.readouterr() capsys.readouterr()
cli.run(["secrets", what, "list"]) cli.run(["secrets", what, "list", test_flake.name])
out = capsys.readouterr() out = capsys.readouterr()
assert "foo" not in out.out assert "foo" not in out.out
def test_users( def test_users(
test_flake: Path, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"] test_flake: FlakeForTest, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"]
) -> None: ) -> None:
_test_identities("users", test_flake, capsys, age_keys) _test_identities("users", test_flake, capsys, age_keys)
def test_machines( def test_machines(
test_flake: Path, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"] test_flake: FlakeForTest, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"]
) -> None: ) -> None:
_test_identities("machines", test_flake, capsys, age_keys) _test_identities("machines", test_flake, capsys, age_keys)
def test_groups( def test_groups(
test_flake: Path, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"] test_flake: FlakeForTest, capsys: pytest.CaptureFixture, age_keys: list["KeyPair"]
) -> None: ) -> None:
cli = Cli() cli = Cli()
capsys.readouterr() # empty the buffer capsys.readouterr() # empty the buffer
cli.run(["secrets", "groups", "list"]) cli.run(["secrets", "groups", "list", test_flake.name])
assert capsys.readouterr().out == "" assert capsys.readouterr().out == ""
with pytest.raises(ClanError): # machine does not exist yet with pytest.raises(ClanError): # machine does not exist yet
cli.run(["secrets", "groups", "add-machine", "group1", "machine1"]) cli.run(
["secrets", "groups", "add-machine", "group1", "machine1", test_flake.name]
)
with pytest.raises(ClanError): # user does not exist yet with pytest.raises(ClanError): # user does not exist yet
cli.run(["secrets", "groups", "add-user", "groupb1", "user1"]) cli.run(["secrets", "groups", "add-user", "groupb1", "user1", test_flake.name])
cli.run(["secrets", "machines", "add", "machine1", age_keys[0].pubkey]) cli.run(
cli.run(["secrets", "groups", "add-machine", "group1", "machine1"]) ["secrets", "machines", "add", "machine1", age_keys[0].pubkey, test_flake.name]
)
cli.run(["secrets", "groups", "add-machine", "group1", "machine1", test_flake.name])
# Should this fail? # Should this fail?
cli.run(["secrets", "groups", "add-machine", "group1", "machine1"]) cli.run(["secrets", "groups", "add-machine", "group1", "machine1", test_flake.name])
cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey]) cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey, test_flake.name])
cli.run(["secrets", "groups", "add-user", "group1", "user1"]) cli.run(["secrets", "groups", "add-user", "group1", "user1", test_flake.name])
capsys.readouterr() # empty the buffer capsys.readouterr() # empty the buffer
cli.run(["secrets", "groups", "list"]) cli.run(["secrets", "groups", "list", test_flake.name])
out = capsys.readouterr().out out = capsys.readouterr().out
assert "user1" in out assert "user1" in out
assert "machine1" in out assert "machine1" in out
cli.run(["secrets", "groups", "remove-user", "group1", "user1"]) cli.run(["secrets", "groups", "remove-user", "group1", "user1", test_flake.name])
cli.run(["secrets", "groups", "remove-machine", "group1", "machine1"]) cli.run(
groups = os.listdir(test_flake / "sops" / "groups") ["secrets", "groups", "remove-machine", "group1", "machine1", test_flake.name]
)
groups = os.listdir(test_flake.path / "sops" / "groups")
assert len(groups) == 0 assert len(groups) == 0
@@ -117,104 +127,114 @@ def use_key(key: str, monkeypatch: pytest.MonkeyPatch) -> Iterator[None]:
def test_secrets( def test_secrets(
test_flake: Path, test_flake: FlakeForTest,
capsys: pytest.CaptureFixture, capsys: pytest.CaptureFixture,
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch,
age_keys: list["KeyPair"], age_keys: list["KeyPair"],
) -> None: ) -> None:
cli = Cli() cli = Cli()
capsys.readouterr() # empty the buffer capsys.readouterr() # empty the buffer
cli.run(["secrets", "list"]) cli.run(["secrets", "list", test_flake.name])
assert capsys.readouterr().out == "" assert capsys.readouterr().out == ""
monkeypatch.setenv("SOPS_NIX_SECRET", "foo") monkeypatch.setenv("SOPS_NIX_SECRET", "foo")
monkeypatch.setenv("SOPS_AGE_KEY_FILE", str(test_flake / ".." / "age.key")) monkeypatch.setenv("SOPS_AGE_KEY_FILE", str(test_flake.path / ".." / "age.key"))
cli.run(["secrets", "key", "generate"]) cli.run(["secrets", "key", "generate"])
capsys.readouterr() # empty the buffer capsys.readouterr() # empty the buffer
cli.run(["secrets", "key", "show"]) cli.run(["secrets", "key", "show"])
key = capsys.readouterr().out key = capsys.readouterr().out
assert key.startswith("age1") assert key.startswith("age1")
cli.run(["secrets", "users", "add", "testuser", key]) cli.run(["secrets", "users", "add", "testuser", key, test_flake.name])
with pytest.raises(ClanError): # does not exist yet with pytest.raises(ClanError): # does not exist yet
cli.run(["secrets", "get", "nonexisting"]) cli.run(["secrets", "get", "nonexisting", test_flake.name])
cli.run(["secrets", "set", "initialkey"]) cli.run(["secrets", "set", "initialkey", test_flake.name])
capsys.readouterr() capsys.readouterr()
cli.run(["secrets", "get", "initialkey"]) cli.run(["secrets", "get", "initialkey", test_flake.name])
assert capsys.readouterr().out == "foo" assert capsys.readouterr().out == "foo"
capsys.readouterr() capsys.readouterr()
cli.run(["secrets", "users", "list"]) cli.run(["secrets", "users", "list", test_flake.name])
users = capsys.readouterr().out.rstrip().split("\n") users = capsys.readouterr().out.rstrip().split("\n")
assert len(users) == 1, f"users: {users}" assert len(users) == 1, f"users: {users}"
owner = users[0] owner = users[0]
monkeypatch.setenv("EDITOR", "cat") monkeypatch.setenv("EDITOR", "cat")
cli.run(["secrets", "set", "--edit", "initialkey"]) cli.run(["secrets", "set", "--edit", "initialkey", test_flake.name])
monkeypatch.delenv("EDITOR") monkeypatch.delenv("EDITOR")
cli.run(["secrets", "rename", "initialkey", "key"]) cli.run(["secrets", "rename", "initialkey", "key", test_flake.name])
capsys.readouterr() # empty the buffer capsys.readouterr() # empty the buffer
cli.run(["secrets", "list"]) cli.run(["secrets", "list", test_flake.name])
assert capsys.readouterr().out == "key\n" assert capsys.readouterr().out == "key\n"
cli.run(["secrets", "machines", "add", "machine1", age_keys[0].pubkey]) cli.run(
cli.run(["secrets", "machines", "add-secret", "machine1", "key"]) ["secrets", "machines", "add", "machine1", age_keys[0].pubkey, test_flake.name]
)
cli.run(["secrets", "machines", "add-secret", "machine1", "key", test_flake.name])
capsys.readouterr() capsys.readouterr()
cli.run(["secrets", "machines", "list"]) cli.run(["secrets", "machines", "list", test_flake.name])
assert capsys.readouterr().out == "machine1\n" assert capsys.readouterr().out == "machine1\n"
with use_key(age_keys[0].privkey, monkeypatch): with use_key(age_keys[0].privkey, monkeypatch):
capsys.readouterr() capsys.readouterr()
cli.run(["secrets", "get", "key"]) cli.run(["secrets", "get", "key", test_flake.name])
assert capsys.readouterr().out == "foo" assert capsys.readouterr().out == "foo"
cli.run(["secrets", "machines", "remove-secret", "machine1", "key"]) cli.run(
["secrets", "machines", "remove-secret", "machine1", "key", test_flake.name]
)
cli.run(["secrets", "users", "add", "user1", age_keys[1].pubkey]) cli.run(["secrets", "users", "add", "user1", age_keys[1].pubkey, test_flake.name])
cli.run(["secrets", "users", "add-secret", "user1", "key"]) cli.run(["secrets", "users", "add-secret", "user1", "key", test_flake.name])
capsys.readouterr() capsys.readouterr()
with use_key(age_keys[1].privkey, monkeypatch): with use_key(age_keys[1].privkey, monkeypatch):
cli.run(["secrets", "get", "key"]) cli.run(["secrets", "get", "key", test_flake.name])
assert capsys.readouterr().out == "foo" assert capsys.readouterr().out == "foo"
cli.run(["secrets", "users", "remove-secret", "user1", "key"]) cli.run(["secrets", "users", "remove-secret", "user1", "key", test_flake.name])
with pytest.raises(ClanError): # does not exist yet with pytest.raises(ClanError): # does not exist yet
cli.run(["secrets", "groups", "add-secret", "admin-group", "key"]) cli.run(
cli.run(["secrets", "groups", "add-user", "admin-group", "user1"]) ["secrets", "groups", "add-secret", "admin-group", "key", test_flake.name]
cli.run(["secrets", "groups", "add-user", "admin-group", owner]) )
cli.run(["secrets", "groups", "add-secret", "admin-group", "key"]) cli.run(["secrets", "groups", "add-user", "admin-group", "user1", test_flake.name])
cli.run(["secrets", "groups", "add-user", "admin-group", owner, test_flake.name])
cli.run(["secrets", "groups", "add-secret", "admin-group", "key", test_flake.name])
capsys.readouterr() # empty the buffer capsys.readouterr() # empty the buffer
cli.run(["secrets", "set", "--group", "admin-group", "key2"]) cli.run(["secrets", "set", "--group", "admin-group", "key2", test_flake.name])
with use_key(age_keys[1].privkey, monkeypatch): with use_key(age_keys[1].privkey, monkeypatch):
capsys.readouterr() capsys.readouterr()
cli.run(["secrets", "get", "key"]) cli.run(["secrets", "get", "key", test_flake.name])
assert capsys.readouterr().out == "foo" assert capsys.readouterr().out == "foo"
# extend group will update secrets # extend group will update secrets
cli.run(["secrets", "users", "add", "user2", age_keys[2].pubkey]) cli.run(["secrets", "users", "add", "user2", age_keys[2].pubkey, test_flake.name])
cli.run(["secrets", "groups", "add-user", "admin-group", "user2"]) cli.run(["secrets", "groups", "add-user", "admin-group", "user2", test_flake.name])
with use_key(age_keys[2].privkey, monkeypatch): # user2 with use_key(age_keys[2].privkey, monkeypatch): # user2
capsys.readouterr() capsys.readouterr()
cli.run(["secrets", "get", "key"]) cli.run(["secrets", "get", "key", test_flake.name])
assert capsys.readouterr().out == "foo" assert capsys.readouterr().out == "foo"
cli.run(["secrets", "groups", "remove-user", "admin-group", "user2"]) cli.run(
["secrets", "groups", "remove-user", "admin-group", "user2", test_flake.name]
)
with pytest.raises(ClanError), use_key(age_keys[2].privkey, monkeypatch): with pytest.raises(ClanError), use_key(age_keys[2].privkey, monkeypatch):
# user2 is not in the group anymore # user2 is not in the group anymore
capsys.readouterr() capsys.readouterr()
cli.run(["secrets", "get", "key"]) cli.run(["secrets", "get", "key", test_flake.name])
print(capsys.readouterr().out) print(capsys.readouterr().out)
cli.run(["secrets", "groups", "remove-secret", "admin-group", "key"]) cli.run(
["secrets", "groups", "remove-secret", "admin-group", "key", test_flake.name]
)
cli.run(["secrets", "remove", "key"]) cli.run(["secrets", "remove", "key", test_flake.name])
cli.run(["secrets", "remove", "key2"]) cli.run(["secrets", "remove", "key2", test_flake.name])
capsys.readouterr() # empty the buffer capsys.readouterr() # empty the buffer
cli.run(["secrets", "list"]) cli.run(["secrets", "list", test_flake.name])
assert capsys.readouterr().out == "" assert capsys.readouterr().out == ""

View File

@@ -1,8 +1,8 @@
from pathlib import Path
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
import pytest import pytest
from cli import Cli from cli import Cli
from fixtures_flakes import FlakeForTest
from clan_cli.machines.facts import machine_get_fact from clan_cli.machines.facts import machine_get_fact
from clan_cli.secrets.folders import sops_secrets_folder from clan_cli.secrets.folders import sops_secrets_folder
@@ -15,21 +15,36 @@ if TYPE_CHECKING:
@pytest.mark.impure @pytest.mark.impure
def test_generate_secret( def test_generate_secret(
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch,
test_flake_with_core: Path, test_flake_with_core: FlakeForTest,
age_keys: list["KeyPair"], age_keys: list["KeyPair"],
) -> None: ) -> None:
monkeypatch.chdir(test_flake_with_core) monkeypatch.chdir(test_flake_with_core.path)
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey) monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli = Cli() cli = Cli()
cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey]) cli.run(
cli.run(["secrets", "generate", "vm1"]) [
has_secret("vm1-age.key") "secrets",
has_secret("vm1-zerotier-identity-secret") "users",
network_id = machine_get_fact("vm1", "zerotier-network-id") "add",
"user1",
age_keys[0].pubkey,
test_flake_with_core.name,
]
)
cli.run(["secrets", "generate", "vm1", test_flake_with_core.name])
has_secret(test_flake_with_core.name, "vm1-age.key")
has_secret(test_flake_with_core.name, "vm1-zerotier-identity-secret")
network_id = machine_get_fact(
test_flake_with_core.name, "vm1", "zerotier-network-id"
)
assert len(network_id) == 16 assert len(network_id) == 16
age_key = sops_secrets_folder().joinpath("vm1-age.key").joinpath("secret") age_key = (
sops_secrets_folder(test_flake_with_core.name)
.joinpath("vm1-age.key")
.joinpath("secret")
)
identity_secret = ( identity_secret = (
sops_secrets_folder() sops_secrets_folder(test_flake_with_core.name)
.joinpath("vm1-zerotier-identity-secret") .joinpath("vm1-zerotier-identity-secret")
.joinpath("secret") .joinpath("secret")
) )
@@ -37,12 +52,12 @@ def test_generate_secret(
secret1_mtime = identity_secret.lstat().st_mtime_ns secret1_mtime = identity_secret.lstat().st_mtime_ns
# test idempotency # test idempotency
cli.run(["secrets", "generate", "vm1"]) cli.run(["secrets", "generate", "vm1", test_flake_with_core.name])
assert age_key.lstat().st_mtime_ns == age_key_mtime assert age_key.lstat().st_mtime_ns == age_key_mtime
assert identity_secret.lstat().st_mtime_ns == secret1_mtime assert identity_secret.lstat().st_mtime_ns == secret1_mtime
machine_path = ( machine_path = (
sops_secrets_folder() sops_secrets_folder(test_flake_with_core.name)
.joinpath("vm1-zerotier-identity-secret") .joinpath("vm1-zerotier-identity-secret")
.joinpath("machines") .joinpath("machines")
.joinpath("vm1") .joinpath("vm1")

View File

@@ -3,6 +3,7 @@ from pathlib import Path
import pytest import pytest
from cli import Cli from cli import Cli
from fixtures_flakes import FlakeForTest
from clan_cli.machines.facts import machine_get_fact from clan_cli.machines.facts import machine_get_fact
from clan_cli.nix import nix_shell from clan_cli.nix import nix_shell
@@ -12,16 +13,16 @@ from clan_cli.ssh import HostGroup
@pytest.mark.impure @pytest.mark.impure
def test_upload_secret( def test_upload_secret(
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch,
test_flake_with_core_and_pass: Path, test_flake_with_core_and_pass: FlakeForTest,
temporary_dir: Path, temporary_home: Path,
host_group: HostGroup, host_group: HostGroup,
) -> None: ) -> None:
monkeypatch.chdir(test_flake_with_core_and_pass) monkeypatch.chdir(test_flake_with_core_and_pass.path)
gnupghome = temporary_dir / "gpg" gnupghome = temporary_home / "gpg"
gnupghome.mkdir(mode=0o700) gnupghome.mkdir(mode=0o700)
monkeypatch.setenv("GNUPGHOME", str(gnupghome)) monkeypatch.setenv("GNUPGHOME", str(gnupghome))
monkeypatch.setenv("PASSWORD_STORE_DIR", str(temporary_dir / "pass")) monkeypatch.setenv("PASSWORD_STORE_DIR", str(temporary_home / "pass"))
gpg_key_spec = temporary_dir / "gpg_key_spec" gpg_key_spec = temporary_home / "gpg_key_spec"
gpg_key_spec.write_text( gpg_key_spec.write_text(
""" """
Key-Type: 1 Key-Type: 1
@@ -38,25 +39,27 @@ def test_upload_secret(
check=True, check=True,
) )
subprocess.run(nix_shell(["pass"], ["pass", "init", "test@local"]), check=True) subprocess.run(nix_shell(["pass"], ["pass", "init", "test@local"]), check=True)
cli.run(["secrets", "generate", "vm1"]) cli.run(["secrets", "generate", "vm1", test_flake_with_core_and_pass.name])
network_id = machine_get_fact("vm1", "zerotier-network-id") network_id = machine_get_fact(
test_flake_with_core_and_pass.name, "vm1", "zerotier-network-id"
)
assert len(network_id) == 16 assert len(network_id) == 16
identity_secret = ( identity_secret = (
temporary_dir / "pass" / "machines" / "vm1" / "zerotier-identity-secret.gpg" temporary_home / "pass" / "machines" / "vm1" / "zerotier-identity-secret.gpg"
) )
secret1_mtime = identity_secret.lstat().st_mtime_ns secret1_mtime = identity_secret.lstat().st_mtime_ns
# test idempotency # test idempotency
cli.run(["secrets", "generate", "vm1"]) cli.run(["secrets", "generate", "vm1", test_flake_with_core_and_pass.name])
assert identity_secret.lstat().st_mtime_ns == secret1_mtime assert identity_secret.lstat().st_mtime_ns == secret1_mtime
flake = test_flake_with_core_and_pass.joinpath("flake.nix") flake = test_flake_with_core_and_pass.path.joinpath("flake.nix")
host = host_group.hosts[0] host = host_group.hosts[0]
addr = f"{host.user}@{host.host}:{host.port}?StrictHostKeyChecking=no&UserKnownHostsFile=/dev/null&IdentityFile={host.key}" addr = f"{host.user}@{host.host}:{host.port}?StrictHostKeyChecking=no&UserKnownHostsFile=/dev/null&IdentityFile={host.key}"
new_text = flake.read_text().replace("__CLAN_DEPLOYMENT_ADDRESS__", addr) new_text = flake.read_text().replace("__CLAN_DEPLOYMENT_ADDRESS__", addr)
flake.write_text(new_text) flake.write_text(new_text)
cli.run(["secrets", "upload", "vm1"]) cli.run(["secrets", "upload", "vm1", test_flake_with_core_and_pass.name])
zerotier_identity_secret = ( zerotier_identity_secret = (
test_flake_with_core_and_pass / "secrets" / "zerotier-identity-secret" test_flake_with_core_and_pass.path / "secrets" / "zerotier-identity-secret"
) )
assert zerotier_identity_secret.exists() assert zerotier_identity_secret.exists()

View File

@@ -1,8 +1,8 @@
from pathlib import Path
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
import pytest import pytest
from cli import Cli from cli import Cli
from fixtures_flakes import FlakeForTest
from clan_cli.ssh import HostGroup from clan_cli.ssh import HostGroup
@@ -13,29 +13,47 @@ if TYPE_CHECKING:
@pytest.mark.impure @pytest.mark.impure
def test_secrets_upload( def test_secrets_upload(
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch,
test_flake_with_core: Path, test_flake_with_core: FlakeForTest,
host_group: HostGroup, host_group: HostGroup,
age_keys: list["KeyPair"], age_keys: list["KeyPair"],
) -> None: ) -> None:
monkeypatch.chdir(test_flake_with_core) monkeypatch.chdir(test_flake_with_core.path)
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey) monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli = Cli() cli = Cli()
cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey]) cli.run(
[
"secrets",
"users",
"add",
"user1",
age_keys[0].pubkey,
test_flake_with_core.name,
]
)
cli.run(["secrets", "machines", "add", "vm1", age_keys[1].pubkey]) cli.run(
[
"secrets",
"machines",
"add",
"vm1",
age_keys[1].pubkey,
test_flake_with_core.name,
]
)
monkeypatch.setenv("SOPS_NIX_SECRET", age_keys[0].privkey) monkeypatch.setenv("SOPS_NIX_SECRET", age_keys[0].privkey)
cli.run(["secrets", "set", "vm1-age.key"]) cli.run(["secrets", "set", "vm1-age.key", test_flake_with_core.name])
flake = test_flake_with_core.joinpath("flake.nix") flake = test_flake_with_core.path.joinpath("flake.nix")
host = host_group.hosts[0] host = host_group.hosts[0]
addr = f"{host.user}@{host.host}:{host.port}?StrictHostKeyChecking=no&UserKnownHostsFile=/dev/null&IdentityFile={host.key}" addr = f"{host.user}@{host.host}:{host.port}?StrictHostKeyChecking=no&UserKnownHostsFile=/dev/null&IdentityFile={host.key}"
new_text = flake.read_text().replace("__CLAN_DEPLOYMENT_ADDRESS__", addr) new_text = flake.read_text().replace("__CLAN_DEPLOYMENT_ADDRESS__", addr)
flake.write_text(new_text) flake.write_text(new_text)
cli.run(["secrets", "upload", "vm1"]) cli.run(["secrets", "upload", "vm1", test_flake_with_core.name])
# the flake defines this path as the location where the sops key should be installed # the flake defines this path as the location where the sops key should be installed
sops_key = test_flake_with_core.joinpath("key.txt") sops_key = test_flake_with_core.path.joinpath("key.txt")
assert sops_key.exists() assert sops_key.exists()
assert sops_key.read_text() == age_keys[0].privkey assert sops_key.read_text() == age_keys[0].privkey

View File

@@ -1,16 +1,19 @@
from pathlib import Path
import pytest import pytest
from api import TestClient from api import TestClient
from fixtures_flakes import FlakeForTest
@pytest.mark.impure @pytest.mark.impure
def test_inspect(api: TestClient, test_flake_with_core: Path) -> None: def test_inspect(api: TestClient, test_flake_with_core: FlakeForTest) -> None:
response = api.post( response = api.post(
"/api/vms/inspect", "/api/vms/inspect",
json=dict(flake_url=str(test_flake_with_core), flake_attr="vm1"), json=dict(flake_url=str(test_flake_with_core.path), flake_attr="vm1"),
) )
assert response.status_code == 200, "Failed to inspect vm"
# print(f"SLEEPING FOR EVER: {99999}", file=sys.stderr)
# time.sleep(99999)
assert response.status_code == 200, f"Failed to inspect vm: {response.text}"
config = response.json()["config"] config = response.json()["config"]
assert config.get("flake_attr") == "vm1" assert config.get("flake_attr") == "vm1"
assert config.get("cores") == 1 assert config.get("cores") == 1
@@ -26,4 +29,4 @@ def test_incorrect_uuid(api: TestClient) -> None:
for endpoint in uuid_endpoints: for endpoint in uuid_endpoints:
response = api.get(endpoint.format("1234")) response = api.get(endpoint.format("1234"))
assert response.status_code == 422, "Failed to get vm status" assert response.status_code == 422, f"Failed to get vm status: {response.text}"

View File

@@ -5,19 +5,24 @@ from typing import TYPE_CHECKING, Iterator
import pytest import pytest
from api import TestClient from api import TestClient
from cli import Cli from cli import Cli
from fixtures_flakes import create_flake from fixtures_flakes import FlakeForTest, create_flake
from httpx import SyncByteStream from httpx import SyncByteStream
from root import CLAN_CORE from root import CLAN_CORE
from clan_cli.types import FlakeName
if TYPE_CHECKING: if TYPE_CHECKING:
from age_keys import KeyPair from age_keys import KeyPair
@pytest.fixture @pytest.fixture
def flake_with_vm_with_secrets(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path]: def flake_with_vm_with_secrets(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
yield from create_flake( yield from create_flake(
monkeypatch, monkeypatch,
"test_flake_with_core_dynamic_machines", temporary_home,
FlakeName("test_flake_with_core_dynamic_machines"),
CLAN_CORE, CLAN_CORE,
machines=["vm_with_secrets"], machines=["vm_with_secrets"],
) )
@@ -25,27 +30,18 @@ def flake_with_vm_with_secrets(monkeypatch: pytest.MonkeyPatch) -> Iterator[Path
@pytest.fixture @pytest.fixture
def remote_flake_with_vm_without_secrets( def remote_flake_with_vm_without_secrets(
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[Path]: ) -> Iterator[FlakeForTest]:
yield from create_flake( yield from create_flake(
monkeypatch, monkeypatch,
"test_flake_with_core_dynamic_machines", temporary_home,
FlakeName("test_flake_with_core_dynamic_machines"),
CLAN_CORE, CLAN_CORE,
machines=["vm_without_secrets"], machines=["vm_without_secrets"],
remote=True, remote=True,
) )
@pytest.fixture
def create_user_with_age_key(
monkeypatch: pytest.MonkeyPatch,
age_keys: list["KeyPair"],
) -> None:
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli = Cli()
cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey])
def generic_create_vm_test(api: TestClient, flake: Path, vm: str) -> None: def generic_create_vm_test(api: TestClient, flake: Path, vm: str) -> None:
print(f"flake_url: {flake} ") print(f"flake_url: {flake} ")
response = api.post( response = api.post(
@@ -74,8 +70,9 @@ def generic_create_vm_test(api: TestClient, flake: Path, vm: str) -> None:
print(line.decode("utf-8")) print(line.decode("utf-8"))
print("=========END LOGS==========") print("=========END LOGS==========")
assert response.status_code == 200, "Failed to get vm logs" assert response.status_code == 200, "Failed to get vm logs"
print("Get /api/vms/{uuid}/status")
response = api.get(f"/api/vms/{uuid}/status") response = api.get(f"/api/vms/{uuid}/status")
print("Finished Get /api/vms/{uuid}/status")
assert response.status_code == 200, "Failed to get vm status" assert response.status_code == 200, "Failed to get vm status"
data = response.json() data = response.json()
assert ( assert (
@@ -88,10 +85,22 @@ def generic_create_vm_test(api: TestClient, flake: Path, vm: str) -> None:
def test_create_local( def test_create_local(
api: TestClient, api: TestClient,
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch,
flake_with_vm_with_secrets: Path, flake_with_vm_with_secrets: FlakeForTest,
create_user_with_age_key: None, age_keys: list["KeyPair"],
) -> None: ) -> None:
generic_create_vm_test(api, flake_with_vm_with_secrets, "vm_with_secrets") monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli = Cli()
cmd = [
"secrets",
"users",
"add",
"user1",
age_keys[0].pubkey,
flake_with_vm_with_secrets.name,
]
cli.run(cmd)
generic_create_vm_test(api, flake_with_vm_with_secrets.path, "vm_with_secrets")
@pytest.mark.skipif(not os.path.exists("/dev/kvm"), reason="Requires KVM") @pytest.mark.skipif(not os.path.exists("/dev/kvm"), reason="Requires KVM")
@@ -99,8 +108,8 @@ def test_create_local(
def test_create_remote( def test_create_remote(
api: TestClient, api: TestClient,
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch,
remote_flake_with_vm_without_secrets: Path, remote_flake_with_vm_without_secrets: FlakeForTest,
) -> None: ) -> None:
generic_create_vm_test( generic_create_vm_test(
api, remote_flake_with_vm_without_secrets, "vm_without_secrets" api, remote_flake_with_vm_without_secrets.path, "vm_without_secrets"
) )

View File

@@ -1,9 +1,9 @@
import os import os
from pathlib import Path
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
import pytest import pytest
from cli import Cli from cli import Cli
from fixtures_flakes import FlakeForTest
if TYPE_CHECKING: if TYPE_CHECKING:
from age_keys import KeyPair from age_keys import KeyPair
@@ -12,9 +12,11 @@ no_kvm = not os.path.exists("/dev/kvm")
@pytest.mark.impure @pytest.mark.impure
def test_inspect(test_flake_with_core: Path, capsys: pytest.CaptureFixture) -> None: def test_inspect(
test_flake_with_core: FlakeForTest, capsys: pytest.CaptureFixture
) -> None:
cli = Cli() cli = Cli()
cli.run(["vms", "inspect", "vm1"]) cli.run(["vms", "inspect", "vm1", test_flake_with_core.name])
out = capsys.readouterr() # empty the buffer out = capsys.readouterr() # empty the buffer
assert "Cores" in out.out assert "Cores" in out.out
@@ -23,11 +25,20 @@ def test_inspect(test_flake_with_core: Path, capsys: pytest.CaptureFixture) -> N
@pytest.mark.impure @pytest.mark.impure
def test_create( def test_create(
monkeypatch: pytest.MonkeyPatch, monkeypatch: pytest.MonkeyPatch,
test_flake_with_core: Path, test_flake_with_core: FlakeForTest,
age_keys: list["KeyPair"], age_keys: list["KeyPair"],
) -> None: ) -> None:
monkeypatch.chdir(test_flake_with_core) monkeypatch.chdir(test_flake_with_core.path)
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey) monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli = Cli() cli = Cli()
cli.run(["secrets", "users", "add", "user1", age_keys[0].pubkey]) cli.run(
cli.run(["vms", "create", "vm1"]) [
"secrets",
"users",
"add",
"user1",
age_keys[0].pubkey,
test_flake_with_core.name,
]
)
cli.run(["vms", "create", "vm1", test_flake_with_core.name])

View File

@@ -6,16 +6,22 @@ import sys
from pathlib import Path from pathlib import Path
import pytest import pytest
from cli import Cli
from ports import PortFunction from ports import PortFunction
@pytest.mark.timeout(10) @pytest.mark.timeout(10)
def test_start_server(unused_tcp_port: PortFunction, temporary_dir: Path) -> None: def test_start_server(unused_tcp_port: PortFunction, temporary_home: Path) -> None:
Cli()
port = unused_tcp_port() port = unused_tcp_port()
fifo = temporary_dir / "fifo" fifo = temporary_home / "fifo"
os.mkfifo(fifo) os.mkfifo(fifo)
notify_script = temporary_dir / "firefox"
# Create a script called "firefox" in the temporary home directory that
# writes "1" to the fifo. This is used to notify the test that the firefox has been
# started.
notify_script = temporary_home / "firefox"
bash = shutil.which("bash") bash = shutil.which("bash")
assert bash is not None assert bash is not None
notify_script.write_text( notify_script.write_text(
@@ -26,11 +32,28 @@ echo "1" > {fifo}
) )
notify_script.chmod(0o700) notify_script.chmod(0o700)
# Add the temporary home directory to the PATH so that the script is found
env = os.environ.copy() env = os.environ.copy()
print(str(temporary_dir.absolute())) env["PATH"] = f"{temporary_home}:{env['PATH']}"
env["PATH"] = ":".join([str(temporary_dir.absolute())] + env["PATH"].split(":"))
# Add build/src to PYTHONPATH so that the webui module is found in nix sandbox
# TODO: We need a way to make sure things which work in the devshell also work in the sandbox
python_path = env.get("PYTHONPATH")
if python_path:
env["PYTHONPATH"] = f"/build/src:{python_path}"
# breakpoint_container(
# cmd=[sys.executable, "-m", "clan_cli.webui", "--port", str(port)],
# env=env,
# work_dir=temporary_home,
# )
with subprocess.Popen( with subprocess.Popen(
[sys.executable, "-m", "clan_cli.webui", "--port", str(port)], env=env [sys.executable, "-m", "clan_cli.webui", "--port", str(port)],
env=env,
stdout=sys.stderr,
stderr=sys.stderr,
text=True,
) as p: ) as p:
try: try:
with open(fifo) as f: with open(fifo) as f:

View File

@@ -1,5 +1,10 @@
{ {
"root": true, "root": true,
"extends": ["next/core-web-vitals", "plugin:tailwindcss/recommended"], "extends": ["next/core-web-vitals", "plugin:tailwindcss/recommended", "plugin:@typescript-eslint/recommended"],
"ignorePatterns": ["**/src/api/*"] "parser": "@typescript-eslint/parser",
"plugins": ["@typescript-eslint"],
"ignorePatterns": ["**/src/api/*"],
"rules": {
"@typescript-eslint/no-explicit-any": "off"
}
} }

File diff suppressed because it is too large Load Diff

View File

@@ -16,6 +16,7 @@
"@rjsf/mui": "^5.12.1", "@rjsf/mui": "^5.12.1",
"@rjsf/validator-ajv8": "^5.12.1", "@rjsf/validator-ajv8": "^5.12.1",
"@types/json-schema": "^7.0.12", "@types/json-schema": "^7.0.12",
"@typescript-eslint/eslint-plugin": "^5.62.0",
"autoprefixer": "10.4.14", "autoprefixer": "10.4.14",
"axios": "^1.4.0", "axios": "^1.4.0",
"classnames": "^2.3.2", "classnames": "^2.3.2",
@@ -50,7 +51,6 @@
"version": "1.2.6", "version": "1.2.6",
"resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz",
"integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==",
"dev": true,
"engines": { "engines": {
"node": ">=0.10.0" "node": ">=0.10.0"
} }
@@ -550,7 +550,6 @@
"version": "4.4.0", "version": "4.4.0",
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz",
"integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==",
"dev": true,
"dependencies": { "dependencies": {
"eslint-visitor-keys": "^3.3.0" "eslint-visitor-keys": "^3.3.0"
}, },
@@ -565,7 +564,6 @@
"version": "4.6.2", "version": "4.6.2",
"resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.6.2.tgz", "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.6.2.tgz",
"integrity": "sha512-pPTNuaAG3QMH+buKyBIGJs3g/S5y0caxw0ygM3YyE6yJFySwiGGSzA+mM3KJ8QQvzeLh3blwgSonkFjgQdxzMw==", "integrity": "sha512-pPTNuaAG3QMH+buKyBIGJs3g/S5y0caxw0ygM3YyE6yJFySwiGGSzA+mM3KJ8QQvzeLh3blwgSonkFjgQdxzMw==",
"dev": true,
"engines": { "engines": {
"node": "^12.0.0 || ^14.0.0 || >=16.0.0" "node": "^12.0.0 || ^14.0.0 || >=16.0.0"
} }
@@ -574,7 +572,6 @@
"version": "2.1.2", "version": "2.1.2",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz", "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz",
"integrity": "sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==", "integrity": "sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==",
"dev": true,
"dependencies": { "dependencies": {
"ajv": "^6.12.4", "ajv": "^6.12.4",
"debug": "^4.3.2", "debug": "^4.3.2",
@@ -597,7 +594,6 @@
"version": "8.47.0", "version": "8.47.0",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.47.0.tgz", "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.47.0.tgz",
"integrity": "sha512-P6omY1zv5MItm93kLM8s2vr1HICJH8v0dvddDhysbIuZ+vcjOHg5Zbkf1mTkcmi2JA9oBG2anOkRnW8WJTS8Og==", "integrity": "sha512-P6omY1zv5MItm93kLM8s2vr1HICJH8v0dvddDhysbIuZ+vcjOHg5Zbkf1mTkcmi2JA9oBG2anOkRnW8WJTS8Og==",
"dev": true,
"engines": { "engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0" "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
} }
@@ -612,7 +608,6 @@
"version": "0.11.10", "version": "0.11.10",
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz",
"integrity": "sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ==", "integrity": "sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ==",
"dev": true,
"dependencies": { "dependencies": {
"@humanwhocodes/object-schema": "^1.2.1", "@humanwhocodes/object-schema": "^1.2.1",
"debug": "^4.1.1", "debug": "^4.1.1",
@@ -626,7 +621,6 @@
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
"integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
"dev": true,
"engines": { "engines": {
"node": ">=12.22" "node": ">=12.22"
}, },
@@ -638,8 +632,7 @@
"node_modules/@humanwhocodes/object-schema": { "node_modules/@humanwhocodes/object-schema": {
"version": "1.2.1", "version": "1.2.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz",
"integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA=="
"dev": true
}, },
"node_modules/@ibm-cloud/openapi-ruleset": { "node_modules/@ibm-cloud/openapi-ruleset": {
"version": "0.45.5", "version": "0.45.5",
@@ -2148,6 +2141,11 @@
"resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.3.tgz", "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.3.tgz",
"integrity": "sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ==" "integrity": "sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ=="
}, },
"node_modules/@types/semver": {
"version": "7.5.4",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.4.tgz",
"integrity": "sha512-MMzuxN3GdFwskAnb6fz0orFvhfqi752yjaXylr0Rp4oDg5H0Zn1IuyRhDVvYOwAXoJirx2xuS16I3WjxnAIHiQ=="
},
"node_modules/@types/urijs": { "node_modules/@types/urijs": {
"version": "1.19.19", "version": "1.19.19",
"resolved": "https://registry.npmjs.org/@types/urijs/-/urijs-1.19.19.tgz", "resolved": "https://registry.npmjs.org/@types/urijs/-/urijs-1.19.19.tgz",
@@ -2160,11 +2158,43 @@
"integrity": "sha512-cSjhgrr8g4KbPnnijAr/KJDNKa/bBa+ixYkywFRvrhvi9n1WEl7yYbtRyzE6jqNQiSxxJxoAW3STaOQwJHndaw==", "integrity": "sha512-cSjhgrr8g4KbPnnijAr/KJDNKa/bBa+ixYkywFRvrhvi9n1WEl7yYbtRyzE6jqNQiSxxJxoAW3STaOQwJHndaw==",
"dev": true "dev": true
}, },
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.62.0.tgz",
"integrity": "sha512-TiZzBSJja/LbhNPvk6yc0JrX9XqhQ0hdh6M2svYfsHGejaKFIAGd9MQ+ERIMzLGlN/kZoYIgdxFV0PuljTKXag==",
"dependencies": {
"@eslint-community/regexpp": "^4.4.0",
"@typescript-eslint/scope-manager": "5.62.0",
"@typescript-eslint/type-utils": "5.62.0",
"@typescript-eslint/utils": "5.62.0",
"debug": "^4.3.4",
"graphemer": "^1.4.0",
"ignore": "^5.2.0",
"natural-compare-lite": "^1.4.0",
"semver": "^7.3.7",
"tsutils": "^3.21.0"
},
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"@typescript-eslint/parser": "^5.0.0",
"eslint": "^6.0.0 || ^7.0.0 || ^8.0.0"
},
"peerDependenciesMeta": {
"typescript": {
"optional": true
}
}
},
"node_modules/@typescript-eslint/parser": { "node_modules/@typescript-eslint/parser": {
"version": "5.62.0", "version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.62.0.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.62.0.tgz",
"integrity": "sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==", "integrity": "sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==",
"dev": true,
"dependencies": { "dependencies": {
"@typescript-eslint/scope-manager": "5.62.0", "@typescript-eslint/scope-manager": "5.62.0",
"@typescript-eslint/types": "5.62.0", "@typescript-eslint/types": "5.62.0",
@@ -2191,7 +2221,6 @@
"version": "5.62.0", "version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz",
"integrity": "sha512-VXuvVvZeQCQb5Zgf4HAxc04q5j+WrNAtNh9OwCsCgpKqESMTu3tF/jhZ3xG6T4NZwWl65Bg8KuS2uEvhSfLl0w==", "integrity": "sha512-VXuvVvZeQCQb5Zgf4HAxc04q5j+WrNAtNh9OwCsCgpKqESMTu3tF/jhZ3xG6T4NZwWl65Bg8KuS2uEvhSfLl0w==",
"dev": true,
"dependencies": { "dependencies": {
"@typescript-eslint/types": "5.62.0", "@typescript-eslint/types": "5.62.0",
"@typescript-eslint/visitor-keys": "5.62.0" "@typescript-eslint/visitor-keys": "5.62.0"
@@ -2204,11 +2233,36 @@
"url": "https://opencollective.com/typescript-eslint" "url": "https://opencollective.com/typescript-eslint"
} }
}, },
"node_modules/@typescript-eslint/type-utils": {
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.62.0.tgz",
"integrity": "sha512-xsSQreu+VnfbqQpW5vnCJdq1Z3Q0U31qiWmRhr98ONQmcp/yhiPJFPq8MXiJVLiksmOKSjIldZzkebzHuCGzew==",
"dependencies": {
"@typescript-eslint/typescript-estree": "5.62.0",
"@typescript-eslint/utils": "5.62.0",
"debug": "^4.3.4",
"tsutils": "^3.21.0"
},
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"eslint": "*"
},
"peerDependenciesMeta": {
"typescript": {
"optional": true
}
}
},
"node_modules/@typescript-eslint/types": { "node_modules/@typescript-eslint/types": {
"version": "5.62.0", "version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.62.0.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.62.0.tgz",
"integrity": "sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ==", "integrity": "sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ==",
"dev": true,
"engines": { "engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0" "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
}, },
@@ -2221,7 +2275,6 @@
"version": "5.62.0", "version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz",
"integrity": "sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==", "integrity": "sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==",
"dev": true,
"dependencies": { "dependencies": {
"@typescript-eslint/types": "5.62.0", "@typescript-eslint/types": "5.62.0",
"@typescript-eslint/visitor-keys": "5.62.0", "@typescript-eslint/visitor-keys": "5.62.0",
@@ -2244,11 +2297,55 @@
} }
} }
}, },
"node_modules/@typescript-eslint/utils": {
"version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.62.0.tgz",
"integrity": "sha512-n8oxjeb5aIbPFEtmQxQYOLI0i9n5ySBEY/ZEHHZqKQSFnxio1rv6dthascc9dLuwrL0RC5mPCxB7vnAVGAYWAQ==",
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@types/json-schema": "^7.0.9",
"@types/semver": "^7.3.12",
"@typescript-eslint/scope-manager": "5.62.0",
"@typescript-eslint/types": "5.62.0",
"@typescript-eslint/typescript-estree": "5.62.0",
"eslint-scope": "^5.1.1",
"semver": "^7.3.7"
},
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"eslint": "^6.0.0 || ^7.0.0 || ^8.0.0"
}
},
"node_modules/@typescript-eslint/utils/node_modules/eslint-scope": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz",
"integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==",
"dependencies": {
"esrecurse": "^4.3.0",
"estraverse": "^4.1.1"
},
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/@typescript-eslint/utils/node_modules/estraverse": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz",
"integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==",
"engines": {
"node": ">=4.0"
}
},
"node_modules/@typescript-eslint/visitor-keys": { "node_modules/@typescript-eslint/visitor-keys": {
"version": "5.62.0", "version": "5.62.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.62.0.tgz", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.62.0.tgz",
"integrity": "sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==", "integrity": "sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==",
"dev": true,
"dependencies": { "dependencies": {
"@typescript-eslint/types": "5.62.0", "@typescript-eslint/types": "5.62.0",
"eslint-visitor-keys": "^3.3.0" "eslint-visitor-keys": "^3.3.0"
@@ -2277,7 +2374,6 @@
"version": "8.10.0", "version": "8.10.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz",
"integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==",
"dev": true,
"bin": { "bin": {
"acorn": "bin/acorn" "acorn": "bin/acorn"
}, },
@@ -2289,7 +2385,6 @@
"version": "5.3.2", "version": "5.3.2",
"resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
"integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
"dev": true,
"peerDependencies": { "peerDependencies": {
"acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
} }
@@ -2298,7 +2393,6 @@
"version": "6.12.6", "version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
"dev": true,
"dependencies": { "dependencies": {
"fast-deep-equal": "^3.1.1", "fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0", "fast-json-stable-stringify": "^2.0.0",
@@ -2359,7 +2453,6 @@
"version": "5.0.1", "version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@@ -2368,7 +2461,6 @@
"version": "4.3.0", "version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dev": true,
"dependencies": { "dependencies": {
"color-convert": "^2.0.1" "color-convert": "^2.0.1"
}, },
@@ -2404,8 +2496,7 @@
"node_modules/argparse": { "node_modules/argparse": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
"dev": true
}, },
"node_modules/aria-query": { "node_modules/aria-query": {
"version": "5.3.0", "version": "5.3.0",
@@ -2452,7 +2543,6 @@
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz",
"integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==",
"dev": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@@ -2826,7 +2916,6 @@
"version": "4.1.2", "version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
"dev": true,
"dependencies": { "dependencies": {
"ansi-styles": "^4.1.0", "ansi-styles": "^4.1.0",
"supports-color": "^7.1.0" "supports-color": "^7.1.0"
@@ -2917,7 +3006,6 @@
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"dev": true,
"dependencies": { "dependencies": {
"color-name": "~1.1.4" "color-name": "~1.1.4"
}, },
@@ -2928,8 +3016,7 @@
"node_modules/color-name": { "node_modules/color-name": {
"version": "1.1.4", "version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
"dev": true
}, },
"node_modules/combined-stream": { "node_modules/combined-stream": {
"version": "1.0.8", "version": "1.0.8",
@@ -3014,7 +3101,6 @@
"version": "7.0.3", "version": "7.0.3",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
"dev": true,
"dependencies": { "dependencies": {
"path-key": "^3.1.0", "path-key": "^3.1.0",
"shebang-command": "^2.0.0", "shebang-command": "^2.0.0",
@@ -3178,7 +3264,6 @@
"version": "4.3.4", "version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
"dev": true,
"dependencies": { "dependencies": {
"ms": "2.1.2" "ms": "2.1.2"
}, },
@@ -3199,8 +3284,7 @@
"node_modules/deep-is": { "node_modules/deep-is": {
"version": "0.1.4", "version": "0.1.4",
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
"integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="
"dev": true
}, },
"node_modules/deepmerge": { "node_modules/deepmerge": {
"version": "2.2.1", "version": "2.2.1",
@@ -3274,7 +3358,6 @@
"version": "3.0.1", "version": "3.0.1",
"resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
"integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==",
"dev": true,
"dependencies": { "dependencies": {
"path-type": "^4.0.0" "path-type": "^4.0.0"
}, },
@@ -3291,7 +3374,6 @@
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
"integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
"dev": true,
"dependencies": { "dependencies": {
"esutils": "^2.0.2" "esutils": "^2.0.2"
}, },
@@ -3853,7 +3935,6 @@
"version": "8.46.0", "version": "8.46.0",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.46.0.tgz", "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.46.0.tgz",
"integrity": "sha512-cIO74PvbW0qU8e0mIvk5IV3ToWdCq5FYG6gWPHHkx6gNdjlbAYvtfHmlCMXxjcoVaIdwy/IAt3+mDkZkfvb2Dg==", "integrity": "sha512-cIO74PvbW0qU8e0mIvk5IV3ToWdCq5FYG6gWPHHkx6gNdjlbAYvtfHmlCMXxjcoVaIdwy/IAt3+mDkZkfvb2Dg==",
"dev": true,
"dependencies": { "dependencies": {
"@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.6.1", "@eslint-community/regexpp": "^4.6.1",
@@ -4200,7 +4281,6 @@
"version": "7.2.2", "version": "7.2.2",
"resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz",
"integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==",
"dev": true,
"dependencies": { "dependencies": {
"esrecurse": "^4.3.0", "esrecurse": "^4.3.0",
"estraverse": "^5.2.0" "estraverse": "^5.2.0"
@@ -4216,7 +4296,6 @@
"version": "3.4.3", "version": "3.4.3",
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
"integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
"dev": true,
"engines": { "engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0" "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
}, },
@@ -4228,7 +4307,6 @@
"version": "9.6.1", "version": "9.6.1",
"resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz",
"integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==",
"dev": true,
"dependencies": { "dependencies": {
"acorn": "^8.9.0", "acorn": "^8.9.0",
"acorn-jsx": "^5.3.2", "acorn-jsx": "^5.3.2",
@@ -4258,7 +4336,6 @@
"version": "1.5.0", "version": "1.5.0",
"resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz",
"integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==",
"dev": true,
"dependencies": { "dependencies": {
"estraverse": "^5.1.0" "estraverse": "^5.1.0"
}, },
@@ -4270,7 +4347,6 @@
"version": "4.3.0", "version": "4.3.0",
"resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
"integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
"dev": true,
"dependencies": { "dependencies": {
"estraverse": "^5.2.0" "estraverse": "^5.2.0"
}, },
@@ -4282,7 +4358,6 @@
"version": "5.3.0", "version": "5.3.0",
"resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
"integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
"dev": true,
"engines": { "engines": {
"node": ">=4.0" "node": ">=4.0"
} }
@@ -4297,7 +4372,6 @@
"version": "2.0.3", "version": "2.0.3",
"resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
"integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
"dev": true,
"engines": { "engines": {
"node": ">=0.10.0" "node": ">=0.10.0"
} }
@@ -4381,14 +4455,12 @@
"node_modules/fast-json-stable-stringify": { "node_modules/fast-json-stable-stringify": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
"dev": true
}, },
"node_modules/fast-levenshtein": { "node_modules/fast-levenshtein": {
"version": "2.0.6", "version": "2.0.6",
"resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
"integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="
"dev": true
}, },
"node_modules/fast-memoize": { "node_modules/fast-memoize": {
"version": "2.5.2", "version": "2.5.2",
@@ -4414,7 +4486,6 @@
"version": "6.0.1", "version": "6.0.1",
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
"integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
"dev": true,
"dependencies": { "dependencies": {
"flat-cache": "^3.0.4" "flat-cache": "^3.0.4"
}, },
@@ -4442,7 +4513,6 @@
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
"integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
"dev": true,
"dependencies": { "dependencies": {
"locate-path": "^6.0.0", "locate-path": "^6.0.0",
"path-exists": "^4.0.0" "path-exists": "^4.0.0"
@@ -4458,7 +4528,6 @@
"version": "3.0.4", "version": "3.0.4",
"resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz",
"integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==",
"dev": true,
"dependencies": { "dependencies": {
"flatted": "^3.1.0", "flatted": "^3.1.0",
"rimraf": "^3.0.2" "rimraf": "^3.0.2"
@@ -4470,8 +4539,7 @@
"node_modules/flatted": { "node_modules/flatted": {
"version": "3.2.7", "version": "3.2.7",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz",
"integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==", "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ=="
"dev": true
}, },
"node_modules/follow-redirects": { "node_modules/follow-redirects": {
"version": "1.15.2", "version": "1.15.2",
@@ -4683,7 +4751,6 @@
"version": "7.1.7", "version": "7.1.7",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz",
"integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==",
"dev": true,
"dependencies": { "dependencies": {
"fs.realpath": "^1.0.0", "fs.realpath": "^1.0.0",
"inflight": "^1.0.4", "inflight": "^1.0.4",
@@ -4719,7 +4786,6 @@
"version": "13.21.0", "version": "13.21.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-13.21.0.tgz", "resolved": "https://registry.npmjs.org/globals/-/globals-13.21.0.tgz",
"integrity": "sha512-ybyme3s4yy/t/3s35bewwXKOf7cvzfreG2lH0lZl0JB7I4GxRP2ghxOK/Nb9EkRXdbBXZLfq/p/0W2JUONB/Gg==", "integrity": "sha512-ybyme3s4yy/t/3s35bewwXKOf7cvzfreG2lH0lZl0JB7I4GxRP2ghxOK/Nb9EkRXdbBXZLfq/p/0W2JUONB/Gg==",
"dev": true,
"dependencies": { "dependencies": {
"type-fest": "^0.20.2" "type-fest": "^0.20.2"
}, },
@@ -4749,7 +4815,6 @@
"version": "11.1.0", "version": "11.1.0",
"resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz",
"integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==",
"dev": true,
"dependencies": { "dependencies": {
"array-union": "^2.1.0", "array-union": "^2.1.0",
"dir-glob": "^3.0.1", "dir-glob": "^3.0.1",
@@ -4793,8 +4858,7 @@
"node_modules/graphemer": { "node_modules/graphemer": {
"version": "1.4.0", "version": "1.4.0",
"resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
"integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="
"dev": true
}, },
"node_modules/has": { "node_modules/has": {
"version": "1.0.3", "version": "1.0.3",
@@ -4820,7 +4884,6 @@
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
"dev": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@@ -5059,7 +5122,6 @@
"version": "5.2.4", "version": "5.2.4",
"resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz",
"integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==",
"dev": true,
"engines": { "engines": {
"node": ">= 4" "node": ">= 4"
} }
@@ -5093,7 +5155,6 @@
"version": "0.1.4", "version": "0.1.4",
"resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
"integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
"dev": true,
"engines": { "engines": {
"node": ">=0.8.19" "node": ">=0.8.19"
} }
@@ -5297,7 +5358,6 @@
"version": "3.0.3", "version": "3.0.3",
"resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
"integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
"dev": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@@ -5417,8 +5477,7 @@
"node_modules/isexe": { "node_modules/isexe": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
"dev": true
}, },
"node_modules/jiti": { "node_modules/jiti": {
"version": "1.19.1", "version": "1.19.1",
@@ -5437,7 +5496,6 @@
"version": "4.1.0", "version": "4.1.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
"dev": true,
"dependencies": { "dependencies": {
"argparse": "^2.0.1" "argparse": "^2.0.1"
}, },
@@ -5538,14 +5596,12 @@
"node_modules/json-schema-traverse": { "node_modules/json-schema-traverse": {
"version": "0.4.1", "version": "0.4.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
"dev": true
}, },
"node_modules/json-stable-stringify-without-jsonify": { "node_modules/json-stable-stringify-without-jsonify": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
"integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="
"dev": true
}, },
"node_modules/json5": { "node_modules/json5": {
"version": "1.0.2", "version": "1.0.2",
@@ -5646,7 +5702,6 @@
"version": "0.4.1", "version": "0.4.1",
"resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
"integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
"dev": true,
"dependencies": { "dependencies": {
"prelude-ls": "^1.2.1", "prelude-ls": "^1.2.1",
"type-check": "~0.4.0" "type-check": "~0.4.0"
@@ -5672,7 +5727,6 @@
"version": "6.0.0", "version": "6.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
"integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
"dev": true,
"dependencies": { "dependencies": {
"p-locate": "^5.0.0" "p-locate": "^5.0.0"
}, },
@@ -5708,8 +5762,7 @@
"node_modules/lodash.merge": { "node_modules/lodash.merge": {
"version": "4.6.2", "version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="
"dev": true
}, },
"node_modules/lodash.omit": { "node_modules/lodash.omit": {
"version": "4.5.0", "version": "4.5.0",
@@ -5762,7 +5815,6 @@
"version": "6.0.0", "version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dev": true,
"dependencies": { "dependencies": {
"yallist": "^4.0.0" "yallist": "^4.0.0"
}, },
@@ -5888,8 +5940,7 @@
"node_modules/ms": { "node_modules/ms": {
"version": "2.1.2", "version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
"dev": true
}, },
"node_modules/mz": { "node_modules/mz": {
"version": "2.7.0", "version": "2.7.0",
@@ -5921,8 +5972,12 @@
"node_modules/natural-compare": { "node_modules/natural-compare": {
"version": "1.4.0", "version": "1.4.0",
"resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
"integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="
"dev": true },
"node_modules/natural-compare-lite": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz",
"integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g=="
}, },
"node_modules/next": { "node_modules/next": {
"version": "13.4.12", "version": "13.4.12",
@@ -6356,7 +6411,6 @@
"version": "0.9.3", "version": "0.9.3",
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz",
"integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==",
"dev": true,
"dependencies": { "dependencies": {
"@aashutoshrathi/word-wrap": "^1.2.3", "@aashutoshrathi/word-wrap": "^1.2.3",
"deep-is": "^0.1.3", "deep-is": "^0.1.3",
@@ -6426,7 +6480,6 @@
"version": "3.1.0", "version": "3.1.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
"integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
"dev": true,
"dependencies": { "dependencies": {
"yocto-queue": "^0.1.0" "yocto-queue": "^0.1.0"
}, },
@@ -6441,7 +6494,6 @@
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
"integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
"dev": true,
"dependencies": { "dependencies": {
"p-limit": "^3.0.2" "p-limit": "^3.0.2"
}, },
@@ -6505,7 +6557,6 @@
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
"dev": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@@ -6522,7 +6573,6 @@
"version": "3.1.1", "version": "3.1.1",
"resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
"dev": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@@ -6717,7 +6767,6 @@
"version": "1.2.1", "version": "1.2.1",
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
"integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
"dev": true,
"engines": { "engines": {
"node": ">= 0.8.0" "node": ">= 0.8.0"
} }
@@ -7184,7 +7233,6 @@
"version": "3.0.2", "version": "3.0.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
"integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
"dev": true,
"dependencies": { "dependencies": {
"glob": "^7.1.3" "glob": "^7.1.3"
}, },
@@ -7282,7 +7330,6 @@
"version": "7.5.4", "version": "7.5.4",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
"integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==",
"dev": true,
"dependencies": { "dependencies": {
"lru-cache": "^6.0.0" "lru-cache": "^6.0.0"
}, },
@@ -7297,7 +7344,6 @@
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
"dev": true,
"dependencies": { "dependencies": {
"shebang-regex": "^3.0.0" "shebang-regex": "^3.0.0"
}, },
@@ -7309,7 +7355,6 @@
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
"dev": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@@ -7404,7 +7449,6 @@
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
"integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
"dev": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@@ -7553,7 +7597,6 @@
"version": "6.0.1", "version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dev": true,
"dependencies": { "dependencies": {
"ansi-regex": "^5.0.1" "ansi-regex": "^5.0.1"
}, },
@@ -7583,7 +7626,6 @@
"version": "3.1.1", "version": "3.1.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
"integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
"dev": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
}, },
@@ -7662,7 +7704,6 @@
"version": "7.2.0", "version": "7.2.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
"dev": true,
"dependencies": { "dependencies": {
"has-flag": "^4.0.0" "has-flag": "^4.0.0"
}, },
@@ -7768,8 +7809,7 @@
"node_modules/text-table": { "node_modules/text-table": {
"version": "0.2.0", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
"integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw=="
"dev": true
}, },
"node_modules/thenify": { "node_modules/thenify": {
"version": "3.3.1", "version": "3.3.1",
@@ -7861,7 +7901,6 @@
"version": "3.21.0", "version": "3.21.0",
"resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz",
"integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==",
"dev": true,
"dependencies": { "dependencies": {
"tslib": "^1.8.1" "tslib": "^1.8.1"
}, },
@@ -7875,14 +7914,12 @@
"node_modules/tsutils/node_modules/tslib": { "node_modules/tsutils/node_modules/tslib": {
"version": "1.14.1", "version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
"dev": true
}, },
"node_modules/type-check": { "node_modules/type-check": {
"version": "0.4.0", "version": "0.4.0",
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
"integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
"dev": true,
"dependencies": { "dependencies": {
"prelude-ls": "^1.2.1" "prelude-ls": "^1.2.1"
}, },
@@ -7894,7 +7931,6 @@
"version": "0.20.2", "version": "0.20.2",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
"integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
"dev": true,
"engines": { "engines": {
"node": ">=10" "node": ">=10"
}, },
@@ -7971,7 +8007,6 @@
"version": "5.1.6", "version": "5.1.6",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz",
"integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==", "integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==",
"dev": true,
"bin": { "bin": {
"tsc": "bin/tsc", "tsc": "bin/tsc",
"tsserver": "bin/tsserver" "tsserver": "bin/tsserver"
@@ -8186,7 +8221,6 @@
"version": "2.0.2", "version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
"dev": true,
"dependencies": { "dependencies": {
"isexe": "^2.0.0" "isexe": "^2.0.0"
}, },
@@ -8266,8 +8300,7 @@
"node_modules/yallist": { "node_modules/yallist": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
"dev": true
}, },
"node_modules/yaml": { "node_modules/yaml": {
"version": "1.10.2", "version": "1.10.2",
@@ -8314,7 +8347,6 @@
"version": "0.1.0", "version": "0.1.0",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
"integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
"dev": true,
"engines": { "engines": {
"node": ">=10" "node": ">=10"
}, },

View File

@@ -20,6 +20,7 @@
"@rjsf/mui": "^5.12.1", "@rjsf/mui": "^5.12.1",
"@rjsf/validator-ajv8": "^5.12.1", "@rjsf/validator-ajv8": "^5.12.1",
"@types/json-schema": "^7.0.12", "@types/json-schema": "^7.0.12",
"@typescript-eslint/eslint-plugin": "^5.62.0",
"autoprefixer": "10.4.14", "autoprefixer": "10.4.14",
"axios": "^1.4.0", "axios": "^1.4.0",
"classnames": "^2.3.2", "classnames": "^2.3.2",

View File

@@ -1,5 +1,10 @@
import { MachineContextProvider } from "@/components/hooks/useMachines"; import { MachineContextProvider } from "@/components/hooks/useMachines";
export default function Layout({ children }: { children: React.ReactNode }) { export default function Layout({ children }: { children: React.ReactNode }) {
return <MachineContextProvider>{children}</MachineContextProvider>; return (
// TODO: select flake?
<MachineContextProvider flakeName="defaultFlake">
{children}
</MachineContextProvider>
);
} }

View File

@@ -1,12 +1,7 @@
"use client"; "use client";
import { NodeTable } from "@/components/table"; import { NodeTable } from "@/components/table";
import { StrictMode } from "react";
export default function Page() { export default function Page() {
return ( return <NodeTable />;
<StrictMode>
<NodeTable />
</StrictMode>
);
} }

View File

@@ -33,7 +33,10 @@ interface PureCustomConfigProps extends FormStepContentProps {
} }
export function CustomConfig(props: FormStepContentProps) { export function CustomConfig(props: FormStepContentProps) {
const { formHooks } = props; const { formHooks } = props;
const { data, isLoading, error } = useGetMachineSchema("mama"); const { data, isLoading, error } = useGetMachineSchema(
"defaultFlake",
"mama",
);
// const { data, isLoading, error } = { data: {data:{schema: { // const { data, isLoading, error } = { data: {data:{schema: {
// title: 'Test form', // title: 'Test form',
// type: 'object', // type: 'object',
@@ -53,11 +56,11 @@ export function CustomConfig(props: FormStepContentProps) {
return {}; return {};
}, [data, isLoading, error]); }, [data, isLoading, error]);
type ValueType = { default: any };
const initialValues = useMemo( const initialValues = useMemo(
() => () =>
Object.entries(schema?.properties || {}).reduce((acc, [key, value]) => { Object.entries(schema?.properties || {}).reduce((acc, [key, value]) => {
/*@ts-ignore*/ const init: any = (value as ValueType)?.default;
const init: any = value?.default;
if (init) { if (init) {
return { return {
...acc, ...acc,
@@ -157,7 +160,7 @@ function PureCustomConfig(props: PureCustomConfigProps) {
// ObjectFieldTemplate: // ObjectFieldTemplate:
ErrorListTemplate: ErrorList, ErrorListTemplate: ErrorList,
ButtonTemplates: { ButtonTemplates: {
SubmitButton: (props) => ( SubmitButton: () => (
<div className="flex w-full items-center justify-center"> <div className="flex w-full items-center justify-center">
<Button <Button
onClick={validate} onClick={validate}

View File

@@ -113,7 +113,7 @@ export function CreateMachineForm() {
)} )}
{!isMobile && ( {!isMobile && (
<Stepper activeStep={activeStep} color="secondary"> <Stepper activeStep={activeStep} color="secondary">
{steps.map(({ label }, index) => { {steps.map(({ label }) => {
const stepProps: { completed?: boolean } = {}; const stepProps: { completed?: boolean } = {};
const labelProps: { const labelProps: {
optional?: React.ReactNode; optional?: React.ReactNode;

View File

@@ -2,10 +2,10 @@ import { useListMachines } from "@/api/default/default";
import { MachinesResponse } from "@/api/model"; import { MachinesResponse } from "@/api/model";
import { AxiosError, AxiosResponse } from "axios"; import { AxiosError, AxiosResponse } from "axios";
import React, { import React, {
createContext,
Dispatch, Dispatch,
ReactNode, ReactNode,
SetStateAction, SetStateAction,
createContext,
useState, useState,
} from "react"; } from "react";
import { KeyedMutator } from "swr"; import { KeyedMutator } from "swr";
@@ -38,7 +38,7 @@ interface AppContextProviderProps {
} }
export const WithAppState = (props: AppContextProviderProps) => { export const WithAppState = (props: AppContextProviderProps) => {
const { children } = props; const { children } = props;
const { isLoading, error, mutate, swrKey } = useListMachines(); const { isLoading, error, mutate, swrKey } = useListMachines("defaultFlake");
const [data, setAppState] = useState<AppState>({ isJoined: false }); const [data, setAppState] = useState<AppState>({ isJoined: false });

View File

@@ -1,7 +1,7 @@
import { useState, useEffect } from "react"; import { useEffect, useState } from "react";
export function useDebounce(value: any, delay: number) { export function useDebounce<T>(value: T, delay: number) {
const [debouncedValue, setDebouncedValue] = useState(value); const [debouncedValue, setDebouncedValue] = useState<T>(value);
useEffect(() => { useEffect(() => {
const handler = setTimeout(() => { const handler = setTimeout(() => {

View File

@@ -24,6 +24,7 @@ type MachineContextType =
rawData: AxiosResponse<MachinesResponse, any> | undefined; rawData: AxiosResponse<MachinesResponse, any> | undefined;
data: Machine[]; data: Machine[];
isLoading: boolean; isLoading: boolean;
flakeName: string;
error: AxiosError<any> | undefined; error: AxiosError<any> | undefined;
isValidating: boolean; isValidating: boolean;
@@ -33,6 +34,7 @@ type MachineContextType =
swrKey: string | false | Record<any, any>; swrKey: string | false | Record<any, any>;
} }
| { | {
flakeName: string;
isLoading: true; isLoading: true;
data: readonly []; data: readonly [];
}; };
@@ -42,14 +44,22 @@ const initialState = {
data: [], data: [],
} as const; } as const;
export const MachineContext = createContext<MachineContextType>(initialState); export function CreateMachineContext(flakeName: string) {
return useMemo(() => {
return createContext<MachineContextType>({
...initialState,
flakeName,
});
}, [flakeName]);
}
interface MachineContextProviderProps { interface MachineContextProviderProps {
children: ReactNode; children: ReactNode;
flakeName: string;
} }
export const MachineContextProvider = (props: MachineContextProviderProps) => { export const MachineContextProvider = (props: MachineContextProviderProps) => {
const { children } = props; const { children, flakeName } = props;
const { const {
data: rawData, data: rawData,
isLoading, isLoading,
@@ -57,7 +67,7 @@ export const MachineContextProvider = (props: MachineContextProviderProps) => {
isValidating, isValidating,
mutate, mutate,
swrKey, swrKey,
} = useListMachines(); } = useListMachines(flakeName);
const [filters, setFilters] = useState<Filters>([]); const [filters, setFilters] = useState<Filters>([]);
const data = useMemo(() => { const data = useMemo(() => {
@@ -70,6 +80,8 @@ export const MachineContextProvider = (props: MachineContextProviderProps) => {
return []; return [];
}, [isLoading, error, isValidating, rawData, filters]); }, [isLoading, error, isValidating, rawData, filters]);
const MachineContext = CreateMachineContext(flakeName);
return ( return (
<MachineContext.Provider <MachineContext.Provider
value={{ value={{
@@ -77,6 +89,7 @@ export const MachineContextProvider = (props: MachineContextProviderProps) => {
data, data,
isLoading, isLoading,
flakeName,
error, error,
isValidating, isValidating,
@@ -92,4 +105,5 @@ export const MachineContextProvider = (props: MachineContextProviderProps) => {
); );
}; };
export const useMachines = () => React.useContext(MachineContext); export const useMachines = (flakeName: string) =>
React.useContext(CreateMachineContext(flakeName));

View File

@@ -16,7 +16,7 @@ import { SearchBar } from "./searchBar";
import { StickySpeedDial } from "./stickySpeedDial"; import { StickySpeedDial } from "./stickySpeedDial";
export function NodeTable() { export function NodeTable() {
const machines = useMachines(); const machines = useMachines("defaultFlake");
const theme = useTheme(); const theme = useTheme();
const is_xs = useMediaQuery(theme.breakpoints.only("xs")); const is_xs = useMediaQuery(theme.breakpoints.only("xs"));

View File

@@ -1,11 +1,11 @@
"use client"; "use client";
import { SetStateAction, Dispatch, useState, useEffect, useMemo } from "react";
import IconButton from "@mui/material/IconButton";
import SearchIcon from "@mui/icons-material/Search";
import { useDebounce } from "../hooks/useDebounce";
import { Autocomplete, InputAdornment, TextField } from "@mui/material";
import { Machine } from "@/api/model/machine"; import { Machine } from "@/api/model/machine";
import SearchIcon from "@mui/icons-material/Search";
import { Autocomplete, InputAdornment, TextField } from "@mui/material";
import IconButton from "@mui/material/IconButton";
import { Dispatch, SetStateAction, useEffect, useMemo, useState } from "react";
import { useDebounce } from "../hooks/useDebounce";
export interface SearchBarProps { export interface SearchBarProps {
tableData: readonly Machine[]; tableData: readonly Machine[];
@@ -13,7 +13,7 @@ export interface SearchBarProps {
} }
export function SearchBar(props: SearchBarProps) { export function SearchBar(props: SearchBarProps) {
let { tableData, setFilteredList } = props; const { tableData, setFilteredList } = props;
const [search, setSearch] = useState<string>(""); const [search, setSearch] = useState<string>("");
const debouncedSearch = useDebounce(search, 250); const debouncedSearch = useDebounce(search, 250);
const [open, setOpen] = useState(false); const [open, setOpen] = useState(false);

View File

@@ -28,11 +28,11 @@ function createData(
}; };
} }
var nameNumber = 0; let nameNumber = 0;
// A function to generate random names // A function to generate random names
function getRandomName(): string { function getRandomName(): string {
let names = [ const names = [
"Alice", "Alice",
"Bob", "Bob",
"Charlie", "Charlie",
@@ -53,7 +53,7 @@ function getRandomName(): string {
"Wendy", "Wendy",
"Zoe", "Zoe",
]; ];
let index = Math.floor(Math.random() * names.length); const index = Math.floor(Math.random() * names.length);
return names[index] + nameNumber++; return names[index] + nameNumber++;
} }
@@ -75,8 +75,12 @@ function getRandomName(): string {
// A function to generate random status keys // A function to generate random status keys
function getRandomStatus(): NodeStatusKeys { function getRandomStatus(): NodeStatusKeys {
let statusKeys = [NodeStatus.Online, NodeStatus.Offline, NodeStatus.Pending]; const statusKeys = [
let index = Math.floor(Math.random() * statusKeys.length); NodeStatus.Online,
NodeStatus.Offline,
NodeStatus.Pending,
];
const index = Math.floor(Math.random() * statusKeys.length);
return statusKeys[index]; return statusKeys[index];
} }
@@ -85,8 +89,8 @@ function getRandomLastSeen(status: NodeStatusKeys): number {
if (status === "online") { if (status === "online") {
return 0; return 0;
} else { } else {
let min = 1; // One day ago const min = 1; // One day ago
let max = 360; // One year ago const max = 360; // One year ago
return Math.floor(Math.random() * (max - min + 1) + min); return Math.floor(Math.random() * (max - min + 1) + min);
} }
} }
@@ -164,12 +168,12 @@ export const tableData = [
// A function to execute the createData function with dummy data in a loop 100 times and return an array // A function to execute the createData function with dummy data in a loop 100 times and return an array
export function executeCreateData(): TableData[] { export function executeCreateData(): TableData[] {
let result: TableData[] = []; const result: TableData[] = [];
for (let i = 0; i < 100; i++) { for (let i = 0; i < 100; i++) {
// Generate dummy data // Generate dummy data
let name = getRandomName(); const name = getRandomName();
let status = getRandomStatus(); const status = getRandomStatus();
let last_seen = getRandomLastSeen(status); const last_seen = getRandomLastSeen(status);
// Call the createData function and push the result to the array // Call the createData function and push the result to the array
result.push(createData(name, status, last_seen)); result.push(createData(name, status, last_seen));

View File

@@ -16,15 +16,16 @@ export default function JoinPrequel() {
const queryParams = useSearchParams(); const queryParams = useSearchParams();
const flakeUrl = queryParams.get("flake") || ""; const flakeUrl = queryParams.get("flake") || "";
const flakeAttr = queryParams.get("attr") || "default"; const flakeAttr = queryParams.get("attr") || "default";
const { handleSubmit, control, formState, getValues, reset } = const { control, formState, getValues, reset } = useForm<FormValues>({
useForm<FormValues>({ defaultValues: { flakeUrl: "" } }); defaultValues: { flakeUrl: "" },
});
return ( return (
<Layout> <Layout>
<Suspense fallback="Loading"> <Suspense fallback="Loading">
{!formState.isSubmitted && !flakeUrl && ( {!formState.isSubmitted && !flakeUrl && (
<form <form
onSubmit={handleSubmit(() => {})} // onSubmit={handleSubmit(() => {})}
className="w-full max-w-2xl justify-self-center" className="w-full max-w-2xl justify-self-center"
> >
<Controller <Controller

View File

@@ -1,7 +1,7 @@
{ {
description = "<Put your description here>"; description = "<Put your description here>";
inputs.clan-core.url = "git+https://git.clan.lol/clan/clan-core"; inputs.clan-core.url = "git+https://git.clan.lol/clan/clan-core?ref=Qubasa-main";
outputs = { self, clan-core, ... }: outputs = { self, clan-core, ... }:
let let
@@ -9,6 +9,7 @@
pkgs = clan-core.inputs.nixpkgs.legacyPackages.${system}; pkgs = clan-core.inputs.nixpkgs.legacyPackages.${system};
clan = clan-core.lib.buildClan { clan = clan-core.lib.buildClan {
directory = self; directory = self;
clanName = "__CHANGE_ME__";
}; };
in in
{ {