Compare commits
10 Commits
enable-mor
...
Qubasa-mai
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7b49653aaf | ||
|
|
1beb9a8ca0 | ||
|
|
d1a79653fe | ||
|
|
351ce1414a | ||
|
|
e2ccd979ed | ||
|
|
f5f3f96809 | ||
|
|
59253a9c71 | ||
|
|
aa03adc581 | ||
|
|
ffd84d50f7 | ||
|
|
679387e4ba |
@@ -30,6 +30,7 @@
|
||||
# this disables dynamic dependency loading in clan-cli
|
||||
export CLAN_NO_DYNAMIC_DEPS=1
|
||||
|
||||
export IN_PYTEST=1
|
||||
nix develop "$ROOT#clan-cli" -c bash -c "TMPDIR=/tmp python -m pytest -m impure ./tests $@"
|
||||
'';
|
||||
};
|
||||
|
||||
@@ -165,7 +165,6 @@
|
||||
(modulesPath + "/../tests/common/auto-format-root-device.nix")
|
||||
];
|
||||
services.openssh.enable = true;
|
||||
users.users.root.openssh.authorizedKeys.keyFiles = [ ../lib/ssh/pubkey ];
|
||||
system.nixos.variant_id = "installer";
|
||||
environment.systemPackages = [ pkgs.nixos-facter ];
|
||||
virtualisation.emptyDiskImages = [ 512 ];
|
||||
@@ -184,6 +183,12 @@
|
||||
"flakes"
|
||||
];
|
||||
};
|
||||
users.users.nonrootuser = {
|
||||
isNormalUser = true;
|
||||
openssh.authorizedKeys.keyFiles = [ ../lib/ssh/pubkey ];
|
||||
extraGroups = [ "wheel" ];
|
||||
};
|
||||
security.sudo.wheelNeedsPassword = false;
|
||||
system.extraDependencies = dependencies;
|
||||
};
|
||||
nodes.client = {
|
||||
@@ -211,14 +216,14 @@
|
||||
installer.start()
|
||||
|
||||
client.succeed("${pkgs.coreutils}/bin/install -Dm 600 ${../lib/ssh/privkey} /root/.ssh/id_ed25519")
|
||||
client.wait_until_succeeds("timeout 2 ssh -o StrictHostKeyChecking=accept-new -v root@installer hostname")
|
||||
client.wait_until_succeeds("timeout 2 ssh -o StrictHostKeyChecking=accept-new -v nonrootuser@installer hostname")
|
||||
client.succeed("cp -r ${../..} test-flake && chmod -R +w test-flake")
|
||||
client.fail("test -f test-flake/machines/test-install-machine-without-system/hardware-configuration.nix")
|
||||
client.fail("test -f test-flake/machines/test-install-machine-without-system/facter.json")
|
||||
client.succeed("clan machines update-hardware-config --flake test-flake test-install-machine-without-system root@installer >&2")
|
||||
client.succeed("clan machines update-hardware-config --flake test-flake test-install-machine-without-system nonrootuser@installer >&2")
|
||||
client.succeed("test -f test-flake/machines/test-install-machine-without-system/facter.json")
|
||||
client.succeed("rm test-flake/machines/test-install-machine-without-system/facter.json")
|
||||
client.succeed("clan machines install --debug --flake test-flake --yes test-install-machine-without-system --target-host root@installer --update-hardware-config nixos-facter >&2")
|
||||
client.succeed("clan machines install --debug --flake test-flake --yes test-install-machine-without-system --target-host nonrootuser@installer --update-hardware-config nixos-facter >&2")
|
||||
try:
|
||||
installer.shutdown()
|
||||
except BrokenPipeError:
|
||||
|
||||
116
decisions/02-clan-api.md
Normal file
116
decisions/02-clan-api.md
Normal file
@@ -0,0 +1,116 @@
|
||||
# Clan as library
|
||||
|
||||
## Status
|
||||
|
||||
Accepted
|
||||
|
||||
## Context
|
||||
|
||||
In the long term we envision the clan application will consist of the following user facing tools in the long term.
|
||||
|
||||
- `CLI`
|
||||
- `TUI`
|
||||
- `Desktop Application`
|
||||
- `REST-API`
|
||||
- `Mobile Application`
|
||||
|
||||
We might not be sure whether all of those will exist but the architecture should be generic such that those are possible without major changes of the underlying system.
|
||||
|
||||
## Decision
|
||||
|
||||
This leads to the conclusion that we should do `library` centric development.
|
||||
With the current `clan` python code beeing a library that can be imported to create various tools ontop of it.
|
||||
All **CLI** or **UI** related parts should be moved out of the main library.
|
||||
|
||||
*Note: The next person who wants implement any new frontend should do this first. Currently it looks like the TUI is the next one.*
|
||||
|
||||
Imagine roughly the following architecture:
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
%% Define styles
|
||||
classDef frontend fill:#f9f,stroke:#333,stroke-width:2px;
|
||||
classDef backend fill:#bbf,stroke:#333,stroke-width:2px;
|
||||
classDef storage fill:#ff9,stroke:#333,stroke-width:2px;
|
||||
classDef testing fill:#cfc,stroke:#333,stroke-width:2px;
|
||||
|
||||
%% Define nodes
|
||||
user(["User"]) -->|Interacts with| Frontends
|
||||
|
||||
subgraph "Frontends"
|
||||
CLI["CLI"]:::frontend
|
||||
APP["Desktop App"]:::frontend
|
||||
TUI["TUI"]:::frontend
|
||||
REST["REST API"]:::frontend
|
||||
end
|
||||
|
||||
subgraph "Python"
|
||||
API["Library <br>for interacting with clan"]:::backend
|
||||
BusinessLogic["Business Logic<br>Implements actions like 'machine create'"]:::backend
|
||||
STORAGE[("Persistence")]:::storage
|
||||
NIX["Nix Eval & Build"]:::backend
|
||||
end
|
||||
|
||||
subgraph "CI/CD & Tests"
|
||||
TEST["Feature Testing"]:::testing
|
||||
end
|
||||
|
||||
%% Define connections
|
||||
CLI --> API
|
||||
APP --> API
|
||||
TUI --> API
|
||||
REST --> API
|
||||
|
||||
TEST --> API
|
||||
|
||||
API --> BusinessLogic
|
||||
BusinessLogic --> STORAGE
|
||||
BusinessLogic --> NIX
|
||||
```
|
||||
|
||||
With this very simple design it is ensured that all the basic features remain stable across all frontends.
|
||||
In the end it is straight forward to create python library function calls in a testing framework to ensure that kind of stability.
|
||||
|
||||
Integration tests and smaller unit-tests should both be utilized to ensure the stability of the library.
|
||||
|
||||
Note: Library function don't have to be json-serializable in general.
|
||||
|
||||
Persistence includes but is not limited to: creating git commits, writing to inventory.json, reading and writing vars and to/from disk in general.
|
||||
|
||||
## Benefits / Drawbacks
|
||||
|
||||
- (+) Less tight coupling of frontend- / backend-teams
|
||||
- (+) Consistency and inherent behavior
|
||||
- (+) Performance & Scalability
|
||||
- (+) Different frontends for different user groups
|
||||
- (+) Documentation per library function makes it convenient to interact with the clan resources.
|
||||
- (+) Testing the library ensures stability of the underlyings for all layers above.
|
||||
- (-) Complexity overhead
|
||||
- (-) library needs to be designed / documented
|
||||
- (+) library can be well documented since it is a finite set of functions.
|
||||
- (-) Error handling might be harder.
|
||||
- (+) Common error reporting
|
||||
- (-) different frontends need different features. The library must include them all.
|
||||
- (+) All those core features must be implemented anyways.
|
||||
- (+) VPN Benchmarking uses the existing library's already and works relatively well.
|
||||
|
||||
## Implementation considerations
|
||||
|
||||
Not all required details that need to change over time are possible to be pointed out ahead of time.
|
||||
The goal of this document is to create a common understanding for how we like our project to be structured.
|
||||
Any future commits should contribute to this goal.
|
||||
|
||||
Some ideas what might be needed to change:
|
||||
|
||||
- Having separate locations or packages for the library and the CLI.
|
||||
- Rename the `clan_cli` package to `clan` and move the `cli` frontend into a subfolder or a separate package.
|
||||
- Python Argparse or other cli related code should not exist in the `clan` python library.
|
||||
- `__init__.py` should be very minimal. Only init the business logic models and resources. Note that all `__init__.py` files all the way up in the module tree are always executed as part of the python module import logic and thus should be as small as possible.
|
||||
i.e. `from clan_cli.vars.generators import ...` executes both `clan_cli/__init__.py` and `clan_cli/vars/__init__.py` if any of those exist.
|
||||
- `api` folder doesn't make sense since the python library `clan` is the api.
|
||||
- Logic needed for the webui that performs json serialization and deserialization will be some `json-adapter` folder or package.
|
||||
- Code for serializing dataclasses and typed dictionaries is needed for the persistence layer. (i.e. for read-write of inventory.json)
|
||||
- The inventory-json is a backend resource, that is internal. Its logic includes merging, unmerging and partial updates with considering nix values and their priorities. Nobody should try to read or write to it directly.
|
||||
Instead there will be library methods i.e. to add a `service` or to update/read/delete some information from it.
|
||||
- Library functions should be carefully designed with suitable conventions for writing good api's in mind. (i.e: https://swagger.io/resources/articles/best-practices-in-api-design/)
|
||||
|
||||
6
flake.lock
generated
6
flake.lock
generated
@@ -121,11 +121,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1742370146,
|
||||
"narHash": "sha256-XRE8hL4vKIQyVMDXykFh4ceo3KSpuJF3ts8GKwh5bIU=",
|
||||
"lastModified": 1742982148,
|
||||
"narHash": "sha256-aRA6LSxjlbMI6MmMzi/M5WH/ynd8pK+vACD9za3MKLQ=",
|
||||
"owner": "numtide",
|
||||
"repo": "treefmt-nix",
|
||||
"rev": "adc195eef5da3606891cedf80c0d9ce2d3190808",
|
||||
"rev": "61c88349bf6dff49fa52d7dfc39b21026c2a8881",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
@@ -135,6 +135,11 @@ def generate_machine_hardware_info(opts: HardwareGenerateOptions) -> HardwareCon
|
||||
]
|
||||
|
||||
host = machine.target_host
|
||||
|
||||
# HACK: to make non-root user work
|
||||
if host.user != "root":
|
||||
config_command.insert(0, "sudo")
|
||||
|
||||
cmd = nix_shell(
|
||||
[
|
||||
"nixpkgs#openssh",
|
||||
|
||||
@@ -185,6 +185,7 @@ def deploy_machines(machines: list[Machine]) -> None:
|
||||
test_cmd,
|
||||
RunOpts(msg_color=MsgColor(stderr=AnsiColor.DEFAULT)),
|
||||
extra_env=env,
|
||||
become_root=True,
|
||||
)
|
||||
|
||||
# retry nixos-rebuild switch if the first attempt failed
|
||||
@@ -193,6 +194,7 @@ def deploy_machines(machines: list[Machine]) -> None:
|
||||
switch_cmd,
|
||||
RunOpts(msg_color=MsgColor(stderr=AnsiColor.DEFAULT)),
|
||||
extra_env=env,
|
||||
become_root=True,
|
||||
)
|
||||
|
||||
with AsyncRuntime() as runtime:
|
||||
|
||||
@@ -1,21 +1,28 @@
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
from shlex import quote
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from clan_cli.cmd import Log, RunOpts
|
||||
from clan_cli.cmd import run as run_local
|
||||
from clan_cli.errors import ClanError
|
||||
from clan_cli.ssh.host import Host
|
||||
|
||||
|
||||
def upload(
|
||||
host: Host,
|
||||
local_src: Path, # must be a directory
|
||||
local_src: Path,
|
||||
remote_dest: Path, # must be a directory
|
||||
file_user: str = "root",
|
||||
file_group: str = "root",
|
||||
dir_mode: int = 0o700,
|
||||
file_mode: int = 0o400,
|
||||
) -> None:
|
||||
# Check if the remote destination is at least 3 directories deep
|
||||
if len(remote_dest.parts) < 3:
|
||||
msg = f"The remote destination must be at least 3 directories deep. Got: {remote_dest}. Reason: The directory will be deleted with 'rm -rf'."
|
||||
raise ClanError(msg)
|
||||
|
||||
# Create the tarball from the temporary directory
|
||||
with TemporaryDirectory(prefix="facts-upload-") as tardir:
|
||||
tar_path = Path(tardir) / "upload.tar.gz"
|
||||
@@ -55,50 +62,22 @@ def upload(
|
||||
with local_src.open("rb") as f:
|
||||
tar.addfile(tarinfo, f)
|
||||
|
||||
if local_src.is_dir():
|
||||
cmd = [
|
||||
*host.ssh_cmd(),
|
||||
"rm",
|
||||
"-r",
|
||||
str(remote_dest),
|
||||
";",
|
||||
"mkdir",
|
||||
"-m",
|
||||
f"{dir_mode:o}",
|
||||
"-p",
|
||||
str(remote_dest),
|
||||
"&&",
|
||||
"tar",
|
||||
"-C",
|
||||
str(remote_dest),
|
||||
"-xzf",
|
||||
"-",
|
||||
]
|
||||
else:
|
||||
# For single file, extract to parent directory and ensure correct name
|
||||
cmd = [
|
||||
*host.ssh_cmd(),
|
||||
"rm",
|
||||
"-f",
|
||||
str(remote_dest),
|
||||
";",
|
||||
"mkdir",
|
||||
"-m",
|
||||
f"{dir_mode:o}",
|
||||
"-p",
|
||||
str(remote_dest.parent),
|
||||
"&&",
|
||||
"tar",
|
||||
"-C",
|
||||
str(remote_dest.parent),
|
||||
"-xzf",
|
||||
"-",
|
||||
]
|
||||
sudo = ""
|
||||
if host.user != "root":
|
||||
sudo = "sudo -- "
|
||||
|
||||
cmd = "rm -rf $0 && mkdir -m $1 -p $0 && tar -C $0 -xzf -"
|
||||
|
||||
# TODO accept `input` to be an IO object instead of bytes so that we don't have to read the tarfile into memory.
|
||||
with tar_path.open("rb") as f:
|
||||
run_local(
|
||||
cmd,
|
||||
[
|
||||
*host.ssh_cmd(),
|
||||
"--",
|
||||
f"{sudo}bash -c {quote(cmd)}",
|
||||
str(remote_dest),
|
||||
f"{dir_mode:o}",
|
||||
],
|
||||
RunOpts(
|
||||
input=f.read(),
|
||||
log=Log.BOTH,
|
||||
|
||||
@@ -45,7 +45,7 @@ def ask(
|
||||
text = f"Enter the value for {ident}:"
|
||||
if label:
|
||||
text = f"{label}"
|
||||
|
||||
log.info(f"Prompting value for {ident}")
|
||||
if MOCK_PROMPT_RESPONSE:
|
||||
return next(MOCK_PROMPT_RESPONSE)
|
||||
match input_type:
|
||||
|
||||
@@ -57,7 +57,10 @@ def sshd_config(test_root: Path) -> Iterator[SshdConfig]:
|
||||
)
|
||||
config = tmpdir / "sshd_config"
|
||||
config.write_text(content)
|
||||
login_shell = tmpdir / "shell"
|
||||
bin_path = tmpdir / "bin"
|
||||
login_shell = bin_path / "shell"
|
||||
fake_sudo = bin_path / "sudo"
|
||||
login_shell.parent.mkdir(parents=True)
|
||||
|
||||
bash = shutil.which("bash")
|
||||
path = os.environ["PATH"]
|
||||
@@ -65,19 +68,23 @@ def sshd_config(test_root: Path) -> Iterator[SshdConfig]:
|
||||
|
||||
login_shell.write_text(
|
||||
f"""#!{bash}
|
||||
set -x
|
||||
if [[ -f /etc/profile ]]; then
|
||||
source /etc/profile
|
||||
fi
|
||||
if [[ -n "$REALPATH" ]]; then
|
||||
export PATH="$REALPATH:${path}"
|
||||
else
|
||||
export PATH="${path}"
|
||||
fi
|
||||
export PATH="{bin_path}:{path}"
|
||||
exec {bash} -l "${{@}}"
|
||||
"""
|
||||
)
|
||||
login_shell.chmod(0o755)
|
||||
|
||||
fake_sudo.write_text(
|
||||
f"""#!{bash}
|
||||
exec "${{@}}"
|
||||
"""
|
||||
)
|
||||
fake_sudo.chmod(0o755)
|
||||
|
||||
lib_path = None
|
||||
|
||||
extension = ".so"
|
||||
|
||||
@@ -26,6 +26,17 @@ def test_secrets_upload(
|
||||
monkeypatch.chdir(str(flake.path))
|
||||
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
|
||||
|
||||
sops_dir = flake.path / "facts"
|
||||
|
||||
# the flake defines this path as the location where the sops key should be installed
|
||||
sops_key = sops_dir / "key.txt"
|
||||
sops_key2 = sops_dir / "key2.txt"
|
||||
|
||||
# Create old state, which should be cleaned up
|
||||
sops_dir.mkdir()
|
||||
sops_key.write_text("OLD STATE")
|
||||
sops_key2.write_text("OLD STATE2")
|
||||
|
||||
cli.run(
|
||||
[
|
||||
"secrets",
|
||||
@@ -56,8 +67,6 @@ def test_secrets_upload(
|
||||
|
||||
cli.run(["facts", "upload", "--flake", str(flake_path), "vm1"])
|
||||
|
||||
# the flake defines this path as the location where the sops key should be installed
|
||||
sops_key = flake.path / "facts" / "key.txt"
|
||||
|
||||
assert sops_key.exists()
|
||||
assert sops_key.read_text() == age_keys[0].privkey
|
||||
assert not sops_key2.exists()
|
||||
|
||||
@@ -34,6 +34,10 @@ def map_json_type(
|
||||
return {"int"}
|
||||
if json_type == "boolean":
|
||||
return {"bool"}
|
||||
# In Python, "number" is analogous to the float type.
|
||||
# https://json-schema.org/understanding-json-schema/reference/numeric#number
|
||||
if json_type == "number":
|
||||
return {"float"}
|
||||
if json_type == "array":
|
||||
assert nested_types, f"Array type not found for {parent}"
|
||||
return {f"""list[{" | ".join(nested_types)}]"""}
|
||||
|
||||
Reference in New Issue
Block a user