pkgs/cli: Move the test folder inside the python module

Move the `tests` folder to `clan_cli/tests`.

As we now want part of our tests to live next to the functions that are
tested - tests that are not in the `/tests` module also need access to
the configured test fixtures that are exposed by the `pytest_plugins`
declaration.

The following folder structure doesn't support this model:

```
├── clan_cli
│   ├── api
│   │    └── api_init_test.py
├── tests/
│   ├── conftest.py
│   └── ...
```

Here `api_init_test.py` even when importing the test functions will not
have the fixtures configured.

There is a way to configure python to import the fixtures from another
[`project/module`](https://docs.pytest.org/en/stable/how-to/fixtures.html#using-fixtures-from-other-projects), but this seems to *generally* be discouraged.

So moving the `conftest.py` to the toplevel and the `/tests` folder into
the toplevel seems to be a sensible choice choice.
This commit is contained in:
a-kenji
2025-02-28 11:56:38 +07:00
committed by Johannes Kirschbauer
parent a503c92c3e
commit 309c132b63
72 changed files with 117 additions and 100 deletions

View File

@@ -0,0 +1,31 @@
import pytest
from clan_cli.custom_logger import setup_logging
# collect_ignore = ["./nixpkgs"]
pytest_plugins = [
"clan_cli.tests.temporary_dir",
"clan_cli.tests.root",
"clan_cli.tests.age_keys",
"clan_cli.tests.gpg_keys",
"clan_cli.tests.git_repo",
"clan_cli.tests.sshd",
"clan_cli.tests.command",
"clan_cli.tests.ports",
"clan_cli.tests.hosts",
"clan_cli.tests.runtime",
"clan_cli.tests.fixtures_flakes",
"clan_cli.tests.stdout",
"clan_cli.tests.nix_config",
]
# Executed on pytest session start
def pytest_sessionstart(session: pytest.Session) -> None:
# This function will be called once at the beginning of the test session
print("Starting pytest session")
# You can access the session config, items, testsfailed, etc.
print(f"Session config: {session.config}")
setup_logging(level="DEBUG")

View File

@@ -0,0 +1,96 @@
import json
import os
from pathlib import Path
import pytest
from clan_cli.secrets.folders import sops_secrets_folder
from clan_cli.tests.helpers import cli
class KeyPair:
def __init__(self, pubkey: str, privkey: str) -> None:
self.pubkey = pubkey
self.privkey = privkey
class SopsSetup:
"""Hold a list of three key pairs and create an "admin" user in the clan.
The first key in the list is used as the admin key and
the private part of the key is exposed in the
`SOPS_AGE_KEY` environment variable, the two others can
be used to add machines or other users.
"""
def __init__(self, keys: list[KeyPair]) -> None:
self.keys = keys
self.user = os.environ.get("USER", "admin")
def init(self, flake_path: Path) -> None:
cli.run(
[
"vars",
"keygen",
"--flake",
str(flake_path),
"--user",
self.user,
]
)
KEYS = [
KeyPair(
"age1dhwqzkah943xzc34tc3dlmfayyevcmdmxzjezdgdy33euxwf59vsp3vk3c",
"AGE-SECRET-KEY-1KF8E3SR3TTGL6M476SKF7EEMR4H9NF7ZWYSLJUAK8JX276JC7KUSSURKFK",
),
KeyPair(
"age14tva0txcrl0zes05x7gkx56qd6wd9q3nwecjac74xxzz4l47r44sv3fz62",
"AGE-SECRET-KEY-1U5ENXZQAY62NC78Y2WC0SEGRRMAEEKH79EYY5TH4GPFWJKEAY0USZ6X7YQ",
),
KeyPair(
"age1dhuh9xtefhgpr2sjjf7gmp9q2pr37z92rv4wsadxuqdx48989g7qj552qp",
"AGE-SECRET-KEY-169N3FT32VNYQ9WYJMLUSVTMA0TTZGVJF7YZWS8AHTWJ5RR9VGR7QCD8SKF",
),
]
@pytest.fixture
def age_keys() -> list[KeyPair]:
return KEYS
@pytest.fixture
def sops_setup(
monkeypatch: pytest.MonkeyPatch,
) -> SopsSetup:
monkeypatch.setenv("SOPS_AGE_KEY", KEYS[0].privkey)
return SopsSetup(KEYS)
# louis@(2025-03-10): right now this is specific to the `sops/secrets` folder,
# but we could make it generic to any sops file if the need arises.
def assert_secrets_file_recipients(
flake_path: Path,
secret_name: str,
expected_age_recipients_keypairs: list["KeyPair"],
err_msg: str | None = None,
) -> None:
"""Checks that the recipients of a secrets file matches expectations.
This looks up the `secret` file for `secret_name` in the `sops` directory
under `flake_path`.
:param err_msg: in case of failure, if you gave an error message then it
will be displayed, otherwise pytest will display the two different sets
of recipients.
"""
sops_file = sops_secrets_folder(flake_path) / secret_name / "secret"
with sops_file.open("rb") as fp:
sops_data = json.load(fp)
age_recipients = {each["recipient"] for each in sops_data["sops"]["age"]}
expected_age_recipients = {pair.pubkey for pair in expected_age_recipients_keypairs}
if not err_msg:
assert age_recipients == expected_age_recipients
return
assert age_recipients == expected_age_recipients, err_msg

View File

@@ -0,0 +1,66 @@
import contextlib
import os
import signal
import subprocess
from collections.abc import Iterator
from pathlib import Path
from typing import IO, Any
import pytest
_FILE = None | int | IO[Any]
class Command:
def __init__(self) -> None:
self.processes: list[subprocess.Popen[str]] = []
def run(
self,
command: list[str],
extra_env: dict[str, str] | None = None,
stdin: _FILE = None,
stdout: _FILE = None,
stderr: _FILE = None,
workdir: Path | None = None,
) -> subprocess.Popen[str]:
if extra_env is None:
extra_env = {}
env = os.environ.copy()
env.update(extra_env)
# We start a new session here so that we can than more reliably kill all children as well
p = subprocess.Popen(
command,
env=env,
start_new_session=True,
stdout=stdout,
stderr=stderr,
stdin=stdin,
text=True,
cwd=workdir,
)
self.processes.append(p)
return p
def terminate(self) -> None:
# Stop in reverse order in case there are dependencies.
# We just kill all processes as quickly as possible because we don't
# care about corrupted state and want to make tests fasts.
for p in reversed(self.processes):
with contextlib.suppress(OSError):
os.killpg(os.getpgid(p.pid), signal.SIGKILL)
p.wait()
@pytest.fixture
def command() -> Iterator[Command]:
"""
Starts a background command. The process is automatically terminated in the end.
>>> p = command.run(["some", "daemon"])
>>> print(p.pid)
"""
c = Command()
try:
yield c
finally:
c.terminate()

View File

@@ -0,0 +1,29 @@
This is a revocation certificate for the OpenPGP key:
pub rsa1024 2024-09-29 [SCEAR]
9A9B2741C8062D3D3DF1302D8B049E262A5CA255
uid Root Superuser <test@local>
A revocation certificate is a kind of "kill switch" to publicly
declare that a key shall not anymore be used. It is not possible
to retract such a revocation certificate once it has been published.
Use it to revoke this key in case of a compromise or loss of
the secret key. However, if the secret key is still accessible,
it is better to generate a new revocation certificate and give
a reason for the revocation. For details see the description of
of the gpg command "--generate-revocation" in the GnuPG manual.
To avoid an accidental use of this file, a colon has been inserted
before the 5 dashes below. Remove this colon with a text editor
before importing and publishing this revocation certificate.
:-----BEGIN PGP PUBLIC KEY BLOCK-----
Comment: This is a revocation certificate
iLYEIAEIACAWIQSamydByAYtPT3xMC2LBJ4mKlyiVQUCZvl/cAIdAAAKCRCLBJ4m
KlyiVUWOA/9rDw6tSSw7Gh3vlaLZXSQvkftO3x9cJwePn6JPmM2nWLDcaOj+/Cd0
guyakYt7Fsxa6fqcv5sYV50bPRqAnfOWbR7jRl4DF6pSYNCHPlkWuLghdYsBOBo2
1MG/J+155aclsB8JQez1eGMe8KcpcJBcrYuZTAMekMGPrfyr9SwDUg==
=V2Jo
-----END PGP PUBLIC KEY BLOCK-----

View File

@@ -0,0 +1,15 @@
Created: 20240929T162520
Key: (private-key (rsa (n #00B1BF3E8A8CEA6A68439F67CDCAF5616B50D99A9F88
6D9E879D3FE990854E9ADFC35D7D26DBC5BC1800B3FF7B814F4623C1DFC34CAB4D326C
3E269C6059D567B5144659B3C895B52B428BA7B74CC2FA130D06C689C45B8FF8DA1D7C
7A578C99C0F221189D6BE045AE2EC8D2389423994BA0D650A2EDD2B7664642BFBF9691
495F#)(e #010001#)(d #57605C65AE94F39EF293136BB23842DE06DE19A90FDF573D
723B3F5D5872C626767AE831687B0116498E326AABABE51E61C9564FC3ABCCBC322737
DB137E191EB3B012B9C142290050EBD8ADD40BC68CCB577521E3A76DFD668BC6E584C7
0DD3B6CE545CC392B1D893EFB959BE3BD0EB7DF73A1F7AFBD9693353BA4FD3C05AED#)
(p #00C169E9E1DF8F39E7B2140FD52723FC5D10CCFC62D8A0876D39641AB00441345C
FC239EF8551B5F39CE850EF2DD79B98D70D57AD933648C86B7DD536B1B3AD6CB#)(q
#00EB43872BDDA397AC02A32E7CB0061ACB26A30497031D24FA793DE9EE4EFBACB1A4
6BF1444DE47CB63A6E254F2E4928BB0BB1F5C51C5247EEA8FF2D84BE25F13D#)(u
#00CEBE9717B5F7B59393065F884ACCA692F64545F492E50DF9070ACA9FBDA8A1EC03
906FDB9C112A97FADBB273E69548C6B17E6BE3BB664B9D02FB2100EF19AF7D#)))

View File

@@ -0,0 +1,6 @@
Key-Type: 1
Key-Length: 1024
Name-Real: Root Superuser
Name-Email: test@local
Expire-Date: 0
%no-protection

View File

@@ -0,0 +1 @@
test@local

View File

@@ -0,0 +1,23 @@
secret-key: ENC[AES256_GCM,data:gjX4OmCUdd3TlA4p,iv:3yZVpyd6FqkITQY0nU2M1iubmzvkR6PfkK2m/s6nQh8=,tag:Abgp9xkiFFylZIyAlap6Ew==,type:str]
nested:
secret-key: ENC[AES256_GCM,data:iUMgDhhIjwvd7wL4,iv:jiJIrh12dSu/sXX+z9ITVoEMNDMjwIlFBnyv40oN4LE=,tag:G9VmAa66Km1sc7JEhW5AvA==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age:
- recipient: age14tva0txcrl0zes05x7gkx56qd6wd9q3nwecjac74xxzz4l47r44sv3fz62
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA0eWdRVjlydXlXOVZFQ3lO
bzU1eG9Iam5Ka29Sdlo0cHJ4b1R6bjdNSzBjCkgwRndCbWZQWHlDU0x1cWRmaGVt
N29lbjR6UjN0L2RhaXEzSG9zQmRsZGsKLS0tIEdsdWgxSmZwU3BWUDVxVWRSSC9M
eVZ6bjgwZnR2TTM5MkRYZWNFSFplQWsKmSzv12/dftL9jx2y35UZUGVK6xWdatE8
BGJiCvMlp0BQNrh2s/+YaEaBa48w8LL79U/XJnEZ+ZUwxmlbSTn6Hg==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2023-08-08T14:27:20Z"
mac: ENC[AES256_GCM,data:iRWWX+L5Q5nKn3fBCLaWoz/mvqGnNnRd93gJmYXDZbRjFoHa9IFJZst5QDIDa1ZRYUe6G0/+lV5SBi+vwRm1pHysJ3c0ZWYjBP+e1jw3jLXxLV5gACsDC8by+6rFUCho0Xgu+Nqu2ehhNenjQQnCvDH5ivWbW70KFT5ynNgR9Tw=,iv:RYnnbLMC/hNfMwWPreMq9uvY0khajwQTZENO/P34ckY=,tag:Xi1PS5vM1c+sRkroHkPn1Q==,type:str]
pgp: []
unencrypted_suffix: _unencrypted
version: 3.7.3

View File

@@ -0,0 +1,7 @@
-----BEGIN OPENSSH PRIVATE KEY-----
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW
QyNTUxOQAAACDonlRWMYxHTtnOeeiurKA1j26EfVZWeozuqSrtCYScFwAAAJje9J1V3vSd
VQAAAAtzc2gtZWQyNTUxOQAAACDonlRWMYxHTtnOeeiurKA1j26EfVZWeozuqSrtCYScFw
AAAEBxDpEXwhlJB/f6ZJOT9BbSqXeLy9S6qeuc25hXu5kpbuieVFYxjEdO2c556K6soDWP
boR9VlZ6jO6pKu0JhJwXAAAAE2pvZXJnQHR1cmluZ21hY2hpbmUBAg==
-----END OPENSSH PRIVATE KEY-----

View File

@@ -0,0 +1 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOieVFYxjEdO2c556K6soDWPboR9VlZ6jO6pKu0JhJwX joerg@turingmachine

View File

@@ -0,0 +1,8 @@
HostKey $host_key
LogLevel DEBUG3
# In the nix build sandbox we don't get any meaningful PATH after login
MaxStartups 64:30:256
AuthorizedKeysFile $host_key.pub
AcceptEnv REALPATH
PasswordAuthentication no
Subsystem sftp $sftp_server

View File

@@ -0,0 +1,2 @@
class FixtureError(Exception):
pass

View File

@@ -0,0 +1,359 @@
import json
import logging
import os
import shutil
import subprocess as sp
import tempfile
from collections import defaultdict
from collections.abc import Callable, Iterator
from pathlib import Path
from typing import Any, NamedTuple
import age_keys
import pytest
from clan_cli.dirs import TemplateType, clan_templates, nixpkgs_source
from clan_cli.locked_open import locked_open
from clan_cli.nix import nix_test_store
from clan_cli.tests.temporary_dir import TEMPDIR
from clan_cli.tests.fixture_error import FixtureError
from clan_cli.tests.root import CLAN_CORE
log = logging.getLogger(__name__)
lock_nix = os.environ.get("LOCK_NIX", "")
if not lock_nix:
lock_nix = tempfile.NamedTemporaryFile().name # NOQA: SIM115
# allows defining nested dictionary in a single line
def def_value() -> defaultdict:
return defaultdict(def_value)
nested_dict: Callable[[], dict[str, Any]] = lambda: defaultdict(def_value)
# Substitutes strings in a file.
# This can be used on the flake.nix or default.nix of a machine
def substitute(
file: Path,
clan_core_flake: Path | None = None,
flake: Path = Path(__file__).parent,
) -> None:
sops_key = str(flake.joinpath("sops.key"))
buf = ""
with file.open() as f:
for line in f:
line = line.replace("__NIXPKGS__", str(nixpkgs_source()))
if clan_core_flake:
line = line.replace("__CLAN_CORE__", f"path:{clan_core_flake}")
line = line.replace(
"git+https://git.clan.lol/clan/clan-core", f"path:{clan_core_flake}"
)
line = line.replace(
"https://git.clan.lol/clan/clan-core/archive/main.tar.gz",
f"path:{clan_core_flake}",
)
line = line.replace("__CLAN_SOPS_KEY_PATH__", sops_key)
line = line.replace("__CLAN_SOPS_KEY_DIR__", str(flake / "facts"))
buf += line
print(f"file: {file}")
print(f"clan_core: {clan_core_flake}")
print(f"flake: {flake}")
file.write_text(buf)
class FlakeForTest(NamedTuple):
path: Path
def set_machine_settings(
flake: Path,
machine_name: str,
machine_settings: dict,
) -> None:
config_path = flake / "machines" / machine_name / "configuration.json"
config_path.write_text(json.dumps(machine_settings, indent=2))
def set_git_credentials(
monkeypatch: pytest.MonkeyPatch,
) -> None:
monkeypatch.setenv("GIT_AUTHOR_NAME", "clan-tool")
monkeypatch.setenv("GIT_AUTHOR_EMAIL", "clan@example.com")
monkeypatch.setenv("GIT_COMMITTER_NAME", "clan-tool")
monkeypatch.setenv("GIT_COMMITTER_EMAIL", "clan@example.com")
def init_git(monkeypatch: pytest.MonkeyPatch, flake: Path) -> None:
set_git_credentials(monkeypatch)
sp.run(["git", "init", "-b", "main"], cwd=flake, check=True)
# TODO: Find out why test_vms_api.py fails in nix build
# but works in pytest when this bottom line is commented out
sp.run(["git", "add", "."], cwd=flake, check=True)
sp.run(["git", "commit", "-a", "-m", "Initial commit"], cwd=flake, check=True)
class ClanFlake:
"""
This class holds all attributes for generating a clan flake.
For example, inventory and machine configs can be set via self.inventory and self.machines["my_machine"] = {...}.
Whenever a flake's configuration is changed, it needs to be re-generated by calling refresh().
This class can also be used for managing templates.
Once initialized, all its settings including all generated files, if any, can be copied using the copy() method.
This avoids expensive re-computation, like for example creating the flake.lock over and over again.
"""
def __init__(
self,
temporary_home: Path,
flake_template: Path,
suppress_tmp_home_warning: bool = False,
) -> None:
self._flake_template = flake_template
self.inventory = nested_dict()
self.machines = nested_dict()
self.substitutions: dict[str, str] = {
"git+https://git.clan.lol/clan/clan-core": "path://" + str(CLAN_CORE),
"https://git.clan.lol/clan/clan-core/archive/main.tar.gz": "path://"
+ str(CLAN_CORE),
}
self.clan_modules: list[str] = []
self.temporary_home = temporary_home
self.path = temporary_home / "flake"
if not suppress_tmp_home_warning:
if "/tmp" not in str(os.environ.get("HOME")):
log.warning(
f"!! $HOME does not point to a temp directory!! HOME={os.environ['HOME']}"
)
def copy(
self,
temporary_home: Path,
monkeypatch: pytest.MonkeyPatch,
) -> "ClanFlake":
# copy the files to the new location
shutil.copytree(self.path, temporary_home / "flake")
set_git_credentials(monkeypatch)
return ClanFlake(
temporary_home=temporary_home,
flake_template=self._flake_template,
)
def substitute(self) -> None:
for file in self.path.rglob("*"):
if ".git" in file.parts:
continue
if file.is_file():
buf = ""
with file.open() as f:
for line in f:
for key, value in self.substitutions.items():
line = line.replace(key, value)
buf += line
file.write_text(buf)
def init_from_template(self) -> None:
shutil.copytree(self._flake_template, self.path)
sp.run(["chmod", "+w", "-R", str(self.path)], check=True)
self.substitute()
if not (self.path / ".git").exists():
with locked_open(Path(lock_nix), "w"):
sp.run(
[
"nix",
"flake",
"lock",
"--extra-experimental-features",
"flakes nix-command",
],
cwd=self.path,
check=True,
)
with pytest.MonkeyPatch.context() as mp:
init_git(mp, self.path)
def refresh(self) -> None:
if not self.path.exists():
self.init_from_template()
self.substitute()
if self.inventory:
inventory_path = self.path / "inventory.json"
inventory_path.write_text(json.dumps(self.inventory, indent=2))
imports = "\n".join(
[f"clan-core.clanModules.{module}" for module in self.clan_modules]
)
for machine_name, machine_config in self.machines.items():
configuration_nix = (
self.path / "machines" / machine_name / "configuration.nix"
)
configuration_nix.parent.mkdir(parents=True, exist_ok=True)
configuration_nix.write_text(
f"""
{{clan-core, ...}}:
{{
imports = [
(builtins.fromJSON (builtins.readFile ./configuration.json))
{imports}
];
}}
"""
)
set_machine_settings(self.path, machine_name, machine_config)
sp.run(["git", "add", "."], cwd=self.path, check=True)
sp.run(
["git", "commit", "-a", "-m", "Update by flake generator"],
cwd=self.path,
check=True,
)
@pytest.fixture(scope="session")
def minimal_flake_template() -> Iterator[ClanFlake]:
with (
tempfile.TemporaryDirectory(prefix="minimal-flake-", dir=TEMPDIR) as _dirpath,
pytest.MonkeyPatch.context() as mp,
):
temporary_home = Path(_dirpath).resolve()
mp.setenv("HOME", str(temporary_home))
flake = ClanFlake(
temporary_home=temporary_home,
flake_template=clan_templates(TemplateType.CLAN) / "minimal",
)
flake.init_from_template()
yield flake
@pytest.fixture
def flake(
temporary_home: Path,
minimal_flake_template: ClanFlake,
monkeypatch: pytest.MonkeyPatch,
) -> ClanFlake:
return minimal_flake_template.copy(temporary_home, monkeypatch)
@pytest.fixture
def flake_with_sops(
flake: ClanFlake,
sops_setup: age_keys.SopsSetup,
) -> ClanFlake:
sops_setup.init(flake.path)
return flake
def create_flake(
temporary_home: Path,
flake_template: str | Path,
monkeypatch: pytest.MonkeyPatch,
clan_core_flake: Path | None = None,
# names referring to pre-defined machines from ../machines
machines: list[str] | None = None,
# alternatively specify the machines directly including their config
machine_configs: dict[str, dict] | None = None,
) -> Iterator[FlakeForTest]:
"""
Creates a flake with the given name and machines.
The machine names map to the machines in ./test_machines
"""
if machine_configs is None:
machine_configs = {}
if machines is None:
machines = []
if isinstance(flake_template, Path):
template_path = flake_template
else:
template_path = Path(__file__).parent / flake_template
flake_template_name = template_path.name
# copy the template to a new temporary location
flake = temporary_home / flake_template_name
shutil.copytree(template_path, flake)
sp.run(["chmod", "+w", "-R", str(flake)], check=True)
# add the requested machines to the flake
if machines:
(flake / "machines").mkdir(parents=True, exist_ok=True)
for machine_name in machines:
machine_path = Path(__file__).parent / "machines" / machine_name
shutil.copytree(machine_path, flake / "machines" / machine_name)
substitute(flake / "machines" / machine_name / "default.nix", flake)
# generate machines from machineConfigs
for machine_name, machine_config in machine_configs.items():
settings_path = flake / "machines" / machine_name / "settings.json"
settings_path.parent.mkdir(parents=True, exist_ok=True)
settings_path.write_text(json.dumps(machine_config, indent=2))
# in the flake.nix file replace the string __CLAN_URL__ with the the clan flake
# provided by get_test_flake_toplevel
flake_nix = flake / "flake.nix"
# this is where we would install the sops key to, when updating
substitute(flake_nix, clan_core_flake, flake)
nix_options = []
if tmp_store := nix_test_store():
nix_options += ["--store", str(tmp_store)]
with locked_open(Path(lock_nix), "w"):
sp.run(
[
"nix",
"flake",
"lock",
flake,
"--extra-experimental-features",
"nix-command flakes",
*nix_options,
],
check=True,
)
if "/tmp" not in str(os.environ.get("HOME")):
log.warning(
f"!! $HOME does not point to a temp directory!! HOME={os.environ['HOME']}"
)
init_git(monkeypatch, flake)
yield FlakeForTest(flake)
@pytest.fixture
def test_flake(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
yield from create_flake(
temporary_home=temporary_home,
flake_template="test_flake",
monkeypatch=monkeypatch,
)
# check that git diff on ./sops is empty
if (temporary_home / "test_flake" / "sops").exists():
git_proc = sp.run(
["git", "diff", "--exit-code", "./sops"],
cwd=temporary_home / "test_flake",
stderr=sp.PIPE,
check=False,
)
if git_proc.returncode != 0:
log.error(git_proc.stderr.decode())
msg = "git diff on ./sops is not empty. This should not happen as all changes should be committed"
raise FixtureError(msg)
@pytest.fixture
def test_flake_with_core(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
if not (CLAN_CORE / "flake.nix").exists():
msg = "clan-core flake not found. This test requires the clan-core flake to be present"
raise FixtureError(msg)
yield from create_flake(
temporary_home=temporary_home,
flake_template="test_flake_with_core",
clan_core_flake=CLAN_CORE,
monkeypatch=monkeypatch,
)

View File

@@ -0,0 +1,69 @@
#define _GNU_SOURCE
#include <dlfcn.h>
#include <pwd.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/types.h>
#ifdef __APPLE__
#include <sandbox.h>
#include <unistd.h>
#endif
#ifdef __APPLE__
struct dyld_interpose {
const void *replacement;
const void *replacee;
};
#define WRAPPER(ret, name) static ret _fakeroot_wrapper_##name
#define WRAPPER_DEF(name) \
__attribute__(( \
used)) static struct dyld_interpose _fakeroot_interpose_##name \
__attribute__((section("__DATA,__interpose"))) = { \
&_fakeroot_wrapper_##name, &name};
#else
#define WRAPPER(ret, name) ret name
#define WRAPPER_DEF(name)
#endif
typedef struct passwd *(*getpwnam_type)(const char *name);
WRAPPER(struct passwd *, getpwnam)(const char *name) {
struct passwd *pw;
#ifdef __APPLE__
#define orig_getpwnam(name) getpwnam(name)
#else
static getpwnam_type orig_getpwnam = NULL;
if (!orig_getpwnam) {
orig_getpwnam = (getpwnam_type)dlsym(RTLD_NEXT, "getpwnam");
if (!orig_getpwnam) {
fprintf(stderr, "dlsym error: %s\n", dlerror());
exit(1);
}
}
#endif
pw = orig_getpwnam(name);
if (pw) {
const char *shell = getenv("LOGIN_SHELL");
if (!shell) {
fprintf(stderr, "no LOGIN_SHELL set\n");
exit(1);
}
pw->pw_shell = strdup(shell);
fprintf(stderr, "getpwnam: %s -> %s\n", name, pw->pw_shell);
}
return pw;
}
WRAPPER_DEF(getpwnam)
#ifdef __APPLE__
// sandbox_init(3) doesn't work in nix build sandbox
WRAPPER(int, sandbox_init)(const char *profile, uint64_t flags, void *handle) {
return 0;
}
WRAPPER_DEF(sandbox_init)
#else
#endif

View File

@@ -0,0 +1,20 @@
import subprocess
from pathlib import Path
import pytest
from clan_cli.nix import nix_shell
# fixture for git_repo
@pytest.fixture
def git_repo(temp_dir: Path) -> Path:
# initialize a git repository
cmd = nix_shell(["nixpkgs#git"], ["git", "init"])
subprocess.run(cmd, cwd=temp_dir, check=True)
# set user.name and user.email
cmd = nix_shell(["nixpkgs#git"], ["git", "config", "user.name", "test"])
subprocess.run(cmd, cwd=temp_dir, check=True)
cmd = nix_shell(["nixpkgs#git"], ["git", "config", "user.email", "test@test.test"])
subprocess.run(cmd, cwd=temp_dir, check=True)
# return the path to the git repository
return temp_dir

View File

@@ -0,0 +1,25 @@
import shutil
from dataclasses import dataclass
from pathlib import Path
import pytest
@dataclass
class GpgKey:
fingerprint: str
gpg_home: Path
@pytest.fixture
def gpg_key(
temp_dir: Path,
monkeypatch: pytest.MonkeyPatch,
test_root: Path,
) -> GpgKey:
gpg_home = temp_dir / "gnupghome"
shutil.copytree(test_root / "data" / "gnupg-home", gpg_home)
monkeypatch.setenv("GNUPGHOME", str(gpg_home))
return GpgKey("9A9B2741C8062D3D3DF1302D8B049E262A5CA255", gpg_home)

View File

@@ -0,0 +1,19 @@
import argparse
import logging
import shlex
from clan_cli import create_parser
from clan_cli.custom_logger import print_trace
log = logging.getLogger(__name__)
def run(args: list[str]) -> argparse.Namespace:
parser = create_parser(prog="clan")
parsed = parser.parse_args(args)
cmd = shlex.join(["clan", *args])
print_trace(f"$ {cmd}", log, "localhost")
if hasattr(parsed, "func"):
parsed.func(parsed)
return parsed

View File

@@ -0,0 +1,11 @@
from collections import defaultdict
from collections.abc import Callable
from typing import Any
def def_value() -> defaultdict:
return defaultdict(def_value)
# allows defining nested dictionary in a single line
nested_dict: Callable[[], dict[str, Any]] = lambda: defaultdict(def_value)

View File

@@ -0,0 +1,21 @@
import subprocess
class Error(Exception):
pass
def is_valid_age_key(secret_key: str) -> bool:
# Run the age-keygen command with the -y flag to check the key format
result = subprocess.run(
["age-keygen", "-y"],
input=secret_key,
capture_output=True,
text=True,
check=False,
)
if result.returncode == 0:
return True
msg = f"Invalid age key: {secret_key}"
raise Error(msg)

View File

@@ -0,0 +1,23 @@
import os
import pwd
import pytest
from clan_cli.ssh.host import Host
from clan_cli.ssh.host_key import HostKeyCheck
from clan_cli.tests.sshd import Sshd
@pytest.fixture
def hosts(sshd: Sshd) -> list[Host]:
login = pwd.getpwuid(os.getuid()).pw_name
group = [
Host(
"127.0.0.1",
port=sshd.port,
user=login,
key=sshd.key,
host_key_check=HostKeyCheck.NONE,
)
]
return group

View File

@@ -0,0 +1,21 @@
{ config, ... }:
{
clan.core.networking.targetHost = "__CLAN_TARGET_ADDRESS__";
system.stateVersion = config.system.nixos.release;
sops.age.keyFile = "__CLAN_SOPS_KEY_PATH__";
clan.core.facts.secretUploadDirectory = "__CLAN_SOPS_KEY_DIR__";
clan.virtualisation.graphics = false;
clan.core.facts.networking.zerotier.controller.enable = true;
networking.useDHCP = false;
systemd.services.shutdown-after-boot = {
enable = true;
wantedBy = [ "multi-user.target" ];
after = [ "multi-user.target" ];
script = ''
#!/usr/bin/env bash
shutdown -h now
'';
};
}

View File

@@ -0,0 +1,21 @@
{ config, ... }:
{
clan.core.networking.targetHost = "__CLAN_TARGET_ADDRESS__";
system.stateVersion = config.system.nixos.release;
sops.age.keyFile = "__CLAN_SOPS_KEY_PATH__";
clan.core.facts.secretUploadDirectory = "__CLAN_SOPS_KEY_DIR__";
clan.virtualisation.graphics = false;
clan.core.networking.zerotier.controller.enable = true;
networking.useDHCP = false;
systemd.services.shutdown-after-boot = {
enable = true;
wantedBy = [ "multi-user.target" ];
after = [ "multi-user.target" ];
script = ''
#!/usr/bin/env bash
shutdown -h now
'';
};
}

View File

@@ -0,0 +1,18 @@
{ config, ... }:
{
clan.core.networking.targetHost = "__CLAN_TARGET_ADDRESS__";
system.stateVersion = config.system.nixos.release;
clan.virtualisation.graphics = false;
networking.useDHCP = false;
systemd.services.shutdown-after-boot = {
enable = true;
wantedBy = [ "multi-user.target" ];
after = [ "multi-user.target" ];
script = ''
#!/usr/bin/env bash
shutdown -h now
'';
};
}

View File

@@ -0,0 +1,24 @@
import json
import subprocess
from dataclasses import dataclass
import pytest
@dataclass
class ConfigItem:
aliases: list[str]
defaultValue: bool # noqa: N815
description: str
documentDefault: bool # noqa: N815
experimentalFeature: str # noqa: N815
value: str | bool | list[str] | dict[str, str]
@pytest.fixture(scope="session")
def nix_config() -> dict[str, ConfigItem]:
proc = subprocess.run(
["nix", "config", "show", "--json"], check=True, stdout=subprocess.PIPE
)
data = json.loads(proc.stdout)
return {name: ConfigItem(**c) for name, c in data.items()}

View File

@@ -0,0 +1,55 @@
#!/usr/bin/env python3
import contextlib
import socket
from collections.abc import Callable
import pytest
def _unused_port(socket_type: int) -> int:
"""Find an unused localhost port from 1024-65535 and return it."""
with contextlib.closing(socket.socket(type=socket_type)) as sock:
sock.bind(("127.0.0.1", 0))
return sock.getsockname()[1]
PortFunction = Callable[[], int]
@pytest.fixture(scope="session")
def unused_tcp_port() -> PortFunction:
"""A function, producing different unused TCP ports."""
produced = set()
def factory() -> int:
"""Return an unused port."""
port = _unused_port(socket.SOCK_STREAM)
while port in produced:
port = _unused_port(socket.SOCK_STREAM)
produced.add(port)
return port
return factory
@pytest.fixture(scope="session")
def unused_udp_port() -> PortFunction:
"""A function, producing different unused UDP ports."""
produced = set()
def factory() -> int:
"""Return an unused port."""
port = _unused_port(socket.SOCK_DGRAM)
while port in produced:
port = _unused_port(socket.SOCK_DGRAM)
produced.add(port)
return port
return factory

View File

@@ -0,0 +1,35 @@
import os
from pathlib import Path
import pytest
TEST_ROOT = Path(__file__).parent.resolve()
PROJECT_ROOT = TEST_ROOT.parent
if CLAN_CORE_ := os.environ.get("CLAN_CORE_PATH"):
CLAN_CORE = Path(CLAN_CORE_)
else:
CLAN_CORE = PROJECT_ROOT.parent.parent.parent
@pytest.fixture(scope="session")
def project_root() -> Path:
"""
Root directory the clan-cli
"""
return PROJECT_ROOT
@pytest.fixture(scope="session")
def test_root() -> Path:
"""
Root directory of the tests
"""
return TEST_ROOT
@pytest.fixture(scope="session")
def clan_core() -> Path:
"""
Directory of the clan-core flake
"""
return CLAN_CORE

View File

@@ -0,0 +1,7 @@
import pytest
from clan_cli.async_run import AsyncRuntime
@pytest.fixture
def runtime() -> AsyncRuntime:
return AsyncRuntime()

View File

@@ -0,0 +1,183 @@
import os
import shutil
import string
import subprocess
import time
from collections.abc import Iterator
from pathlib import Path
from sys import platform
from tempfile import TemporaryDirectory
from typing import TYPE_CHECKING
import pytest
if TYPE_CHECKING:
from .command import Command
from .ports import PortFunction
class SshdError(Exception):
pass
class Sshd:
def __init__(self, port: int, proc: subprocess.Popen[str], key: str) -> None:
self.port = port
self.proc = proc
self.key = key
class SshdConfig:
def __init__(
self, path: Path, login_shell: Path, key: str, preload_lib: Path, log_file: Path
) -> None:
self.path = path
self.login_shell = login_shell
self.key = key
self.preload_lib = preload_lib
self.log_file = log_file
@pytest.fixture(scope="session")
def sshd_config(test_root: Path) -> Iterator[SshdConfig]:
# FIXME, if any parent of the sshd directory is world-writable than sshd will refuse it.
# we use .direnv instead since it's already in .gitignore
with TemporaryDirectory(prefix="sshd-") as _dir:
tmpdir = Path(_dir)
host_key = test_root / "data" / "ssh_host_ed25519_key"
host_key.chmod(0o600)
template = (test_root / "data" / "sshd_config").read_text()
sshd = shutil.which("sshd")
assert sshd is not None
sshdp = Path(sshd)
sftp_server = sshdp.parent.parent / "libexec" / "sftp-server"
assert sftp_server is not None
content = string.Template(template).substitute(
{"host_key": host_key, "sftp_server": sftp_server}
)
config = tmpdir / "sshd_config"
config.write_text(content)
bin_path = tmpdir / "bin"
login_shell = bin_path / "shell"
fake_sudo = bin_path / "sudo"
login_shell.parent.mkdir(parents=True)
bash = shutil.which("bash")
path = os.environ["PATH"]
assert bash is not None
login_shell.write_text(
f"""#!{bash}
set -x
if [[ -f /etc/profile ]]; then
source /etc/profile
fi
export PATH="{bin_path}:{path}"
exec {bash} -l "${{@}}"
"""
)
login_shell.chmod(0o755)
fake_sudo.write_text(
f"""#!{bash}
exec "${{@}}"
"""
)
fake_sudo.chmod(0o755)
lib_path = None
extension = ".so"
if platform == "darwin":
extension = ".dylib"
link_lib_flag = "-shared"
if platform == "darwin":
link_lib_flag = "-dynamiclib"
# This enforces a login shell by overriding the login shell of `getpwnam(3)`
lib_path = tmpdir / f"libgetpwnam-preload.${extension}"
subprocess.run(
[
os.environ.get("CC", "cc"),
link_lib_flag,
"-o",
lib_path,
str(test_root / "getpwnam-preload.c"),
],
check=True,
)
log_file = tmpdir / "sshd.log"
yield SshdConfig(config, login_shell, str(host_key), lib_path, log_file)
@pytest.fixture
def sshd(
sshd_config: SshdConfig,
command: "Command",
unused_tcp_port: "PortFunction",
monkeypatch: pytest.MonkeyPatch,
) -> Iterator[Sshd]:
import subprocess
port = unused_tcp_port()
sshd = shutil.which("sshd")
assert sshd is not None, "no sshd binary found"
env = {}
preload_env_name = "LD_PRELOAD"
if platform == "darwin":
preload_env_name = "DYLD_INSERT_LIBRARIES"
env = {
preload_env_name: str(sshd_config.preload_lib),
"LOGIN_SHELL": str(sshd_config.login_shell),
}
proc = command.run(
[
sshd,
"-E",
str(sshd_config.log_file),
"-f",
str(sshd_config.path),
"-D",
"-p",
str(port),
],
extra_env=env,
)
monkeypatch.delenv("SSH_AUTH_SOCK", raising=False)
timeout = 5
start_time = time.time()
while True:
print(sshd_config.path)
if (
subprocess.run(
[
"ssh",
"-o",
"StrictHostKeyChecking=no",
"-o",
"UserKnownHostsFile=/dev/null",
"-i",
sshd_config.key,
"localhost",
"-p",
str(port),
"true",
],
check=False,
).returncode
== 0
):
yield Sshd(port, proc, sshd_config.key)
return
else:
rc = proc.poll()
if rc is not None:
msg = f"sshd processes was terminated with {rc}"
raise SshdError(msg)
if time.time() - start_time > timeout:
msg = "Timeout while waiting for sshd to be ready"
raise SshdError(msg)
time.sleep(0.1)

View File

@@ -0,0 +1,34 @@
import types
import pytest
class CaptureOutput:
def __init__(self, capsys: pytest.CaptureFixture) -> None:
self.capsys = capsys
self.capsys_disabled = capsys.disabled()
self.capsys_disabled.__enter__()
def __enter__(self) -> "CaptureOutput":
self.capsys_disabled.__exit__(None, None, None)
self.capsys.readouterr()
return self
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: types.TracebackType | None,
) -> None:
res = self.capsys.readouterr()
self.out = res.out
self.err = res.err
# Disable capsys again
self.capsys_disabled = self.capsys.disabled()
self.capsys_disabled.__enter__()
@pytest.fixture
def capture_output(capsys: pytest.CaptureFixture) -> CaptureOutput:
return CaptureOutput(capsys)

View File

@@ -0,0 +1,48 @@
import logging
import os
import tempfile
from collections.abc import Iterator
from pathlib import Path
from sys import platform
import pytest
log = logging.getLogger(__name__)
TEMPDIR = None
# macOS' default temporary directory is too long for unix sockets
# This can break applications such as gpg-agent
if platform == "darwin":
TEMPDIR = Path("/tmp")
@pytest.fixture
def temporary_home(temp_dir: Path, monkeypatch: pytest.MonkeyPatch) -> Path:
xdg_runtime_dir = os.getenv("XDG_RUNTIME_DIR")
monkeypatch.setenv("HOME", str(temp_dir))
monkeypatch.setenv("XDG_CONFIG_HOME", str(temp_dir / ".config"))
runtime_dir = temp_dir / "xdg-runtime-dir"
runtime_dir.mkdir()
runtime_dir.chmod(0o700)
gpgdir = runtime_dir / "gpgagent"
gpgdir.mkdir()
gpgdir.chmod(0o700)
monkeypatch.setenv("GPG_AGENT_INFO", str(gpgdir))
# Iterate over all environment variables
for key, value in os.environ.items():
if xdg_runtime_dir and value.startswith(xdg_runtime_dir):
monkeypatch.setenv(key, value.replace(xdg_runtime_dir, str(runtime_dir)))
monkeypatch.setenv("XDG_RUNTIME_DIR", str(runtime_dir))
monkeypatch.chdir(str(temp_dir))
return temp_dir
@pytest.fixture
def temp_dir() -> Iterator[Path]:
with tempfile.TemporaryDirectory(prefix="pytest-", dir=TEMPDIR) as _dirpath:
yield Path(_dirpath).resolve()

View File

@@ -0,0 +1,157 @@
import ast
import importlib.util
import os
import sys
from dataclasses import is_dataclass
from pathlib import Path
from clan_cli.api import API
from clan_cli.api.util import JSchemaTypeError, type_to_dict
from clan_cli.errors import ClanError
def find_dataclasses_in_directory(
directory: Path, exclude_paths: list[str] | None = None
) -> list[tuple[Path, str]]:
"""
Find all dataclass classes in all Python files within a nested directory.
Args:
directory (str): The root directory to start searching from.
Returns:
List[Tuple[str, str]]: A list of tuples containing the file path and the dataclass name.
"""
if exclude_paths is None:
exclude_paths = []
dataclass_files = []
excludes = [directory / d for d in exclude_paths]
for root, _, files in os.walk(directory, topdown=False):
for file in files:
if not file.endswith(".py"):
continue
file_path = Path(root) / file
if file_path in excludes:
print(f"Skipping dataclass check for file: {file_path}")
continue
python_code = file_path.read_text()
try:
tree = ast.parse(python_code, filename=file_path)
for node in ast.walk(tree):
if isinstance(node, ast.ClassDef):
for deco in node.decorator_list:
if (
isinstance(deco, ast.Name) and deco.id == "dataclass"
) or (
isinstance(deco, ast.Call)
and isinstance(deco.func, ast.Name)
and deco.func.id == "dataclass"
):
dataclass_files.append((file_path, node.name))
except (SyntaxError, UnicodeDecodeError) as e:
print(f"Error parsing {file_path}: {e}")
return dataclass_files
def load_dataclass_from_file(
file_path: Path, class_name: str, root_dir: str
) -> type | None:
"""
Load a dataclass from a given file path.
Args:
file_path (str): Path to the file.
class_name (str): Name of the class to load.
Returns:
List[Type]: The dataclass type if found, else an empty list.
"""
module_name = (
os.path.relpath(file_path, root_dir).replace(os.path.sep, ".").rstrip(".py")
)
try:
sys.path.insert(0, root_dir)
spec = importlib.util.spec_from_file_location(module_name, file_path)
print(spec)
if not spec:
msg = f"Could not load spec from file: {file_path}"
raise ClanError(msg)
module = importlib.util.module_from_spec(spec)
print(module)
if not module:
msg = f"Could not create module: {file_path}"
raise ClanError(msg)
if not spec.loader:
msg = f"Could not load loader from spec: {spec}"
raise ClanError(msg)
spec.loader.exec_module(module)
finally:
sys.path.pop(0)
dataclass_type = getattr(module, class_name, None)
if dataclass_type and is_dataclass(dataclass_type):
return dataclass_type
msg = f"Could not load dataclass {class_name} from file: {file_path}"
raise ClanError(msg)
def test_all_dataclasses() -> None:
"""
This Test ensures that all dataclasses are compatible with the API.
It will load all dataclasses from the clan_cli directory and
generate a JSON schema for each of them.
It will fail if any dataclass cannot be converted to JSON schema.
This means the dataclass in its current form is not compatible with the API.
"""
# Excludes:
# - API includes Type Generic wrappers, that are not known in the init file.
excludes = [
"api/__init__.py",
"cmd.py", # We don't want the UI to have access to the cmd module anyway
"async_run.py", # We don't want the UI to have access to the async_run module anyway
]
cli_path = Path("clan_cli").resolve()
dataclasses = find_dataclasses_in_directory(cli_path, excludes)
for file, dataclass in dataclasses:
print(f"checking dataclass {dataclass} in file: {file}")
try:
API.reset()
dclass = load_dataclass_from_file(file, dataclass, str(cli_path.parent))
if dclass is None:
msg = f"Could not load dataclass {dataclass} from {file}"
raise ClanError(msg)
type_to_dict(dclass)
except JSchemaTypeError as e:
print(f"Error loading dataclass {dataclass} from {file}: {e}")
msg = f"""
--------------------------------------------------------------------------------
Error converting dataclass 'class {dataclass}()' from {file}
Details:
{e}
Help:
- Converting public fields to PRIVATE by prefixing them with underscore ('_')
- Ensure all private fields are initialized the API wont provide initial values for them.
--------------------------------------------------------------------------------
"""
raise ClanError(
msg,
location=__file__,
) from e

View File

@@ -0,0 +1,18 @@
import pytest
from clan_cli.tests.fixtures_flakes import FlakeForTest
from clan_cli.tests.helpers import cli
@pytest.mark.impure
def test_backups(
test_flake_with_core: FlakeForTest,
) -> None:
cli.run(
[
"backups",
"list",
"--flake",
str(test_flake_with_core.path),
"vm1",
]
)

View File

@@ -0,0 +1,326 @@
# mypy: disable-error-code="var-annotated"
import json
from pathlib import Path
from typing import Any
import pytest
from clan_cli.cmd import run
from clan_cli.flake import Flake
from clan_cli.git import commit_file
from clan_cli.locked_open import locked_open
from clan_cli.nix import nix_command
from clan_cli.templates import (
ClanExports,
InputName,
TemplateName,
copy_from_nixstore,
get_clan_nix_attrset,
get_template,
list_templates,
)
from clan_cli.tests.fixtures_flakes import FlakeForTest
# Function to write clan attributes to a file
def write_clan_attr(clan_attrset: dict[str, Any], flake: FlakeForTest) -> None:
file = flake.path / "clan_attrs.json"
with locked_open(file, "w") as cfile:
json.dump(clan_attrset, cfile, indent=2)
commit_file(file, flake.path, "Add clan attributes")
# Common function to test clan nix attrset
def nix_attr_tester(
test_flake_with_core: FlakeForTest,
injected: dict[str, Any],
expected_self: dict[str, Any],
test_number: int,
) -> ClanExports:
write_clan_attr(injected, test_flake_with_core)
clan_dir = Flake(str(test_flake_with_core.path))
nix_attrset = get_clan_nix_attrset(clan_dir)
def recursive_sort(item: Any) -> Any:
if isinstance(item, dict):
return {k: recursive_sort(item[k]) for k in sorted(item)}
if isinstance(item, list):
return sorted(recursive_sort(elem) for elem in item)
return item
returned_sorted = recursive_sort(nix_attrset["self"])
expected_sorted = recursive_sort(expected_self["self"])
assert json.dumps(returned_sorted, indent=2) == json.dumps(
expected_sorted, indent=2
)
return nix_attrset
@pytest.mark.impure
def test_copy_from_nixstore_symlink(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> None:
src = temporary_home / "src"
src.mkdir()
(src / "file.txt").write_text("magicstring!")
res = run(nix_command(["store", "add", str(src)]))
src_nix = Path(res.stdout.strip())
src2 = temporary_home / "src2"
src2.mkdir()
(src2 / "file.txt").symlink_to(src_nix / "file.txt")
res = run(nix_command(["store", "add", str(src2)]))
src2_nix = Path(res.stdout.strip())
dest = temporary_home / "dest"
copy_from_nixstore(src2_nix, dest)
assert (dest / "file.txt").exists()
assert (dest / "file.txt").read_text() == "magicstring!"
assert (dest / "file.txt").is_symlink()
@pytest.mark.impure
def test_clan_core_templates(
test_flake_with_core: FlakeForTest,
monkeypatch: pytest.MonkeyPatch,
temporary_home: Path,
) -> None:
clan_dir = Flake(str(test_flake_with_core.path))
nix_attrset = get_clan_nix_attrset(clan_dir)
clan_core_templates = nix_attrset["inputs"][InputName("clan-core")]["templates"][
"clan"
]
clan_core_template_keys = list(clan_core_templates.keys())
expected_templates = ["default", "flake-parts", "minimal", "minimal-flake-parts"]
assert clan_core_template_keys == expected_templates
vlist_temps = list_templates("clan", clan_dir)
list_template_keys = list(vlist_temps.inputs[InputName("clan-core")].keys())
assert list_template_keys == expected_templates
default_template = get_template(
TemplateName("default"),
"clan",
input_prio=None,
clan_dir=clan_dir,
)
new_clan = temporary_home / "new_clan"
copy_from_nixstore(
Path(default_template.src["path"]),
new_clan,
)
assert (new_clan / "flake.nix").exists()
assert (new_clan / "machines").is_dir()
assert (new_clan / "machines" / "jon").is_dir()
config_nix_p = new_clan / "machines" / "jon" / "configuration.nix"
assert (config_nix_p).is_file()
# Test if we can write to the configuration.nix file
with config_nix_p.open("r+") as f:
data = f.read()
f.write(data)
# Test Case 1: Minimal input with empty templates
@pytest.mark.with_core
def test_clan_get_nix_attrset_case_1(
monkeypatch: pytest.MonkeyPatch,
temporary_home: Path,
test_flake_with_core: FlakeForTest,
) -> None:
test_number = 1
injected = {"templates": {"disko": {}, "machine": {}}}
expected = {
"inputs": {},
"self": {"templates": {"disko": {}, "machine": {}, "clan": {}}, "modules": {}},
}
nix_attr_tester(test_flake_with_core, injected, expected, test_number)
# Test Case 2: Input with one template under 'clan'
@pytest.mark.with_core
def test_clan_get_nix_attrset_case_2(
monkeypatch: pytest.MonkeyPatch,
temporary_home: Path,
test_flake_with_core: FlakeForTest,
) -> None:
test_number = 2
injected = {
"templates": {
"clan": {
"example_template": {
"description": "An example clan template.",
"path": "/example/path",
}
}
}
}
expected = {
"inputs": {},
"self": {
"templates": {
"clan": {
"example_template": {
"description": "An example clan template.",
"path": "/example/path",
}
},
"disko": {},
"machine": {},
},
"modules": {},
},
}
nix_attrset = nix_attr_tester(test_flake_with_core, injected, expected, test_number)
assert "default" in list(
nix_attrset["inputs"][InputName("clan-core")]["templates"]["clan"].keys()
)
# Test Case 3: Input with templates under multiple types
@pytest.mark.with_core
def test_clan_get_nix_attrset_case_3(
monkeypatch: pytest.MonkeyPatch,
temporary_home: Path,
test_flake_with_core: FlakeForTest,
) -> None:
test_number = 3
injected = {
"templates": {
"clan": {
"clan_template": {
"description": "A clan template.",
"path": "/clan/path",
}
},
"disko": {
"disko_template": {
"description": "A disko template.",
"path": "/disko/path",
}
},
"machine": {
"machine_template": {
"description": "A machine template.",
"path": "/machine/path",
}
},
}
}
expected = {
"inputs": {},
"self": {
"templates": {
"clan": {
"clan_template": {
"description": "A clan template.",
"path": "/clan/path",
}
},
"disko": {
"disko_template": {
"description": "A disko template.",
"path": "/disko/path",
}
},
"machine": {
"machine_template": {
"description": "A machine template.",
"path": "/machine/path",
}
},
},
"modules": {},
},
}
nix_attr_tester(test_flake_with_core, injected, expected, test_number)
# Test Case 4: Input with modules only
@pytest.mark.with_core
def test_clan_get_nix_attrset_case_4(
monkeypatch: pytest.MonkeyPatch,
temporary_home: Path,
test_flake_with_core: FlakeForTest,
) -> None:
test_number = 4
injected = {
"modules": {
"module1": {"description": "First module", "path": "/module1/path"},
"module2": {"description": "Second module", "path": "/module2/path"},
}
}
expected = {
"inputs": {},
"self": {
"modules": {
"module1": {"description": "First module", "path": "/module1/path"},
"module2": {"description": "Second module", "path": "/module2/path"},
},
"templates": {"disko": {}, "machine": {}, "clan": {}},
},
}
nix_attr_tester(test_flake_with_core, injected, expected, test_number)
# Test Case 5: Input with both templates and modules
@pytest.mark.with_core
def test_clan_get_nix_attrset_case_5(
monkeypatch: pytest.MonkeyPatch,
temporary_home: Path,
test_flake_with_core: FlakeForTest,
) -> None:
test_number = 5
injected = {
"templates": {
"clan": {
"clan_template": {
"description": "A clan template.",
"path": "/clan/path",
}
}
},
"modules": {
"module1": {"description": "First module", "path": "/module1/path"}
},
}
expected = {
"inputs": {},
"self": {
"modules": {
"module1": {"description": "First module", "path": "/module1/path"}
},
"templates": {
"clan": {
"clan_template": {
"description": "A clan template.",
"path": "/clan/path",
}
},
"disko": {},
"machine": {},
},
},
}
nix_attr_tester(test_flake_with_core, injected, expected, test_number)
# Test Case 6: Input with missing 'templates' and 'modules' (empty clan attrset)
@pytest.mark.with_core
def test_clan_get_nix_attrset_case_6(
monkeypatch: pytest.MonkeyPatch,
temporary_home: Path,
test_flake_with_core: FlakeForTest,
) -> None:
test_number = 6
injected = {}
expected = {
"inputs": {},
"self": {"templates": {"disko": {}, "machine": {}, "clan": {}}, "modules": {}},
}
nix_attr_tester(test_flake_with_core, injected, expected, test_number)

View File

@@ -0,0 +1,99 @@
from pathlib import Path
import pytest
from clan_cli.clan_uri import ClanURI
from clan_cli.flake import Flake
from clan_cli.tests.fixtures_flakes import ClanFlake
def test_get_url() -> None:
# Create a ClanURI object from a remote URI with parameters
uri = ClanURI.from_str("clan://https://example.com?password=1234#myVM")
assert uri.get_url() == "https://example.com?password=1234"
uri = ClanURI.from_str("clan://~/Downloads")
assert uri.get_url().endswith("/Downloads")
uri = ClanURI.from_str("clan:///home/user/Downloads")
assert uri.get_url() == "/home/user/Downloads"
uri = ClanURI.from_str("clan://file:///home/user/Downloads")
assert uri.get_url() == "file:///home/user/Downloads"
@pytest.mark.impure
def test_is_local(flake: ClanFlake) -> None:
uri = ClanURI.from_str(f"clan://git+file://{flake.path}")
assert uri.get_url() == str(flake.path)
assert uri.flake.is_local
myflake = Flake(f"git+file://{flake.path}")
assert myflake.is_local
def test_firefox_strip_uri() -> None:
uri = ClanURI.from_str("clan://git+https//git.clan.lol/clan/democlan.git")
assert uri.get_url() == "git+https://git.clan.lol/clan/democlan.git"
def test_local_uri(temp_dir: Path) -> None:
flake_nix = temp_dir / "flake.nix"
flake_nix.write_text("outputs = _: {}")
# Create a ClanURI object from a local URI
uri = ClanURI.from_str(f"clan://file://{temp_dir}")
assert uri.flake.path == temp_dir
def test_is_remote() -> None:
# Create a ClanURI object from a remote URI
uri = ClanURI.from_str("clan://https://example.com")
assert uri.flake.identifier == "https://example.com"
def test_direct_local_path() -> None:
# Create a ClanURI object from a remote URI
uri = ClanURI.from_str("clan://~/Downloads")
assert uri.get_url().endswith("/Downloads")
def test_direct_local_path2() -> None:
# Create a ClanURI object from a remote URI
uri = ClanURI.from_str("clan:///home/user/Downloads")
assert uri.get_url() == "/home/user/Downloads"
def test_remote_with_clanparams() -> None:
# Create a ClanURI object from a remote URI with parameters
uri = ClanURI.from_str("clan://https://example.com")
assert uri.machine_name == "defaultVM"
assert uri.flake.identifier == "https://example.com"
def test_from_str_remote() -> None:
uri = ClanURI.from_str(url="https://example.com", machine_name="myVM")
assert uri.get_url() == "https://example.com"
assert uri.machine_name == "myVM"
assert uri.flake.identifier == "https://example.com"
def test_from_str_local(temp_dir: Path) -> None:
flake_nix = temp_dir / "flake.nix"
flake_nix.write_text("outputs = _: {}")
uri = ClanURI.from_str(url=str(temp_dir), machine_name="myVM")
assert uri.get_url().endswith(str(temp_dir))
assert uri.machine_name == "myVM"
assert uri.flake.is_local
assert str(uri.flake).endswith(str(temp_dir))
def test_from_str_local_no_machine(temp_dir: Path) -> None:
flake_nix = temp_dir / "flake.nix"
flake_nix.write_text("outputs = _: {}")
uri = ClanURI.from_str(str(temp_dir))
assert uri.get_url().endswith(str(temp_dir))
assert uri.machine_name == "defaultVM"
assert uri.flake.is_local
assert str(uri.flake).endswith(str(temp_dir))

View File

@@ -0,0 +1,9 @@
import pytest
from clan_cli.tests.helpers import cli
from clan_cli.tests.stdout import CaptureOutput
def test_help(capture_output: CaptureOutput) -> None:
with capture_output as output, pytest.raises(SystemExit):
cli.run(["--help"])
assert output.out.startswith("usage:")

View File

@@ -0,0 +1,135 @@
import json
import logging
from pathlib import Path
import pytest
from clan_cli.cmd import run
from clan_cli.nix import nix_flake_show
from clan_cli.tests.fixtures_flakes import FlakeForTest, substitute
from clan_cli.tests.helpers import cli
from clan_cli.tests.stdout import CaptureOutput
log = logging.getLogger(__name__)
@pytest.mark.with_core
def test_create_flake(
monkeypatch: pytest.MonkeyPatch,
temporary_home: Path,
clan_core: Path,
capture_output: CaptureOutput,
) -> None:
flake_dir = temporary_home / "test-flake"
cli.run(["flakes", "create", str(flake_dir), "--template=default", "--no-update"])
assert (flake_dir / ".clan-flake").exists()
# Replace the inputs.clan.url in the template flake.nix
substitute(
flake_dir / "flake.nix",
clan_core,
)
# Dont evaluate the inventory before the substitute call
monkeypatch.chdir(flake_dir)
cli.run(["machines", "create", "machine1"])
# create a hardware-configuration.nix that doesn't throw an eval error
for patch_machine in ["jon", "sara"]:
(
flake_dir / "machines" / f"{patch_machine}/hardware-configuration.nix"
).write_text("{}")
with capture_output as output:
cli.run(["machines", "list"])
assert "machine1" in output.out
flake_show = run(
nix_flake_show(str(flake_dir)),
)
flake_outputs = json.loads(flake_show.stdout)
try:
flake_outputs["nixosConfigurations"]["machine1"]
except KeyError:
pytest.fail("nixosConfigurations.machine1 not found in flake outputs")
@pytest.mark.with_core
def test_create_flake_existing_git(
monkeypatch: pytest.MonkeyPatch,
temporary_home: Path,
clan_core: Path,
capture_output: CaptureOutput,
) -> None:
flake_dir = temporary_home / "test-flake"
run(["git", "init", str(temporary_home)])
cli.run(["flakes", "create", str(flake_dir), "--template=default", "--no-update"])
assert (flake_dir / ".clan-flake").exists()
# Replace the inputs.clan.url in the template flake.nix
substitute(
flake_dir / "flake.nix",
clan_core,
)
# Dont evaluate the inventory before the substitute call
monkeypatch.chdir(flake_dir)
cli.run(["machines", "create", "machine1"])
# create a hardware-configuration.nix that doesn't throw an eval error
for patch_machine in ["jon", "sara"]:
(
flake_dir / "machines" / f"{patch_machine}/hardware-configuration.nix"
).write_text("{}")
with capture_output as output:
cli.run(["machines", "list"])
assert "machine1" in output.out
flake_show = run(
nix_flake_show(str(flake_dir)),
)
flake_outputs = json.loads(flake_show.stdout)
try:
flake_outputs["nixosConfigurations"]["machine1"]
except KeyError:
pytest.fail("nixosConfigurations.machine1 not found in flake outputs")
@pytest.mark.with_core
def test_ui_template(
monkeypatch: pytest.MonkeyPatch,
temporary_home: Path,
test_flake_with_core: FlakeForTest,
clan_core: Path,
capture_output: CaptureOutput,
) -> None:
flake_dir = temporary_home / "test-flake"
cli.run(["flakes", "create", str(flake_dir), "--template=minimal", "--no-update"])
# Replace the inputs.clan.url in the template flake.nix
substitute(
flake_dir / "flake.nix",
clan_core,
)
monkeypatch.chdir(flake_dir)
cli.run(["machines", "create", "machine1"])
with capture_output as output:
cli.run(["machines", "list"])
assert "machine1" in output.out
flake_show = run(
nix_flake_show(str(flake_dir)),
)
flake_outputs = json.loads(flake_show.stdout)
try:
flake_outputs["nixosConfigurations"]["machine1"]
except KeyError:
pytest.fail("nixosConfigurations.machine1 not found in flake outputs")

View File

@@ -0,0 +1,296 @@
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any, Literal
import pytest
# Functions to test
from clan_cli.api import dataclass_to_dict, from_dict
from clan_cli.errors import ClanError
from clan_cli.machines import machines
def test_simple() -> None:
@dataclass
class Person:
name: str
person_dict = {
"name": "John",
}
expected_person = Person(
name="John",
)
assert from_dict(Person, person_dict) == expected_person
def test_nested() -> None:
@dataclass
class Age:
value: str
@dataclass
class Person:
name: str
# deeply nested dataclasses
home: Path | str | None
age: Age
age_list: list[Age]
age_dict: dict[str, Age]
# Optional field
person_dict = {
"name": "John",
"age": {
"value": "99",
},
"age_list": [{"value": "66"}, {"value": "77"}],
"age_dict": {"now": {"value": "55"}, "max": {"value": "100"}},
"home": "/home",
}
expected_person = Person(
name="John",
age=Age("99"),
age_list=[Age("66"), Age("77")],
age_dict={"now": Age("55"), "max": Age("100")},
home=Path("/home"),
)
assert from_dict(Person, person_dict) == expected_person
def test_nested_nullable() -> None:
@dataclass
class SystemConfig:
language: str | None = field(default=None)
keymap: str | None = field(default=None)
ssh_keys_path: list[str] | None = field(default=None)
@dataclass
class FlashOptions:
machine: machines.Machine
mode: str
disks: dict[str, str]
system_config: SystemConfig
dry_run: bool
write_efi_boot_entries: bool
debug: bool
data = {
"machine": {
"name": "flash-installer",
"flake": {"identifier": "git+https://git.clan.lol/clan/clan-core"},
},
"mode": "format",
"disks": {"main": "/dev/sda"},
"system_config": {"language": "en_US.UTF-8", "keymap": "en"},
"dry_run": False,
"write_efi_boot_entries": False,
"debug": False,
"op_key": "jWnTSHwYhSgr7Qz3u4ppD",
}
expected = FlashOptions(
machine=machines.Machine(
name="flash-installer",
flake=machines.Flake("git+https://git.clan.lol/clan/clan-core"),
),
mode="format",
disks={"main": "/dev/sda"},
system_config=SystemConfig(
language="en_US.UTF-8", keymap="en", ssh_keys_path=None
),
dry_run=False,
write_efi_boot_entries=False,
debug=False,
)
assert from_dict(FlashOptions, data) == expected
def test_simple_field_missing() -> None:
@dataclass
class Person:
name: str
person_dict: Any = {}
with pytest.raises(ClanError):
from_dict(Person, person_dict)
def test_nullable() -> None:
@dataclass
class Person:
name: None
person_dict = {
"name": None,
}
from_dict(Person, person_dict)
def test_nullable_non_exist() -> None:
@dataclass
class Person:
name: None
person_dict: Any = {}
with pytest.raises(ClanError):
from_dict(Person, person_dict)
def test_list() -> None:
data = [
{"name": "John"},
{"name": "Sarah"},
]
@dataclass
class Name:
name: str
result = from_dict(list[Name], data)
assert result == [Name("John"), Name("Sarah")]
def test_alias_field() -> None:
@dataclass
class Person:
name: str = field(metadata={"alias": "--user-name--"})
data = {"--user-name--": "John"}
expected = Person(name="John")
person = from_dict(Person, data)
# Deserialize
assert person == expected
# Serialize with alias
assert dataclass_to_dict(person) == data
# Serialize without alias
assert dataclass_to_dict(person, use_alias=False) == {"name": "John"}
def test_alias_field_from_orig_name() -> None:
"""
Field declares an alias. But the data is provided with the field name.
"""
@dataclass
class Person:
name: str = field(metadata={"alias": "--user-name--"})
data = {"user": "John"}
with pytest.raises(ClanError):
from_dict(Person, data)
def test_none_or_string() -> None:
"""
Field declares an alias. But the data is provided with the field name.
"""
data = None
@dataclass
class Person:
name: Path
checked: str | None = from_dict(str | None, data)
assert checked is None
checked2: dict[str, str] | None = from_dict(dict[str, str] | None, data)
assert checked2 is None
checked3: Person | None = from_dict(Person | None, data)
assert checked3 is None
def test_roundtrip_escape() -> None:
assert from_dict(str, "\n") == "\n"
assert dataclass_to_dict("\n") == "\n"
# Test that the functions are inverses of each other
# f(g(x)) == x
# and
# g(f(x)) == x
assert from_dict(str, dataclass_to_dict("\n")) == "\n"
assert dataclass_to_dict(from_dict(str, "\\n")) == "\\n"
def test_path_field() -> None:
@dataclass
class Person:
name: Path
data = {"name": "John"}
expected = Person(name=Path("John"))
assert from_dict(Person, data) == expected
def test_private_public_fields() -> None:
@dataclass
class Person:
name: Path
_name: str | None = None
data = {"name": "John"}
expected = Person(name=Path("John"))
assert from_dict(Person, data) == expected
assert dataclass_to_dict(expected) == data
def test_literal_field() -> None:
@dataclass
class Person:
name: Literal["open_file", "select_folder", "save"]
data = {"name": "open_file"}
expected = Person(name="open_file")
assert from_dict(Person, data) == expected
assert dataclass_to_dict(expected) == data
with pytest.raises(ClanError):
# Not a valid value
from_dict(Person, {"name": "open"})
def test_enum_roundtrip() -> None:
from enum import Enum
class MyEnum(Enum):
FOO = "abc"
BAR = 2
@dataclass
class Person:
name: MyEnum
# Both are equivalent
data = {"name": "abc"} # JSON Representation
expected = Person(name=MyEnum.FOO) # Data representation
assert from_dict(Person, data) == expected
assert dataclass_to_dict(expected) == data
# Same test for integer values
data2 = {"name": 2} # JSON Representation
expected2 = Person(name=MyEnum.BAR) # Data representation
assert from_dict(Person, data2) == expected2
assert dataclass_to_dict(expected2) == data2

View File

@@ -0,0 +1,35 @@
# from clan_cli.dirs import _get_clan_flake_toplevel
# TODO: Reimplement test?
# def test_get_clan_flake_toplevel(
# monkeypatch: pytest.MonkeyPatch, temporary_home: Path
# ) -> None:
# monkeypatch.chdir(temporary_home)
# with pytest.raises(ClanError):
# print(_get_clan_flake_toplevel())
# (temporary_home / ".git").touch()
# assert _get_clan_flake_toplevel() == temporary_home
# subdir = temporary_home / "subdir"
# subdir.mkdir()
# monkeypatch.chdir(subdir)
# (subdir / ".clan-flake").touch()
# assert _get_clan_flake_toplevel() == subdir
from clan_cli.dirs import clan_key_safe, vm_state_dir
def test_clan_key_safe() -> None:
assert clan_key_safe("/foo/bar") == "%2Ffoo%2Fbar"
def test_vm_state_dir_identity() -> None:
dir1 = vm_state_dir("https://some.clan", "vm1")
dir2 = vm_state_dir("https://some.clan", "vm1")
assert str(dir1) == str(dir2)
def test_vm_state_dir_no_collision() -> None:
dir1 = vm_state_dir("/foo/bar", "vm1")
dir2 = vm_state_dir("https://some.clan", "vm1")
assert str(dir1) != str(dir2)

View File

@@ -0,0 +1,10 @@
{ lib, ... }:
{
options.clan.fake-module.fake-flag = lib.mkOption {
type = lib.types.bool;
default = false;
description = ''
A useless fake flag fro testing purposes.
'';
};
}

View File

@@ -0,0 +1,44 @@
{
# this placeholder is replaced by the path to nixpkgs
inputs.nixpkgs.url = "__NIXPKGS__";
outputs =
inputs':
let
# fake clan-core input
fake-clan-core = {
clanModules.fake-module = ./fake-module.nix;
};
inputs = inputs' // {
clan-core = fake-clan-core;
};
lib = inputs.nixpkgs.lib;
clan_attrs_json =
if lib.pathExists ./clan_attrs.json then
builtins.fromJSON (builtins.readFile ./clan_attrs.json)
else
{ };
in
{
clan = clan_attrs_json;
nixosConfigurations.machine1 = inputs.nixpkgs.lib.nixosSystem {
modules = [
./nixosModules/machine1.nix
(
{
...
}:
{
config = {
nixpkgs.hostPlatform = "x86_64-linux";
# speed up by not instantiating nixpkgs twice and disable documentation
nixpkgs.pkgs = inputs.nixpkgs.legacyPackages.x86_64-linux;
documentation.enable = false;
};
}
)
];
};
};
}

View File

@@ -0,0 +1,8 @@
{ lib, ... }:
{
options.clan.jitsi.enable = lib.mkOption {
type = lib.types.bool;
default = false;
description = "Enable jitsi on this machine";
};
}

View File

@@ -0,0 +1,134 @@
import logging
import pytest
from clan_cli.flake import Flake, FlakeCache, FlakeCacheEntry
from clan_cli.tests.fixtures_flakes import ClanFlake
log = logging.getLogger(__name__)
def test_select() -> None:
testdict = {"x": {"y": [123, 345, 456], "z": "bla"}}
test_cache = FlakeCacheEntry(testdict, [])
assert test_cache["x"]["z"].value == "bla"
assert test_cache.is_cached(["x", "z"])
assert not test_cache.is_cached(["x", "y", "z"])
assert test_cache.select(["x", "y", 0]) == 123
assert not test_cache.is_cached(["x", "z", 1])
def test_insert() -> None:
test_cache = FlakeCacheEntry({}, [])
# Inserting the same thing twice should succeed
test_cache.insert(None, ["nix"])
test_cache.insert(None, ["nix"])
assert test_cache.select(["nix"]) is None
def test_out_path() -> None:
testdict = {"x": {"y": [123, 345, 456], "z": "/nix/store/bla"}}
test_cache = FlakeCacheEntry(testdict, [])
assert test_cache.select(["x", "z"]) == "/nix/store/bla"
assert test_cache.select(["x", "z", "outPath"]) == "/nix/store/bla"
@pytest.mark.with_core
def test_flake_caching(flake: ClanFlake) -> None:
m1 = flake.machines["machine1"]
m1["nixpkgs"]["hostPlatform"] = "x86_64-linux"
flake.machines["machine2"] = m1.copy()
flake.machines["machine3"] = m1.copy()
flake.refresh()
flake_ = Flake(str(flake.path))
hostnames = flake_.select("nixosConfigurations.*.config.networking.hostName")
assert hostnames == {
"machine1": "machine1",
"machine2": "machine2",
"machine3": "machine3",
}
@pytest.mark.with_core
def test_cache_persistance(flake: ClanFlake) -> None:
m1 = flake.machines["machine1"]
m1["nixpkgs"]["hostPlatform"] = "x86_64-linux"
flake.refresh()
flake1 = Flake(str(flake.path))
flake2 = Flake(str(flake.path))
flake1.prefetch()
flake2.prefetch()
assert isinstance(flake1._cache, FlakeCache) # noqa: SLF001
assert isinstance(flake2._cache, FlakeCache) # noqa: SLF001
assert not flake1._cache.is_cached( # noqa: SLF001
"nixosConfigurations.*.config.networking.hostName"
)
flake1.select("nixosConfigurations.*.config.networking.hostName")
flake1.select("nixosConfigurations.*.config.networking.{hostName,hostId}")
flake2.prefetch()
assert flake2._cache.is_cached( # noqa: SLF001
"nixosConfigurations.*.config.networking.{hostName,hostId}"
)
@pytest.mark.with_core
def test_conditional_all_selector(flake: ClanFlake) -> None:
m1 = flake.machines["machine1"]
m1["nixpkgs"]["hostPlatform"] = "x86_64-linux"
flake.refresh()
flake1 = Flake(str(flake.path))
flake2 = Flake(str(flake.path))
flake1.prefetch()
flake2.prefetch()
assert isinstance(flake1._cache, FlakeCache) # noqa: SLF001
assert isinstance(flake2._cache, FlakeCache) # noqa: SLF001
log.info("First select")
res1 = flake1.select("inputs.*.{clan,missing}")
log.info("Second (cached) select")
res2 = flake1.select("inputs.*.{clan,missing}")
assert res1 == res2
assert res1["clan-core"].get("clan") is not None
flake2.prefetch()
# This test fails because the CI sandbox does not have the required packages to run the generators
# maybe @DavHau or @Qubasa can fix this at some point :)
# @pytest.mark.with_core
# def test_cache_invalidation(flake: ClanFlake, sops_setup: SopsSetup) -> None:
# m1 = flake.machines["machine1"]
# m1["nixpkgs"]["hostPlatform"] = "x86_64-linux"
# flake.refresh()
# clan_dir = Flake(str(flake.path))
# machine1 = Machine(
# name="machine1",
# flake=clan_dir,
# )
# sops_setup.init(flake.path)
# generate_vars([machine1])
#
# flake.inventory["services"] = {
# "sshd": {
# "someid": {
# "roles": {
# "server": {
# "machines": ["machine1"],
# }
# }
# }
# }
# }
# flake.refresh()
# machine1.flush_caches() # because flake.refresh() does not invalidate the cache but it writes into the directory
#
# generate_vars([machine1])
# vpn_ip = (
# get_var(str(clan_dir), machine1.name, "openssh/ssh.id_ed25519")
# .value.decode()
# .strip("\n")
# )
# assert vpn_ip is not None

View File

@@ -0,0 +1,64 @@
{
# Use this path to our repo root e.g. for UI test
# inputs.clan-core.url = "../../../../.";
# this placeholder is replaced by the path to nixpkgs
inputs.clan-core.url = "__CLAN_CORE__";
inputs.nixpkgs.url = "__NIXPKGS__";
outputs =
{
self,
clan-core,
nixpkgs,
...
}:
let
clan_attrs_json =
if nixpkgs.lib.pathExists ./clan_attrs.json then
builtins.fromJSON (builtins.readFile ./clan_attrs.json)
else
{ };
clan = clan-core.lib.buildClan {
inherit self;
meta.name = "test_flake_with_core";
machines = {
vm1 =
{ config, ... }:
{
nixpkgs.hostPlatform = "x86_64-linux";
clan.core.networking.targetHost = "__CLAN_TARGET_ADDRESS__";
system.stateVersion = config.system.nixos.release;
sops.age.keyFile = "__CLAN_SOPS_KEY_PATH__";
clan.core.facts.secretUploadDirectory = "__CLAN_SOPS_KEY_DIR__";
clan.core.sops.defaultGroups = [ "admins" ];
clan.virtualisation.graphics = false;
clan.core.networking.zerotier.controller.enable = true;
networking.useDHCP = false;
};
vm2 =
{ config, ... }:
{
nixpkgs.hostPlatform = "x86_64-linux";
imports = [
clan-core.clanModules.sshd
clan-core.clanModules.root-password
clan-core.clanModules.user-password
];
clan.user-password.user = "alice";
clan.user-password.prompt = false;
clan.core.networking.targetHost = "__CLAN_TARGET_ADDRESS__";
system.stateVersion = config.system.nixos.release;
sops.age.keyFile = "__CLAN_SOPS_KEY_PATH__";
clan.core.facts.secretUploadDirectory = "__CLAN_SOPS_KEY_DIR__";
clan.core.networking.zerotier.networkId = "82b44b162ec6c013";
};
};
};
in
{
clan = clan_attrs_json;
inherit (clan) nixosConfigurations clanInternals;
};
}

View File

@@ -0,0 +1,49 @@
{
# Use this path to our repo root e.g. for UI test
# inputs.clan-core.url = "../../../../.";
# this placeholder is replaced by the path to clan-core
inputs.clan-core.url = "__CLAN_CORE__";
outputs =
{ self, clan-core }:
let
clan = clan-core.lib.buildClan {
inherit self;
meta.name = "test_flake_with_core_and_pass";
machines = {
vm1 =
{ lib, config, ... }:
{
imports = [
clan-core.clanModules.sshd
clan-core.clanModules.root-password
clan-core.clanModules.user-password
];
clan.user-password.user = "alice";
clan.user-password.prompt = false;
clan.core.networking.targetHost = "__CLAN_TARGET_ADDRESS__";
system.stateVersion = config.system.nixos.release;
clan.core.facts.secretStore = "password-store";
clan.core.facts.secretUploadDirectory = lib.mkForce "__CLAN_SOPS_KEY_DIR__/secrets";
clan.core.networking.zerotier.controller.enable = true;
systemd.services.shutdown-after-boot = {
enable = true;
wantedBy = [ "multi-user.target" ];
after = [ "multi-user.target" ];
script = ''
#!/usr/bin/env bash
shutdown -h now
'';
};
};
};
};
in
{
inherit (clan) nixosConfigurations clanInternals;
};
}

View File

@@ -0,0 +1,24 @@
{
# Use this path to our repo root e.g. for UI test
# inputs.clan-core.url = "../../../../.";
# this placeholder is replaced by the path to nixpkgs
inputs.clan-core.url = "__CLAN_CORE__";
outputs =
{ self, clan-core }:
let
clan = clan-core.lib.buildClan {
inherit self;
meta.name = "test_flake_with_core_dynamic_machines";
machines =
let
machineModules = builtins.readDir (self + "/machines");
in
builtins.mapAttrs (name: _type: import (self + "/machines/${name}")) machineModules;
};
in
{
inherit (clan) nixosConfigurations clanInternals;
};
}

View File

@@ -0,0 +1,27 @@
from typing import TYPE_CHECKING
import pytest
from clan_cli.tests.fixtures_flakes import FlakeForTest
from clan_cli.tests.helpers import cli
from clan_cli.tests.stdout import CaptureOutput
if TYPE_CHECKING:
pass
@pytest.mark.impure
def test_flakes_inspect(
test_flake_with_core: FlakeForTest, capture_output: CaptureOutput
) -> None:
with capture_output as output:
cli.run(
[
"flakes",
"inspect",
"--flake",
str(test_flake_with_core.path),
"--machine",
"vm1",
]
)
assert "Icon" in output.out

View File

@@ -0,0 +1,65 @@
import subprocess
import tempfile
from pathlib import Path
import pytest
from clan_cli import git
from clan_cli.errors import ClanError
def test_commit_file(git_repo: Path) -> None:
# create a file in the git repo
(git_repo / "test.txt").touch()
# commit the file
git.commit_file((git_repo / "test.txt"), git_repo, "test commit")
# check that the repo directory does in fact contain the file
assert (git_repo / "test.txt").exists()
# check that the working tree is clean
assert not subprocess.check_output(["git", "status", "--porcelain"], cwd=git_repo)
# check that the latest commit message is correct
assert (
subprocess.check_output(
["git", "log", "-1", "--pretty=%B"], cwd=git_repo
).decode("utf-8")
== "test commit\n\n"
)
def test_commit_file_outside_git_raises_error(git_repo: Path) -> None:
# create a file outside the git (a temporary file)
with tempfile.NamedTemporaryFile() as tmp, pytest.raises(ClanError):
# this should not fail but skip the commit
git.commit_file(Path(tmp.name), git_repo, "test commit")
def test_commit_file_not_existing_raises_error(git_repo: Path) -> None:
# commit a file that does not exist
with pytest.raises(ClanError):
git.commit_file(Path("test.txt"), git_repo, "test commit")
def test_clan_flake_in_subdir(git_repo: Path, monkeypatch: pytest.MonkeyPatch) -> None:
# create a clan_flake subdirectory
(git_repo / "clan_flake").mkdir()
# create a .clan-flake file
(git_repo / "clan_flake" / ".clan-flake").touch()
# change to the clan_flake subdirectory
monkeypatch.chdir(git_repo / "clan_flake")
# commit files to git
subprocess.run(["git", "add", "."], cwd=git_repo, check=True)
subprocess.run(["git", "commit", "-m", "init"], cwd=git_repo, check=True)
# add a new file under ./clan_flake
(git_repo / "clan_flake" / "test.txt").touch()
# commit the file
git.commit_file(git_repo / "clan_flake" / "test.txt", git_repo, "test commit")
# check that the repo directory does in fact contain the file
assert (git_repo / "clan_flake" / "test.txt").exists()
# check that the working tree is clean
assert not subprocess.check_output(["git", "status", "--porcelain"], cwd=git_repo)
# check that the latest commit message is correct
assert (
subprocess.check_output(
["git", "log", "-1", "--pretty=%B"], cwd=git_repo
).decode("utf-8")
== "test commit\n\n"
)

View File

@@ -0,0 +1,47 @@
import json
from typing import TYPE_CHECKING
import pytest
from clan_cli.dirs import user_history_file
from clan_cli.history.add import HistoryEntry
from clan_cli.tests.fixtures_flakes import FlakeForTest
from clan_cli.tests.helpers import cli
from clan_cli.tests.stdout import CaptureOutput
if TYPE_CHECKING:
pass
@pytest.mark.impure
def test_history_add(
test_flake_with_core: FlakeForTest,
) -> None:
cmd = [
"history",
"add",
f"clan://{test_flake_with_core.path}#vm1",
]
cli.run(cmd)
history_file = user_history_file()
assert history_file.exists()
history = [
HistoryEntry.from_json(entry) for entry in json.loads(history_file.read_text())
]
assert str(history[0].flake.flake_url) == str(test_flake_with_core.path)
@pytest.mark.impure
def test_history_list(
capture_output: CaptureOutput,
test_flake_with_core: FlakeForTest,
) -> None:
with capture_output as output:
cli.run(["history", "list"])
assert str(test_flake_with_core.path) not in output.out
cli.run(["history", "add", f"clan://{test_flake_with_core.path}#vm1"])
with capture_output as output:
cli.run(["history", "list"])
assert str(test_flake_with_core.path) in output.out

View File

@@ -0,0 +1,99 @@
from pathlib import Path
from typing import TYPE_CHECKING
import pytest
from clan_cli.tests.fixtures_flakes import FlakeForTest
from clan_cli.tests.helpers import cli
from clan_cli.tests.stdout import CaptureOutput
if TYPE_CHECKING:
from .age_keys import KeyPair
def test_import_sops(
test_root: Path,
test_flake: FlakeForTest,
capture_output: CaptureOutput,
monkeypatch: pytest.MonkeyPatch,
age_keys: list["KeyPair"],
) -> None:
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[1].privkey)
cli.run(
[
"secrets",
"machines",
"add",
"--flake",
str(test_flake.path),
"machine1",
age_keys[0].pubkey,
]
)
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(test_flake.path),
"user1",
age_keys[1].pubkey,
]
)
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(test_flake.path),
"user2",
age_keys[2].pubkey,
]
)
cli.run(
[
"secrets",
"groups",
"add-user",
"--flake",
str(test_flake.path),
"group1",
"user1",
]
)
cli.run(
[
"secrets",
"groups",
"add-user",
"--flake",
str(test_flake.path),
"group1",
"user2",
]
)
# To edit:
# SOPS_AGE_KEY=AGE-SECRET-KEY-1U5ENXZQAY62NC78Y2WC0SEGRRMAEEKH79EYY5TH4GPFWJKEAY0USZ6X7YQ sops --age age14tva0txcrl0zes05x7gkx56qd6wd9q3nwecjac74xxzz4l47r44sv3fz62 ./data/secrets.yaml
cmd = [
"secrets",
"import-sops",
"--flake",
str(test_flake.path),
"--group",
"group1",
"--machine",
"machine1",
str(test_root.joinpath("data", "secrets.yaml")),
]
cli.run(cmd)
with capture_output as output:
cli.run(["secrets", "users", "list", "--flake", str(test_flake.path)])
users = sorted(output.out.rstrip().split())
assert users == ["user1", "user2"]
with capture_output as output:
cli.run(["secrets", "get", "--flake", str(test_flake.path), "secret-key"])
assert output.out == "secret-value"

View File

@@ -0,0 +1,41 @@
from clan_cli.inventory.classes import Inventory, Machine, Meta, Service
def test_make_meta_minimal() -> None:
# Name is required
res = Meta(
{
"name": "foo",
}
)
assert res == {"name": "foo"}
def test_make_inventory_minimal() -> None:
# Meta is required
res = Inventory(
{
"meta": Meta(
{
"name": "foo",
}
),
}
)
assert res == {"meta": {"name": "foo"}}
def test_make_machine_minimal() -> None:
# Empty is valid
res = Machine({})
assert res == {}
def test_make_service_minimal() -> None:
# Empty is valid
res = Service({})
assert res == {}

View File

@@ -0,0 +1,125 @@
import pytest
from age_keys import SopsSetup, assert_secrets_file_recipients
from clan_cli.inventory import load_inventory_json
from clan_cli.secrets.folders import sops_machines_folder
from clan_cli.tests import fixtures_flakes
from clan_cli.tests.helpers import cli
from clan_cli.tests.stdout import CaptureOutput
@pytest.mark.impure
def test_machine_subcommands(
test_flake_with_core: fixtures_flakes.FlakeForTest,
capture_output: CaptureOutput,
) -> None:
cli.run(
[
"machines",
"create",
"--flake",
str(test_flake_with_core.path),
"machine1",
"--tags",
"vm",
]
)
inventory: dict = dict(load_inventory_json(str(test_flake_with_core.path)))
assert "machine1" in inventory["machines"]
assert "service" not in inventory
with capture_output as output:
cli.run(["machines", "list", "--flake", str(test_flake_with_core.path)])
print(output.out)
assert "machine1" in output.out
assert "vm1" in output.out
assert "vm2" in output.out
cli.run(
["machines", "delete", "--flake", str(test_flake_with_core.path), "machine1"]
)
inventory_2: dict = dict(load_inventory_json(str(test_flake_with_core.path)))
assert "machine1" not in inventory_2["machines"]
assert "service" not in inventory_2
with capture_output as output:
cli.run(["machines", "list", "--flake", str(test_flake_with_core.path)])
assert "machine1" not in output.out
assert "vm1" in output.out
assert "vm2" in output.out
@pytest.mark.with_core
def test_machine_delete(
monkeypatch: pytest.MonkeyPatch,
flake_with_sops: fixtures_flakes.ClanFlake,
sops_setup: SopsSetup,
) -> None:
flake = flake_with_sops
admin_key, machine_key, machine2_key = sops_setup.keys
# create a couple machines with their keys
for name, key in (("my-machine", machine_key), ("my-machine2", machine2_key)):
cli.run(["machines", "create", f"--flake={flake.path}", name])
add_machine_key = [
"secrets",
"machines",
"add",
f"--flake={flake.path}",
name,
key.pubkey,
]
cli.run(add_machine_key)
# create a secret shared by both machines
shared_secret_name = "shared_secret"
with monkeypatch.context():
monkeypatch.setenv("SOPS_NIX_SECRET", "secret_value")
set_shared_secret = [
"secrets",
"set",
f"--flake={flake.path}",
"--machine=my-machine",
"--machine=my-machine2",
shared_secret_name,
]
cli.run(set_shared_secret)
my_machine_sops_folder = sops_machines_folder(flake.path) / "my-machine"
assert (
my_machine_sops_folder.is_dir()
), "A sops folder for `my-machine` should have been created with its public key"
# define some vars generator for `my-machine`:
config = flake.machines["my-machine"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
my_generator = config["clan"]["core"]["vars"]["generators"]["my_generator"]
my_generator["files"]["my_value"]["secret"] = False
my_generator["files"]["my_secret"]["secret"] = True
my_generator["script"] = (
"echo -n public > $out/my_value;"
"echo -n secret > $out/my_secret;"
"echo -n non-default > $out/value_with_default"
)
flake.refresh() # saves "my_generator"
monkeypatch.chdir(flake.path)
cli.run(["vars", "generate", "--flake", str(flake.path), "my-machine"])
my_machine_vars_store = flake.path / "vars/per-machine" / "my-machine"
assert (
my_machine_vars_store.is_dir()
), "A vars directory should have been created for `my-machine`"
cli.run(["machines", "delete", "--flake", str(flake.path), "my-machine"])
assert (
not my_machine_vars_store.exists()
), "The vars directory for `my-machine` should have been deleted"
assert (
not my_machine_sops_folder.exists()
), "The sops folder holding the public key for `my-machine` should have been deleted"
expected_recipients = [admin_key, machine2_key]
assert_secrets_file_recipients(flake.path, shared_secret_name, expected_recipients)

View File

@@ -0,0 +1,119 @@
import json
import subprocess
from typing import TYPE_CHECKING
import pytest
from clan_cli.api.modules import list_modules
from clan_cli.flake import Flake
from clan_cli.inventory import (
Inventory,
Machine,
MachineDeploy,
set_inventory,
)
from clan_cli.machines.create import CreateOptions, create_machine
from clan_cli.nix import nix_eval, run_no_stdout
from clan_cli.tests.fixtures_flakes import FlakeForTest
if TYPE_CHECKING:
from .age_keys import KeyPair
# from clan_cli.vars.var import machine_get_fact
from clan_cli.machines.machines import Machine as MachineMachine
from clan_cli.tests.helpers import cli
@pytest.mark.with_core
def test_list_modules(test_flake_with_core: FlakeForTest) -> None:
base_path = test_flake_with_core.path
modules_info = list_modules(str(base_path))
assert len(modules_info.items()) > 1
# Random test for those two modules
assert "borgbackup" in modules_info
assert "syncthing" in modules_info
@pytest.mark.impure
def test_add_module_to_inventory(
monkeypatch: pytest.MonkeyPatch,
test_flake_with_core: FlakeForTest,
age_keys: list["KeyPair"],
) -> None:
base_path = test_flake_with_core.path
monkeypatch.chdir(test_flake_with_core.path)
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(test_flake_with_core.path),
"user1",
age_keys[0].pubkey,
]
)
opts = CreateOptions(
clan_dir=Flake(str(base_path)),
machine=Machine(name="machine1", tags=[], deploy=MachineDeploy()),
)
create_machine(opts)
(test_flake_with_core.path / "machines" / "machine1" / "facter.json").write_text(
json.dumps(
{
"version": 1,
"system": "x86_64-linux",
}
)
)
subprocess.run(["git", "add", "."], cwd=test_flake_with_core.path, check=True)
inventory: Inventory = {}
inventory["services"] = {
"borgbackup": {
"borg1": {
"meta": {"name": "borg1"},
"roles": {
"client": {"machines": ["machine1"]},
"server": {"machines": ["machine1"]},
},
}
}
}
set_inventory(inventory, base_path, "Add borgbackup service")
# cmd = ["facts", "generate", "--flake", str(test_flake_with_core.path), "machine1"]
cmd = ["vars", "generate", "--flake", str(test_flake_with_core.path), "machine1"]
cli.run(cmd)
machine = MachineMachine(
name="machine1", flake=Flake(str(test_flake_with_core.path))
)
generator = None
for gen in machine.vars_generators:
if gen.name == "borgbackup":
generator = gen
break
assert generator
ssh_key = machine.public_vars_store.get(generator, "borgbackup.ssh.pub")
cmd = nix_eval(
[
f"{base_path}#nixosConfigurations.machine1.config.services.borgbackup.repos",
"--json",
]
)
proc = run_no_stdout(cmd)
res = json.loads(proc.stdout.strip())
assert res["machine1"]["authorizedKeys"] == [ssh_key.decode()]

View File

@@ -0,0 +1,595 @@
# Functions to test
from typing import Any
import pytest
from clan_cli.errors import ClanError
from clan_cli.inventory import (
calc_patches,
delete_by_path,
determine_writeability,
patch,
unmerge_lists,
)
# --------- Patching tests ---------
def test_patch_nested() -> None:
orig = {"a": 1, "b": {"a": 2.1, "b": 2.2}, "c": 3}
patch(orig, "b.b", "foo")
# Should only update the nested value
assert orig == {"a": 1, "b": {"a": 2.1, "b": "foo"}, "c": 3}
def test_patch_nested_dict() -> None:
orig = {"a": 1, "b": {"a": 2.1, "b": 2.2}, "c": 3}
# This should update the whole "b" dict
# Which also removes all other keys
patch(orig, "b", {"b": "foo"})
# Should only update the nested value
assert orig == {"a": 1, "b": {"b": "foo"}, "c": 3}
def test_create_missing_paths() -> None:
orig = {"a": 1}
patch(orig, "b.c", "foo")
# Should only update the nested value
assert orig == {"a": 1, "b": {"c": "foo"}}
orig = {}
patch(orig, "a.b.c", "foo")
assert orig == {"a": {"b": {"c": "foo"}}}
# --------- Write tests ---------
#
def test_write_simple() -> None:
prios = {
"foo": {
"__prio": 100, # <- writeable: "foo"
"bar": {"__prio": 1000}, # <- writeable: mkDefault "foo.bar"
},
}
default: dict = {"foo": {}}
data: dict = {}
res = determine_writeability(prios, default, data)
assert res == {"writeable": {"foo", "foo.bar"}, "non_writeable": set({})}
def test_write_inherited() -> None:
prios = {
"foo": {
"__prio": 100, # <- writeable: "foo"
"bar": {
# Inherits prio from parent <- writeable: "foo.bar"
"baz": {"__prio": 1000}, # <- writeable: "foo.bar.baz"
},
},
}
data: dict = {}
res = determine_writeability(prios, {"foo": {"bar": {}}}, data)
assert res == {
"writeable": {"foo", "foo.bar", "foo.bar.baz"},
"non_writeable": set(),
}
def test_non_write_inherited() -> None:
prios = {
"foo": {
"__prio": 50, # <- non writeable: mkForce "foo" = {...}
"bar": {
# Inherits prio from parent <- non writeable
"baz": {"__prio": 1000}, # <- non writeable: mkDefault "foo.bar.baz"
},
},
}
data: dict = {}
res = determine_writeability(prios, {}, data)
assert res == {
"writeable": set(),
"non_writeable": {"foo", "foo.bar", "foo.bar.baz"},
}
def test_write_list() -> None:
prios = {
"foo": {
"__prio": 100,
},
}
data: dict = {}
default: dict = {
"foo": [
"a",
"b",
] # <- writeable: because lists are merged. Filtering out nix-values comes later
}
res = determine_writeability(prios, default, data)
assert res == {
"writeable": {"foo"},
"non_writeable": set(),
}
def test_write_because_written() -> None:
prios = {
"foo": {
"__prio": 100, # <- writeable: "foo"
"bar": {
# Inherits prio from parent <- writeable
"baz": {"__prio": 100}, # <- non writeable usually
"foobar": {"__prio": 100}, # <- non writeable
},
},
}
# Given the following data. {}
# Check that the non-writeable paths are correct.
res = determine_writeability(prios, {"foo": {"bar": {}}}, {})
assert res == {
"writeable": {"foo", "foo.bar"},
"non_writeable": {"foo.bar.baz", "foo.bar.foobar"},
}
data: dict = {
"foo": {
"bar": {
"baz": "foo" # <- written. Since we created the data, we know we can write to it
}
}
}
res = determine_writeability(prios, {}, data)
assert res == {
"writeable": {"foo", "foo.bar", "foo.bar.baz"},
"non_writeable": {"foo.bar.foobar"},
}
# --------- List unmerge tests ---------
def test_list_unmerge() -> None:
all_machines = ["machineA", "machineB"]
inventory = ["machineB"]
nix_machines = unmerge_lists(all_machines, inventory)
assert nix_machines == ["machineA"]
# --------- Write tests ---------
def test_update_simple() -> None:
prios = {
"foo": {
"__prio": 100, # <- writeable: "foo"
"bar": {"__prio": 1000}, # <- writeable: mkDefault "foo.bar"
"nix": {"__prio": 100}, # <- non writeable: "foo.bar" (defined in nix)
},
}
data_eval = {"foo": {"bar": "baz", "nix": "this is set in nix"}}
data_disk: dict = {}
writeables = determine_writeability(prios, data_eval, data_disk)
assert writeables == {"writeable": {"foo", "foo.bar"}, "non_writeable": {"foo.nix"}}
update = {
"foo": {
"bar": "new value", # <- user sets this value
"nix": "this is set in nix", # <- user didnt touch this value
# If the user would have set this value, it would trigger an error
}
}
patchset, _ = calc_patches(
data_disk, update, all_values=data_eval, writeables=writeables
)
assert patchset == {"foo.bar": "new value"}
def test_update_many() -> None:
prios = {
"foo": {
"__prio": 100, # <- writeable: "foo"
"bar": {"__prio": 100}, # <-
"nix": {"__prio": 100}, # <- non writeable: "foo.bar" (defined in nix)
"nested": {
"__prio": 100,
"x": {"__prio": 100}, # <- writeable: "foo.nested.x"
"y": {"__prio": 100}, # <- non-writeable: "foo.nested.y"
},
},
}
data_eval = {
"foo": {
"bar": "baz",
"nix": "this is set in nix",
"nested": {"x": "x", "y": "y"},
}
}
data_disk = {"foo": {"bar": "baz", "nested": {"x": "x"}}}
writeables = determine_writeability(prios, data_eval, data_disk)
assert writeables == {
"writeable": {"foo.nested", "foo", "foo.bar", "foo.nested.x"},
"non_writeable": {"foo.nix", "foo.nested.y"},
}
update = {
"foo": {
"bar": "new value for bar", # <- user sets this value
"nix": "this is set in nix", # <- user cannot set this value
"nested": {
"x": "new value for x", # <- user sets this value
"y": "y", # <- user cannot set this value
},
}
}
patchset, _ = calc_patches(
data_disk, update, all_values=data_eval, writeables=writeables
)
assert patchset == {
"foo.bar": "new value for bar",
"foo.nested.x": "new value for x",
}
def test_update_parent_non_writeable() -> None:
prios = {
"foo": {
"__prio": 50, # <- non-writeable: "foo"
"bar": {"__prio": 1000}, # <- writeable: mkDefault "foo.bar"
},
}
data_eval = {
"foo": {
"bar": "baz",
}
}
data_disk = {
"foo": {
"bar": "baz",
}
}
writeables = determine_writeability(prios, data_eval, data_disk)
assert writeables == {"writeable": set(), "non_writeable": {"foo", "foo.bar"}}
update = {
"foo": {
"bar": "new value", # <- user sets this value
}
}
with pytest.raises(ClanError) as error:
calc_patches(data_disk, update, all_values=data_eval, writeables=writeables)
assert str(error.value) == "Key 'foo.bar' is not writeable."
def test_update_list() -> None:
prios = {
"foo": {
"__prio": 100, # <- writeable: "foo"
},
}
data_eval = {
# [ "A" ] is defined in nix.
"foo": ["A", "B"]
}
data_disk = {"foo": ["B"]}
writeables = determine_writeability(prios, data_eval, data_disk)
assert writeables == {"writeable": {"foo"}, "non_writeable": set()}
# Add "C" to the list
update = {"foo": ["A", "B", "C"]} # User wants to add "C"
patchset, _ = calc_patches(
data_disk, update, all_values=data_eval, writeables=writeables
)
assert patchset == {"foo": ["B", "C"]}
# "foo": ["A", "B"]
# Remove "B" from the list
# Expected is [ ] because ["A"] is defined in nix
update = {"foo": ["A"]} # User wants to remove "B"
patchset, _ = calc_patches(
data_disk, update, all_values=data_eval, writeables=writeables
)
assert patchset == {"foo": []}
def test_update_list_duplicates() -> None:
prios = {
"foo": {
"__prio": 100, # <- writeable: "foo"
},
}
data_eval = {
# [ "A" ] is defined in nix.
"foo": ["A", "B"]
}
data_disk = {"foo": ["B"]}
writeables = determine_writeability(prios, data_eval, data_disk)
assert writeables == {"writeable": {"foo"}, "non_writeable": set()}
# Add "A" to the list
update = {"foo": ["A", "B", "A"]} # User wants to add duplicate "A"
with pytest.raises(ClanError) as error:
calc_patches(data_disk, update, all_values=data_eval, writeables=writeables)
assert (
str(error.value)
== "Key 'foo' contains duplicates: ['A']. This not supported yet."
)
def test_dont_persist_defaults() -> None:
"""
Default values should not be persisted to disk if not explicitly requested by the user.
"""
prios = {
"enabled": {"__prio": 1500},
"config": {"__prio": 100},
}
data_eval = {
"enabled": True,
"config": {"foo": "bar"},
}
data_disk: dict[str, Any] = {}
writeables = determine_writeability(prios, data_eval, data_disk)
assert writeables == {"writeable": {"config", "enabled"}, "non_writeable": set()}
update = {"config": {"foo": "foo"}}
patchset, delete_set = calc_patches(
data_disk, update, all_values=data_eval, writeables=writeables
)
assert patchset == {"config.foo": "foo"}
assert delete_set == set()
def test_machine_delete() -> None:
prios = {
"machines": {"__prio": 100},
}
data_eval = {
"machines": {
"foo": {"name": "foo"},
"bar": {"name": "bar"},
"naz": {"name": "naz"},
},
}
data_disk = data_eval
writeables = determine_writeability(prios, data_eval, data_disk)
assert writeables == {"writeable": {"machines"}, "non_writeable": set()}
# Delete machine "bar" from the inventory
update = {"machines": {"foo": {"name": "foo"}, "naz": {"name": "naz"}}}
patchset, delete_set = calc_patches(
data_disk, update, all_values=data_eval, writeables=writeables
)
assert patchset == {}
assert delete_set == {"machines.bar"}
def test_update_mismatching_update_type() -> None:
prios = {
"foo": {
"__prio": 100, # <- writeable: "foo"
},
}
data_eval = {"foo": ["A", "B"]}
data_disk: dict = {}
writeables = determine_writeability(prios, data_eval, data_disk)
assert writeables == {"writeable": {"foo"}, "non_writeable": set()}
# set foo to an int but it is a list
update: dict = {"foo": 1}
with pytest.raises(ClanError) as error:
calc_patches(data_disk, update, all_values=data_eval, writeables=writeables)
assert (
str(error.value)
== "Type mismatch for key 'foo'. Cannot update <class 'list'> with <class 'int'>"
)
def test_delete_key() -> None:
prios = {
"foo": {
"__prio": 100, # <- writeable: "foo"
},
}
data_eval = {"foo": {"bar": "baz"}}
data_disk = data_eval
writeables = determine_writeability(prios, data_eval, data_disk)
assert writeables == {"writeable": {"foo"}, "non_writeable": set()}
# remove all keys from foo
update: dict = {"foo": {}}
patchset, delete_set = calc_patches(
data_disk, update, all_values=data_eval, writeables=writeables
)
assert patchset == {}
assert delete_set == {"foo.bar"}
def test_delete_key_intermediate() -> None:
prios = {
"foo": {
"__prio": 100,
},
}
data_eval = {
"foo": {
# Remove the key "bar"
"bar": {"name": "bar", "info": "info", "other": ["a", "b"]},
# Leave the key "other"
"other": {"name": "other", "info": "info", "other": ["a", "b"]},
}
}
update: dict = {
"foo": {"other": {"name": "other", "info": "info", "other": ["a", "b"]}}
}
data_disk = data_eval
writeables = determine_writeability(prios, data_eval, data_disk)
assert writeables == {"writeable": {"foo"}, "non_writeable": set()}
# remove all keys from foo
patchset, delete_set = calc_patches(
data_disk, update, all_values=data_eval, writeables=writeables
)
assert patchset == {}
assert delete_set == {"foo.bar"}
def test_delete_key_non_writeable() -> None:
prios = {
"foo": {
"__prio": 50,
},
}
data_eval = {
"foo": {
# Remove the key "bar"
"bar": {"name": "bar", "info": "info", "other": ["a", "b"]},
}
}
update: dict = {"foo": {}}
data_disk = data_eval
writeables = determine_writeability(prios, data_eval, data_disk)
assert writeables == {"writeable": set(), "non_writeable": {"foo"}}
# remove all keys from foo
with pytest.raises(ClanError) as error:
calc_patches(data_disk, update, all_values=data_eval, writeables=writeables)
assert "Cannot delete" in str(error.value)
def test_delete_atom() -> None:
data = {"foo": {"bar": 1}}
# Removes the key "foo.bar"
# Returns the deleted key-value pair { "bar": 1 }
entry = delete_by_path(data, "foo.bar")
assert entry == {"bar": 1}
assert data == {"foo": {}}
def test_delete_intermediate() -> None:
data = {"a": {"b": {"c": {"d": 42}}}}
# Removes "a.b.c.d"
entry = delete_by_path(data, "a.b.c")
assert entry == {"c": {"d": 42}}
# Check all intermediate dictionaries remain intact
assert data == {"a": {"b": {}}}
def test_delete_top_level() -> None:
data = {"x": 100, "y": 200}
# Deletes top-level key
entry = delete_by_path(data, "x")
assert entry == {"x": 100}
assert data == {"y": 200}
def test_delete_key_not_found() -> None:
data = {"foo": {"bar": 1}}
# Trying to delete a non-existing key "foo.baz"
with pytest.raises(KeyError) as excinfo:
delete_by_path(data, "foo.baz")
assert "Cannot delete. Path 'foo.baz'" in str(excinfo.value)
# Data should remain unchanged
assert data == {"foo": {"bar": 1}}
def test_delete_intermediate_not_dict() -> None:
data = {"foo": "not a dict"}
# Trying to go deeper into a non-dict value
with pytest.raises(KeyError) as excinfo:
delete_by_path(data, "foo.bar")
assert "not found or not a dictionary" in str(excinfo.value)
# Data should remain unchanged
assert data == {"foo": "not a dict"}
def test_delete_empty_path() -> None:
data = {"foo": {"bar": 1}}
# Attempting to delete with an empty path
with pytest.raises(KeyError) as excinfo:
delete_by_path(data, "")
# Depending on how you handle empty paths, you might raise an error or handle it differently.
# If you do raise an error, check the message.
assert "Cannot delete. Path is empty" in str(excinfo.value)
assert data == {"foo": {"bar": 1}}
def test_delete_non_existent_path_deep() -> None:
data = {"foo": {"bar": {"baz": 123}}}
# non-existent deep path
with pytest.raises(KeyError) as excinfo:
delete_by_path(data, "foo.bar.qux")
assert "not found" in str(excinfo.value)
# Data remains unchanged
assert data == {"foo": {"bar": {"baz": 123}}}

View File

@@ -0,0 +1,799 @@
import json
import logging
import os
import re
from collections.abc import Iterator
from contextlib import contextmanager
from typing import TYPE_CHECKING
import pytest
from age_keys import assert_secrets_file_recipients
from clan_cli.errors import ClanError
from gpg_keys import GpgKey
from clan_cli.secrets.folders import sops_secrets_folder
from clan_cli.tests.fixtures_flakes import FlakeForTest
from clan_cli.tests.helpers import cli
from clan_cli.tests.stdout import CaptureOutput
if TYPE_CHECKING:
from .age_keys import KeyPair
log = logging.getLogger(__name__)
def _test_identities(
what: str,
test_flake: FlakeForTest,
capture_output: CaptureOutput,
age_keys: list["KeyPair"],
monkeypatch: pytest.MonkeyPatch,
) -> None:
sops_folder = test_flake.path / "sops"
what_singular = what[:-1]
test_secret_name = f"{what_singular}_secret"
# fake some admin user that's different from the identity, we are going to
# try to add/remove/update from the clan, this way we can check that keys
# are properly updated on secrets when an identity changes.
admin_age_key = age_keys[2]
cli.run(
[
"secrets",
what,
"add",
"--flake",
str(test_flake.path),
"foo",
age_keys[0].pubkey,
]
)
assert (sops_folder / what / "foo" / "key.json").exists()
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(test_flake.path),
"admin",
admin_age_key.pubkey,
]
)
with pytest.raises(ClanError): # raises "foo already exists"
cli.run(
[
"secrets",
what,
"add",
"--flake",
str(test_flake.path),
"foo",
age_keys[0].pubkey,
]
)
with monkeypatch.context():
monkeypatch.setenv("SOPS_NIX_SECRET", "deadfeed")
monkeypatch.setenv("SOPS_AGE_KEY", admin_age_key.privkey)
cli.run(
[
"secrets",
"set",
"--flake",
str(test_flake.path),
f"--{what_singular}",
"foo",
test_secret_name,
]
)
assert_secrets_file_recipients(
test_flake.path,
test_secret_name,
expected_age_recipients_keypairs=[age_keys[0], admin_age_key],
)
with monkeypatch.context():
monkeypatch.setenv("SOPS_AGE_KEY", admin_age_key.privkey)
cli.run(
[
"secrets",
what,
"add",
"--flake",
str(test_flake.path),
"-f",
"foo",
age_keys[1].privkey,
]
)
assert_secrets_file_recipients(
test_flake.path,
test_secret_name,
expected_age_recipients_keypairs=[age_keys[1], admin_age_key],
)
with capture_output as output:
cli.run(
[
"secrets",
what,
"get",
"--flake",
str(test_flake.path),
"foo",
]
)
assert age_keys[1].pubkey in output.out
with capture_output as output:
cli.run(["secrets", what, "list", "--flake", str(test_flake.path)])
assert "foo" in output.out
cli.run(["secrets", what, "remove", "--flake", str(test_flake.path), "foo"])
assert not (sops_folder / what / "foo" / "key.json").exists()
with pytest.raises(ClanError): # already removed
cli.run(["secrets", what, "remove", "--flake", str(test_flake.path), "foo"])
with capture_output as output:
cli.run(["secrets", what, "list", "--flake", str(test_flake.path)])
assert "foo" not in output.out
user_or_machine_symlink = sops_folder / "secrets" / test_secret_name / what / "foo"
err_msg = (
f"Symlink to {what_singular} foo's key in secret "
f"{test_secret_name} was not cleaned up after "
f"{what_singular} foo was removed."
)
assert not user_or_machine_symlink.exists(follow_symlinks=False), err_msg
def test_users(
test_flake: FlakeForTest,
capture_output: CaptureOutput,
age_keys: list["KeyPair"],
monkeypatch: pytest.MonkeyPatch,
) -> None:
_test_identities("users", test_flake, capture_output, age_keys, monkeypatch)
def test_machines(
test_flake: FlakeForTest,
capture_output: CaptureOutput,
age_keys: list["KeyPair"],
monkeypatch: pytest.MonkeyPatch,
) -> None:
_test_identities("machines", test_flake, capture_output, age_keys, monkeypatch)
def test_groups(
test_flake: FlakeForTest,
capture_output: CaptureOutput,
age_keys: list["KeyPair"],
monkeypatch: pytest.MonkeyPatch,
) -> None:
with capture_output as output:
cli.run(["secrets", "groups", "list", "--flake", str(test_flake.path)])
assert output.out == ""
machine1_age_key = age_keys[0]
user1_age_key = age_keys[1]
admin_age_key = age_keys[2]
with pytest.raises(ClanError): # machine does not exist yet
cli.run(
[
"secrets",
"groups",
"add-machine",
"--flake",
str(test_flake.path),
"group1",
"machine1",
]
)
with pytest.raises(ClanError): # user does not exist yet
cli.run(
[
"secrets",
"groups",
"add-user",
"--flake",
str(test_flake.path),
"groupb1",
"user1",
]
)
cli.run(
[
"secrets",
"machines",
"add",
"--flake",
str(test_flake.path),
"machine1",
machine1_age_key.pubkey,
]
)
cli.run(
[
"secrets",
"groups",
"add-machine",
"--flake",
str(test_flake.path),
"group1",
"machine1",
]
)
# Should this fail?
cli.run(
[
"secrets",
"groups",
"add-machine",
"--flake",
str(test_flake.path),
"group1",
"machine1",
]
)
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(test_flake.path),
"user1",
user1_age_key.pubkey,
]
)
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(test_flake.path),
"admin",
admin_age_key.pubkey,
]
)
cli.run(
[
"secrets",
"groups",
"add-user",
"--flake",
str(test_flake.path),
"group1",
"user1",
]
)
with capture_output as output:
cli.run(["secrets", "groups", "list", "--flake", str(test_flake.path)])
out = output.out
assert "user1" in out
assert "machine1" in out
secret_name = "foo"
with monkeypatch.context():
monkeypatch.setenv("SOPS_NIX_SECRET", "deafbeef")
monkeypatch.setenv("SOPS_AGE_KEY", admin_age_key.privkey)
cli.run(
[
"secrets",
"set",
"--flake",
str(test_flake.path),
"--group",
"group1",
secret_name,
]
)
assert_secrets_file_recipients(
test_flake.path,
secret_name,
expected_age_recipients_keypairs=[
machine1_age_key,
user1_age_key,
admin_age_key,
],
err_msg=(
f"The secret `{secret_name}` owned by group1 was not encrypted "
f"with all members of the group."
),
)
cli.run(
[
"secrets",
"groups",
"remove-user",
"--flake",
str(test_flake.path),
"group1",
"user1",
]
)
assert_secrets_file_recipients(
test_flake.path,
secret_name,
expected_age_recipients_keypairs=[machine1_age_key, admin_age_key],
err_msg=(
f"The secret `{secret_name}` owned by group1 is still encrypted for "
f"`user1` even though this user has been removed from the group."
),
)
# re-add the user to the group
cli.run(
[
"secrets",
"groups",
"add-user",
"--flake",
str(test_flake.path),
"group1",
"user1",
]
)
assert_secrets_file_recipients(
test_flake.path,
secret_name,
expected_age_recipients_keypairs=[
machine1_age_key,
user1_age_key,
admin_age_key,
],
)
# and instead of removing the user from the group, remove the
# user instead, it should also remove it from the group:
cli.run(
[
"secrets",
"users",
"remove",
"--flake",
str(test_flake.path),
"user1",
]
)
assert_secrets_file_recipients(
test_flake.path,
secret_name,
expected_age_recipients_keypairs=[machine1_age_key, admin_age_key],
err_msg=(
f"The secret `{secret_name}` owned by group1 is still encrypted "
f"for `user1` even though this user has been deleted."
),
)
cli.run(
[
"secrets",
"groups",
"remove-machine",
"--flake",
str(test_flake.path),
"group1",
"machine1",
]
)
assert_secrets_file_recipients(
test_flake.path,
secret_name,
expected_age_recipients_keypairs=[admin_age_key],
err_msg=(
f"The secret `{secret_name}` owned by group1 is still encrypted for "
f"`machine1` even though this machine has been removed from the group."
),
)
first_group = next((test_flake.path / "sops" / "groups").iterdir(), None)
assert first_group is None
# Check if the symlink to the group was removed from our foo test secret:
group_symlink = test_flake.path / "sops/secrets/foo/groups/group1"
err_msg = (
"Symlink to group1's key in foo secret "
"was not cleaned up after group1 was removed"
)
assert not group_symlink.exists(follow_symlinks=False), err_msg
@contextmanager
def use_age_key(key: str, monkeypatch: pytest.MonkeyPatch) -> Iterator[None]:
old_key = os.environ["SOPS_AGE_KEY_FILE"]
monkeypatch.delenv("SOPS_AGE_KEY_FILE")
monkeypatch.setenv("SOPS_AGE_KEY", key)
try:
yield
finally:
monkeypatch.delenv("SOPS_AGE_KEY")
monkeypatch.setenv("SOPS_AGE_KEY_FILE", old_key)
@contextmanager
def use_gpg_key(key: GpgKey, monkeypatch: pytest.MonkeyPatch) -> Iterator[None]:
old_key_file = os.environ.get("SOPS_AGE_KEY_FILE")
old_key = os.environ.get("SOPS_AGE_KEY")
monkeypatch.delenv("SOPS_AGE_KEY_FILE", raising=False)
monkeypatch.delenv("SOPS_AGE_KEY", raising=False)
monkeypatch.setenv("SOPS_PGP_FP", key.fingerprint)
try:
yield
finally:
monkeypatch.delenv("SOPS_PGP_FP")
if old_key_file is not None:
monkeypatch.setenv("SOPS_AGE_KEY_FILE", old_key_file)
if old_key is not None:
monkeypatch.setenv("SOPS_AGE_KEY", old_key)
def test_secrets(
test_flake: FlakeForTest,
capture_output: CaptureOutput,
monkeypatch: pytest.MonkeyPatch,
gpg_key: GpgKey,
age_keys: list["KeyPair"],
) -> None:
with capture_output as output:
cli.run(["secrets", "list", "--flake", str(test_flake.path)])
assert output.out == ""
# Generate a new key for the clan
monkeypatch.setenv("SOPS_AGE_KEY_FILE", str(test_flake.path / ".." / "age.key"))
with capture_output as output:
cli.run(["secrets", "key", "generate", "--flake", str(test_flake.path)])
assert "age private key" in output.out
# Read the key that was generated
with capture_output as output:
cli.run(["secrets", "key", "show", "--flake", str(test_flake.path)])
key = json.loads(output.out)["publickey"]
assert key.startswith("age1")
# Add testuser with the key that was generated for the clan
cli.run(
["secrets", "users", "add", "--flake", str(test_flake.path), "testuser", key]
)
with pytest.raises(ClanError): # does not exist yet
cli.run(["secrets", "get", "--flake", str(test_flake.path), "nonexisting"])
monkeypatch.setenv("SOPS_NIX_SECRET", "foo")
cli.run(["secrets", "set", "--flake", str(test_flake.path), "initialkey"])
with capture_output as output:
cli.run(["secrets", "get", "--flake", str(test_flake.path), "initialkey"])
assert output.out == "foo"
with capture_output as output:
cli.run(["secrets", "users", "list", "--flake", str(test_flake.path)])
users = output.out.rstrip().split("\n")
assert len(users) == 1, f"users: {users}"
owner = users[0]
monkeypatch.setenv("EDITOR", "cat")
cli.run(["secrets", "set", "--edit", "--flake", str(test_flake.path), "initialkey"])
monkeypatch.delenv("EDITOR")
cli.run(["secrets", "rename", "--flake", str(test_flake.path), "initialkey", "key"])
with capture_output as output:
cli.run(["secrets", "list", "--flake", str(test_flake.path)])
assert output.out == "key\n"
with capture_output as output:
cli.run(["secrets", "list", "--flake", str(test_flake.path), "nonexisting"])
assert output.out == ""
with capture_output as output:
cli.run(["secrets", "list", "--flake", str(test_flake.path), "key"])
assert output.out == "key\n"
# using the `age_keys` KeyPair, add a machine and rotate its key
cli.run(
[
"secrets",
"machines",
"add",
"--flake",
str(test_flake.path),
"machine1",
age_keys[1].pubkey,
]
)
cli.run(
[
"secrets",
"machines",
"add-secret",
"--flake",
str(test_flake.path),
"machine1",
"key",
]
)
with capture_output as output:
cli.run(["secrets", "machines", "list", "--flake", str(test_flake.path)])
assert output.out == "machine1\n"
with use_age_key(age_keys[1].privkey, monkeypatch):
with capture_output as output:
cli.run(["secrets", "get", "--flake", str(test_flake.path), "key"])
assert output.out == "foo"
# rotate machines key
cli.run(
[
"secrets",
"machines",
"add",
"--flake",
str(test_flake.path),
"-f",
"machine1",
age_keys[0].privkey,
]
)
# should also rotate the encrypted secret
with use_age_key(age_keys[0].privkey, monkeypatch):
with capture_output as output:
cli.run(["secrets", "get", "--flake", str(test_flake.path), "key"])
assert output.out == "foo"
cli.run(
[
"secrets",
"machines",
"remove-secret",
"--flake",
str(test_flake.path),
"machine1",
"key",
]
)
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(test_flake.path),
"user1",
age_keys[1].pubkey,
]
)
cli.run(
[
"secrets",
"users",
"add-secret",
"--flake",
str(test_flake.path),
"user1",
"key",
]
)
with capture_output as output, use_age_key(age_keys[1].privkey, monkeypatch):
cli.run(["secrets", "get", "--flake", str(test_flake.path), "key"])
assert output.out == "foo"
cli.run(
[
"secrets",
"users",
"remove-secret",
"--flake",
str(test_flake.path),
"user1",
"key",
]
)
with pytest.raises(ClanError): # does not exist yet
cli.run(
[
"secrets",
"groups",
"add-secret",
"--flake",
str(test_flake.path),
"admin-group",
"key",
]
)
cli.run(
[
"secrets",
"groups",
"add-user",
"--flake",
str(test_flake.path),
"admin-group",
"user1",
]
)
cli.run(
[
"secrets",
"groups",
"add-user",
"--flake",
str(test_flake.path),
"admin-group",
owner,
]
)
cli.run(
[
"secrets",
"groups",
"add-secret",
"--flake",
str(test_flake.path),
"admin-group",
"key",
]
)
cli.run(
[
"secrets",
"set",
"--flake",
str(test_flake.path),
"--group",
"admin-group",
"key2",
]
)
with use_age_key(age_keys[1].privkey, monkeypatch):
with capture_output as output:
cli.run(["secrets", "get", "--flake", str(test_flake.path), "key"])
assert output.out == "foo"
# Add an user with a GPG key
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(test_flake.path),
"--pgp-key",
gpg_key.fingerprint,
"user2",
]
)
# Extend group will update secrets
cli.run(
[
"secrets",
"groups",
"add-user",
"--flake",
str(test_flake.path),
"admin-group",
"user2",
]
)
with use_gpg_key(gpg_key, monkeypatch): # user2
with capture_output as output:
cli.run(["secrets", "get", "--flake", str(test_flake.path), "key"])
assert output.out == "foo"
cli.run(
[
"secrets",
"groups",
"remove-user",
"--flake",
str(test_flake.path),
"admin-group",
"user2",
]
)
with (
pytest.raises(ClanError),
use_gpg_key(gpg_key, monkeypatch),
capture_output as output,
):
# user2 is not in the group anymore
cli.run(["secrets", "get", "--flake", str(test_flake.path), "key"])
print(output.out)
cli.run(
[
"secrets",
"groups",
"remove-secret",
"--flake",
str(test_flake.path),
"admin-group",
"key",
]
)
cli.run(["secrets", "remove", "--flake", str(test_flake.path), "key"])
cli.run(["secrets", "remove", "--flake", str(test_flake.path), "key2"])
with capture_output as output:
cli.run(["secrets", "list", "--flake", str(test_flake.path)])
assert output.out == ""
def test_secrets_key_generate_gpg(
test_flake: FlakeForTest,
capture_output: CaptureOutput,
monkeypatch: pytest.MonkeyPatch,
gpg_key: GpgKey,
) -> None:
with use_gpg_key(gpg_key, monkeypatch):
# Make sure clan secrets key generate recognizes
# the PGP key and does nothing:
with capture_output as output:
cli.run(
[
"secrets",
"key",
"generate",
"--flake",
str(test_flake.path),
]
)
assert "age private key" not in output.out
assert re.match(r"PGP key.+is already set", output.out) is not None
with capture_output as output:
cli.run(["secrets", "key", "show", "--flake", str(test_flake.path)])
key = json.loads(output.out)
assert key["type"] == "pgp"
assert key["publickey"] == gpg_key.fingerprint
# Add testuser with the key that was (not) generated for the clan:
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(test_flake.path),
"--pgp-key",
gpg_key.fingerprint,
"testuser",
]
)
with capture_output as output:
cli.run(
[
"secrets",
"users",
"get",
"--flake",
str(test_flake.path),
"testuser",
]
)
key = json.loads(output.out)
assert key["type"] == "pgp"
assert key["publickey"] == gpg_key.fingerprint
monkeypatch.setenv("SOPS_NIX_SECRET", "secret-value")
cli.run(["secrets", "set", "--flake", str(test_flake.path), "secret-name"])
with capture_output as output:
cli.run(["secrets", "get", "--flake", str(test_flake.path), "secret-name"])
assert output.out == "secret-value"

View File

@@ -0,0 +1,110 @@
import ipaddress
from typing import TYPE_CHECKING
import pytest
from clan_cli.facts.secret_modules.sops import SecretStore
from clan_cli.flake import Flake
from clan_cli.machines.facts import machine_get_fact
from clan_cli.machines.machines import Machine
from clan_cli.secrets.folders import sops_secrets_folder
from clan_cli.tests.fixtures_flakes import FlakeForTest
from clan_cli.tests.helpers import cli
from clan_cli.tests.helpers.validator import is_valid_age_key
if TYPE_CHECKING:
from .age_keys import KeyPair
@pytest.mark.impure
def test_generate_secret(
monkeypatch: pytest.MonkeyPatch,
test_flake_with_core: FlakeForTest,
age_keys: list["KeyPair"],
) -> None:
monkeypatch.chdir(test_flake_with_core.path)
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(test_flake_with_core.path),
"user1",
age_keys[0].pubkey,
]
)
cli.run(
[
"secrets",
"groups",
"add-user",
"--flake",
str(test_flake_with_core.path),
"admins",
"user1",
]
)
cmd = ["facts", "generate", "--flake", str(test_flake_with_core.path), "vm1"]
cli.run(cmd)
store1 = SecretStore(
Machine(name="vm1", flake=Flake(str(test_flake_with_core.path)))
)
assert store1.exists("", "age.key")
assert store1.exists("", "zerotier-identity-secret")
network_id = machine_get_fact(
test_flake_with_core.path, "vm1", "zerotier-network-id"
)
assert len(network_id) == 16
secrets_folder = sops_secrets_folder(test_flake_with_core.path)
age_key = secrets_folder / "vm1-age.key" / "secret"
identity_secret = secrets_folder / "vm1-zerotier-identity-secret" / "secret"
age_key_mtime = age_key.lstat().st_mtime_ns
secret1_mtime = identity_secret.lstat().st_mtime_ns
# Assert that the age key is valid
age_secret = store1.get("", "age.key").decode()
assert age_secret.isprintable()
assert is_valid_age_key(age_secret)
# test idempotency for vm1 and also generate for vm2
cli.run(["facts", "generate", "--flake", str(test_flake_with_core.path)])
assert age_key.lstat().st_mtime_ns == age_key_mtime
assert identity_secret.lstat().st_mtime_ns == secret1_mtime
assert (
secrets_folder / "vm1-zerotier-identity-secret" / "machines" / "vm1"
).exists()
store2 = SecretStore(
Machine(name="vm2", flake=Flake(str(test_flake_with_core.path)))
)
# clan vars generate
# TODO: Test vars
# varsStore = VarsSecretStore(
# machine=Machine(name="vm2", flake=FlakeId(str(test_flake_with_core.path)))
# )
# generators = get_generators(str(test_flake_with_core.path), "vm2")
# generator = next((gen for gen in generators if gen.name == "root-password"), None)
# if not generator:
# raise Exception("Generator not found")
# password_update = GeneratorUpdate(
# generator=generator.name, prompt_values={"password": "1234"}
# )
# set_prompts(str(test_flake_with_core.path), "vm2", [password_update])
# assert varsStore.exists(generator, "root-password")
assert store2.exists("", "age.key")
assert store2.exists("", "zerotier-identity-secret")
ip = machine_get_fact(test_flake_with_core.path, "vm1", "zerotier-ip")
assert ipaddress.IPv6Address(ip).is_private
# Assert that the age key is valid
age_secret = store2.get("", "age.key").decode()
assert age_secret.isprintable()
assert is_valid_age_key(age_secret)

View File

@@ -0,0 +1,88 @@
import subprocess
from pathlib import Path
import pytest
from clan_cli.facts.secret_modules.password_store import SecretStore
from clan_cli.flake import Flake
from clan_cli.machines.facts import machine_get_fact
from clan_cli.machines.machines import Machine
from clan_cli.nix import nix_shell
from clan_cli.ssh.host import Host
from clan_cli.tests.fixtures_flakes import ClanFlake
from clan_cli.tests.helpers import cli
@pytest.mark.impure
def test_upload_secret(
monkeypatch: pytest.MonkeyPatch,
flake: ClanFlake,
temporary_home: Path,
hosts: list[Host],
) -> None:
flake.clan_modules = [
"root-password",
"user-password",
"sshd",
]
config = flake.machines["vm1"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
config["clan"]["core"]["networking"]["zerotier"]["controller"]["enable"] = True
host = hosts[0]
addr = f"{host.user}@{host.host}:{host.port}?StrictHostKeyChecking=no&UserKnownHostsFile=/dev/null&IdentityFile={host.key}"
config["clan"]["core"]["networking"]["targetHost"] = addr
config["clan"]["user-password"]["user"] = "alice"
config["clan"]["user-password"]["prompt"] = False
facts = config["clan"]["core"]["facts"]
facts["secretStore"] = "password-store"
facts["secretUploadDirectory"]["_type"] = "override"
facts["secretUploadDirectory"]["content"] = str(
temporary_home / "flake" / "secrets"
)
facts["secretUploadDirectory"]["priority"] = 50
flake.refresh()
monkeypatch.chdir(flake.path)
gnupghome = temporary_home / "gpg"
gnupghome.mkdir(mode=0o700)
monkeypatch.setenv("GNUPGHOME", str(gnupghome))
monkeypatch.setenv("PASSWORD_STORE_DIR", str(temporary_home / "pass"))
gpg_key_spec = temporary_home / "gpg_key_spec"
gpg_key_spec.write_text(
"""
Key-Type: 1
Key-Length: 1024
Name-Real: Root Superuser
Name-Email: test@local
Expire-Date: 0
%no-protection
"""
)
subprocess.run(
nix_shell(
["nixpkgs#gnupg"], ["gpg", "--batch", "--gen-key", str(gpg_key_spec)]
),
check=True,
)
subprocess.run(
nix_shell(["nixpkgs#pass"], ["pass", "init", "test@local"]), check=True
)
cli.run(["facts", "generate", "vm1", "--flake", str(flake.path)])
store = SecretStore(Machine(name="vm1", flake=Flake(str(flake.path))))
network_id = machine_get_fact(flake.path, "vm1", "zerotier-network-id")
assert len(network_id) == 16
identity_secret = (
temporary_home / "pass" / "machines" / "vm1" / "zerotier-identity-secret.gpg"
)
secret1_mtime = identity_secret.lstat().st_mtime_ns
# test idempotency
cli.run(["facts", "generate", "vm1"])
assert identity_secret.lstat().st_mtime_ns == secret1_mtime
cli.run(["facts", "upload", "vm1"])
zerotier_identity_secret = flake.path / "secrets" / "zerotier-identity-secret"
assert zerotier_identity_secret.exists()
assert store.exists("", "zerotier-identity-secret")
assert store.exists("", "zerotier-identity-secret")

View File

@@ -0,0 +1,72 @@
from typing import TYPE_CHECKING
import pytest
from clan_cli.ssh.host import Host
from clan_cli.tests.fixtures_flakes import ClanFlake
from clan_cli.tests.helpers import cli
if TYPE_CHECKING:
from .age_keys import KeyPair
@pytest.mark.with_core
def test_secrets_upload(
monkeypatch: pytest.MonkeyPatch,
flake: ClanFlake,
hosts: list[Host],
age_keys: list["KeyPair"],
) -> None:
config = flake.machines["vm1"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
host = hosts[0]
addr = f"{host.user}@{host.host}:{host.port}?StrictHostKeyChecking=no&UserKnownHostsFile=/dev/null&IdentityFile={host.key}"
config["clan"]["networking"]["targetHost"] = addr
config["clan"]["core"]["facts"]["secretUploadDirectory"] = str(flake.path / "facts")
flake.refresh()
monkeypatch.chdir(str(flake.path))
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
sops_dir = flake.path / "facts"
# the flake defines this path as the location where the sops key should be installed
sops_key = sops_dir / "key.txt"
sops_key2 = sops_dir / "key2.txt"
# Create old state, which should be cleaned up
sops_dir.mkdir()
sops_key.write_text("OLD STATE")
sops_key2.write_text("OLD STATE2")
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(flake.path),
"user1",
age_keys[0].pubkey,
]
)
cli.run(
[
"secrets",
"machines",
"add",
"--flake",
str(flake.path),
"vm1",
age_keys[1].pubkey,
]
)
monkeypatch.setenv("SOPS_NIX_SECRET", age_keys[0].privkey)
cli.run(["secrets", "set", "--flake", str(flake.path), "vm1-age.key"])
flake_path = flake.path.joinpath("flake.nix")
cli.run(["facts", "upload", "--flake", str(flake_path), "vm1"])
assert sops_key.exists()
assert sops_key.read_text() == age_keys[0].privkey
assert not sops_key2.exists()

View File

@@ -0,0 +1,139 @@
from dataclasses import dataclass, field
# Functions to test
from clan_cli.api import (
dataclass_to_dict,
sanitize_string,
)
#
def test_sanitize_string() -> None:
# Simple strings
assert sanitize_string("Hello World") == "Hello World"
assert sanitize_string("Hello\nWorld") == "Hello\nWorld"
assert sanitize_string("Hello\tWorld") == "Hello\tWorld"
assert sanitize_string("Hello\rWorld") == "Hello\rWorld"
assert sanitize_string("Hello\fWorld") == "Hello\fWorld"
assert sanitize_string("Hello\vWorld") == "Hello\u000bWorld"
assert sanitize_string("Hello\bWorld") == "Hello\bWorld"
assert sanitize_string("Hello\\World") == "Hello\\World"
assert sanitize_string('Hello"World') == 'Hello"World'
assert sanitize_string("Hello'World") == "Hello'World"
assert sanitize_string("Hello\0World") == "Hello\x00World"
# Console escape characters
assert sanitize_string("\033[1mBold\033[0m") == "\033[1mBold\033[0m" # Red
assert sanitize_string("\033[31mRed\033[0m") == "\033[31mRed\033[0m" # Blue
assert sanitize_string("\033[42mGreen\033[0m") == "\033[42mGreen\033[0m" # Green
assert sanitize_string("\033[4mUnderline\033[0m") == "\033[4mUnderline\033[0m"
assert (
sanitize_string("\033[91m\033[1mBold Red\033[0m")
== "\033[91m\033[1mBold Red\033[0m"
)
def test_dataclass_to_dict() -> None:
@dataclass
class Person:
name: str
age: int
person = Person(name="John", age=25)
expected_dict = {"name": "John", "age": 25}
assert dataclass_to_dict(person) == expected_dict
def test_dataclass_to_dict_nested() -> None:
@dataclass
class Address:
city: str = "afghanistan"
zip: str = "01234"
@dataclass
class Person:
name: str
age: int
address: Address = field(default_factory=Address)
person1 = Person(name="John", age=25)
expected_dict1 = {
"name": "John",
"age": 25,
"address": {"city": "afghanistan", "zip": "01234"},
}
# address must be constructed with default values if not passed
assert dataclass_to_dict(person1) == expected_dict1
person2 = Person(name="John", age=25, address=Address(zip="0", city="Anywhere"))
expected_dict2 = {
"name": "John",
"age": 25,
"address": {"zip": "0", "city": "Anywhere"},
}
assert dataclass_to_dict(person2) == expected_dict2
def test_dataclass_to_dict_defaults() -> None:
@dataclass
class Foo:
home: dict[str, str] = field(default_factory=dict)
work: list[str] = field(default_factory=list)
@dataclass
class Person:
name: str = field(default="jon")
age: int = field(default=1)
foo: Foo = field(default_factory=Foo)
default_person = Person()
expected_default = {
"name": "jon",
"age": 1,
"foo": {"home": {}, "work": []},
}
# address must be constructed with default values if not passed
assert dataclass_to_dict(default_person) == expected_default
real_person = Person(name="John", age=25, foo=Foo(home={"a": "b"}, work=["a", "b"]))
expected = {
"name": "John",
"age": 25,
"foo": {"home": {"a": "b"}, "work": ["a", "b"]},
}
assert dataclass_to_dict(real_person) == expected
def test_filters_null_fields() -> None:
@dataclass
class Foo:
home: str | None = None
work: str | None = None
# None fields are filtered out
instance = Foo()
assert instance.home is None
assert dataclass_to_dict(instance) == {}
# fields that are set are not filtered
instance = Foo(home="home")
assert instance.home == "home"
assert instance.work is None
assert dataclass_to_dict(instance) == {"home": "home"}
def test_custom_enum() -> None:
from enum import Enum
class CustomEnum(Enum):
FOO = "foo"
BAR = "bar"
@dataclass
class Foo:
field: CustomEnum
instance = Foo(field=CustomEnum.FOO)
assert dataclass_to_dict(instance) == {"field": "foo"}

View File

@@ -0,0 +1,49 @@
from clan_cli.async_run import AsyncRuntime
from clan_cli.cmd import ClanCmdTimeoutError, Log, RunOpts
from clan_cli.ssh.host import Host
host = Host("some_host")
def test_run_environment(runtime: AsyncRuntime) -> None:
p2 = runtime.async_run(
None,
host.run_local,
["echo $env_var"],
RunOpts(shell=True, log=Log.STDERR),
extra_env={"env_var": "true"},
)
assert p2.wait().result.stdout == "true\n"
p3 = runtime.async_run(
None,
host.run_local,
["env"],
RunOpts(shell=True, log=Log.STDERR),
extra_env={"env_var": "true"},
)
assert "env_var=true" in p3.wait().result.stdout
def test_run_local(runtime: AsyncRuntime) -> None:
p1 = runtime.async_run(
None, host.run_local, ["echo", "hello"], RunOpts(log=Log.STDERR)
)
assert p1.wait().result.stdout == "hello\n"
def test_timeout(runtime: AsyncRuntime) -> None:
p1 = runtime.async_run(None, host.run_local, ["sleep", "10"], RunOpts(timeout=0.01))
error = p1.wait().error
assert isinstance(error, ClanCmdTimeoutError)
def test_run_exception(runtime: AsyncRuntime) -> None:
p1 = runtime.async_run(None, host.run_local, ["exit 1"], RunOpts(shell=True))
assert p1.wait().error is not None
def test_run_local_non_shell(runtime: AsyncRuntime) -> None:
p2 = runtime.async_run(None, host.run_local, ["echo", "1"], RunOpts(log=Log.STDERR))
assert p2.wait().result.stdout == "1\n"

View File

@@ -0,0 +1,230 @@
import contextlib
import sys
from collections.abc import Generator
from typing import Any, NamedTuple
import pytest
from clan_cli.async_run import AsyncRuntime
from clan_cli.cmd import ClanCmdTimeoutError, Log, RunOpts
from clan_cli.errors import ClanError, CmdOut
from clan_cli.ssh.host import Host
from clan_cli.ssh.host_key import HostKeyCheck
from clan_cli.ssh.parse import parse_deployment_address
class ParseTestCase(NamedTuple):
test_addr: str = ""
expected_host: str = ""
expected_port: int | None = None
expected_user: str = ""
expected_options: dict[str, str] = {} # noqa: RUF012
expected_exception: type[Exception] | None = None
parse_deployment_address_test_cases = (
(
"host_only",
ParseTestCase(test_addr="example.com", expected_host="example.com"),
),
(
"host_user_port",
ParseTestCase(
test_addr="user@example.com:22",
expected_host="example.com",
expected_user="user",
expected_port=22,
),
),
(
"cannot_parse_user_host_port",
ParseTestCase(test_addr="foo@bar@wat", expected_exception=ClanError),
),
(
"missing_hostname",
ParseTestCase(test_addr="foo@:2222", expected_exception=ClanError),
),
(
"invalid_ipv6",
ParseTestCase(test_addr="user@fe80::1%eth0", expected_exception=ClanError),
),
(
"valid_ipv6_without_port",
ParseTestCase(test_addr="[fe80::1%eth0]", expected_host="fe80::1%eth0"),
),
(
"valid_ipv6_with_port",
ParseTestCase(
test_addr="[fe80::1%eth0]:222",
expected_host="fe80::1%eth0",
expected_port=222,
),
),
(
"empty_options",
ParseTestCase(test_addr="example.com?", expected_host="example.com"),
),
(
"option_with_missing_value",
ParseTestCase(test_addr="example.com?foo", expected_exception=ClanError),
),
(
"options_with_@",
ParseTestCase(
test_addr="user@example.com?ProxyJump=root@foo&IdentityFile=/key",
expected_host="example.com",
expected_user="user",
expected_options={
"IdentityFile": "/key",
"ProxyJump": "root@foo",
},
),
),
)
@pytest.mark.parametrize(
argnames=ParseTestCase._fields,
argvalues=(case for _, case in parse_deployment_address_test_cases),
ids=(name for name, _ in parse_deployment_address_test_cases),
)
def test_parse_deployment_address(
test_addr: str,
expected_host: str,
expected_port: int | None,
expected_user: str,
expected_options: dict[str, str],
expected_exception: type[Exception] | None,
) -> None:
if expected_exception:
maybe_check_exception = pytest.raises(expected_exception)
else:
@contextlib.contextmanager
def noop() -> Generator[None, Any, None]:
yield
maybe_check_exception = noop() # type: ignore
with maybe_check_exception:
machine_name = "foo"
result = parse_deployment_address(machine_name, test_addr, HostKeyCheck.STRICT)
if expected_exception:
return
assert result.host == expected_host
assert result.port == expected_port
assert result.user == expected_user
assert result.ssh_options == expected_options
def test_parse_ssh_options() -> None:
addr = "root@example.com:2222?IdentityFile=/path/to/private/key&StrictHostKeyChecking=yes"
host = parse_deployment_address("foo", addr, HostKeyCheck.STRICT)
assert host.host == "example.com"
assert host.port == 2222
assert host.user == "root"
assert host.ssh_options["IdentityFile"] == "/path/to/private/key"
assert host.ssh_options["StrictHostKeyChecking"] == "yes"
is_darwin = sys.platform == "darwin"
@pytest.mark.skipif(is_darwin, reason="preload doesn't work on darwin")
def test_run(hosts: list[Host], runtime: AsyncRuntime) -> None:
for host in hosts:
proc = runtime.async_run(
None, host.run_local, ["echo", "hello"], RunOpts(log=Log.STDERR)
)
assert proc.wait().result.stdout == "hello\n"
@pytest.mark.skipif(is_darwin, reason="preload doesn't work on darwin")
def test_run_environment(hosts: list[Host], runtime: AsyncRuntime) -> None:
for host in hosts:
proc = runtime.async_run(
None,
host.run_local,
["echo $env_var"],
RunOpts(shell=True, log=Log.STDERR),
extra_env={"env_var": "true"},
)
assert proc.wait().result.stdout == "true\n"
for host in hosts:
p2 = runtime.async_run(
None,
host.run_local,
["env"],
RunOpts(log=Log.STDERR),
extra_env={"env_var": "true"},
)
assert "env_var=true" in p2.wait().result.stdout
@pytest.mark.skipif(is_darwin, reason="preload doesn't work on darwin")
def test_run_no_shell(hosts: list[Host], runtime: AsyncRuntime) -> None:
for host in hosts:
proc = runtime.async_run(
None, host.run_local, ["echo", "hello"], RunOpts(log=Log.STDERR)
)
assert proc.wait().result.stdout == "hello\n"
@pytest.mark.skipif(is_darwin, reason="preload doesn't work on darwin")
def test_run_function(hosts: list[Host], runtime: AsyncRuntime) -> None:
def some_func(h: Host) -> bool:
p = h.run(["echo", "hello"])
return p.stdout == "hello\n"
for host in hosts:
proc = runtime.async_run(None, some_func, host)
assert proc.wait().result
@pytest.mark.skipif(is_darwin, reason="preload doesn't work on darwin")
def test_timeout(hosts: list[Host], runtime: AsyncRuntime) -> None:
for host in hosts:
proc = runtime.async_run(
None, host.run_local, ["sleep", "10"], RunOpts(timeout=0.01)
)
error = proc.wait().error
assert isinstance(error, ClanCmdTimeoutError)
@pytest.mark.skipif(is_darwin, reason="preload doesn't work on darwin")
def test_run_exception(hosts: list[Host], runtime: AsyncRuntime) -> None:
for host in hosts:
proc = runtime.async_run(
None, host.run_local, ["exit 1"], RunOpts(shell=True, check=False)
)
assert proc.wait().result.returncode == 1
try:
for host in hosts:
runtime.async_run(None, host.run_local, ["exit 1"], RunOpts(shell=True))
runtime.join_all()
runtime.check_all()
except Exception:
pass
else:
msg = "should have raised Exception"
raise AssertionError(msg)
@pytest.mark.skipif(is_darwin, reason="preload doesn't work on darwin")
def test_run_function_exception(hosts: list[Host], runtime: AsyncRuntime) -> None:
def some_func(h: Host) -> CmdOut:
return h.run_local(["exit 1"], RunOpts(shell=True))
try:
for host in hosts:
runtime.async_run(None, some_func, host)
runtime.join_all()
runtime.check_all()
except Exception:
pass
else:
msg = "should have raised Exception"
raise AssertionError(msg)

View File

@@ -0,0 +1,998 @@
import json
import logging
import shutil
from pathlib import Path
import pytest
from clan_cli.errors import ClanError
from clan_cli.flake import Flake
from clan_cli.machines.machines import Machine
from clan_cli.nix import nix_eval, run
from clan_cli.tests.age_keys import SopsSetup
from clan_cli.tests.fixtures_flakes import ClanFlake
from clan_cli.tests.helpers import cli
from clan_cli.vars.check import check_vars
from clan_cli.vars.generate import Generator, generate_vars_for_machine
from clan_cli.vars.get import get_var
from clan_cli.vars.graph import all_missing_closure, requested_closure
from clan_cli.vars.list import stringify_all_vars
from clan_cli.vars.public_modules import in_repo
from clan_cli.vars.secret_modules import password_store, sops
from clan_cli.vars.set import set_var
if TYPE_CHECKING:
from .age_keys import KeyPair
def test_dependencies_as_files(temp_dir: Path) -> None:
from clan_cli.vars.generate import dependencies_as_dir
decrypted_dependencies = {
"gen_1": {
"var_1a": b"var_1a",
"var_1b": b"var_1b",
},
"gen_2": {
"var_2a": b"var_2a",
"var_2b": b"var_2b",
},
}
dependencies_as_dir(decrypted_dependencies, temp_dir)
assert temp_dir.is_dir()
assert (temp_dir / "gen_1" / "var_1a").read_bytes() == b"var_1a"
assert (temp_dir / "gen_1" / "var_1b").read_bytes() == b"var_1b"
assert (temp_dir / "gen_2" / "var_2a").read_bytes() == b"var_2a"
assert (temp_dir / "gen_2" / "var_2b").read_bytes() == b"var_2b"
# ensure the files are not world readable
assert (temp_dir / "gen_1" / "var_1a").stat().st_mode & 0o777 == 0o600
assert (temp_dir / "gen_1" / "var_1b").stat().st_mode & 0o777 == 0o600
assert (temp_dir / "gen_2" / "var_2a").stat().st_mode & 0o777 == 0o600
assert (temp_dir / "gen_2" / "var_2b").stat().st_mode & 0o777 == 0o600
def test_required_generators() -> None:
gen_1 = Generator(name="gen_1", dependencies=[])
gen_2 = Generator(name="gen_2", dependencies=["gen_1"])
gen_2a = Generator(name="gen_2a", dependencies=["gen_2"])
gen_2b = Generator(name="gen_2b", dependencies=["gen_2"])
gen_1.exists = True
gen_2.exists = False
gen_2a.exists = False
gen_2b.exists = True
generators = {
generator.name: generator for generator in [gen_1, gen_2, gen_2a, gen_2b]
}
def generator_names(generator: list[Generator]) -> list[str]:
return [gen.name for gen in generator]
assert generator_names(requested_closure(["gen_1"], generators)) == [
"gen_1",
"gen_2",
"gen_2a",
"gen_2b",
]
assert generator_names(requested_closure(["gen_2"], generators)) == [
"gen_2",
"gen_2a",
"gen_2b",
]
assert generator_names(requested_closure(["gen_2a"], generators)) == [
"gen_2",
"gen_2a",
"gen_2b",
]
assert generator_names(requested_closure(["gen_2b"], generators)) == [
"gen_2",
"gen_2a",
"gen_2b",
]
assert generator_names(all_missing_closure(generators)) == [
"gen_2",
"gen_2a",
"gen_2b",
]
@pytest.mark.with_core
def test_generate_public_and_secret_vars(
monkeypatch: pytest.MonkeyPatch,
flake_with_sops: ClanFlake,
) -> None:
flake = flake_with_sops
config = flake.machines["my_machine"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
my_generator = config["clan"]["core"]["vars"]["generators"]["my_generator"]
my_generator["files"]["my_value"]["secret"] = False
my_generator["files"]["my_secret"]["secret"] = True
my_generator["script"] = (
"echo -n public > $out/my_value; echo -n secret > $out/my_secret; echo -n non-default > $out/value_with_default"
)
my_generator["files"]["value_with_default"]["secret"] = False
my_generator["files"]["value_with_default"]["value"]["_type"] = "override"
my_generator["files"]["value_with_default"]["value"]["priority"] = 1000 # mkDefault
my_generator["files"]["value_with_default"]["value"]["content"] = "default_value"
my_shared_generator = config["clan"]["core"]["vars"]["generators"][
"my_shared_generator"
]
my_shared_generator["share"] = True
my_shared_generator["files"]["my_shared_value"]["secret"] = False
my_shared_generator["script"] = "echo -n shared > $out/my_shared_value"
dependent_generator = config["clan"]["core"]["vars"]["generators"][
"dependent_generator"
]
dependent_generator["share"] = False
dependent_generator["files"]["my_secret"]["secret"] = True
dependent_generator["dependencies"] = ["my_shared_generator"]
dependent_generator["script"] = (
"cat $in/my_shared_generator/my_shared_value > $out/my_secret"
)
flake.refresh()
monkeypatch.chdir(flake.path)
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
assert not check_vars(machine)
vars_text = stringify_all_vars(machine)
assert "my_generator/my_value: <not set>" in vars_text
assert "my_generator/my_secret: <not set>" in vars_text
assert "my_shared_generator/my_shared_value: <not set>" in vars_text
assert "dependent_generator/my_secret: <not set>" in vars_text
# ensure evaluating the default value works without generating the value
value_non_default = run(
nix_eval(
[
f"{flake.path}#nixosConfigurations.my_machine.config.clan.core.vars.generators.my_generator.files.value_with_default.value",
]
)
).stdout.strip()
assert json.loads(value_non_default) == "default_value"
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
assert check_vars(machine)
# get last commit message
commit_message = run(
["git", "log", "-3", "--pretty=%B"],
).stdout.strip()
assert (
"Update vars via generator my_generator for machine my_machine"
in commit_message
)
assert (
"Update vars via generator my_shared_generator for machine my_machine"
in commit_message
)
assert (
get_var(
str(machine.flake.path), machine.name, "my_generator/my_value"
).printable_value
== "public"
)
assert (
get_var(
str(machine.flake.path), machine.name, "my_shared_generator/my_shared_value"
).printable_value
== "shared"
)
vars_text = stringify_all_vars(machine)
in_repo_store = in_repo.FactStore(
Machine(name="my_machine", flake=Flake(str(flake.path)))
)
assert not in_repo_store.exists(Generator("my_generator"), "my_secret")
sops_store = sops.SecretStore(
Machine(name="my_machine", flake=Flake(str(flake.path)))
)
assert sops_store.exists(Generator("my_generator"), "my_secret")
assert sops_store.get(Generator("my_generator"), "my_secret").decode() == "secret"
assert sops_store.exists(Generator("dependent_generator"), "my_secret")
assert (
sops_store.get(Generator("dependent_generator"), "my_secret").decode()
== "shared"
)
assert "my_generator/my_value: public" in vars_text
assert "my_generator/my_secret" in vars_text
vars_eval = run(
nix_eval(
[
f"{flake.path}#nixosConfigurations.my_machine.config.clan.core.vars.generators.my_generator.files.my_value.value",
]
)
).stdout.strip()
assert json.loads(vars_eval) == "public"
value_non_default = run(
nix_eval(
[
f"{flake.path}#nixosConfigurations.my_machine.config.clan.core.vars.generators.my_generator.files.value_with_default.value",
]
)
).stdout.strip()
assert json.loads(value_non_default) == "non-default"
# test regeneration works
cli.run(
["vars", "generate", "--flake", str(flake.path), "my_machine", "--regenerate"]
)
# TODO: it doesn't actually test if the group has access
@pytest.mark.with_core
def test_generate_secret_var_sops_with_default_group(
monkeypatch: pytest.MonkeyPatch,
flake_with_sops: ClanFlake,
sops_setup: SopsSetup,
) -> None:
flake = flake_with_sops
config = flake.machines["my_machine"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
config["clan"]["core"]["sops"]["defaultGroups"] = ["my_group"]
first_generator = config["clan"]["core"]["vars"]["generators"]["first_generator"]
first_generator["files"]["my_secret"]["secret"] = True
first_generator["files"]["my_public"]["secret"] = False
first_generator["script"] = (
"echo hello > $out/my_secret && echo hello > $out/my_public"
)
second_generator = config["clan"]["core"]["vars"]["generators"]["second_generator"]
second_generator["files"]["my_secret"]["secret"] = True
second_generator["files"]["my_public"]["secret"] = False
second_generator["script"] = (
"echo hello > $out/my_secret && echo hello > $out/my_public"
)
flake.refresh()
monkeypatch.chdir(flake.path)
cli.run(["secrets", "groups", "add-user", "my_group", sops_setup.user])
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
in_repo_store = in_repo.FactStore(
Machine(name="my_machine", flake=Flake(str(flake.path)))
)
assert not in_repo_store.exists(Generator("first_generator"), "my_secret")
sops_store = sops.SecretStore(
Machine(name="my_machine", flake=Flake(str(flake.path)))
)
assert sops_store.exists(Generator("first_generator"), "my_secret")
assert (
sops_store.get(Generator("first_generator"), "my_secret").decode() == "hello\n"
)
assert sops_store.exists(Generator("second_generator"), "my_secret")
assert (
sops_store.get(Generator("second_generator"), "my_secret").decode() == "hello\n"
)
# add another user to the group and check if secret gets re-encrypted
pubkey_user2 = sops_setup.keys[1]
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(flake.path),
"user2",
pubkey_user2.pubkey,
]
)
cli.run(["secrets", "groups", "add-user", "my_group", "user2"])
# check if new user can access the secret
monkeypatch.setenv("USER", "user2")
assert sops_store.user_has_access(
"user2", Generator("first_generator", share=False), "my_secret"
)
assert sops_store.user_has_access(
"user2", Generator("second_generator", share=False), "my_secret"
)
# Rotate key of a user
pubkey_user3 = sops_setup.keys[2]
cli.run(
[
"secrets",
"users",
"add",
"--flake",
str(flake.path),
"--force",
"user2",
pubkey_user3.pubkey,
]
)
monkeypatch.setenv("USER", "user2")
assert sops_store.user_has_access(
"user2", Generator("first_generator", share=False), "my_secret"
)
assert sops_store.user_has_access(
"user2", Generator("second_generator", share=False), "my_secret"
)
@pytest.mark.with_core
def test_generated_shared_secret_sops(
monkeypatch: pytest.MonkeyPatch,
flake_with_sops: ClanFlake,
) -> None:
flake = flake_with_sops
m1_config = flake.machines["machine1"]
m1_config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
shared_generator = m1_config["clan"]["core"]["vars"]["generators"][
"my_shared_generator"
]
shared_generator["share"] = True
shared_generator["files"]["my_shared_secret"]["secret"] = True
shared_generator["script"] = "echo hello > $out/my_shared_secret"
m2_config = flake.machines["machine2"]
m2_config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
m2_config["clan"]["core"]["vars"]["generators"][
"my_shared_generator"
] = shared_generator.copy()
flake.refresh()
monkeypatch.chdir(flake.path)
machine1 = Machine(name="machine1", flake=Flake(str(flake.path)))
machine2 = Machine(name="machine2", flake=Flake(str(flake.path)))
cli.run(["vars", "generate", "--flake", str(flake.path), "machine1"])
assert check_vars(machine1)
cli.run(["vars", "generate", "--flake", str(flake.path), "machine2"])
assert check_vars(machine2)
assert check_vars(machine2)
m1_sops_store = sops.SecretStore(machine1)
m2_sops_store = sops.SecretStore(machine2)
assert m1_sops_store.exists(
Generator("my_shared_generator", share=True), "my_shared_secret"
)
assert m2_sops_store.exists(
Generator("my_shared_generator", share=True), "my_shared_secret"
)
assert m1_sops_store.machine_has_access(
Generator("my_shared_generator", share=True), "my_shared_secret"
)
assert m2_sops_store.machine_has_access(
Generator("my_shared_generator", share=True), "my_shared_secret"
)
@pytest.mark.with_core
def test_generate_secret_var_password_store(
monkeypatch: pytest.MonkeyPatch,
flake: ClanFlake,
test_root: Path,
) -> None:
config = flake.machines["my_machine"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
clan_vars = config["clan"]["core"]["vars"]
clan_vars["settings"]["secretStore"] = "password-store"
# Create a second secret so that when we delete the first one,
# we still have the second one to test `delete_store`:
my_generator = clan_vars["generators"]["my_generator"]
my_generator["files"]["my_secret"]["secret"] = True
my_generator["script"] = "echo hello > $out/my_secret"
my_generator2 = clan_vars["generators"]["my_generator2"]
my_generator2["files"]["my_secret2"]["secret"] = True
my_generator2["script"] = "echo world > $out/my_secret2"
my_shared_generator = clan_vars["generators"]["my_shared_generator"]
my_shared_generator["share"] = True
my_shared_generator["files"]["my_shared_secret"]["secret"] = True
my_shared_generator["script"] = "echo hello > $out/my_shared_secret"
flake.refresh()
monkeypatch.chdir(flake.path)
gnupghome = flake.path / "gpg"
shutil.copytree(test_root / "data" / "gnupg-home", gnupghome)
monkeypatch.setenv("GNUPGHOME", str(gnupghome))
password_store_dir = flake.path / "pass"
shutil.copytree(test_root / "data" / "password-store", password_store_dir)
monkeypatch.setenv("PASSWORD_STORE_DIR", str(password_store_dir))
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
assert not check_vars(machine)
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
assert check_vars(machine)
store = password_store.SecretStore(
Machine(name="my_machine", flake=Flake(str(flake.path)))
)
assert store.exists(Generator("my_generator", share=False, files=[]), "my_secret")
assert not store.exists(
Generator("my_generator", share=True, files=[]), "my_secret"
)
assert store.exists(
Generator("my_shared_generator", share=True, files=[]), "my_shared_secret"
)
assert not store.exists(
Generator("my_shared_generator", share=False, files=[]), "my_shared_secret"
)
generator = Generator(name="my_generator", share=False, files=[])
assert store.get(generator, "my_secret").decode() == "hello\n"
vars_text = stringify_all_vars(machine)
assert "my_generator/my_secret" in vars_text
my_generator = Generator("my_generator", share=False, files=[])
var_name = "my_secret"
store.delete(my_generator, var_name)
assert not store.exists(my_generator, var_name)
store.delete_store()
store.delete_store() # check idempotency
my_generator2 = Generator("my_generator2", share=False, files=[])
var_name = "my_secret2"
assert not store.exists(my_generator2, var_name)
# The shared secret should still be there,
# not sure if we can delete those automatically:
my_shared_generator = Generator("my_shared_generator", share=True, files=[])
var_name = "my_shared_secret"
assert store.exists(my_shared_generator, var_name)
@pytest.mark.with_core
def test_generate_secret_for_multiple_machines(
monkeypatch: pytest.MonkeyPatch,
flake_with_sops: ClanFlake,
) -> None:
flake = flake_with_sops
from clan_cli.nix import nix_config
local_system = nix_config()["system"]
machine1_config = flake.machines["machine1"]
machine1_config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
machine1_generator = machine1_config["clan"]["core"]["vars"]["generators"][
"my_generator"
]
machine1_generator["files"]["my_secret"]["secret"] = True
machine1_generator["files"]["my_value"]["secret"] = False
machine1_generator["script"] = (
"echo machine1 > $out/my_secret && echo machine1 > $out/my_value"
)
machine2_config = flake.machines["machine2"]
# Test that we can generate secrets for other platforms
machine2_config["nixpkgs"]["hostPlatform"] = (
"aarch64-linux" if local_system == "x86_64-linux" else "x86_64-linux"
)
machine2_generator = machine2_config["clan"]["core"]["vars"]["generators"][
"my_generator"
]
machine2_generator["files"]["my_secret"]["secret"] = True
machine2_generator["files"]["my_value"]["secret"] = False
machine2_generator["script"] = (
"echo machine2 > $out/my_secret && echo machine2 > $out/my_value"
)
flake.refresh()
monkeypatch.chdir(flake.path)
cli.run(["vars", "generate", "--flake", str(flake.path)])
# check if public vars have been created correctly
in_repo_store1 = in_repo.FactStore(
Machine(name="machine1", flake=Flake(str(flake.path)))
)
in_repo_store2 = in_repo.FactStore(
Machine(name="machine2", flake=Flake(str(flake.path)))
)
assert in_repo_store1.exists(Generator("my_generator"), "my_value")
assert in_repo_store2.exists(Generator("my_generator"), "my_value")
assert (
in_repo_store1.get(Generator("my_generator"), "my_value").decode()
== "machine1\n"
)
assert (
in_repo_store2.get(Generator("my_generator"), "my_value").decode()
== "machine2\n"
)
# check if secret vars have been created correctly
sops_store1 = sops.SecretStore(
Machine(name="machine1", flake=Flake(str(flake.path)))
)
sops_store2 = sops.SecretStore(
Machine(name="machine2", flake=Flake(str(flake.path)))
)
assert sops_store1.exists(Generator("my_generator"), "my_secret")
assert sops_store2.exists(Generator("my_generator"), "my_secret")
assert (
sops_store1.get(Generator("my_generator"), "my_secret").decode() == "machine1\n"
)
assert (
sops_store2.get(Generator("my_generator"), "my_secret").decode() == "machine2\n"
)
@pytest.mark.with_core
def test_prompt(
monkeypatch: pytest.MonkeyPatch,
flake_with_sops: ClanFlake,
) -> None:
flake = flake_with_sops
config = flake.machines["my_machine"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
my_generator = config["clan"]["core"]["vars"]["generators"]["my_generator"]
my_generator["files"]["line_value"]["secret"] = False
my_generator["files"]["multiline_value"]["secret"] = False
my_generator["prompts"]["prompt1"]["description"] = "dream2nix"
my_generator["prompts"]["prompt1"]["persist"] = False
my_generator["prompts"]["prompt1"]["type"] = "line"
my_generator["prompts"]["prompt2"]["description"] = "dream2nix"
my_generator["prompts"]["prompt2"]["persist"] = False
my_generator["prompts"]["prompt2"]["type"] = "line"
my_generator["prompts"]["prompt_persist"]["persist"] = True
my_generator["script"] = (
"cat $prompts/prompt1 > $out/line_value; cat $prompts/prompt2 > $out/multiline_value"
)
flake.refresh()
monkeypatch.chdir(flake.path)
monkeypatch.setattr(
"clan_cli.vars.prompt.MOCK_PROMPT_RESPONSE",
iter(["line input", "my\nmultiline\ninput\n", "prompt_persist"]),
)
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
in_repo_store = in_repo.FactStore(
Machine(name="my_machine", flake=Flake(str(flake.path)))
)
assert in_repo_store.exists(Generator("my_generator"), "line_value")
assert (
in_repo_store.get(Generator("my_generator"), "line_value").decode()
== "line input"
)
assert in_repo_store.exists(Generator("my_generator"), "multiline_value")
assert (
in_repo_store.get(Generator("my_generator"), "multiline_value").decode()
== "my\nmultiline\ninput\n"
)
sops_store = sops.SecretStore(
Machine(name="my_machine", flake=Flake(str(flake.path)))
)
assert sops_store.exists(
Generator(name="my_generator", share=False, files=[]), "prompt_persist"
)
assert (
sops_store.get(Generator(name="my_generator"), "prompt_persist").decode()
== "prompt_persist"
)
@pytest.mark.with_core
def test_multi_machine_shared_vars(
monkeypatch: pytest.MonkeyPatch,
flake_with_sops: ClanFlake,
) -> None:
"""
Ensure that shared vars are regenerated only when they should, and also can be
accessed by all machines that should have access.
Specifically:
- make sure shared wars are not regenerated when a second machines is added
- make sure vars can still be accessed by all machines, after they are regenerated
"""
flake = flake_with_sops
machine1_config = flake.machines["machine1"]
machine1_config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
shared_generator = machine1_config["clan"]["core"]["vars"]["generators"][
"shared_generator"
]
shared_generator["share"] = True
shared_generator["files"]["my_secret"]["secret"] = True
shared_generator["files"]["my_value"]["secret"] = False
shared_generator["script"] = (
"echo $RANDOM > $out/my_value && echo $RANDOM > $out/my_secret"
)
# machine 2 is equivalent to machine 1
flake.machines["machine2"] = machine1_config
flake.refresh()
monkeypatch.chdir(flake.path)
machine1 = Machine(name="machine1", flake=Flake(str(flake.path)))
machine2 = Machine(name="machine2", flake=Flake(str(flake.path)))
sops_store_1 = sops.SecretStore(machine1)
sops_store_2 = sops.SecretStore(machine2)
in_repo_store_1 = in_repo.FactStore(machine1)
in_repo_store_2 = in_repo.FactStore(machine2)
generator = Generator("shared_generator", share=True)
# generate for machine 1
cli.run(["vars", "generate", "--flake", str(flake.path), "machine1"])
# read out values for machine 1
m1_secret = sops_store_1.get(generator, "my_secret")
m1_value = in_repo_store_1.get(generator, "my_value")
# generate for machine 2
cli.run(["vars", "generate", "--flake", str(flake.path), "machine2"])
# ensure values are the same for both machines
assert sops_store_2.get(generator, "my_secret") == m1_secret
assert in_repo_store_2.get(generator, "my_value") == m1_value
# ensure shared secret stays available for all machines after regeneration
# regenerate for machine 1
cli.run(
["vars", "generate", "--flake", str(flake.path), "machine1", "--regenerate"]
)
# ensure values changed
new_secret_1 = sops_store_1.get(generator, "my_secret")
new_value_1 = in_repo_store_1.get(generator, "my_value")
new_secret_2 = sops_store_2.get(generator, "my_secret")
assert new_secret_1 != m1_secret
assert new_value_1 != m1_value
# ensure that both machines still have access to the same secret
assert new_secret_1 == new_secret_2
assert sops_store_1.machine_has_access(generator, "my_secret")
assert sops_store_2.machine_has_access(generator, "my_secret")
@pytest.mark.with_core
def test_api_set_prompts(
monkeypatch: pytest.MonkeyPatch,
flake: ClanFlake,
) -> None:
from clan_cli.vars._types import GeneratorUpdate
from clan_cli.vars.list import get_generators, set_prompts
config = flake.machines["my_machine"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
my_generator = config["clan"]["core"]["vars"]["generators"]["my_generator"]
my_generator["prompts"]["prompt1"]["type"] = "line"
my_generator["prompts"]["prompt1"]["persist"] = True
my_generator["files"]["prompt1"]["secret"] = False
flake.refresh()
monkeypatch.chdir(flake.path)
params = {"machine_name": "my_machine", "base_dir": str(flake.path)}
set_prompts(
**params,
updates=[
GeneratorUpdate(
generator="my_generator",
prompt_values={"prompt1": "input1"},
)
],
)
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
store = in_repo.FactStore(machine)
assert store.exists(Generator("my_generator"), "prompt1")
assert store.get(Generator("my_generator"), "prompt1").decode() == "input1"
set_prompts(
**params,
updates=[
GeneratorUpdate(
generator="my_generator",
prompt_values={"prompt1": "input2"},
)
],
)
assert store.get(Generator("my_generator"), "prompt1").decode() == "input2"
generators = get_generators(**params)
assert len(generators) == 1
assert generators[0].name == "my_generator"
assert generators[0].prompts[0].name == "prompt1"
assert generators[0].prompts[0].previous_value == "input2"
@pytest.mark.with_core
def test_stdout_of_generate(
monkeypatch: pytest.MonkeyPatch,
flake_with_sops: ClanFlake,
caplog: pytest.LogCaptureFixture,
) -> None:
flake = flake_with_sops
config = flake.machines["my_machine"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
my_generator = config["clan"]["core"]["vars"]["generators"]["my_generator"]
my_generator["files"]["my_value"]["secret"] = False
my_generator["script"] = "echo -n hello > $out/my_value"
my_secret_generator = config["clan"]["core"]["vars"]["generators"][
"my_secret_generator"
]
my_secret_generator["files"]["my_secret"]["secret"] = True
my_secret_generator["script"] = "echo -n hello > $out/my_secret"
flake.refresh()
monkeypatch.chdir(flake.path)
from clan_cli.vars.generate import generate_vars_for_machine
# with capture_output as output:
with caplog.at_level(logging.INFO):
generate_vars_for_machine(
Machine(name="my_machine", flake=Flake(str(flake.path))),
"my_generator",
regenerate=False,
)
assert "Updated var my_generator/my_value" in caplog.text
assert "old: <not set>" in caplog.text
assert "new: hello" in caplog.text
caplog.clear()
set_var("my_machine", "my_generator/my_value", b"world", Flake(str(flake.path)))
with caplog.at_level(logging.INFO):
generate_vars_for_machine(
Machine(name="my_machine", flake=Flake(str(flake.path))),
"my_generator",
regenerate=True,
)
assert "Updated var my_generator/my_value" in caplog.text
assert "old: world" in caplog.text
assert "new: hello" in caplog.text
caplog.clear()
# check the output when nothing gets regenerated
with caplog.at_level(logging.INFO):
generate_vars_for_machine(
Machine(name="my_machine", flake=Flake(str(flake.path))),
"my_generator",
regenerate=True,
)
assert "Updated var" not in caplog.text
assert "hello" in caplog.text
caplog.clear()
with caplog.at_level(logging.INFO):
generate_vars_for_machine(
Machine(name="my_machine", flake=Flake(str(flake.path))),
"my_secret_generator",
regenerate=False,
)
assert "Updated secret var my_secret_generator/my_secret" in caplog.text
assert "hello" not in caplog.text
caplog.clear()
set_var(
"my_machine",
"my_secret_generator/my_secret",
b"world",
Flake(str(flake.path)),
)
with caplog.at_level(logging.INFO):
generate_vars_for_machine(
Machine(name="my_machine", flake=Flake(str(flake.path))),
"my_secret_generator",
regenerate=True,
)
assert "Updated secret var my_secret_generator/my_secret" in caplog.text
assert "world" not in caplog.text
assert "hello" not in caplog.text
caplog.clear()
@pytest.mark.with_core
def test_migration(
monkeypatch: pytest.MonkeyPatch,
flake_with_sops: ClanFlake,
caplog: pytest.LogCaptureFixture,
) -> None:
flake = flake_with_sops
config = flake.machines["my_machine"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
my_service = config["clan"]["core"]["facts"]["services"]["my_service"]
my_service["public"]["my_value"] = {}
my_service["secret"]["my_secret"] = {}
my_service["generator"][
"script"
] = "echo -n hello > $facts/my_value && echo -n hello > $secrets/my_secret"
my_generator = config["clan"]["core"]["vars"]["generators"]["my_generator"]
my_generator["files"]["my_value"]["secret"] = False
my_generator["files"]["my_secret"]["secret"] = True
my_generator["migrateFact"] = "my_service"
my_generator["script"] = "echo -n other > $out/my_value"
other_service = config["clan"]["core"]["facts"]["services"]["other_service"]
other_service["secret"]["other_value"] = {}
other_service["generator"]["script"] = "echo -n hello > $secrets/other_value"
other_generator = config["clan"]["core"]["vars"]["generators"]["other_generator"]
# the var to migrate to is mistakenly marked as not secret (migration should fail)
other_generator["files"]["other_value"]["secret"] = False
other_generator["migrateFact"] = "my_service"
other_generator["script"] = "echo -n value-from-vars > $out/other_value"
flake.refresh()
monkeypatch.chdir(flake.path)
cli.run(["facts", "generate", "--flake", str(flake.path), "my_machine"])
with caplog.at_level(logging.INFO):
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
assert "Migrated var my_generator/my_value" in caplog.text
assert "Migrated secret var my_generator/my_secret" in caplog.text
in_repo_store = in_repo.FactStore(
Machine(name="my_machine", flake=Flake(str(flake.path)))
)
sops_store = sops.SecretStore(
Machine(name="my_machine", flake=Flake(str(flake.path)))
)
assert in_repo_store.exists(Generator("my_generator"), "my_value")
assert in_repo_store.get(Generator("my_generator"), "my_value").decode() == "hello"
assert sops_store.exists(Generator("my_generator"), "my_secret")
assert sops_store.get(Generator("my_generator"), "my_secret").decode() == "hello"
assert in_repo_store.exists(Generator("other_generator"), "other_value")
assert (
in_repo_store.get(Generator("other_generator"), "other_value").decode()
== "value-from-vars"
)
@pytest.mark.with_core
def test_fails_when_files_are_left_from_other_backend(
monkeypatch: pytest.MonkeyPatch,
flake_with_sops: ClanFlake,
) -> None:
flake = flake_with_sops
config = flake.machines["my_machine"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
my_secret_generator = config["clan"]["core"]["vars"]["generators"][
"my_secret_generator"
]
my_secret_generator["files"]["my_secret"]["secret"] = True
my_secret_generator["script"] = "echo hello > $out/my_secret"
my_value_generator = config["clan"]["core"]["vars"]["generators"][
"my_value_generator"
]
my_value_generator["files"]["my_value"]["secret"] = False
my_value_generator["script"] = "echo hello > $out/my_value"
flake.refresh()
monkeypatch.chdir(flake.path)
for generator in ["my_secret_generator", "my_value_generator"]:
generate_vars_for_machine(
Machine(name="my_machine", flake=Flake(str(flake.path))),
generator,
regenerate=False,
)
# Will raise. It was secret before, but now it's not.
my_secret_generator["files"]["my_secret"][
"secret"
] = False # secret -> public (NOT OK)
# WIll not raise. It was not secret before, and it's secret now.
my_value_generator["files"]["my_value"]["secret"] = True # public -> secret (OK)
flake.refresh()
monkeypatch.chdir(flake.path)
for generator in ["my_secret_generator", "my_value_generator"]:
# This should raise an error
if generator == "my_secret_generator":
with pytest.raises(ClanError):
generate_vars_for_machine(
Machine(name="my_machine", flake=Flake(str(flake.path))),
generator,
regenerate=False,
)
else:
generate_vars_for_machine(
Machine(name="my_machine", flake=Flake(str(flake.path))),
generator,
regenerate=False,
)
@pytest.mark.with_core
def test_keygen(monkeypatch: pytest.MonkeyPatch, flake: ClanFlake) -> None:
monkeypatch.chdir(flake.path)
cli.run(["vars", "keygen", "--flake", str(flake.path), "--user", "user"])
# check public key exists
assert (flake.path / "sops" / "users" / "user").is_dir()
# check private key exists
assert (flake.temporary_home / ".config" / "sops" / "age" / "keys.txt").is_file()
# it should still work, even if the keys already exist
import shutil
shutil.rmtree(flake.path / "sops" / "users" / "user")
cli.run(["vars", "keygen", "--flake", str(flake.path), "--user", "user"])
# check public key exists
assert (flake.path / "sops" / "users" / "user").is_dir()
@pytest.mark.with_core
def test_invalidation(
monkeypatch: pytest.MonkeyPatch,
flake: ClanFlake,
) -> None:
config = flake.machines["my_machine"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
my_generator = config["clan"]["core"]["vars"]["generators"]["my_generator"]
my_generator["files"]["my_value"]["secret"] = False
my_generator["script"] = "echo -n $RANDOM > $out/my_value"
flake.refresh()
monkeypatch.chdir(flake.path)
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
value1 = get_var(
str(machine.flake.path), machine.name, "my_generator/my_value"
).printable_value
# generate again and make sure nothing changes without the invalidation data being set
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
value1_new = get_var(
str(machine.flake.path), machine.name, "my_generator/my_value"
).printable_value
assert value1 == value1_new
# set the invalidation data of the generator
my_generator["validation"] = 1
flake.refresh()
# generate again and make sure the value changes
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
value2 = get_var(
str(machine.flake.path), machine.name, "my_generator/my_value"
).printable_value
assert value1 != value2
# generate again without changing invalidation data -> value should not change
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
value2_new = get_var(
str(machine.flake.path), machine.name, "my_generator/my_value"
).printable_value
assert value2 == value2_new
@pytest.mark.with_core
def test_dynamic_invalidation(
monkeypatch: pytest.MonkeyPatch,
flake: ClanFlake,
) -> None:
gen_prefix = "config.clan.core.vars.generators"
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
config = flake.machines[machine.name]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
my_generator = config["clan"]["core"]["vars"]["generators"]["my_generator"]
my_generator["files"]["my_value"]["secret"] = False
my_generator["script"] = "echo -n $RANDOM > $out/my_value"
dependent_generator = config["clan"]["core"]["vars"]["generators"][
"dependent_generator"
]
dependent_generator["files"]["my_value"]["secret"] = False
dependent_generator["dependencies"] = ["my_generator"]
dependent_generator["script"] = "echo -n $RANDOM > $out/my_value"
flake.refresh()
# this is an abuse
custom_nix = flake.path / "machines" / machine.name / "hardware-configuration.nix"
custom_nix.write_text(
"""
{ config, ... }: let
p = config.clan.core.vars.generators.my_generator.files.my_value.path;
in {
clan.core.vars.generators.dependent_generator.validation = if builtins.pathExists p then builtins.readFile p else null;
}
"""
)
flake.refresh()
machine.flush_caches()
monkeypatch.chdir(flake.path)
# before generating, dependent generator validation should be empty; see bogus hardware-configuration.nix above
# we have to avoid `*.files.value` in this initial select because the generators haven't been run yet
generators_0 = machine.eval_nix(f"{gen_prefix}.*.{{validationHash}}")
assert generators_0["dependent_generator"]["validationHash"] is None
# generate both my_generator and (the dependent) dependent_generator
cli.run(["vars", "generate", "--flake", str(flake.path), machine.name])
machine.flush_caches()
# after generating once, dependent generator validation should be set
generators_1 = machine.eval_nix(gen_prefix)
assert generators_1["dependent_generator"]["validationHash"] is not None
# after generating once, neither generator should want to run again because `clan vars generate` should have re-evaluated the dependent generator's validationHash after executing the parent generator but before executing the dependent generator
# this ensures that validation can depend on parent generators while still only requiring a single pass
cli.run(["vars", "generate", "--flake", str(flake.path), machine.name])
machine.flush_caches()
generators_2 = machine.eval_nix(gen_prefix)
assert (
generators_1["dependent_generator"]["validationHash"]
== generators_2["dependent_generator"]["validationHash"]
)
assert (
generators_1["my_generator"]["files"]["my_value"]["value"]
== generators_2["my_generator"]["files"]["my_value"]["value"]
)
assert (
generators_1["dependent_generator"]["files"]["my_value"]["value"]
== generators_2["dependent_generator"]["files"]["my_value"]["value"]
)

View File

@@ -0,0 +1,128 @@
import json
import subprocess
from contextlib import ExitStack
import pytest
from clan_cli import cmd
from clan_cli.flake import Flake
from clan_cli.machines.machines import Machine
from clan_cli.nix import nix_eval, run
from clan_cli.tests.age_keys import SopsSetup
from clan_cli.tests.fixtures_flakes import ClanFlake
from clan_cli.tests.helpers import cli
from clan_cli.tests.nix_config import ConfigItem
from clan_cli.vms.run import inspect_vm, spawn_vm
@pytest.mark.impure
def test_vm_deployment(
flake: ClanFlake,
nix_config: dict[str, ConfigItem],
sops_setup: SopsSetup,
) -> None:
# machine 1
machine1_config = flake.machines["m1_machine"]
machine1_config["nixpkgs"]["hostPlatform"] = nix_config["system"].value
machine1_config["clan"]["virtualisation"]["graphics"] = False
machine1_config["services"]["getty"]["autologinUser"] = "root"
machine1_config["services"]["openssh"]["enable"] = True
machine1_config["networking"]["firewall"]["enable"] = False
machine1_config["users"]["users"]["root"]["openssh"]["authorizedKeys"]["keys"] = [
# put your key here when debugging and pass ssh_port in run_vm_in_thread call below
]
m1_generator = machine1_config["clan"]["core"]["vars"]["generators"]["m1_generator"]
m1_generator["files"]["my_secret"]["secret"] = True
m1_generator["script"] = """
echo hello > $out/my_secret
"""
m1_shared_generator = machine1_config["clan"]["core"]["vars"]["generators"][
"my_shared_generator"
]
m1_shared_generator["share"] = True
m1_shared_generator["files"]["shared_secret"]["secret"] = True
m1_shared_generator["files"]["no_deploy_secret"]["secret"] = True
m1_shared_generator["files"]["no_deploy_secret"]["deploy"] = False
m1_shared_generator["script"] = """
echo hello > $out/shared_secret
echo hello > $out/no_deploy_secret
"""
# machine 2
machine2_config = flake.machines["m2_machine"]
machine2_config["nixpkgs"]["hostPlatform"] = nix_config["system"].value
machine2_config["clan"]["virtualisation"]["graphics"] = False
machine2_config["services"]["getty"]["autologinUser"] = "root"
machine2_config["services"]["openssh"]["enable"] = True
machine2_config["users"]["users"]["root"]["openssh"]["authorizedKeys"]["keys"] = [
# put your key here when debugging and pass ssh_port in run_vm_in_thread call below
]
machine2_config["networking"]["firewall"]["enable"] = False
machine2_config["clan"]["core"]["vars"]["generators"]["my_shared_generator"] = (
m1_shared_generator.copy()
)
flake.refresh()
sops_setup.init(flake.path)
cli.run(["vars", "generate", "--flake", str(flake.path)])
# check sops secrets not empty
for machine in ["m1_machine", "m2_machine"]:
sops_secrets = json.loads(
run(
nix_eval(
[
f"{flake.path}#nixosConfigurations.{machine}.config.sops.secrets",
]
)
).stdout.strip()
)
assert sops_secrets != {}
my_secret_path = run(
nix_eval(
[
f"{flake.path}#nixosConfigurations.m1_machine.config.clan.core.vars.generators.m1_generator.files.my_secret.path",
]
)
).stdout.strip()
assert "no-such-path" not in my_secret_path
for machine in ["m1_machine", "m2_machine"]:
shared_secret_path = run(
nix_eval(
[
f"{flake.path}#nixosConfigurations.{machine}.config.clan.core.vars.generators.my_shared_generator.files.shared_secret.path",
]
)
).stdout.strip()
assert "no-such-path" not in shared_secret_path
# run nix flake lock
cmd.run(["nix", "flake", "lock"], cmd.RunOpts(cwd=flake.path))
vm1_config = inspect_vm(machine=Machine("m1_machine", Flake(str(flake.path))))
vm2_config = inspect_vm(machine=Machine("m2_machine", Flake(str(flake.path))))
with ExitStack() as stack:
vm1 = stack.enter_context(spawn_vm(vm1_config, stdin=subprocess.DEVNULL))
vm2 = stack.enter_context(spawn_vm(vm2_config, stdin=subprocess.DEVNULL))
qga_m1 = stack.enter_context(vm1.qga_connect())
qga_m2 = stack.enter_context(vm2.qga_connect())
# run these always successful commands to make sure all vms have started before continuing
qga_m1.run(["echo"])
qga_m2.run(["echo"])
# check my_secret is deployed
result = qga_m1.run(["cat", "/run/secrets/vars/m1_generator/my_secret"])
assert result.stdout == "hello\n"
# check shared_secret is deployed on m1
result = qga_m1.run(
["cat", "/run/secrets/vars/my_shared_generator/shared_secret"]
)
assert result.stdout == "hello\n"
# check shared_secret is deployed on m2
result = qga_m2.run(
["cat", "/run/secrets/vars/my_shared_generator/shared_secret"]
)
assert result.stdout == "hello\n"
# check no_deploy_secret is not deployed
result = qga_m1.run(
["test", "-e", "/run/secrets/vars/my_shared_generator/no_deploy_secret"],
check=False,
)
assert result.returncode != 0

View File

@@ -0,0 +1,119 @@
from pathlib import Path
from typing import TYPE_CHECKING
import pytest
from clan_cli.flake import Flake
from clan_cli.machines.machines import Machine
from clan_cli.tests.fixtures_flakes import ClanFlake, FlakeForTest
from clan_cli.tests.helpers import cli
from clan_cli.tests.stdout import CaptureOutput
from clan_cli.vms.run import inspect_vm, spawn_vm
if TYPE_CHECKING:
from .age_keys import KeyPair
no_kvm = not Path("/dev/kvm").exists()
@pytest.mark.impure
def test_inspect(
test_flake_with_core: FlakeForTest, capture_output: CaptureOutput
) -> None:
with capture_output as output:
cli.run(["vms", "inspect", "--flake", str(test_flake_with_core.path), "vm1"])
assert "Cores" in output.out
@pytest.mark.skipif(no_kvm, reason="Requires KVM")
@pytest.mark.impure
def test_run(
monkeypatch: pytest.MonkeyPatch,
test_flake_with_core: FlakeForTest,
age_keys: list["KeyPair"],
) -> None:
monkeypatch.chdir(test_flake_with_core.path)
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
cli.run(
[
"secrets",
"users",
"add",
"user1",
age_keys[0].pubkey,
]
)
cli.run(
[
"secrets",
"groups",
"add-user",
"admins",
"user1",
]
)
cli.run(["vms", "run", "--no-block", "vm1", "-c", "shutdown", "-h", "now"])
@pytest.mark.skipif(no_kvm, reason="Requires KVM")
@pytest.mark.impure
def test_vm_persistence(
flake: ClanFlake,
) -> None:
# set up a clan flake with some systemd services to test persistence
config = flake.machines["my_machine"]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
# logrotate-checkconf doesn't work in VM because /nix/store is owned by nobody
config["systemd"]["services"]["logrotate-checkconf"]["enable"] = False
config["services"]["getty"]["autologinUser"] = "root"
config["clan"]["virtualisation"] = {"graphics": False}
config["clan"]["core"]["networking"] = {"targetHost": "client"}
config["clan"]["core"]["state"]["my_state"]["folders"] = [
# to be owned by root
"/var/my-state",
# to be owned by user 'test'
"/var/user-state",
]
config["users"]["users"] = {
"test": {"initialPassword": "test", "isSystemUser": True, "group": "users"},
"root": {"initialPassword": "root"},
}
flake.refresh()
vm_config = inspect_vm(machine=Machine("my_machine", Flake(str(flake.path))))
with spawn_vm(vm_config) as vm, vm.qga_connect() as qga:
# create state via qmp command instead of systemd service
qga.run(["/bin/sh", "-c", "echo 'dream2nix' > /var/my-state/root"])
qga.run(["/bin/sh", "-c", "echo 'dream2nix' > /var/my-state/test"])
qga.run(["/bin/sh", "-c", "chown test /var/my-state/test"])
qga.run(["/bin/sh", "-c", "chown test /var/user-state"])
qga.run_nonblocking(["shutdown", "-h", "now"])
## start vm again
with spawn_vm(vm_config) as vm, vm.qga_connect() as qga:
# check state exists
qga.run(["cat", "/var/my-state/test"])
# ensure root file is owned by root
qga.run(["stat", "-c", "%U", "/var/my-state/root"])
# ensure test file is owned by test
qga.run(["stat", "-c", "%U", "/var/my-state/test"])
# ensure /var/user-state is owned by test
qga.run(["stat", "-c", "%U", "/var/user-state"])
# ensure that the file created by the service is still there and has the expected content
result = qga.run(["cat", "/var/my-state/test"])
assert result.stdout == "dream2nix\n", result.stdout
# check for errors
result = qga.run(["cat", "/var/my-state/error"], check=False)
assert result.returncode == 1, result.stdout
# check all systemd services are OK, or print details
result = qga.run(
[
"/bin/sh",
"-c",
"systemctl --failed | tee /tmp/log | grep -q '0 loaded units listed' || ( cat /tmp/log && false )",
],
)