vars: fix re-generate behavior for dependencies of shared vars (#5001)
fixes https://git.clan.lol/clan/clan-core/issues/3791 This fixes multiple issues we had when re-generating shared vars. Problem 1: shared vars are re-generated for each individual machine instead of just once (see #3791) Problem 2: When a shared var was re-generated for one machine, dependent vars on other machines did not get re-generated, leading to broken state Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/5001
This commit is contained in:
@@ -1,6 +0,0 @@
|
||||
{ fetchgit }:
|
||||
fetchgit {
|
||||
url = "https://git.clan.lol/clan/clan-core.git";
|
||||
rev = "5d884cecc2585a29b6a3596681839d081b4de192";
|
||||
sha256 = "09is1afmncamavb2q88qac37vmsijxzsy1iz1vr6gsyjq2rixaxc";
|
||||
}
|
||||
@@ -50,6 +50,7 @@
|
||||
self.nixosConfigurations."test-flash-machine-${pkgs.hostPlatform.system}".config.system.build.toplevel
|
||||
self.nixosConfigurations."test-flash-machine-${pkgs.hostPlatform.system}".config.system.build.diskoScript
|
||||
self.nixosConfigurations."test-flash-machine-${pkgs.hostPlatform.system}".config.system.build.diskoScript.drvPath
|
||||
(import ../installation/facter-report.nix pkgs.hostPlatform.system)
|
||||
]
|
||||
++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
closureInfo = pkgs.closureInfo { rootPaths = dependencies; };
|
||||
|
||||
10
checks/installation/facter-report.nix
Normal file
10
checks/installation/facter-report.nix
Normal file
@@ -0,0 +1,10 @@
|
||||
system:
|
||||
builtins.fetchurl {
|
||||
url = "https://git.clan.lol/clan/test-fixtures/raw/commit/4a2bc56d886578124b05060d3fb7eddc38c019f8/nixos-vm-facter-json/${system}.json";
|
||||
sha256 =
|
||||
{
|
||||
aarch64-linux = "sha256:1rlfymk03rmfkm2qgrc8l5kj5i20srx79n1y1h4nzlpwaz0j7hh2";
|
||||
x86_64-linux = "sha256:16myh0ll2gdwsiwkjw5ba4dl23ppwbsanxx214863j7nvzx42pws";
|
||||
}
|
||||
.${system};
|
||||
}
|
||||
@@ -18,27 +18,23 @@
|
||||
fileSystems."/".device = lib.mkDefault "/dev/vda";
|
||||
boot.loader.grub.device = lib.mkDefault "/dev/vda";
|
||||
|
||||
imports = [ self.nixosModules.test-install-machine-without-system ];
|
||||
imports = [
|
||||
self.nixosModules.test-install-machine-without-system
|
||||
];
|
||||
};
|
||||
|
||||
clan.machines.test-install-machine-with-system =
|
||||
{ pkgs, ... }:
|
||||
{
|
||||
# https://git.clan.lol/clan/test-fixtures
|
||||
facter.reportPath = builtins.fetchurl {
|
||||
url = "https://git.clan.lol/clan/test-fixtures/raw/commit/4a2bc56d886578124b05060d3fb7eddc38c019f8/nixos-vm-facter-json/${pkgs.hostPlatform.system}.json";
|
||||
sha256 =
|
||||
{
|
||||
aarch64-linux = "sha256:1rlfymk03rmfkm2qgrc8l5kj5i20srx79n1y1h4nzlpwaz0j7hh2";
|
||||
x86_64-linux = "sha256:16myh0ll2gdwsiwkjw5ba4dl23ppwbsanxx214863j7nvzx42pws";
|
||||
}
|
||||
.${pkgs.hostPlatform.system};
|
||||
};
|
||||
facter.reportPath = import ./facter-report.nix pkgs.hostPlatform.system;
|
||||
|
||||
fileSystems."/".device = lib.mkDefault "/dev/vda";
|
||||
boot.loader.grub.device = lib.mkDefault "/dev/vda";
|
||||
|
||||
imports = [ self.nixosModules.test-install-machine-without-system ];
|
||||
};
|
||||
|
||||
flake.nixosModules = {
|
||||
test-install-machine-without-system =
|
||||
{ lib, modulesPath, ... }:
|
||||
@@ -159,6 +155,7 @@
|
||||
pkgs.stdenv.drvPath
|
||||
pkgs.bash.drvPath
|
||||
pkgs.buildPackages.xorg.lndir
|
||||
(import ./facter-report.nix pkgs.hostPlatform.system)
|
||||
]
|
||||
++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
};
|
||||
|
||||
@@ -35,6 +35,7 @@
|
||||
pkgs.stdenv.drvPath
|
||||
pkgs.stdenvNoCC
|
||||
self.nixosConfigurations.test-morph-machine.config.system.build.toplevel
|
||||
(import ../installation/facter-report.nix pkgs.hostPlatform.system)
|
||||
]
|
||||
++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
closureInfo = pkgs.closureInfo { rootPaths = dependencies; };
|
||||
|
||||
@@ -112,6 +112,7 @@
|
||||
pkgs.stdenv.drvPath
|
||||
pkgs.bash.drvPath
|
||||
pkgs.buildPackages.xorg.lndir
|
||||
(import ../installation/facter-report.nix pkgs.hostPlatform.system)
|
||||
]
|
||||
++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
};
|
||||
|
||||
39
pkgs/clan-cli/clan_cli/python-deps.nix
Normal file
39
pkgs/clan-cli/clan_cli/python-deps.nix
Normal file
@@ -0,0 +1,39 @@
|
||||
{
|
||||
python3,
|
||||
fetchFromGitHub,
|
||||
}:
|
||||
rec {
|
||||
asyncore-wsgi = python3.pkgs.buildPythonPackage rec {
|
||||
pname = "asyncore-wsgi";
|
||||
version = "0.0.11";
|
||||
src = fetchFromGitHub {
|
||||
owner = "romanvm";
|
||||
repo = "asyncore-wsgi";
|
||||
rev = "${version}";
|
||||
sha256 = "sha256-06rWCC8qZb9H9qPUDQpzASKOY4VX+Y+Bm9a5e71Hqhc=";
|
||||
};
|
||||
pyproject = true;
|
||||
buildInputs = [
|
||||
python3.pkgs.setuptools
|
||||
];
|
||||
};
|
||||
|
||||
web-pdb = python3.pkgs.buildPythonPackage rec {
|
||||
pname = "web-pdb";
|
||||
version = "1.6.3";
|
||||
src = fetchFromGitHub {
|
||||
owner = "romanvm";
|
||||
repo = "python-web-pdb";
|
||||
rev = "${version}";
|
||||
sha256 = "sha256-VG0mHbogx0n1f38h9VVxFQgjvghipAf1rb43/Bwb/8I=";
|
||||
};
|
||||
pyproject = true;
|
||||
buildInputs = [
|
||||
python3.pkgs.setuptools
|
||||
];
|
||||
propagatedBuildInputs = [
|
||||
python3.pkgs.bottle
|
||||
asyncore-wsgi
|
||||
];
|
||||
};
|
||||
}
|
||||
@@ -431,20 +431,22 @@ def test_generated_shared_secret_sops(
|
||||
generator_m1 = Generator(
|
||||
"my_shared_generator",
|
||||
share=True,
|
||||
machine="machine1",
|
||||
_flake=machine1.flake,
|
||||
)
|
||||
generator_m2 = Generator(
|
||||
"my_shared_generator",
|
||||
share=True,
|
||||
machine="machine2",
|
||||
_flake=machine2.flake,
|
||||
)
|
||||
|
||||
assert m1_sops_store.exists(generator_m1, "my_shared_secret")
|
||||
assert m2_sops_store.exists(generator_m2, "my_shared_secret")
|
||||
assert m1_sops_store.machine_has_access(generator_m1, "my_shared_secret")
|
||||
assert m2_sops_store.machine_has_access(generator_m2, "my_shared_secret")
|
||||
assert m1_sops_store.machine_has_access(
|
||||
generator_m1, "my_shared_secret", "machine1"
|
||||
)
|
||||
assert m2_sops_store.machine_has_access(
|
||||
generator_m2, "my_shared_secret", "machine2"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.with_core
|
||||
@@ -499,6 +501,7 @@ def test_generate_secret_var_password_store(
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
|
||||
assert check_vars(machine.name, machine.flake)
|
||||
store = password_store.SecretStore(flake=flake_obj)
|
||||
store.init_pass_command(machine="my_machine")
|
||||
my_generator = Generator(
|
||||
"my_generator",
|
||||
share=False,
|
||||
@@ -744,6 +747,74 @@ def test_shared_vars_must_never_depend_on_machine_specific_vars(
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
|
||||
|
||||
|
||||
@pytest.mark.with_core
|
||||
def test_shared_vars_regeneration(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
flake_with_sops: ClanFlake,
|
||||
) -> None:
|
||||
"""Ensure that is a shared generator gets generated on one machine, dependents of that
|
||||
shared generator on other machines get re-generated as well.
|
||||
"""
|
||||
flake = flake_with_sops
|
||||
|
||||
machine1_config = flake.machines["machine1"]
|
||||
machine1_config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
|
||||
shared_generator = machine1_config["clan"]["core"]["vars"]["generators"][
|
||||
"shared_generator"
|
||||
]
|
||||
shared_generator["share"] = True
|
||||
shared_generator["files"]["my_value"]["secret"] = False
|
||||
shared_generator["script"] = 'echo "$RANDOM" > "$out"/my_value'
|
||||
child_generator = machine1_config["clan"]["core"]["vars"]["generators"][
|
||||
"child_generator"
|
||||
]
|
||||
child_generator["share"] = False
|
||||
child_generator["files"]["my_value"]["secret"] = False
|
||||
child_generator["dependencies"] = ["shared_generator"]
|
||||
child_generator["script"] = 'cat "$in"/shared_generator/my_value > "$out"/my_value'
|
||||
# machine 2 is equivalent to machine 1
|
||||
flake.machines["machine2"] = machine1_config
|
||||
flake.refresh()
|
||||
monkeypatch.chdir(flake.path)
|
||||
machine1 = Machine(name="machine1", flake=Flake(str(flake.path)))
|
||||
machine2 = Machine(name="machine2", flake=Flake(str(flake.path)))
|
||||
in_repo_store_1 = in_repo.FactStore(machine1.flake)
|
||||
in_repo_store_2 = in_repo.FactStore(machine2.flake)
|
||||
# Create generators with machine context for testing
|
||||
child_gen_m1 = Generator(
|
||||
"child_generator", share=False, machine="machine1", _flake=machine1.flake
|
||||
)
|
||||
child_gen_m2 = Generator(
|
||||
"child_generator", share=False, machine="machine2", _flake=machine2.flake
|
||||
)
|
||||
# generate for machine 1
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "machine1"])
|
||||
# generate for machine 2
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "machine2"])
|
||||
# child value should be the same on both machines
|
||||
assert in_repo_store_1.get(child_gen_m1, "my_value") == in_repo_store_2.get(
|
||||
child_gen_m2, "my_value"
|
||||
), "Child values should be the same after initial generation"
|
||||
|
||||
# regenerate on all machines
|
||||
cli.run(
|
||||
["vars", "generate", "--flake", str(flake.path), "--regenerate"],
|
||||
)
|
||||
# ensure child value after --regenerate is the same on both machines
|
||||
assert in_repo_store_1.get(child_gen_m1, "my_value") == in_repo_store_2.get(
|
||||
child_gen_m2, "my_value"
|
||||
), "Child values should be the same after regenerating all machines"
|
||||
|
||||
# regenerate for machine 1
|
||||
cli.run(
|
||||
["vars", "generate", "--flake", str(flake.path), "machine1", "--regenerate"]
|
||||
)
|
||||
# ensure child value after --regenerate is the same on both machines
|
||||
assert in_repo_store_1.get(child_gen_m1, "my_value") == in_repo_store_2.get(
|
||||
child_gen_m2, "my_value"
|
||||
), "Child values should be the same after regenerating machine1"
|
||||
|
||||
|
||||
@pytest.mark.with_core
|
||||
def test_multi_machine_shared_vars(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
@@ -816,8 +887,8 @@ def test_multi_machine_shared_vars(
|
||||
assert new_value_1 != m1_value
|
||||
# ensure that both machines still have access to the same secret
|
||||
assert new_secret_1 == new_secret_2
|
||||
assert sops_store_1.machine_has_access(generator_m1, "my_secret")
|
||||
assert sops_store_2.machine_has_access(generator_m2, "my_secret")
|
||||
assert sops_store_1.machine_has_access(generator_m1, "my_secret", "machine1")
|
||||
assert sops_store_2.machine_has_access(generator_m2, "my_secret", "machine2")
|
||||
|
||||
|
||||
@pytest.mark.with_core
|
||||
|
||||
@@ -42,11 +42,7 @@ class StoreBase(ABC):
|
||||
"""Get machine name from generator, asserting it's not None for now."""
|
||||
if generator.machine is None:
|
||||
if generator.share:
|
||||
# Shared generators don't need a machine for most operations
|
||||
# but some operations (like SOPS key management) might still need one
|
||||
# This is a temporary workaround - we should handle this better
|
||||
msg = f"Shared generator '{generator.name}' requires a machine context for this operation"
|
||||
raise ClanError(msg)
|
||||
return "__shared"
|
||||
msg = f"Generator '{generator.name}' has no machine associated"
|
||||
raise ClanError(msg)
|
||||
return generator.machine
|
||||
@@ -62,6 +58,7 @@ class StoreBase(ABC):
|
||||
generator: "Generator",
|
||||
var: "Var",
|
||||
value: bytes,
|
||||
machine: str,
|
||||
) -> Path | None:
|
||||
"""Override this method to implement the actual creation of the file"""
|
||||
|
||||
@@ -140,16 +137,20 @@ class StoreBase(ABC):
|
||||
generator: "Generator",
|
||||
var: "Var",
|
||||
value: bytes,
|
||||
machine: str,
|
||||
is_migration: bool = False,
|
||||
) -> list[Path]:
|
||||
changed_files: list[Path] = []
|
||||
|
||||
# if generator was switched from shared to per-machine or vice versa,
|
||||
# remove the old var first
|
||||
if self.exists(
|
||||
gen := dataclasses.replace(generator, share=not generator.share), var.name
|
||||
):
|
||||
changed_files += self.delete(gen, var.name)
|
||||
prev_generator = dataclasses.replace(
|
||||
generator,
|
||||
share=not generator.share,
|
||||
machine=machine if generator.share else None,
|
||||
)
|
||||
if self.exists(prev_generator, var.name):
|
||||
changed_files += self.delete(prev_generator, var.name)
|
||||
|
||||
if self.exists(generator, var.name):
|
||||
if self.is_secret_store:
|
||||
@@ -161,7 +162,7 @@ class StoreBase(ABC):
|
||||
else:
|
||||
old_val = None
|
||||
old_val_str = "<not set>"
|
||||
new_file = self._set(generator, var, value)
|
||||
new_file = self._set(generator, var, value, machine)
|
||||
action_str = "Migrated" if is_migration else "Updated"
|
||||
log_info: Callable
|
||||
if generator.machine is None:
|
||||
@@ -169,8 +170,8 @@ class StoreBase(ABC):
|
||||
else:
|
||||
from clan_lib.machines.machines import Machine # noqa: PLC0415
|
||||
|
||||
machine = Machine(name=generator.machine, flake=self.flake)
|
||||
log_info = machine.info
|
||||
machine_obj = Machine(name=generator.machine, flake=self.flake)
|
||||
log_info = machine_obj.info
|
||||
if self.is_secret_store:
|
||||
log.info(f"{action_str} secret var {generator.name}/{var.name}\n")
|
||||
elif value != old_val:
|
||||
|
||||
@@ -2,9 +2,9 @@ import logging
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from collections.abc import Iterable
|
||||
from contextlib import ExitStack
|
||||
from dataclasses import dataclass, field
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import TYPE_CHECKING
|
||||
@@ -15,7 +15,6 @@ from clan_lib.errors import ClanError
|
||||
from clan_lib.git import commit_files
|
||||
from clan_lib.nix import nix_config, nix_shell, nix_test_store
|
||||
|
||||
from .check import check_vars
|
||||
from .prompt import Prompt, ask
|
||||
from .var import Var
|
||||
|
||||
@@ -60,9 +59,12 @@ class Generator:
|
||||
dependencies: list[GeneratorKey] = field(default_factory=list)
|
||||
|
||||
migrate_fact: str | None = None
|
||||
validation_hash: str | None = None
|
||||
|
||||
machine: str | None = None
|
||||
_flake: "Flake | None" = None
|
||||
_public_store: "StoreBase | None" = None
|
||||
_secret_store: "StoreBase | None" = None
|
||||
|
||||
@property
|
||||
def key(self) -> GeneratorKey:
|
||||
@@ -71,20 +73,28 @@ class Generator:
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.key)
|
||||
|
||||
@cached_property
|
||||
@property
|
||||
def exists(self) -> bool:
|
||||
if self.machine is None:
|
||||
msg = "Machine cannot be None"
|
||||
"""Check if all files for this generator exist in their respective stores."""
|
||||
if self._public_store is None or self._secret_store is None:
|
||||
msg = "Stores must be set to check existence"
|
||||
raise ClanError(msg)
|
||||
if self._flake is None:
|
||||
msg = "Flake cannot be None"
|
||||
raise ClanError(msg)
|
||||
return check_vars(self.machine, self._flake, generator_name=self.name)
|
||||
|
||||
# Check if all files exist
|
||||
for file in self.files:
|
||||
store = self._secret_store if file.secret else self._public_store
|
||||
if not store.exists(self, file.name):
|
||||
return False
|
||||
|
||||
# Also check if validation hashes are up to date
|
||||
return self._secret_store.hash_is_valid(
|
||||
self
|
||||
) and self._public_store.hash_is_valid(self)
|
||||
|
||||
@classmethod
|
||||
def get_machine_generators(
|
||||
cls: type["Generator"],
|
||||
machine_names: list[str],
|
||||
machine_names: Iterable[str],
|
||||
flake: "Flake",
|
||||
include_previous_values: bool = False,
|
||||
) -> list["Generator"]:
|
||||
@@ -102,7 +112,7 @@ class Generator:
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
|
||||
generators_selector = "config.clan.core.vars.generators.*.{share,dependencies,migrateFact,prompts}"
|
||||
generators_selector = "config.clan.core.vars.generators.*.{share,dependencies,migrateFact,prompts,validationHash}"
|
||||
files_selector = "config.clan.core.vars.generators.*.files.*.{secret,deploy,owner,group,mode,neededFor}"
|
||||
|
||||
# precache all machines generators and files to avoid multiple calls to nix
|
||||
@@ -123,7 +133,7 @@ class Generator:
|
||||
generators_selector,
|
||||
)
|
||||
if not generators_data:
|
||||
return []
|
||||
continue
|
||||
|
||||
# Get all file metadata in one select
|
||||
files_data = flake.select_machine(
|
||||
@@ -162,18 +172,30 @@ class Generator:
|
||||
Prompt.from_nix(p) for p in gen_data.get("prompts", {}).values()
|
||||
]
|
||||
|
||||
share = gen_data["share"]
|
||||
|
||||
generator = cls(
|
||||
name=gen_name,
|
||||
share=gen_data["share"],
|
||||
share=share,
|
||||
files=files,
|
||||
dependencies=[
|
||||
GeneratorKey(machine=machine_name, name=dep)
|
||||
GeneratorKey(
|
||||
machine=None
|
||||
if generators_data[dep]["share"]
|
||||
else machine_name,
|
||||
name=dep,
|
||||
)
|
||||
for dep in gen_data["dependencies"]
|
||||
],
|
||||
migrate_fact=gen_data.get("migrateFact"),
|
||||
validation_hash=gen_data.get("validationHash"),
|
||||
prompts=prompts,
|
||||
machine=machine_name,
|
||||
# only set machine for machine-specific generators
|
||||
# this is essential for the graph algorithms to work correctly
|
||||
machine=None if share else machine_name,
|
||||
_flake=flake,
|
||||
_public_store=pub_store,
|
||||
_secret_store=sec_store,
|
||||
)
|
||||
generators.append(generator)
|
||||
|
||||
@@ -204,14 +226,10 @@ class Generator:
|
||||
return sec_store.get(self, prompt.name).decode()
|
||||
return None
|
||||
|
||||
def final_script(self) -> Path:
|
||||
if self.machine is None:
|
||||
msg = "Machine cannot be None"
|
||||
raise ClanError(msg)
|
||||
def final_script(self, machine: "Machine") -> Path:
|
||||
if self._flake is None:
|
||||
msg = "Flake cannot be None"
|
||||
raise ClanError(msg)
|
||||
machine = Machine(name=self.machine, flake=self._flake)
|
||||
output = Path(
|
||||
machine.select(
|
||||
f'config.clan.core.vars.generators."{self.name}".finalScript',
|
||||
@@ -222,16 +240,7 @@ class Generator:
|
||||
return output
|
||||
|
||||
def validation(self) -> str | None:
|
||||
if self.machine is None:
|
||||
msg = "Machine cannot be None"
|
||||
raise ClanError(msg)
|
||||
if self._flake is None:
|
||||
msg = "Flake cannot be None"
|
||||
raise ClanError(msg)
|
||||
machine = Machine(name=self.machine, flake=self._flake)
|
||||
return machine.select(
|
||||
f'config.clan.core.vars.generators."{self.name}".validationHash',
|
||||
)
|
||||
return self.validation_hash
|
||||
|
||||
def decrypt_dependencies(
|
||||
self,
|
||||
@@ -254,11 +263,6 @@ class Generator:
|
||||
result: dict[str, dict[str, bytes]] = {}
|
||||
|
||||
for dep_key in set(self.dependencies):
|
||||
# For now, we only support dependencies from the same machine
|
||||
if dep_key.machine != machine.name:
|
||||
msg = f"Cross-machine dependencies are not supported. Generator {self.name} depends on {dep_key.name} from machine {dep_key.machine}"
|
||||
raise ClanError(msg)
|
||||
|
||||
result[dep_key.name] = {}
|
||||
|
||||
dep_generator = next(
|
||||
@@ -390,7 +394,7 @@ class Generator:
|
||||
value = get_prompt_value(prompt.name)
|
||||
prompt_file.write_text(value)
|
||||
|
||||
final_script = self.final_script()
|
||||
final_script = self.final_script(machine)
|
||||
|
||||
if sys.platform == "linux" and bwrap.bubblewrap_works():
|
||||
cmd = bubblewrap_cmd(str(final_script), tmpdir)
|
||||
@@ -430,6 +434,7 @@ class Generator:
|
||||
self,
|
||||
file,
|
||||
secret_file.read_bytes(),
|
||||
machine.name,
|
||||
)
|
||||
secret_changed = True
|
||||
else:
|
||||
@@ -437,6 +442,7 @@ class Generator:
|
||||
self,
|
||||
file,
|
||||
secret_file.read_bytes(),
|
||||
machine.name,
|
||||
)
|
||||
public_changed = True
|
||||
files_to_commit.extend(file_paths)
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from unittest.mock import Mock
|
||||
|
||||
from clan_cli.vars.generator import (
|
||||
Generator,
|
||||
GeneratorKey,
|
||||
@@ -9,30 +11,69 @@ def generator_names(generator: list[Generator]) -> list[str]:
|
||||
return [gen.name for gen in generator]
|
||||
|
||||
|
||||
def generator_keys(generator: list[Generator]) -> set[GeneratorKey]:
|
||||
return {gen.key for gen in generator}
|
||||
|
||||
|
||||
def create_mock_stores(exists_map: dict[str, bool]) -> tuple[Mock, Mock]:
|
||||
"""Create mock public and secret stores with specified existence mapping."""
|
||||
public_store = Mock()
|
||||
secret_store = Mock()
|
||||
|
||||
def mock_exists(generator: Generator, _file_name: str) -> bool:
|
||||
return exists_map.get(generator.name, False)
|
||||
|
||||
def mock_hash_valid(generator: Generator) -> bool:
|
||||
return exists_map.get(generator.name, False)
|
||||
|
||||
public_store.exists.side_effect = mock_exists
|
||||
secret_store.exists.side_effect = mock_exists
|
||||
public_store.hash_is_valid.side_effect = mock_hash_valid
|
||||
secret_store.hash_is_valid.side_effect = mock_hash_valid
|
||||
|
||||
return public_store, secret_store
|
||||
|
||||
|
||||
def test_required_generators() -> None:
|
||||
# Create mock stores
|
||||
exists_map = {
|
||||
"gen_1": True,
|
||||
"gen_2": False,
|
||||
"gen_2a": False,
|
||||
"gen_2b": True,
|
||||
}
|
||||
public_store, secret_store = create_mock_stores(exists_map)
|
||||
|
||||
# Create generators with proper machine context
|
||||
machine_name = "test_machine"
|
||||
gen_1 = Generator(name="gen_1", dependencies=[], machine=machine_name)
|
||||
gen_1 = Generator(
|
||||
name="gen_1",
|
||||
dependencies=[],
|
||||
machine=machine_name,
|
||||
_public_store=public_store,
|
||||
_secret_store=secret_store,
|
||||
)
|
||||
gen_2 = Generator(
|
||||
name="gen_2",
|
||||
dependencies=[gen_1.key],
|
||||
machine=machine_name,
|
||||
_public_store=public_store,
|
||||
_secret_store=secret_store,
|
||||
)
|
||||
gen_2a = Generator(
|
||||
name="gen_2a",
|
||||
dependencies=[gen_2.key],
|
||||
machine=machine_name,
|
||||
_public_store=public_store,
|
||||
_secret_store=secret_store,
|
||||
)
|
||||
gen_2b = Generator(
|
||||
name="gen_2b",
|
||||
dependencies=[gen_2.key],
|
||||
machine=machine_name,
|
||||
_public_store=public_store,
|
||||
_secret_store=secret_store,
|
||||
)
|
||||
|
||||
gen_1.exists = True
|
||||
gen_2.exists = False
|
||||
gen_2a.exists = False
|
||||
gen_2b.exists = True
|
||||
generators: dict[GeneratorKey, Generator] = {
|
||||
generator.key: generator for generator in [gen_1, gen_2, gen_2a, gen_2b]
|
||||
}
|
||||
@@ -67,6 +108,10 @@ def test_required_generators() -> None:
|
||||
|
||||
|
||||
def test_shared_generator_invalidates_multiple_machines_dependents() -> None:
|
||||
# Create mock stores
|
||||
exists_map = {"shared_gen": False, "gen_1": True, "gen_2": True}
|
||||
public_store, secret_store = create_mock_stores(exists_map)
|
||||
|
||||
# Create generators with proper machine context
|
||||
machine_1 = "machine_1"
|
||||
machine_2 = "machine_2"
|
||||
@@ -74,35 +119,37 @@ def test_shared_generator_invalidates_multiple_machines_dependents() -> None:
|
||||
name="shared_gen",
|
||||
dependencies=[],
|
||||
machine=None, # Shared generator
|
||||
_public_store=public_store,
|
||||
_secret_store=secret_store,
|
||||
)
|
||||
gen_1 = Generator(
|
||||
name="gen_1",
|
||||
dependencies=[shared_gen.key],
|
||||
machine=machine_1,
|
||||
_public_store=public_store,
|
||||
_secret_store=secret_store,
|
||||
)
|
||||
gen_2 = Generator(
|
||||
name="gen_2",
|
||||
dependencies=[shared_gen.key],
|
||||
machine=machine_2,
|
||||
_public_store=public_store,
|
||||
_secret_store=secret_store,
|
||||
)
|
||||
|
||||
shared_gen.exists = False
|
||||
gen_1.exists = True
|
||||
gen_2.exists = True
|
||||
generators: dict[GeneratorKey, Generator] = {
|
||||
generator.key: generator for generator in [shared_gen, gen_1, gen_2]
|
||||
}
|
||||
|
||||
assert generator_names(all_missing_closure(generators.keys(), generators)) == [
|
||||
"shared_gen",
|
||||
"gen_1",
|
||||
"gen_2",
|
||||
], (
|
||||
assert generator_keys(all_missing_closure(generators.keys(), generators)) == {
|
||||
GeneratorKey(name="shared_gen", machine=None),
|
||||
GeneratorKey(name="gen_1", machine=machine_1),
|
||||
GeneratorKey(name="gen_2", machine=machine_2),
|
||||
}, (
|
||||
"All generators should be included in all_missing_closure due to shared dependency"
|
||||
)
|
||||
|
||||
assert generator_names(requested_closure([shared_gen.key], generators)) == [
|
||||
"shared_gen",
|
||||
"gen_1",
|
||||
"gen_2",
|
||||
], "All generators should be included in requested_closure due to shared dependency"
|
||||
assert generator_keys(requested_closure([shared_gen.key], generators)) == {
|
||||
GeneratorKey(name="shared_gen", machine=None),
|
||||
GeneratorKey(name="gen_1", machine=machine_1),
|
||||
GeneratorKey(name="gen_2", machine=machine_2),
|
||||
}, "All generators should be included in requested_closure due to shared dependency"
|
||||
|
||||
@@ -59,13 +59,13 @@ def _migrate_file(
|
||||
if file.secret:
|
||||
old_value = machine.secret_facts_store.get(service_name, fact_name)
|
||||
paths_list = machine.secret_vars_store.set(
|
||||
generator, file, old_value, is_migration=True
|
||||
generator, file, old_value, machine.name, is_migration=True
|
||||
)
|
||||
paths.extend(paths_list)
|
||||
else:
|
||||
old_value = machine.public_facts_store.get(service_name, fact_name)
|
||||
paths_list = machine.public_vars_store.set(
|
||||
generator, file, old_value, is_migration=True
|
||||
generator, file, old_value, machine.name, is_migration=True
|
||||
)
|
||||
paths.extend(paths_list)
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ class FactStore(StoreBase):
|
||||
generator: Generator,
|
||||
var: Var,
|
||||
value: bytes,
|
||||
machine: str, # noqa: ARG002
|
||||
) -> Path | None:
|
||||
if not self.flake.is_local:
|
||||
msg = f"Storing var '{var.id}' in a flake is only supported for local flakes: {self.flake}"
|
||||
|
||||
@@ -45,8 +45,8 @@ class FactStore(StoreBase):
|
||||
generator: Generator,
|
||||
var: Var,
|
||||
value: bytes,
|
||||
machine: str,
|
||||
) -> Path | None:
|
||||
machine = self.get_machine(generator)
|
||||
fact_path = self.get_dir(machine) / generator.name / var.name
|
||||
fact_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
fact_path.write_bytes(value)
|
||||
|
||||
@@ -27,6 +27,7 @@ class SecretStore(StoreBase):
|
||||
generator: Generator,
|
||||
var: Var,
|
||||
value: bytes,
|
||||
machine: str, # noqa: ARG002
|
||||
) -> Path | None:
|
||||
secret_file = self.dir / generator.name / var.name
|
||||
secret_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@@ -21,20 +21,20 @@ class SecretStore(StoreBase):
|
||||
def is_secret_store(self) -> bool:
|
||||
return True
|
||||
|
||||
def __init__(self, flake: Flake) -> None:
|
||||
def __init__(self, flake: Flake, pass_cmd: str | None = None) -> None:
|
||||
super().__init__(flake)
|
||||
self.entry_prefix = "clan-vars"
|
||||
self._store_dir: Path | None = None
|
||||
self._pass_cmd = pass_cmd
|
||||
|
||||
@property
|
||||
def store_name(self) -> str:
|
||||
return "password_store"
|
||||
|
||||
def store_dir(self, machine: str) -> Path:
|
||||
def store_dir(self) -> Path:
|
||||
"""Get the password store directory, cached per machine."""
|
||||
if not self._store_dir:
|
||||
result = self._run_pass(
|
||||
machine,
|
||||
"git",
|
||||
"rev-parse",
|
||||
"--show-toplevel",
|
||||
@@ -46,7 +46,8 @@ class SecretStore(StoreBase):
|
||||
self._store_dir = Path(result.stdout.strip().decode())
|
||||
return self._store_dir
|
||||
|
||||
def _pass_command(self, machine: str) -> str:
|
||||
def init_pass_command(self, machine: str) -> None:
|
||||
"""Initialize the password store command based on the machine's configuration."""
|
||||
out_path = self.flake.select_machine(
|
||||
machine,
|
||||
"config.clan.core.vars.password-store.passPackage.outPath",
|
||||
@@ -63,7 +64,8 @@ class SecretStore(StoreBase):
|
||||
if main_program:
|
||||
binary_path = Path(out_path) / "bin" / main_program
|
||||
if binary_path.exists():
|
||||
return str(binary_path)
|
||||
self._pass_cmd = str(binary_path)
|
||||
return
|
||||
|
||||
# Look for common password store binaries
|
||||
bin_dir = Path(out_path) / "bin"
|
||||
@@ -71,27 +73,34 @@ class SecretStore(StoreBase):
|
||||
for binary in ["pass", "passage"]:
|
||||
binary_path = bin_dir / binary
|
||||
if binary_path.exists():
|
||||
return str(binary_path)
|
||||
self._pass_cmd = str(binary_path)
|
||||
return
|
||||
|
||||
# If only one binary exists, use it
|
||||
binaries = [f for f in bin_dir.iterdir() if f.is_file()]
|
||||
if len(binaries) == 1:
|
||||
return str(binaries[0])
|
||||
self._pass_cmd = str(binaries[0])
|
||||
return
|
||||
|
||||
msg = "Could not find password store binary in package"
|
||||
raise ValueError(msg)
|
||||
|
||||
def _pass_command(self) -> str:
|
||||
if not self._pass_cmd:
|
||||
msg = "Password store command not initialized. This should be set during SecretStore initialization."
|
||||
raise ValueError(msg)
|
||||
return self._pass_cmd
|
||||
|
||||
def entry_dir(self, generator: Generator, name: str) -> Path:
|
||||
return Path(self.entry_prefix) / self.rel_dir(generator, name)
|
||||
|
||||
def _run_pass(
|
||||
self,
|
||||
machine: str,
|
||||
*args: str,
|
||||
input: bytes | None = None, # noqa: A002
|
||||
check: bool = True,
|
||||
) -> subprocess.CompletedProcess[bytes]:
|
||||
cmd = [self._pass_command(machine), *args]
|
||||
cmd = [self._pass_command(), *args]
|
||||
# We need bytes support here, so we can not use clan cmd.
|
||||
# If you change this to run( add bytes support to it first!
|
||||
# otherwise we mangle binary secrets (which is annoying to debug)
|
||||
@@ -107,39 +116,35 @@ class SecretStore(StoreBase):
|
||||
generator: Generator,
|
||||
var: Var,
|
||||
value: bytes,
|
||||
machine: str, # noqa: ARG002
|
||||
) -> Path | None:
|
||||
machine = self.get_machine(generator)
|
||||
pass_call = ["insert", "-m", str(self.entry_dir(generator, var.name))]
|
||||
self._run_pass(machine, *pass_call, input=value, check=True)
|
||||
self._run_pass(*pass_call, input=value, check=True)
|
||||
return None # we manage the files outside of the git repo
|
||||
|
||||
def get(self, generator: Generator, name: str) -> bytes:
|
||||
machine = self.get_machine(generator)
|
||||
pass_name = str(self.entry_dir(generator, name))
|
||||
return self._run_pass(machine, "show", pass_name).stdout
|
||||
return self._run_pass("show", pass_name).stdout
|
||||
|
||||
def exists(self, generator: Generator, name: str) -> bool:
|
||||
machine = self.get_machine(generator)
|
||||
pass_name = str(self.entry_dir(generator, name))
|
||||
# Check if the file exists with either .age or .gpg extension
|
||||
store_dir = self.store_dir(machine)
|
||||
store_dir = self.store_dir()
|
||||
age_file = store_dir / f"{pass_name}.age"
|
||||
gpg_file = store_dir / f"{pass_name}.gpg"
|
||||
return age_file.exists() or gpg_file.exists()
|
||||
|
||||
def delete(self, generator: Generator, name: str) -> Iterable[Path]:
|
||||
machine = self.get_machine(generator)
|
||||
pass_name = str(self.entry_dir(generator, name))
|
||||
self._run_pass(machine, "rm", "--force", pass_name, check=True)
|
||||
self._run_pass("rm", "--force", pass_name, check=True)
|
||||
return []
|
||||
|
||||
def delete_store(self, machine: str) -> Iterable[Path]:
|
||||
machine_dir = Path(self.entry_prefix) / "per-machine" / machine
|
||||
# Check if the directory exists in the password store before trying to delete
|
||||
result = self._run_pass(machine, "ls", str(machine_dir), check=False)
|
||||
result = self._run_pass("ls", str(machine_dir), check=False)
|
||||
if result.returncode == 0:
|
||||
self._run_pass(
|
||||
machine,
|
||||
"rm",
|
||||
"--force",
|
||||
"--recursive",
|
||||
@@ -150,7 +155,6 @@ class SecretStore(StoreBase):
|
||||
|
||||
def generate_hash(self, machine: str) -> bytes:
|
||||
result = self._run_pass(
|
||||
machine,
|
||||
"git",
|
||||
"log",
|
||||
"-1",
|
||||
|
||||
@@ -95,8 +95,9 @@ class SecretStore(StoreBase):
|
||||
key_dir = sops_users_folder(self.flake.path) / user
|
||||
return self.key_has_access(key_dir, generator, secret_name)
|
||||
|
||||
def machine_has_access(self, generator: Generator, secret_name: str) -> bool:
|
||||
machine = self.get_machine(generator)
|
||||
def machine_has_access(
|
||||
self, generator: Generator, secret_name: str, machine: str
|
||||
) -> bool:
|
||||
self.ensure_machine_key(machine)
|
||||
key_dir = sops_machines_folder(self.flake.path) / machine
|
||||
return self.key_has_access(key_dir, generator, secret_name)
|
||||
@@ -156,8 +157,8 @@ class SecretStore(StoreBase):
|
||||
continue
|
||||
if file.secret and self.exists(generator, file.name):
|
||||
if file.deploy:
|
||||
self.ensure_machine_has_access(generator, file.name)
|
||||
needs_update, msg = self.needs_fix(generator, file.name)
|
||||
self.ensure_machine_has_access(generator, file.name, machine)
|
||||
needs_update, msg = self.needs_fix(generator, file.name, machine)
|
||||
if needs_update:
|
||||
outdated.append((generator.name, file.name, msg))
|
||||
if file_name and not file_found:
|
||||
@@ -177,8 +178,8 @@ class SecretStore(StoreBase):
|
||||
generator: Generator,
|
||||
var: Var,
|
||||
value: bytes,
|
||||
machine: str,
|
||||
) -> Path | None:
|
||||
machine = self.get_machine(generator)
|
||||
self.ensure_machine_key(machine)
|
||||
secret_folder = self.secret_path(generator, var.name)
|
||||
# create directory if it doesn't exist
|
||||
@@ -277,9 +278,10 @@ class SecretStore(StoreBase):
|
||||
secret_folder = self.secret_path(generator, name)
|
||||
return (secret_folder / "secret").exists()
|
||||
|
||||
def ensure_machine_has_access(self, generator: Generator, name: str) -> None:
|
||||
machine = self.get_machine(generator)
|
||||
if self.machine_has_access(generator, name):
|
||||
def ensure_machine_has_access(
|
||||
self, generator: Generator, name: str, machine: str
|
||||
) -> None:
|
||||
if self.machine_has_access(generator, name, machine):
|
||||
return
|
||||
secret_folder = self.secret_path(generator, name)
|
||||
add_secret(
|
||||
@@ -313,8 +315,9 @@ class SecretStore(StoreBase):
|
||||
|
||||
return keys
|
||||
|
||||
def needs_fix(self, generator: Generator, name: str) -> tuple[bool, str | None]:
|
||||
machine = self.get_machine(generator)
|
||||
def needs_fix(
|
||||
self, generator: Generator, name: str, machine: str
|
||||
) -> tuple[bool, str | None]:
|
||||
secret_path = self.secret_path(generator, name)
|
||||
current_recipients = sops.get_recipients(secret_path)
|
||||
wanted_recipients = self.collect_keys_for_secret(machine, secret_path)
|
||||
@@ -373,9 +376,8 @@ class SecretStore(StoreBase):
|
||||
|
||||
age_plugins = load_age_plugins(self.flake)
|
||||
|
||||
gen_machine = self.get_machine(generator)
|
||||
for group in self.flake.select_machine(
|
||||
gen_machine,
|
||||
machine,
|
||||
"config.clan.core.sops.defaultGroups",
|
||||
):
|
||||
allow_member(
|
||||
|
||||
@@ -32,8 +32,8 @@ class SecretStore(StoreBase):
|
||||
generator: Generator,
|
||||
var: Var,
|
||||
value: bytes,
|
||||
machine: str,
|
||||
) -> Path | None:
|
||||
machine = self.get_machine(generator)
|
||||
secret_file = self.get_dir(machine) / generator.name / var.name
|
||||
secret_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
secret_file.write_bytes(value)
|
||||
|
||||
@@ -25,7 +25,7 @@ def set_var(machine: str | Machine, var: str | Var, value: bytes, flake: Flake)
|
||||
else:
|
||||
_machine = machine
|
||||
_var = get_machine_var(_machine, var) if isinstance(var, str) else var
|
||||
paths = _var.set(value)
|
||||
paths = _var.set(value, _machine.name)
|
||||
if paths:
|
||||
commit_files(
|
||||
paths,
|
||||
|
||||
@@ -52,14 +52,14 @@ class Var:
|
||||
except UnicodeDecodeError:
|
||||
return "<binary blob>"
|
||||
|
||||
def set(self, value: bytes) -> list[Path]:
|
||||
def set(self, value: bytes, machine: str) -> list[Path]:
|
||||
if self._store is None:
|
||||
msg = "Store cannot be None"
|
||||
raise ClanError(msg)
|
||||
if self._generator is None:
|
||||
msg = "Generator cannot be None"
|
||||
raise ClanError(msg)
|
||||
return self._store.set(self._generator, self, value)
|
||||
return self._store.set(self._generator, self, value, machine)
|
||||
|
||||
@property
|
||||
def exists(self) -> bool:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import importlib
|
||||
import logging
|
||||
import pkgutil
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from functools import wraps
|
||||
@@ -214,6 +215,8 @@ API.register(get_system_file)
|
||||
for name, func in self._registry.items():
|
||||
hints = get_type_hints(func)
|
||||
|
||||
print("Generating schema for function:", name, file=sys.stderr)
|
||||
|
||||
try:
|
||||
serialized_hints = {
|
||||
key: type_to_dict(
|
||||
@@ -236,6 +239,15 @@ API.register(get_system_file)
|
||||
if ("error" in t["properties"]["status"]["enum"])
|
||||
)
|
||||
|
||||
# TODO: improve error handling in this function
|
||||
if "oneOf" not in return_type:
|
||||
msg = (
|
||||
f"Return type of function '{name}' is not a union type. Expected a union of Success and Error types."
|
||||
# @DavHau: no idea wy exactly this leads to the "oneOf" ot being present, but this should help
|
||||
"Hint: When using dataclasses as return types, ensure they don't contain public fields with non-serializable types"
|
||||
)
|
||||
raise JSchemaTypeError(msg)
|
||||
|
||||
return_type["oneOf"][1] = {"$ref": "#/$defs/error"}
|
||||
|
||||
sig = signature(func)
|
||||
|
||||
@@ -95,9 +95,14 @@ class Machine:
|
||||
|
||||
@cached_property
|
||||
def secret_vars_store(self) -> StoreBase:
|
||||
from clan_cli.vars.secret_modules import password_store # noqa: PLC0415
|
||||
|
||||
secret_module = self.select("config.clan.core.vars.settings.secretModule")
|
||||
module = importlib.import_module(secret_module)
|
||||
return module.SecretStore(flake=self.flake)
|
||||
store = module.SecretStore(flake=self.flake)
|
||||
if isinstance(store, password_store.SecretStore):
|
||||
store.init_pass_command(machine=self.name)
|
||||
return store
|
||||
|
||||
@cached_property
|
||||
def public_vars_store(self) -> StoreBase:
|
||||
|
||||
@@ -8,10 +8,13 @@ from clan_cli.vars.migration import check_can_migrate, migrate_files
|
||||
|
||||
from clan_lib.api import API
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.machines.actions import list_machines
|
||||
from clan_lib.machines.machines import Machine
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
debug_condition = False
|
||||
|
||||
|
||||
@API.register
|
||||
def get_generators(
|
||||
@@ -32,27 +35,46 @@ def get_generators(
|
||||
List of generators based on the specified selection and closure mode.
|
||||
|
||||
"""
|
||||
machine_names = [machine.name for machine in machines]
|
||||
vars_generators = Generator.get_machine_generators(
|
||||
machine_names,
|
||||
if not machines:
|
||||
msg = "At least one machine must be provided"
|
||||
raise ClanError(msg)
|
||||
|
||||
all_machines = list_machines(machines[0].flake).keys()
|
||||
requested_machines = [machine.name for machine in machines]
|
||||
|
||||
all_generators_list = Generator.get_machine_generators(
|
||||
all_machines,
|
||||
machines[0].flake,
|
||||
include_previous_values=include_previous_values,
|
||||
)
|
||||
generators = {generator.key: generator for generator in vars_generators}
|
||||
requested_generators_list = Generator.get_machine_generators(
|
||||
requested_machines,
|
||||
machines[0].flake,
|
||||
include_previous_values=include_previous_values,
|
||||
)
|
||||
|
||||
all_generators = {generator.key: generator for generator in all_generators_list}
|
||||
requested_generators = {
|
||||
generator.key: generator for generator in requested_generators_list
|
||||
}
|
||||
|
||||
result_closure = []
|
||||
if generator_name is None: # all generators selected
|
||||
if full_closure:
|
||||
result_closure = graph.requested_closure(generators.keys(), generators)
|
||||
result_closure = graph.requested_closure(
|
||||
requested_generators.keys(), all_generators
|
||||
)
|
||||
else:
|
||||
result_closure = graph.all_missing_closure(generators.keys(), generators)
|
||||
result_closure = graph.all_missing_closure(
|
||||
requested_generators.keys(), all_generators
|
||||
)
|
||||
# specific generator selected
|
||||
elif full_closure:
|
||||
roots = [key for key in generators if key.name == generator_name]
|
||||
result_closure = requested_closure(roots, generators)
|
||||
roots = [key for key in requested_generators if key.name == generator_name]
|
||||
result_closure = requested_closure(roots, all_generators)
|
||||
else:
|
||||
roots = [key for key in generators if key.name == generator_name]
|
||||
result_closure = graph.all_missing_closure(roots, generators)
|
||||
roots = [key for key in requested_generators if key.name == generator_name]
|
||||
result_closure = graph.all_missing_closure(roots, all_generators)
|
||||
|
||||
return result_closure
|
||||
|
||||
@@ -123,6 +145,9 @@ def run_generators(
|
||||
executing the generator.
|
||||
|
||||
"""
|
||||
if not machines:
|
||||
msg = "At least one machine must be provided"
|
||||
raise ClanError(msg)
|
||||
if isinstance(generators, list):
|
||||
# List of generator names - use them exactly as provided
|
||||
if len(generators) == 0:
|
||||
@@ -143,23 +168,23 @@ def run_generators(
|
||||
prompt_values = {
|
||||
generator.name: prompt_values(generator) for generator in generator_objects
|
||||
}
|
||||
|
||||
# execute health check
|
||||
for machine in machines:
|
||||
_ensure_healthy(machine=machine)
|
||||
|
||||
# execute generators
|
||||
for generator in generator_objects:
|
||||
generator_machines = (
|
||||
machines
|
||||
machine = (
|
||||
machines[0]
|
||||
if generator.machine is None
|
||||
else [Machine(name=generator.machine, flake=machines[0].flake)]
|
||||
else Machine(name=generator.machine, flake=machines[0].flake)
|
||||
)
|
||||
for machine in generator_machines:
|
||||
if check_can_migrate(machine, generator):
|
||||
migrate_files(machine, generator)
|
||||
else:
|
||||
generator.execute(
|
||||
machine=machine,
|
||||
prompt_values=prompt_values.get(generator.name, {}),
|
||||
no_sandbox=no_sandbox,
|
||||
)
|
||||
if check_can_migrate(machine, generator):
|
||||
migrate_files(machine, generator)
|
||||
else:
|
||||
generator.execute(
|
||||
machine=machine,
|
||||
prompt_values=prompt_values.get(generator.name, {}),
|
||||
no_sandbox=no_sandbox,
|
||||
)
|
||||
|
||||
@@ -24,6 +24,9 @@
|
||||
let
|
||||
pyDeps = ps: [
|
||||
ps.argcomplete # Enables shell completions
|
||||
|
||||
# uncomment web-pdb for debugging:
|
||||
# (pkgs.callPackage ./python-deps.nix {}).web-pdb
|
||||
];
|
||||
devDeps = ps: [
|
||||
ps.ipython
|
||||
|
||||
Reference in New Issue
Block a user