Merge remote-tracking branch 'origin/main' into rework-installation

This commit is contained in:
Jörg Thalheim
2024-07-24 21:09:23 +02:00
70 changed files with 986 additions and 667 deletions

View File

@@ -2,9 +2,8 @@ import argparse
import importlib
import logging
import os
import subprocess
import sys
from collections.abc import Callable
from getpass import getpass
from graphlib import TopologicalSorter
from pathlib import Path
from tempfile import TemporaryDirectory
@@ -29,17 +28,7 @@ from .secret_modules import SecretStoreBase
log = logging.getLogger(__name__)
def read_multiline_input(prompt: str = "Finish with Ctrl-D") -> str:
"""
Read multi-line input from stdin.
"""
print(prompt, flush=True)
proc = subprocess.run(["cat"], stdout=subprocess.PIPE, text=True)
log.info("Input received. Processing...")
return proc.stdout
def bubblewrap_cmd(generator: str, generator_dir: Path, dep_tmpdir: Path) -> list[str]:
def bubblewrap_cmd(generator: str, tmpdir: Path) -> list[str]:
# fmt: off
return nix_shell(
[
@@ -51,8 +40,7 @@ def bubblewrap_cmd(generator: str, generator_dir: Path, dep_tmpdir: Path) -> lis
"--ro-bind", "/nix/store", "/nix/store",
"--tmpfs", "/usr/lib/systemd",
"--dev", "/dev",
"--bind", str(generator_dir), str(generator_dir),
"--ro-bind", str(dep_tmpdir), str(dep_tmpdir),
"--bind", str(tmpdir), str(tmpdir),
"--unshare-all",
"--unshare-user",
"--uid", "1000",
@@ -92,7 +80,7 @@ def decrypt_dependencies(
def dependencies_as_dir(
decrypted_dependencies: dict[str, dict[str, bytes]],
tmpdir: Path,
) -> Path:
) -> None:
for dep_generator, files in decrypted_dependencies.items():
dep_generator_dir = tmpdir / dep_generator
dep_generator_dir.mkdir()
@@ -102,7 +90,6 @@ def dependencies_as_dir(
file_path.touch()
file_path.chmod(0o600)
file_path.write_bytes(file)
return tmpdir
def execute_generator(
@@ -111,10 +98,7 @@ def execute_generator(
regenerate: bool,
secret_vars_store: SecretStoreBase,
public_vars_store: FactStoreBase,
dep_tmpdir: Path,
prompt: Callable[[str], str],
) -> bool:
generator_dir = dep_tmpdir / generator_name
# check if all secrets exist and generate them if at least one is missing
needs_regeneration = not check_secrets(machine, generator_name=generator_name)
log.debug(f"{generator_name} needs_regeneration: {needs_regeneration}")
@@ -124,51 +108,65 @@ def execute_generator(
msg = f"flake is not a Path: {machine.flake}"
msg += "fact/secret generation is only supported for local flakes"
# compatibility for old outputs.nix users
generator = machine.vars_generators[generator_name]["finalScript"]
# if machine.vars_data[generator_name]["generator"]["prompt"]:
# prompt_value = prompt(machine.vars_data[generator_name]["generator"]["prompt"])
# env["prompt_value"] = prompt_value
# build temporary file tree of dependencies
decrypted_dependencies = decrypt_dependencies(
machine, generator_name, secret_vars_store, public_vars_store
)
env = os.environ.copy()
generator_dir.mkdir(parents=True)
env["out"] = str(generator_dir)
with TemporaryDirectory() as tmp:
dep_tmpdir = dependencies_as_dir(decrypted_dependencies, Path(tmp))
env["in"] = str(dep_tmpdir)
tmpdir = Path(tmp)
tmpdir_in = tmpdir / "in"
tmpdir_prompts = tmpdir / "prompts"
tmpdir_out = tmpdir / "out"
tmpdir_in.mkdir()
tmpdir_out.mkdir()
env["in"] = str(tmpdir_in)
env["out"] = str(tmpdir_out)
# populate dependency inputs
dependencies_as_dir(decrypted_dependencies, tmpdir_in)
# populate prompted values
# TODO: make prompts rest API friendly
if machine.vars_generators[generator_name]["prompts"]:
tmpdir_prompts.mkdir()
env["prompts"] = str(tmpdir_prompts)
for prompt_name, prompt in machine.vars_generators[generator_name][
"prompts"
].items():
prompt_file = tmpdir_prompts / prompt_name
value = prompt_func(prompt["description"], prompt["type"])
prompt_file.write_text(value)
if sys.platform == "linux":
cmd = bubblewrap_cmd(generator, generator_dir, dep_tmpdir=dep_tmpdir)
cmd = bubblewrap_cmd(generator, tmpdir)
else:
cmd = ["bash", "-c", generator]
run(
cmd,
env=env,
)
files_to_commit = []
# store secrets
files = machine.vars_generators[generator_name]["files"]
for file_name, file in files.items():
groups = machine.deployment["sops"]["defaultGroups"]
files_to_commit = []
# store secrets
files = machine.vars_generators[generator_name]["files"]
for file_name, file in files.items():
groups = machine.deployment["sops"]["defaultGroups"]
secret_file = generator_dir / file_name
if not secret_file.is_file():
msg = f"did not generate a file for '{file_name}' when running the following command:\n"
msg += generator
raise ClanError(msg)
if file["secret"]:
file_path = secret_vars_store.set(
generator_name, file_name, secret_file.read_bytes(), groups
)
else:
file_path = public_vars_store.set(
generator_name, file_name, secret_file.read_bytes()
)
if file_path:
files_to_commit.append(file_path)
secret_file = tmpdir_out / file_name
if not secret_file.is_file():
msg = f"did not generate a file for '{file_name}' when running the following command:\n"
msg += generator
raise ClanError(msg)
if file["secret"]:
file_path = secret_vars_store.set(
generator_name, file_name, secret_file.read_bytes(), groups
)
else:
file_path = public_vars_store.set(
generator_name, file_name, secret_file.read_bytes()
)
if file_path:
files_to_commit.append(file_path)
commit_files(
files_to_commit,
machine.flake_dir,
@@ -177,9 +175,18 @@ def execute_generator(
return True
def prompt_func(text: str) -> str:
print(f"{text}: ")
return read_multiline_input()
def prompt_func(description: str, input_type: str) -> str:
if input_type == "line":
result = input(f"Enter the value for {description}: ")
elif input_type == "multiline":
print(f"Enter the value for {description} (Finish with Ctrl-D): ")
result = sys.stdin.read()
elif input_type == "hidden":
result = getpass(f"Enter the value for {description} (hidden): ")
else:
raise ClanError(f"Unknown input type: {input_type} for prompt {description}")
log.info("Input received. Processing...")
return result
def _get_subgraph(graph: dict[str, set], vertex: str) -> dict[str, set]:
@@ -197,11 +204,7 @@ def _generate_vars_for_machine(
machine: Machine,
generator_name: str | None,
regenerate: bool,
tmpdir: Path,
prompt: Callable[[str], str] = prompt_func,
) -> bool:
local_temp = tmpdir / machine.name
local_temp.mkdir()
secret_vars_module = importlib.import_module(machine.secret_vars_module)
secret_vars_store = secret_vars_module.SecretStore(machine=machine)
@@ -216,13 +219,6 @@ def _generate_vars_for_machine(
f"Could not find generator with name: {generator_name}. The following generators are available: {generators}"
)
# if generator_name:
# machine_generator_facts = {
# generator_name: machine.vars_generators[generator_name]
# }
# else:
# machine_generator_facts = machine.vars_generators
graph = {
gen_name: set(generator["dependencies"])
for gen_name, generator in machine.vars_generators.items()
@@ -250,8 +246,6 @@ def _generate_vars_for_machine(
regenerate=regenerate,
secret_vars_store=secret_vars_store,
public_vars_store=public_vars_store,
dep_tmpdir=local_temp,
prompt=prompt,
)
if machine_updated:
# flush caches to make sure the new secrets are available in evaluation
@@ -263,25 +257,21 @@ def generate_vars(
machines: list[Machine],
generator_name: str | None,
regenerate: bool,
prompt: Callable[[str], str] = prompt_func,
) -> bool:
was_regenerated = False
with TemporaryDirectory() as tmp:
tmpdir = Path(tmp)
for machine in machines:
errors = 0
try:
was_regenerated |= _generate_vars_for_machine(
machine, generator_name, regenerate, tmpdir, prompt
)
except Exception as exc:
log.error(f"Failed to generate facts for {machine.name}: {exc}")
errors += 1
if errors > 0:
raise ClanError(
f"Failed to generate facts for {errors} hosts. Check the logs above"
)
for machine in machines:
errors = 0
try:
was_regenerated |= _generate_vars_for_machine(
machine, generator_name, regenerate
)
except Exception as exc:
log.error(f"Failed to generate facts for {machine.name}: {exc}")
errors += 1
if errors > 0:
raise ClanError(
f"Failed to generate facts for {errors} hosts. Check the logs above"
)
if not was_regenerated:
print("All secrets and facts are already up to date")

View File

@@ -13,33 +13,45 @@ class SecretStore(SecretStoreBase):
self.machine = machine
def set(
self, service: str, name: str, value: bytes, groups: list[str]
self, generator_name: str, name: str, value: bytes, groups: list[str]
) -> Path | None:
subprocess.run(
nix_shell(
["nixpkgs#pass"],
["pass", "insert", "-m", f"machines/{self.machine.name}/{name}"],
[
"pass",
"insert",
"-m",
f"machines/{self.machine.name}/{generator_name}/{name}",
],
),
input=value,
check=True,
)
return None # we manage the files outside of the git repo
def get(self, service: str, name: str) -> bytes:
def get(self, generator_name: str, name: str) -> bytes:
return subprocess.run(
nix_shell(
["nixpkgs#pass"],
["pass", "show", f"machines/{self.machine.name}/{name}"],
[
"pass",
"show",
f"machines/{self.machine.name}/{generator_name}/{name}",
],
),
check=True,
stdout=subprocess.PIPE,
).stdout
def exists(self, service: str, name: str) -> bool:
def exists(self, generator_name: str, name: str) -> bool:
password_store = os.environ.get(
"PASSWORD_STORE_DIR", f"{os.environ['HOME']}/.password-store"
)
secret_path = Path(password_store) / f"machines/{self.machine.name}/{name}.gpg"
secret_path = (
Path(password_store)
/ f"machines/{self.machine.name}/{generator_name}/{name}.gpg"
)
return secret_path.exists()
def generate_hash(self) -> bytes:

View File

@@ -36,13 +36,20 @@ class SecretStore(SecretStoreBase):
)
add_machine(self.machine.flake_dir, self.machine.name, pub_key, False)
def secret_path(self, generator_name: str, secret_name: str) -> Path:
return (
self.machine.flake_dir
/ "sops"
/ "vars"
/ self.machine.name
/ generator_name
/ secret_name
)
def set(
self, generator_name: str, name: str, value: bytes, groups: list[str]
) -> Path | None:
path = (
sops_secrets_folder(self.machine.flake_dir)
/ f"vars-{self.machine.name}-{generator_name}-{name}"
)
path = self.secret_path(generator_name, name)
encrypt_secret(
self.machine.flake_dir,
path,
@@ -54,19 +61,21 @@ class SecretStore(SecretStoreBase):
def get(self, generator_name: str, name: str) -> bytes:
return decrypt_secret(
self.machine.flake_dir, f"vars-{self.machine.name}-{generator_name}-{name}"
self.machine.flake_dir, self.secret_path(generator_name, name)
).encode("utf-8")
def exists(self, generator_name: str, name: str) -> bool:
return has_secret(
self.machine.flake_dir,
f"vars-{self.machine.name}-{generator_name}-{name}",
self.secret_path(generator_name, name),
)
def upload(self, output_dir: Path) -> None:
key_name = f"{self.machine.name}-age.key"
if not has_secret(self.machine.flake_dir, key_name):
if not has_secret(sops_secrets_folder(self.machine.flake_dir) / key_name):
# skip uploading the secret, not managed by us
return
key = decrypt_secret(self.machine.flake_dir, key_name)
key = decrypt_secret(
self.machine.flake_dir,
sops_secrets_folder(self.machine.flake_dir) / key_name,
)
(output_dir / "key.txt").write_text(key)