vars: refactor - remove create_machine_vars_interactive in favor of run_generators

The motivation is to create one powerful entrypoint shared by the GUI as well as the CLI in order to not having to maintain too much separate code paths.

As a next step, generate_vars can probably also be removed.
This commit is contained in:
DavHau
2025-08-18 18:00:19 +07:00
parent 43febe5f33
commit ee87f20471
4 changed files with 85 additions and 81 deletions

View File

@@ -550,7 +550,7 @@ const InstallSummary = () => {
} }
const runGenerators = client.fetch("run_generators", { const runGenerators = client.fetch("run_generators", {
all_prompt_values: store.install.promptValues, prompt_values: store.install.promptValues,
machine: { machine: {
name: store.install.machineName, name: store.install.machineName,
flake: { flake: {

View File

@@ -11,7 +11,6 @@ from clan_cli.vars.check import check_vars
from clan_cli.vars.generate import ( from clan_cli.vars.generate import (
Generator, Generator,
GeneratorKey, GeneratorKey,
create_machine_vars_interactive,
get_generators, get_generators,
run_generators, run_generators,
) )
@@ -700,8 +699,8 @@ def test_api_set_prompts(
run_generators( run_generators(
machine=Machine(name="my_machine", flake=Flake(str(flake.path))), machine=Machine(name="my_machine", flake=Flake(str(flake.path))),
generators=["my_generator"], generators=[GeneratorKey(machine="my_machine", name="my_generator")],
all_prompt_values={ prompt_values={
"my_generator": { "my_generator": {
"prompt1": "input1", "prompt1": "input1",
} }
@@ -714,8 +713,8 @@ def test_api_set_prompts(
assert store.get(my_generator, "prompt1").decode() == "input1" assert store.get(my_generator, "prompt1").decode() == "input1"
run_generators( run_generators(
machine=Machine(name="my_machine", flake=Flake(str(flake.path))), machine=Machine(name="my_machine", flake=Flake(str(flake.path))),
generators=["my_generator"], generators=[GeneratorKey(machine="my_machine", name="my_generator")],
all_prompt_values={ prompt_values={
"my_generator": { "my_generator": {
"prompt1": "input2", "prompt1": "input2",
} }
@@ -757,14 +756,11 @@ def test_stdout_of_generate(
flake_.refresh() flake_.refresh()
monkeypatch.chdir(flake_.path) monkeypatch.chdir(flake_.path)
flake = Flake(str(flake_.path)) flake = Flake(str(flake_.path))
from clan_cli.vars.generate import create_machine_vars_interactive
# with capture_output as output: # with capture_output as output:
with caplog.at_level(logging.INFO): with caplog.at_level(logging.INFO):
create_machine_vars_interactive( run_generators(
Machine(name="my_machine", flake=flake), Machine(name="my_machine", flake=flake),
"my_generator", generators=[GeneratorKey(machine="my_machine", name="my_generator")],
regenerate=False,
) )
assert "Updated var my_generator/my_value" in caplog.text assert "Updated var my_generator/my_value" in caplog.text
@@ -774,10 +770,9 @@ def test_stdout_of_generate(
set_var("my_machine", "my_generator/my_value", b"world", flake) set_var("my_machine", "my_generator/my_value", b"world", flake)
with caplog.at_level(logging.INFO): with caplog.at_level(logging.INFO):
create_machine_vars_interactive( run_generators(
Machine(name="my_machine", flake=flake), Machine(name="my_machine", flake=flake),
"my_generator", generators=[GeneratorKey(machine="my_machine", name="my_generator")],
regenerate=True,
) )
assert "Updated var my_generator/my_value" in caplog.text assert "Updated var my_generator/my_value" in caplog.text
assert "old: world" in caplog.text assert "old: world" in caplog.text
@@ -785,19 +780,17 @@ def test_stdout_of_generate(
caplog.clear() caplog.clear()
# check the output when nothing gets regenerated # check the output when nothing gets regenerated
with caplog.at_level(logging.INFO): with caplog.at_level(logging.INFO):
create_machine_vars_interactive( run_generators(
Machine(name="my_machine", flake=flake), Machine(name="my_machine", flake=flake),
"my_generator", generators=[GeneratorKey(machine="my_machine", name="my_generator")],
regenerate=True,
) )
assert "Updated var" not in caplog.text assert "Updated var" not in caplog.text
assert "hello" in caplog.text assert "hello" in caplog.text
caplog.clear() caplog.clear()
with caplog.at_level(logging.INFO): with caplog.at_level(logging.INFO):
create_machine_vars_interactive( run_generators(
Machine(name="my_machine", flake=flake), Machine(name="my_machine", flake=flake),
"my_secret_generator", generators=[GeneratorKey(machine="my_machine", name="my_secret_generator")],
regenerate=False,
) )
assert "Updated secret var my_secret_generator/my_secret" in caplog.text assert "Updated secret var my_secret_generator/my_secret" in caplog.text
assert "hello" not in caplog.text assert "hello" not in caplog.text
@@ -809,10 +802,9 @@ def test_stdout_of_generate(
Flake(str(flake.path)), Flake(str(flake.path)),
) )
with caplog.at_level(logging.INFO): with caplog.at_level(logging.INFO):
create_machine_vars_interactive( run_generators(
Machine(name="my_machine", flake=flake), Machine(name="my_machine", flake=flake),
"my_secret_generator", generators=[GeneratorKey(machine="my_machine", name="my_secret_generator")],
regenerate=True,
) )
assert "Updated secret var my_secret_generator/my_secret" in caplog.text assert "Updated secret var my_secret_generator/my_secret" in caplog.text
assert "world" not in caplog.text assert "world" not in caplog.text
@@ -899,10 +891,9 @@ def test_fails_when_files_are_left_from_other_backend(
flake.refresh() flake.refresh()
monkeypatch.chdir(flake.path) monkeypatch.chdir(flake.path)
for generator in ["my_secret_generator", "my_value_generator"]: for generator in ["my_secret_generator", "my_value_generator"]:
create_machine_vars_interactive( run_generators(
Machine(name="my_machine", flake=Flake(str(flake.path))), Machine(name="my_machine", flake=Flake(str(flake.path))),
generator, generators=GeneratorKey(machine="my_machine", name=generator),
regenerate=False,
) )
# Will raise. It was secret before, but now it's not. # Will raise. It was secret before, but now it's not.
my_secret_generator["files"]["my_secret"]["secret"] = ( my_secret_generator["files"]["my_secret"]["secret"] = (
@@ -916,16 +907,14 @@ def test_fails_when_files_are_left_from_other_backend(
# This should raise an error # This should raise an error
if generator == "my_secret_generator": if generator == "my_secret_generator":
with pytest.raises(ClanError): with pytest.raises(ClanError):
create_machine_vars_interactive( run_generators(
Machine(name="my_machine", flake=Flake(str(flake.path))), Machine(name="my_machine", flake=Flake(str(flake.path))),
generator, generators=GeneratorKey(machine="my_machine", name=generator),
regenerate=False,
) )
else: else:
create_machine_vars_interactive( run_generators(
Machine(name="my_machine", flake=Flake(str(flake.path))), Machine(name="my_machine", flake=Flake(str(flake.path))),
generator, generators=GeneratorKey(machine="my_machine", name=generator),
regenerate=False,
) )

View File

@@ -3,9 +3,11 @@ import logging
import os import os
import shutil import shutil
import sys import sys
from collections.abc import Callable
from contextlib import ExitStack from contextlib import ExitStack
from pathlib import Path from pathlib import Path
from tempfile import TemporaryDirectory from tempfile import TemporaryDirectory
from typing import Literal
from clan_cli.completions import ( from clan_cli.completions import (
add_dynamic_completer, add_dynamic_completer,
@@ -333,7 +335,7 @@ def _ensure_healthy(
def _generate_vars_for_machine( def _generate_vars_for_machine(
machine: "Machine", machine: "Machine",
generators: list[Generator], generators: list[Generator],
all_prompt_values: dict[str, dict[str, str]], prompt_values: dict[str, dict[str, str]],
no_sandbox: bool = False, no_sandbox: bool = False,
) -> None: ) -> None:
_ensure_healthy(machine=machine, generators=generators) _ensure_healthy(machine=machine, generators=generators)
@@ -346,68 +348,76 @@ def _generate_vars_for_machine(
generator=generator, generator=generator,
secret_vars_store=machine.secret_vars_store, secret_vars_store=machine.secret_vars_store,
public_vars_store=machine.public_vars_store, public_vars_store=machine.public_vars_store,
prompt_values=all_prompt_values.get(generator.name, {}), prompt_values=prompt_values.get(generator.name, {}),
no_sandbox=no_sandbox, no_sandbox=no_sandbox,
) )
PromptFunc = Callable[[Generator], dict[str, str]]
"""Type for a function that collects prompt values for a generator.
The function receives a Generator and should return a dictionary mapping
prompt names to their values. This allows for custom prompt collection
strategies (e.g., interactive CLI, GUI, or programmatic).
"""
@API.register @API.register
def run_generators( def run_generators(
machine: Machine, machine: Machine,
all_prompt_values: dict[str, dict[str, str]], generators: GeneratorKey
generators: list[str] | None = None, | list[GeneratorKey]
| Literal["all", "minimal"] = "minimal",
prompt_values: dict[str, dict[str, str]] | PromptFunc = _ask_prompts,
no_sandbox: bool = False, no_sandbox: bool = False,
) -> None: ) -> None:
"""Run the specified generators for a machine. """Run the specified generators for a machine.
Args: Args:
machine_name (str): The name of the machine. machine: The machine to run generators for.
generators (list[str]): The list of generator names to run. generators: Can be:
all_prompt_values (dict[str, dict[str, str]]): A dictionary mapping generator names - GeneratorKey: Single generator to run (ensuring dependencies are met)
to their prompt values. - list[GeneratorKey]: Specific generators to run exactly as provided.
base_dir (Path): The base directory of the flake. Dependency generators are not added automatically in this case.
no_sandbox (bool): Whether to disable sandboxing when executing the generator. The caller must ensure that all dependencies are included.
Returns: - "all": Run all generators (full closure)
bool: True if any variables were generated, False otherwise. - "minimal": Run only missing generators (minimal closure) (default)
prompt_values: A dictionary mapping generator names to their prompt values,
or a function that returns prompt values for a generator.
no_sandbox: Whether to disable sandboxing when executing the generator.
Raises: Raises:
ClanError: If the machine or generator is not found, or if there are issues with ClanError: If the machine or generator is not found, or if there are issues with
executing the generator. executing the generator.
""" """
if not generators: if generators == "all":
generator_objects = Generator.get_machine_generators( generator_objects = get_generators(machine, full_closure=True)
machine.name, machine.flake elif generators == "minimal":
generator_objects = get_generators(machine, full_closure=False)
elif isinstance(generators, GeneratorKey):
# Single generator - compute minimal closure for it
generator_objects = get_generators(
machine, full_closure=False, generator_name=generators.name
) )
elif isinstance(generators, list):
if len(generators) == 0:
return
generator_keys = set(generators)
all_generators = get_generators(machine, full_closure=True)
generator_objects = [g for g in all_generators if g.key in generator_keys]
else: else:
generators_set = set(generators) msg = f"Invalid generators argument: {generators}. Must be 'all', 'minimal', GeneratorKey, or a list of GeneratorKey"
generator_objects = [ raise ValueError(msg)
g
for g in Generator.get_machine_generators(machine.name, machine.flake) # If prompt function provided, ask all prompts
if g.name in generators_set # TODO: make this more lazy and ask for every generator on execution
] if callable(prompt_values):
prompt_values = {
generator.name: prompt_values(generator) for generator in generator_objects
}
_generate_vars_for_machine( _generate_vars_for_machine(
machine=machine, machine=machine,
generators=generator_objects, generators=generator_objects,
all_prompt_values=all_prompt_values, prompt_values=prompt_values,
no_sandbox=no_sandbox,
)
def create_machine_vars_interactive(
machine: "Machine",
generator_name: str | None,
regenerate: bool,
no_sandbox: bool = False,
) -> None:
generators = get_generators(machine, regenerate, generator_name)
if len(generators) == 0:
return
all_prompt_values = {}
for generator in generators:
all_prompt_values[generator.name] = _ask_prompts(generator)
_generate_vars_for_machine(
machine,
generators,
all_prompt_values,
no_sandbox=no_sandbox, no_sandbox=no_sandbox,
) )
@@ -421,10 +431,15 @@ def generate_vars(
for machine in machines: for machine in machines:
errors = [] errors = []
try: try:
create_machine_vars_interactive( generators: GeneratorKey | Literal["all", "minimal"]
if generator_name:
generators = GeneratorKey(machine=machine.name, name=generator_name)
else:
generators = "all" if regenerate else "minimal"
run_generators(
machine, machine,
generator_name, generators=generators,
regenerate,
no_sandbox=no_sandbox, no_sandbox=no_sandbox,
) )
machine.info("All vars are up to date") machine.info("All vars are up to date")

View File

@@ -218,7 +218,7 @@ def test_clan_create_api(
clan_dir_flake.invalidate_cache() clan_dir_flake.invalidate_cache()
generators = get_generators(machine=machine, full_closure=True) generators = get_generators(machine=machine, full_closure=True)
all_prompt_values = {} collected_prompt_values = {}
for generator in generators: for generator in generators:
prompt_values = {} prompt_values = {}
for prompt in generator.prompts: for prompt in generator.prompts:
@@ -228,12 +228,12 @@ def test_clan_create_api(
else: else:
msg = f"Prompt {var_id} not handled in test, please fix it" msg = f"Prompt {var_id} not handled in test, please fix it"
raise ClanError(msg) raise ClanError(msg)
all_prompt_values[generator.name] = prompt_values collected_prompt_values[generator.name] = prompt_values
run_generators( run_generators(
machine=machine, machine=machine,
generators=[gen.name for gen in generators], generators=[gen.key for gen in generators],
all_prompt_values=all_prompt_values, prompt_values=collected_prompt_values,
) )
clan_dir_flake.invalidate_cache() clan_dir_flake.invalidate_cache()