Merge pull request 'Init: Autogenerate classes from nix interfaces' (#1778) from hsjobeki/clan-core:hsjobeki-main into main

This commit is contained in:
clan-bot
2024-07-19 07:58:57 +00:00
22 changed files with 559 additions and 271 deletions

1
.gitignore vendored
View File

@@ -14,6 +14,7 @@ nixos.qcow2
**/*.glade~ **/*.glade~
/docs/out /docs/out
# dream2nix # dream2nix
.dream2nix .dream2nix

View File

@@ -2,7 +2,8 @@
{ {
options.clan.single-disk = { options.clan.single-disk = {
device = lib.mkOption { device = lib.mkOption {
type = lib.types.str; default = null;
type = lib.types.nullOr lib.types.str;
description = "The primary disk device to install the system on"; description = "The primary disk device to install the system on";
# Question: should we set a default here? # Question: should we set a default here?
# default = "/dev/null"; # default = "/dev/null";

View File

@@ -26,9 +26,6 @@
}, },
"roles": { "roles": {
"default": { "default": {
"config": {
"packages": ["vim"]
},
"imports": [], "imports": [],
"machines": ["test-inventory-machine"], "machines": ["test-inventory-machine"],
"tags": [] "tags": []

View File

@@ -41,6 +41,7 @@ in
options = { options = {
inherit (metaOptions) name description icon; inherit (metaOptions) name description icon;
tags = lib.mkOption { tags = lib.mkOption {
default = [ ]; default = [ ];
apply = lib.unique; apply = lib.unique;
type = types.listOf types.str; type = types.listOf types.str;
@@ -49,16 +50,10 @@ in
default = null; default = null;
type = types.nullOr types.str; type = types.nullOr types.str;
}; };
deploy = lib.mkOption { deploy.targetHost = lib.mkOption {
default = { }; description = "Configuration for the deployment of the machine";
type = types.submodule { default = null;
options = { type = types.nullOr types.str;
targetHost = lib.mkOption {
default = null;
type = types.nullOr types.str;
};
};
};
}; };
}; };
} }

View File

@@ -55,6 +55,7 @@ let
inventorySchema.properties.services.additionalProperties.additionalProperties.properties.meta; inventorySchema.properties.services.additionalProperties.additionalProperties.properties.meta;
config = { config = {
title = "${moduleName}-config"; title = "${moduleName}-config";
default = { };
} // moduleSchema; } // moduleSchema;
roles = { roles = {
type = "object"; type = "object";
@@ -69,6 +70,7 @@ let
{ {
properties.config = { properties.config = {
title = "${moduleName}-config"; title = "${moduleName}-config";
default = { };
} // moduleSchema; } // moduleSchema;
}; };
}) (rolesOf moduleName) }) (rolesOf moduleName)
@@ -80,6 +82,7 @@ let
{ {
additionalProperties.properties.config = { additionalProperties.properties.config = {
title = "${moduleName}-config"; title = "${moduleName}-config";
default = { };
} // moduleSchema; } // moduleSchema;
}; };
}; };

View File

@@ -318,7 +318,7 @@ rec {
# return jsonschema property definition for submodule # return jsonschema property definition for submodule
# then (lib.attrNames (option.type.getSubOptions option.loc).opt) # then (lib.attrNames (option.type.getSubOptions option.loc).opt)
then then
parseOptions' (option.type.getSubOptions option.loc) example // description // parseOptions' (option.type.getSubOptions option.loc)
# throw error if option type is not supported # throw error if option type is not supported
else else
notSupported option; notSupported option;

View File

@@ -279,6 +279,7 @@ in
expected = { expected = {
type = "object"; type = "object";
additionalProperties = false; additionalProperties = false;
description = "Test Description";
properties = { properties = {
opt = { opt = {
type = "boolean"; type = "boolean";
@@ -303,6 +304,7 @@ in
expected = { expected = {
type = "object"; type = "object";
additionalProperties = false; additionalProperties = false;
description = "Test Description";
properties = { properties = {
opt = { opt = {
type = "boolean"; type = "boolean";

View File

@@ -6,7 +6,7 @@ from pathlib import Path
from clan_cli.cmd import run_no_stdout from clan_cli.cmd import run_no_stdout
from clan_cli.errors import ClanCmdError, ClanError from clan_cli.errors import ClanCmdError, ClanError
from clan_cli.inventory import Inventory, Service from clan_cli.inventory import Inventory, load_inventory
from clan_cli.nix import nix_eval from clan_cli.nix import nix_eval
from . import API from . import API
@@ -150,24 +150,6 @@ def get_module_info(
) )
@API.register
def update_module_instance(
base_path: str, module_name: str, instance_name: str, instance_config: Service
) -> Inventory:
inventory = Inventory.load_file(base_path)
module_instances = inventory.services.get(module_name, {})
module_instances[instance_name] = instance_config
inventory.services[module_name] = module_instances
inventory.persist(
base_path, f"Updated module instance {module_name}/{instance_name}"
)
return inventory
@API.register @API.register
def get_inventory(base_path: str) -> Inventory: def get_inventory(base_path: str) -> Inventory:
return Inventory.load_file(base_path) return load_inventory(base_path)

View File

@@ -6,7 +6,7 @@ from pathlib import Path
from clan_cli.api import API from clan_cli.api import API
from clan_cli.arg_actions import AppendOptionAction from clan_cli.arg_actions import AppendOptionAction
from clan_cli.inventory import Inventory, InventoryMeta from clan_cli.inventory import Meta, load_inventory, save_inventory
from ..cmd import CmdOut, run from ..cmd import CmdOut, run
from ..errors import ClanError from ..errors import ClanError
@@ -29,7 +29,7 @@ class CreateOptions:
directory: Path | str directory: Path | str
# Metadata for the clan # Metadata for the clan
# Metadata can be shown with `clan show` # Metadata can be shown with `clan show`
meta: InventoryMeta | None = None meta: Meta | None = None
# URL to the template to use. Defaults to the "minimal" template # URL to the template to use. Defaults to the "minimal" template
template_url: str = minimal_template_url template_url: str = minimal_template_url
@@ -84,11 +84,11 @@ def create_clan(options: CreateOptions) -> CreateClanResponse:
## End: setup git ## End: setup git
# Write inventory.json file # Write inventory.json file
inventory = Inventory.load_file(directory) inventory = load_inventory(directory)
if options.meta is not None: if options.meta is not None:
inventory.meta = options.meta inventory.meta = options.meta
# Persist creates a commit message for each change # Persist creates a commit message for each change
inventory.persist(directory, "Init inventory") save_inventory(inventory, directory, "Init inventory")
command = ["nix", "flake", "update"] command = ["nix", "flake", "update"]
out = run(command, cwd=directory) out = run(command, cwd=directory)
@@ -113,7 +113,7 @@ def register_create_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument( parser.add_argument(
"--meta", "--meta",
help=f"""Metadata to set for the clan. Available options are: {", ".join([f.name for f in fields(InventoryMeta)]) }""", help=f"""Metadata to set for the clan. Available options are: {", ".join([f.name for f in fields(Meta)]) }""",
nargs=2, nargs=2,
metavar=("name", "value"), metavar=("name", "value"),
action=AppendOptionAction, action=AppendOptionAction,

View File

@@ -6,7 +6,7 @@ from urllib.parse import urlparse
from clan_cli.api import API from clan_cli.api import API
from clan_cli.errors import ClanCmdError, ClanError from clan_cli.errors import ClanCmdError, ClanError
from clan_cli.inventory import InventoryMeta from clan_cli.inventory import Meta
from ..cmd import run_no_stdout from ..cmd import run_no_stdout
from ..nix import nix_eval from ..nix import nix_eval
@@ -15,7 +15,7 @@ log = logging.getLogger(__name__)
@API.register @API.register
def show_clan_meta(uri: str | Path) -> InventoryMeta: def show_clan_meta(uri: str | Path) -> Meta:
cmd = nix_eval( cmd = nix_eval(
[ [
f"{uri}#clanInternals.inventory.meta", f"{uri}#clanInternals.inventory.meta",
@@ -61,7 +61,7 @@ def show_clan_meta(uri: str | Path) -> InventoryMeta:
description="Icon path must be a URL or a relative path.", description="Icon path must be a URL or a relative path.",
) )
return InventoryMeta( return Meta(
name=clan_meta.get("name"), name=clan_meta.get("name"),
description=clan_meta.get("description", None), description=clan_meta.get("description", None),
icon=icon_path, icon=icon_path,

View File

@@ -1,20 +1,20 @@
from dataclasses import dataclass from dataclasses import dataclass
from clan_cli.api import API from clan_cli.api import API
from clan_cli.inventory import Inventory, InventoryMeta from clan_cli.inventory import Meta, load_inventory, save_inventory
@dataclass @dataclass
class UpdateOptions: class UpdateOptions:
directory: str directory: str
meta: InventoryMeta meta: Meta
@API.register @API.register
def update_clan_meta(options: UpdateOptions) -> InventoryMeta: def update_clan_meta(options: UpdateOptions) -> Meta:
inventory = Inventory.load_file(options.directory) inventory = load_inventory(options.directory)
inventory.meta = options.meta inventory.meta = options.meta
inventory.persist(options.directory, "Update clan meta") save_inventory(inventory, options.directory, "Update clan metadata")
return inventory.meta return inventory.meta

View File

@@ -1,16 +1,44 @@
# ruff: noqa: N815 import dataclasses
# ruff: noqa: N806
import json import json
from dataclasses import asdict, dataclass, field, is_dataclass from dataclasses import fields, is_dataclass
from pathlib import Path from pathlib import Path
from typing import Any, Literal from types import UnionType
from typing import Any, get_args, get_origin
from clan_cli.errors import ClanError from clan_cli.errors import ClanError
from clan_cli.git import commit_file from clan_cli.git import commit_file
from .classes import (
Inventory,
Machine,
MachineDeploy,
Meta,
Service,
ServiceBorgbackup,
ServiceBorgbackupMeta,
ServiceBorgbackupRole,
ServiceBorgbackupRoleClient,
ServiceBorgbackupRoleServer,
)
# Re export classes here
# This allows to rename classes in the generated code
__all__ = [
"Service",
"Machine",
"Meta",
"Inventory",
"MachineDeploy",
"ServiceBorgbackup",
"ServiceBorgbackupMeta",
"ServiceBorgbackupRole",
"ServiceBorgbackupRoleClient",
"ServiceBorgbackupRoleServer",
]
def sanitize_string(s: str) -> str: def sanitize_string(s: str) -> str:
return s.replace("\\", "\\\\").replace('"', '\\"') return s.replace("\\", "\\\\").replace('"', '\\"').replace("\n", "\\n")
def dataclass_to_dict(obj: Any) -> Any: def dataclass_to_dict(obj: Any) -> Any:
@@ -22,8 +50,11 @@ def dataclass_to_dict(obj: Any) -> Any:
""" """
if is_dataclass(obj): if is_dataclass(obj):
return { return {
sanitize_string(k): dataclass_to_dict(v) # Use either the original name or name
for k, v in asdict(obj).items() # type: ignore sanitize_string(
field.metadata.get("original_name", field.name)
): dataclass_to_dict(getattr(obj, field.name))
for field in fields(obj) # type: ignore
} }
elif isinstance(obj, list | tuple): elif isinstance(obj, list | tuple):
return [dataclass_to_dict(item) for item in obj] return [dataclass_to_dict(item) for item in obj]
@@ -37,149 +68,133 @@ def dataclass_to_dict(obj: Any) -> Any:
return obj return obj
@dataclass def is_union_type(type_hint: type) -> bool:
class DeploymentInfo: return type(type_hint) is UnionType
def get_inner_type(type_hint: type) -> type:
if is_union_type(type_hint):
# Return the first non-None type
return next(t for t in get_args(type_hint) if t is not type(None))
return type_hint
def get_second_type(type_hint: type[dict]) -> type:
""" """
Deployment information for a machine. Get the value type of a dictionary type hint
""" """
args = get_args(type_hint)
if len(args) == 2:
# Return the second argument, which should be the value type (Machine)
return args[1]
targetHost: str | None = None raise ValueError(f"Invalid type hint for dict: {type_hint}")
@dataclass def from_dict(t: type, data: dict[str, Any] | None) -> Any:
class Machine:
""" """
Inventory machine model. Dynamically instantiate a data class from a dictionary, handling nested data classes.
DO NOT EDIT THIS CLASS.
Any changes here must be reflected in the inventory interface file and potentially other nix files.
- Persisted to the inventory.json file
- Source of truth to generate each clan machine.
- For hardware deployment, the machine must declare the host system.
""" """
if data is None:
return None
name: str try:
deploy: DeploymentInfo = field(default_factory=DeploymentInfo) # Attempt to create an instance of the data_class
description: str | None = None field_values = {}
icon: str | None = None for field in fields(t):
tags: list[str] = field(default_factory=list) original_name = field.metadata.get("original_name", field.name)
system: Literal["x86_64-linux"] | str | None = None
@staticmethod field_value = data.get(original_name)
def from_dict(data: dict[str, Any]) -> "Machine":
targetHost = data.get("deploy", {}).get("targetHost", None) field_type = get_inner_type(field.type) # type: ignore
return Machine(
name=data["name"], if original_name in data:
description=data.get("description", None), # If the field is another dataclass, recursively instantiate it
icon=data.get("icon", None), if is_dataclass(field_type):
tags=data.get("tags", []), field_value = from_dict(field_type, field_value)
system=data.get("system", None), elif isinstance(field_type, Path | str) and isinstance(
deploy=DeploymentInfo(targetHost), field_value, str
) ):
field_value = (
Path(field_value) if field_type == Path else field_value
)
elif get_origin(field_type) is dict and isinstance(field_value, dict):
# The field is a dictionary with a specific type
inner_type = get_second_type(field_type)
field_value = {
k: from_dict(inner_type, v) for k, v in field_value.items()
}
elif get_origin is list and isinstance(field_value, list):
# The field is a list with a specific type
inner_type = get_args(field_type)[0]
field_value = [from_dict(inner_type, v) for v in field_value]
# Set the value
if (
field.default is not dataclasses.MISSING
or field.default_factory is not dataclasses.MISSING
):
# Fields with default value
# a: Int = 1
# b: list = Field(default_factory=list)
if original_name in data or field_value is not None:
field_values[field.name] = field_value
else:
# Fields without default value
# a: Int
field_values[field.name] = field_value
return t(**field_values)
except (TypeError, ValueError) as e:
print(f"Failed to instantiate {t.__name__}: {e} {data}")
return None
# raise ClanError(f"Failed to instantiate {t.__name__}: {e}")
@dataclass def get_path(flake_dir: str | Path) -> Path:
class MachineServiceConfig: """
config: dict[str, Any] = field(default_factory=dict) Get the path to the inventory file in the flake directory
imports: list[str] = field(default_factory=list) """
return (Path(flake_dir) / "inventory.json").resolve()
@dataclass # Default inventory
class ServiceMeta: default_inventory = Inventory(
name: str meta=Meta(name="New Clan"), machines={}, services=Service()
description: str | None = None )
icon: str | None = None
@dataclass def load_inventory(
class Role: flake_dir: str | Path, default: Inventory = default_inventory
config: dict[str, Any] = field(default_factory=dict) ) -> Inventory:
imports: list[str] = field(default_factory=list) """
machines: list[str] = field(default_factory=list) Load the inventory file from the flake directory
tags: list[str] = field(default_factory=list) If not file is found, returns the default inventory
"""
inventory = default_inventory
inventory_file = get_path(flake_dir)
if inventory_file.exists():
with open(inventory_file) as f:
try:
res = json.load(f)
inventory = from_dict(Inventory, res)
except json.JSONDecodeError as e:
# Error decoding the inventory file
raise ClanError(f"Error decoding inventory file: {e}")
return inventory
@dataclass def save_inventory(inventory: Inventory, flake_dir: str | Path, message: str) -> None:
class Service: """ "
meta: ServiceMeta Write the inventory to the flake directory
roles: dict[str, Role] and commit it to git with the given message
config: dict[str, Any] = field(default_factory=dict) """
imports: list[str] = field(default_factory=list) inventory_file = get_path(flake_dir)
machines: dict[str, MachineServiceConfig] = field(default_factory=dict)
@staticmethod with open(inventory_file, "w") as f:
def from_dict(d: dict[str, Any]) -> "Service": json.dump(dataclass_to_dict(inventory), f, indent=2)
return Service(
meta=ServiceMeta(**d.get("meta", {})),
roles={name: Role(**role) for name, role in d.get("roles", {}).items()},
machines=(
{
name: MachineServiceConfig(**machine)
for name, machine in d.get("machines", {}).items()
}
if d.get("machines")
else {}
),
config=d.get("config", {}),
imports=d.get("imports", []),
)
commit_file(inventory_file, Path(flake_dir), commit_message=message)
@dataclass
class InventoryMeta:
name: str
description: str | None = None
icon: str | None = None
@dataclass
class Inventory:
meta: InventoryMeta
machines: dict[str, Machine]
services: dict[str, dict[str, Service]]
@staticmethod
def from_dict(d: dict[str, Any]) -> "Inventory":
return Inventory(
meta=InventoryMeta(**d.get("meta", {})),
machines={
name: Machine.from_dict(machine)
for name, machine in d.get("machines", {}).items()
},
services={
name: {
role: Service.from_dict(service)
for role, service in services.items()
}
for name, services in d.get("services", {}).items()
},
)
@staticmethod
def get_path(flake_dir: str | Path) -> Path:
return Path(flake_dir) / "inventory.json"
@staticmethod
def load_file(flake_dir: str | Path) -> "Inventory":
inventory = Inventory(
machines={}, services={}, meta=InventoryMeta(name="New Clan")
)
inventory_file = Inventory.get_path(flake_dir)
if inventory_file.exists():
with open(inventory_file) as f:
try:
res = json.load(f)
inventory = Inventory.from_dict(res)
except json.JSONDecodeError as e:
raise ClanError(f"Error decoding inventory file: {e}")
return inventory
def persist(self, flake_dir: str | Path, message: str) -> None:
inventory_file = Inventory.get_path(flake_dir)
with open(inventory_file, "w") as f:
json.dump(dataclass_to_dict(self), f, indent=2)
commit_file(inventory_file, Path(flake_dir), commit_message=message)

View File

@@ -0,0 +1,176 @@
# DON NOT EDIT THIS FILE MANUALLY. IT IS GENERATED.
#
# ruff: noqa: N815
# ruff: noqa: N806
# ruff: noqa: F401
# fmt: off
from dataclasses import dataclass, field
from typing import Any
@dataclass
class MachineDeploy:
targetHost: str | None = field(default=None )
@dataclass
class Machine:
deploy: MachineDeploy
name: str
description: str | None = field(default=None )
icon: str | None = field(default=None )
system: str | None = field(default=None )
tags: list[str] = field(default_factory=list )
@dataclass
class Meta:
name: str
description: str | None = field(default=None )
icon: str | None = field(default=None )
@dataclass
class BorgbackupConfigDestination:
repo: str
name: str
@dataclass
class BorgbackupConfig:
destinations: dict[str, BorgbackupConfigDestination] = field(default_factory=dict )
@dataclass
class ServiceBorgbackupMachine:
config: BorgbackupConfig = field(default_factory=BorgbackupConfig )
imports: list[str] = field(default_factory=list )
@dataclass
class ServiceBorgbackupMeta:
name: str
description: str | None = field(default=None )
icon: str | None = field(default=None )
@dataclass
class ServiceBorgbackupRoleClient:
config: BorgbackupConfig = field(default_factory=BorgbackupConfig )
imports: list[str] = field(default_factory=list )
machines: list[str] = field(default_factory=list )
tags: list[str] = field(default_factory=list )
@dataclass
class ServiceBorgbackupRoleServer:
config: BorgbackupConfig = field(default_factory=BorgbackupConfig )
imports: list[str] = field(default_factory=list )
machines: list[str] = field(default_factory=list )
tags: list[str] = field(default_factory=list )
@dataclass
class ServiceBorgbackupRole:
client: ServiceBorgbackupRoleClient
server: ServiceBorgbackupRoleServer
@dataclass
class ServiceBorgbackup:
meta: ServiceBorgbackupMeta
roles: ServiceBorgbackupRole
config: BorgbackupConfig = field(default_factory=BorgbackupConfig )
machines: dict[str, ServiceBorgbackupMachine] = field(default_factory=dict )
@dataclass
class PackagesConfig:
packages: list[str] = field(default_factory=list )
@dataclass
class ServicePackageMachine:
config: PackagesConfig = field(default_factory=PackagesConfig )
imports: list[str] = field(default_factory=list )
@dataclass
class ServicePackageMeta:
name: str
description: str | None = field(default=None )
icon: str | None = field(default=None )
@dataclass
class ServicePackageRoleDefault:
config: PackagesConfig = field(default_factory=PackagesConfig )
imports: list[str] = field(default_factory=list )
machines: list[str] = field(default_factory=list )
tags: list[str] = field(default_factory=list )
@dataclass
class ServicePackageRole:
default: ServicePackageRoleDefault
@dataclass
class ServicePackage:
meta: ServicePackageMeta
roles: ServicePackageRole
config: PackagesConfig = field(default_factory=PackagesConfig )
machines: dict[str, ServicePackageMachine] = field(default_factory=dict )
@dataclass
class SingleDiskConfig:
device: str | None = field(default=None )
@dataclass
class ServiceSingleDiskMachine:
config: SingleDiskConfig = field(default_factory=SingleDiskConfig )
imports: list[str] = field(default_factory=list )
@dataclass
class ServiceSingleDiskMeta:
name: str
description: str | None = field(default=None )
icon: str | None = field(default=None )
@dataclass
class ServiceSingleDiskRoleDefault:
config: SingleDiskConfig = field(default_factory=SingleDiskConfig )
imports: list[str] = field(default_factory=list )
machines: list[str] = field(default_factory=list )
tags: list[str] = field(default_factory=list )
@dataclass
class ServiceSingleDiskRole:
default: ServiceSingleDiskRoleDefault
@dataclass
class ServiceSingleDisk:
meta: ServiceSingleDiskMeta
roles: ServiceSingleDiskRole
config: SingleDiskConfig = field(default_factory=SingleDiskConfig )
machines: dict[str, ServiceSingleDiskMachine] = field(default_factory=dict )
@dataclass
class Service:
borgbackup: dict[str, ServiceBorgbackup] = field(default_factory=dict )
packages: dict[str, ServicePackage] = field(default_factory=dict )
single_disk: dict[str, ServiceSingleDisk] = field(default_factory=dict , metadata={"original_name": "single-disk"})
@dataclass
class Inventory:
meta: Meta
services: Service
machines: dict[str, Machine] = field(default_factory=dict )

View File

@@ -7,7 +7,7 @@ from ..api import API
from ..clan_uri import FlakeId from ..clan_uri import FlakeId
from ..errors import ClanError from ..errors import ClanError
from ..git import commit_file from ..git import commit_file
from ..inventory import Inventory, Machine from ..inventory import Machine, MachineDeploy, get_path, load_inventory, save_inventory
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@@ -20,11 +20,11 @@ def create_machine(flake: FlakeId, machine: Machine) -> None:
"Machine name must be a valid hostname", location="Create Machine" "Machine name must be a valid hostname", location="Create Machine"
) )
inventory = Inventory.load_file(flake.path) inventory = load_inventory(flake.path)
inventory.machines.update({machine.name: machine}) inventory.machines.update({machine.name: machine})
inventory.persist(flake.path, f"Create machine {machine.name}") save_inventory(inventory, flake.path, f"Create machine {machine.name}")
commit_file(Inventory.get_path(flake.path), Path(flake.path)) commit_file(get_path(flake.path), Path(flake.path))
def create_command(args: argparse.Namespace) -> None: def create_command(args: argparse.Namespace) -> None:
@@ -36,6 +36,7 @@ def create_command(args: argparse.Namespace) -> None:
description=args.description, description=args.description,
tags=args.tags, tags=args.tags,
icon=args.icon, icon=args.icon,
deploy=MachineDeploy(),
), ),
) )

View File

@@ -6,18 +6,18 @@ from ..clan_uri import FlakeId
from ..completions import add_dynamic_completer, complete_machines from ..completions import add_dynamic_completer, complete_machines
from ..dirs import specific_machine_dir from ..dirs import specific_machine_dir
from ..errors import ClanError from ..errors import ClanError
from ..inventory import Inventory from ..inventory import load_inventory, save_inventory
@API.register @API.register
def delete_machine(flake: FlakeId, name: str) -> None: def delete_machine(flake: FlakeId, name: str) -> None:
inventory = Inventory.load_file(flake.path) inventory = load_inventory(flake.path)
machine = inventory.machines.pop(name, None) machine = inventory.machines.pop(name, None)
if machine is None: if machine is None:
raise ClanError(f"Machine {name} does not exist") raise ClanError(f"Machine {name} does not exist")
inventory.persist(flake.path, f"Delete machine {name}") save_inventory(inventory, flake.path, f"Delete machine {name}")
folder = specific_machine_dir(flake.path, name) folder = specific_machine_dir(flake.path, name)
if folder.exists(): if folder.exists():

View File

@@ -4,7 +4,7 @@ import logging
from pathlib import Path from pathlib import Path
from clan_cli.api import API from clan_cli.api import API
from clan_cli.inventory import Machine from clan_cli.inventory import Machine, from_dict
from ..cmd import run_no_stdout from ..cmd import run_no_stdout
from ..nix import nix_eval from ..nix import nix_eval
@@ -24,7 +24,7 @@ def list_machines(flake_url: str | Path, debug: bool = False) -> dict[str, Machi
proc = run_no_stdout(cmd) proc = run_no_stdout(cmd)
res = proc.stdout.strip() res = proc.stdout.strip()
data = {name: Machine.from_dict(v) for name, v in json.loads(res).items()} data = {name: from_dict(Machine, v) for name, v in json.loads(res).items()}
return data return data

View File

@@ -21,6 +21,9 @@
clan-core-path, clan-core-path,
nixpkgs, nixpkgs,
includedRuntimeDeps, includedRuntimeDeps,
inventory-schema,
classgen,
}: }:
let let
pythonDependencies = [ pythonDependencies = [
@@ -60,6 +63,8 @@ let
rm $out/clan_cli/config/jsonschema rm $out/clan_cli/config/jsonschema
ln -sf ${nixpkgs'} $out/clan_cli/nixpkgs ln -sf ${nixpkgs'} $out/clan_cli/nixpkgs
cp -r ${../../lib/jsonschema} $out/clan_cli/config/jsonschema cp -r ${../../lib/jsonschema} $out/clan_cli/config/jsonschema
${classgen}/bin/classgen ${inventory-schema}/schema.json $out/clan_cli/inventory/classes.py
''; '';
# Create a custom nixpkgs for use within the project # Create a custom nixpkgs for use within the project

View File

@@ -9,7 +9,7 @@
{ self', pkgs, ... }: { self', pkgs, ... }:
let let
flakeLock = lib.importJSON (self + /flake.lock); flakeLock = lib.importJSON (self + /flake.lock);
flakeInputs = (builtins.removeAttrs inputs [ "self" ]); flakeInputs = builtins.removeAttrs inputs [ "self" ];
flakeLockVendoredDeps = flakeLock // { flakeLockVendoredDeps = flakeLock // {
nodes = nodes =
flakeLock.nodes flakeLock.nodes
@@ -38,7 +38,6 @@
''; '';
in in
{ {
devShells.clan-cli = pkgs.callPackage ./shell.nix { devShells.clan-cli = pkgs.callPackage ./shell.nix {
inherit (self'.packages) clan-cli clan-cli-full; inherit (self'.packages) clan-cli clan-cli-full;
inherit self'; inherit self';
@@ -46,6 +45,7 @@
packages = { packages = {
clan-cli = pkgs.python3.pkgs.callPackage ./default.nix { clan-cli = pkgs.python3.pkgs.callPackage ./default.nix {
inherit (inputs) nixpkgs; inherit (inputs) nixpkgs;
inherit (self'.packages) inventory-schema classgen;
clan-core-path = clanCoreWithVendoredDeps; clan-core-path = clanCoreWithVendoredDeps;
includedRuntimeDeps = [ includedRuntimeDeps = [
"age" "age"
@@ -54,6 +54,7 @@
}; };
clan-cli-full = pkgs.python3.pkgs.callPackage ./default.nix { clan-cli-full = pkgs.python3.pkgs.callPackage ./default.nix {
inherit (inputs) nixpkgs; inherit (inputs) nixpkgs;
inherit (self'.packages) inventory-schema classgen;
clan-core-path = clanCoreWithVendoredDeps; clan-core-path = clanCoreWithVendoredDeps;
includedRuntimeDeps = lib.importJSON ./clan_cli/nix/allowed-programs.json; includedRuntimeDeps = lib.importJSON ./clan_cli/nix/allowed-programs.json;
}; };
@@ -64,6 +65,8 @@
buildInputs = [ pkgs.python3 ]; buildInputs = [ pkgs.python3 ];
installPhase = '' installPhase = ''
${self'.packages.classgen}/bin/classgen ${self'.packages.inventory-schema}/schema.json ./clan_cli/inventory/classes.py
python docs.py reference python docs.py reference
mkdir -p $out mkdir -p $out
cp -r out/* $out cp -r out/* $out
@@ -77,6 +80,8 @@
buildInputs = [ pkgs.python3 ]; buildInputs = [ pkgs.python3 ];
installPhase = '' installPhase = ''
${self'.packages.classgen}/bin/classgen ${self'.packages.inventory-schema}/schema.json ./clan_cli/inventory/classes.py
python api.py > $out python api.py > $out
''; '';
}; };
@@ -84,6 +89,35 @@
default = self'.packages.clan-cli; default = self'.packages.clan-cli;
}; };
checks = self'.packages.clan-cli.tests; checks = self'.packages.clan-cli.tests // {
inventory-classes-up-to-date = pkgs.stdenv.mkDerivation {
name = "inventory-classes-up-to-date";
src = ./clan_cli/inventory;
env = {
classFile = "classes.py";
};
installPhase = ''
${self'.packages.classgen}/bin/classgen ${self'.packages.inventory-schema}/schema.json b_classes.py
file1=$classFile
file2=b_classes.py
echo "Comparing $file1 and $file2"
if cmp -s "$file1" "$file2"; then
echo "Files are identical"
echo "Classes file is up to date"
else
echo "Classes file is out of date or has been modified"
echo "run ./update.sh in the inventory directory to update the classes file"
echo "--------------------------------\n"
diff "$file1" "$file2"
echo "--------------------------------\n\n"
exit 1
fi
touch $out
'';
};
};
}; };
} }

View File

@@ -45,5 +45,9 @@ mkShell {
# Needed for impure tests # Needed for impure tests
ln -sfT ${clan-cli.nixpkgs} "$PKG_ROOT/clan_cli/nixpkgs" ln -sfT ${clan-cli.nixpkgs} "$PKG_ROOT/clan_cli/nixpkgs"
# Generate classes.py from inventory schema
# This file is in .gitignore
${self'.packages.classgen}/bin/classgen ${self'.packages.inventory-schema}/schema.json $PKG_ROOT/clan_cli/inventory/classes.py
''; '';
} }

View File

@@ -8,7 +8,7 @@ from clan_cli.config.machine import (
verify_machine_config, verify_machine_config,
) )
from clan_cli.config.schema import machine_schema from clan_cli.config.schema import machine_schema
from clan_cli.inventory import Machine from clan_cli.inventory import Machine, MachineDeploy
from clan_cli.machines.create import create_machine from clan_cli.machines.create import create_machine
from clan_cli.machines.list import list_machines from clan_cli.machines.list import list_machines
@@ -31,6 +31,7 @@ def test_create_machine_on_minimal_clan(test_flake_minimal: FlakeForTest) -> Non
description="A test machine", description="A test machine",
tags=["test"], tags=["test"],
icon=None, icon=None,
deploy=MachineDeploy(),
), ),
) )

View File

@@ -4,9 +4,19 @@ from typing import TYPE_CHECKING
import pytest import pytest
from fixtures_flakes import FlakeForTest from fixtures_flakes import FlakeForTest
from clan_cli.api.modules import list_modules, update_module_instance from clan_cli.api.modules import list_modules
from clan_cli.clan_uri import FlakeId from clan_cli.clan_uri import FlakeId
from clan_cli.inventory import Machine, Role, Service, ServiceMeta from clan_cli.inventory import (
Machine,
MachineDeploy,
ServiceBorgbackup,
ServiceBorgbackupMeta,
ServiceBorgbackupRole,
ServiceBorgbackupRoleClient,
ServiceBorgbackupRoleServer,
load_inventory,
save_inventory,
)
from clan_cli.machines.create import create_machine from clan_cli.machines.create import create_machine
from clan_cli.nix import nix_eval, run_no_stdout from clan_cli.nix import nix_eval, run_no_stdout
@@ -51,21 +61,30 @@ def test_add_module_to_inventory(
] ]
) )
create_machine( create_machine(
FlakeId(base_path), Machine(name="machine1", tags=[], system="x86_64-linux") FlakeId(base_path),
) Machine(
update_module_instance( name="machine1", tags=[], system="x86_64-linux", deploy=MachineDeploy()
base_path,
"borgbackup",
"borgbackup1",
Service(
meta=ServiceMeta(name="borgbackup"),
roles={
"client": Role(machines=["machine1"]),
"server": Role(machines=["machine1"]),
},
), ),
) )
inventory = load_inventory(base_path)
inventory.services.borgbackup = {
"borg1": ServiceBorgbackup(
meta=ServiceBorgbackupMeta(name="borg1"),
roles=ServiceBorgbackupRole(
client=ServiceBorgbackupRoleClient(
machines=["machine1"],
),
server=ServiceBorgbackupRoleServer(
machines=["machine1"],
),
),
)
}
save_inventory(inventory, base_path, "Add borgbackup service")
cmd = ["facts", "generate", "--flake", str(test_flake_with_core.path), "machine1"] cmd = ["facts", "generate", "--flake", str(test_flake_with_core.path), "machine1"]
cli.run(cmd) cli.run(cmd)

View File

@@ -1,28 +1,36 @@
# ruff: noqa: RUF001
import argparse import argparse
import json import json
from typing import Any from typing import Any
# Function to map JSON schema types to Python types # Function to map JSON schemas and types to Python types
def map_json_type(json_type: Any, nested_type: str = "Any") -> str: def map_json_type(
json_type: Any, nested_types: set[str] = {"Any"}, parent: Any = None
) -> set[str]:
if isinstance(json_type, list): if isinstance(json_type, list):
return " | ".join(map(map_json_type, json_type)) res = set()
for t in json_type:
res |= map_json_type(t)
return res
if isinstance(json_type, dict): if isinstance(json_type, dict):
return map_json_type(json_type.get("type")) return map_json_type(json_type.get("type"))
elif json_type == "string": elif json_type == "string":
return "str" return {"str"}
elif json_type == "integer": elif json_type == "integer":
return "int" return {"int"}
elif json_type == "boolean": elif json_type == "boolean":
return "bool" return {"bool"}
elif json_type == "array": elif json_type == "array":
return f"list[{nested_type}]" # Further specification can be handled if needed assert nested_types, f"Array type not found for {parent}"
return {f"""list[{" | ".join(nested_types)}]"""}
elif json_type == "object": elif json_type == "object":
return f"dict[str, {nested_type}]" assert nested_types, f"dict type not found for {parent}"
return {f"""dict[str, {" | ".join(nested_types)}]"""}
elif json_type == "null": elif json_type == "null":
return "None" return {"None"}
else: else:
return "Any" raise ValueError(f"Python type not found for {json_type}")
known_classes = set() known_classes = set()
@@ -32,9 +40,9 @@ root_class = "Inventory"
# Recursive function to generate dataclasses from JSON schema # Recursive function to generate dataclasses from JSON schema
def generate_dataclass(schema: dict[str, Any], class_name: str = root_class) -> str: def generate_dataclass(schema: dict[str, Any], class_name: str = root_class) -> str:
properties = schema.get("properties", {}) properties = schema.get("properties", {})
required = schema.get("required", [])
fields = [] required_fields = []
fields_with_default = []
nested_classes = [] nested_classes = []
for prop, prop_info in properties.items(): for prop, prop_info in properties.items():
@@ -42,77 +50,120 @@ def generate_dataclass(schema: dict[str, Any], class_name: str = root_class) ->
prop_type = prop_info.get("type", None) prop_type = prop_info.get("type", None)
union_variants = prop_info.get("oneOf", []) union_variants = prop_info.get("oneOf", [])
# Collect all types
field_types = set()
title = prop_info.get("title", prop.removesuffix("s")) title = prop_info.get("title", prop.removesuffix("s"))
title_sanitized = "".join([p.capitalize() for p in title.split("-")]) title_sanitized = "".join([p.capitalize() for p in title.split("-")])
nested_class_name = f"""{class_name if class_name != root_class and not prop_info.get("title") else ""}{title_sanitized}""" nested_class_name = f"""{class_name if class_name != root_class and not prop_info.get("title") else ""}{title_sanitized}"""
# if nested_class_name == "ServiceBorgbackupRoleServerConfig":
# breakpoint()
if (prop_type is None) and (not union_variants): if (prop_type is None) and (not union_variants):
raise ValueError(f"Type not found for property {prop} {prop_info}") raise ValueError(f"Type not found for property {prop} {prop_info}")
# Unions fields (oneOf)
# str | int | None
python_type = None
if union_variants: if union_variants:
python_type = map_json_type(union_variants) field_types = map_json_type(union_variants)
elif prop_type == "array": elif prop_type == "array":
item_schema = prop_info.get("items") item_schema = prop_info.get("items")
if isinstance(item_schema, dict): if isinstance(item_schema, dict):
python_type = map_json_type( field_types = map_json_type(
prop_type, prop_type, map_json_type(item_schema), field_name
map_json_type(item_schema),
) )
else:
python_type = map_json_type(
prop_type,
map_json_type([i for i in prop_info.get("items", [])]),
)
assert python_type, f"Python type not found for {prop} {prop_info}" elif prop_type == "object":
inner_type = prop_info.get("additionalProperties")
if inner_type and inner_type.get("type") == "object":
# Inner type is a class
field_types = map_json_type(prop_type, {nested_class_name}, field_name)
if prop in required: #
field_def = f"{prop}: {python_type}" if nested_class_name not in known_classes:
else: nested_classes.append(
field_def = f"{prop}: {python_type} | None = None" generate_dataclass(inner_type, nested_class_name)
)
known_classes.add(nested_class_name)
if prop_type == "object": elif inner_type and inner_type.get("type") != "object":
map_type = prop_info.get("additionalProperties") # Trivial type
if map_type: field_types = map_json_type(inner_type)
# breakpoint()
if map_type.get("type") == "object": elif not inner_type:
# Non trivial type # The type is a class
if nested_class_name not in known_classes: field_types = {nested_class_name}
nested_classes.append(
generate_dataclass(map_type, nested_class_name)
)
known_classes.add(nested_class_name)
field_def = f"{field_name}: dict[str, {nested_class_name}]"
else:
# Trivial type
field_def = f"{field_name}: dict[str, {map_json_type(map_type)}]"
else:
if nested_class_name not in known_classes: if nested_class_name not in known_classes:
nested_classes.append( nested_classes.append(
generate_dataclass(prop_info, nested_class_name) generate_dataclass(prop_info, nested_class_name)
) )
known_classes.add(nested_class_name) known_classes.add(nested_class_name)
else:
field_types = map_json_type(
prop_type,
nested_types=set(),
parent=field_name,
)
field_def = f"{field_name}: {nested_class_name}" assert field_types, f"Python type not found for {prop} {prop_info}"
elif prop_type == "array": serialised_types = " | ".join(field_types)
items = prop_info.get("items", {}) field_meta = None
if items.get("type") == "object": if field_name != prop:
nested_class_name = prop.capitalize() field_meta = f"""{{"original_name": "{prop}"}}"""
nested_classes.append(generate_dataclass(items, nested_class_name))
field_def = f"{field_name}: List[{nested_class_name}]"
fields.append(field_def) field_def = f"{field_name}: {serialised_types}"
if field_meta:
field_def = f"{field_def} = field(metadata={field_meta})"
fields_str = "\n ".join(fields) if "default" in prop_info or field_name not in prop_info.get("required", []):
if "default" in prop_info:
default_value = prop_info.get("default")
if default_value is None:
field_types |= {"None"}
serialised_types = " | ".join(field_types)
field_def = f"""{field_name}: {serialised_types} = field(default=None {f", metadata={field_meta}" if field_meta else ""})"""
elif isinstance(default_value, list):
field_def = f"""{field_def} = field(default_factory=list {f", metadata={field_meta}" if field_meta else ""})"""
elif isinstance(default_value, dict):
serialised_types = " | ".join(field_types)
if serialised_types == nested_class_name:
field_def = f"""{field_name}: {serialised_types} = field(default_factory={nested_class_name} {f", metadata={field_meta}" if field_meta else ""})"""
elif f"dict[str, {nested_class_name}]" in serialised_types:
field_def = f"""{field_name}: {serialised_types} = field(default_factory=dict {f", metadata={field_meta}" if field_meta else ""})"""
else:
field_def = f"""{field_name}: {serialised_types} | dict[str,Any] = field(default_factory=dict {f", metadata={field_meta}" if field_meta else ""})"""
elif default_value == "name":
# Special case for nix submodules
pass
elif isinstance(default_value, str):
field_def = f"""{field_name}: {serialised_types} = field(default = '{default_value}' {f", metadata={field_meta}" if field_meta else ""})"""
else:
# Other default values unhandled yet.
raise ValueError(
f"Unhandled default value for field '{field_name}' - default value: {default_value}"
)
fields_with_default.append(field_def)
if "default" not in prop_info:
# Field is not required and but also specifies no default value
# Trying to infer default value from type
if "dict" in str(serialised_types):
field_def = f"""{field_name}: {serialised_types} = field(default_factory=dict {f", metadata={field_meta}" if field_meta else ""})"""
fields_with_default.append(field_def)
elif "list" in str(serialised_types):
field_def = f"""{field_name}: {serialised_types} = field(default_factory=list {f", metadata={field_meta}" if field_meta else ""})"""
fields_with_default.append(field_def)
elif "None" in str(serialised_types):
field_def = f"""{field_name}: {serialised_types} = field(default=None {f", metadata={field_meta}" if field_meta else ""})"""
fields_with_default.append(field_def)
else:
# Field is not required and but also specifies no default value
required_fields.append(field_def)
else:
required_fields.append(field_def)
fields_str = "\n ".join(required_fields + fields_with_default)
nested_classes_str = "\n\n".join(nested_classes) nested_classes_str = "\n\n".join(nested_classes)
class_def = f"@dataclass\nclass {class_name}:\n {fields_str}\n" class_def = f"@dataclass\nclass {class_name}:\n {fields_str}\n"
@@ -128,12 +179,13 @@ def run_gen(args: argparse.Namespace) -> None:
with open(args.output, "w") as f: with open(args.output, "w") as f:
f.write( f.write(
""" """# DON NOT EDIT THIS FILE MANUALLY. IT IS GENERATED.
# DON NOT EDIT THIS FILE MANUALLY. IT IS GENERATED. #
# UPDATE:
# ruff: noqa: N815 # ruff: noqa: N815
# ruff: noqa: N806 # ruff: noqa: N806
from dataclasses import dataclass # ruff: noqa: F401
# fmt: off
from dataclasses import dataclass, field
from typing import Any\n\n from typing import Any\n\n
""" """
) )