Compare commits

..

69 Commits

Author SHA1 Message Date
Michael Hoang
01c9432cc5 checks/installation: don't hardcode system 2025-03-31 16:20:54 +09:00
Michael Hoang
f62e9db126 wip! fix all checks on aarch64-linux 2025-03-31 16:20:54 +09:00
renovate[bot]
dcb2231332 chore(deps): update data-mesher digest to bf8c544 2025-03-31 00:20:25 +00:00
renovate[bot]
725eeb87ae chore(deps): lock file maintenance 2025-03-31 00:00:41 +00:00
hsjobeki
66df677fd2 Merge pull request 'chore(lib): prepare for refactoring into clanLib' (#3141) from hsjobeki/clan-core:clan-services into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/3141
2025-03-30 15:51:17 +00:00
Johannes Kirschbauer
f7d15215ea feat(clanLib): expose clanInternals.clanLib
This is usefully for:
- As api via python
- for testing clanLib downstream
2025-03-30 17:07:11 +02:00
Johannes Kirschbauer
c25574bebd fix(lib/evalClan): evalClan is an explizit lib attribute 2025-03-30 17:07:11 +02:00
Johannes Kirschbauer
fe5796ba17 feat(inventory/instances): only warn if instances is populated 2025-03-30 17:07:11 +02:00
Johannes Kirschbauer
f2e89d27fe feat(lib/inventory): use clanLib instead of clan-core as internal specialArg. This has the benefit of beeing more narrow scoped. 2025-03-30 17:07:09 +02:00
Johannes Kirschbauer
06dd2ebf8c feat(lib/modules): remove dependency on self 2025-03-30 16:12:01 +02:00
Johannes Kirschbauer
40740860c0 feat(lib): init callLib; helper to bootstrap clanLib 2025-03-30 16:11:04 +02:00
Johannes Kirschbauer
89bc39869c chore(lib): prepare for refactoring into clanLib 2025-03-30 15:56:54 +02:00
hsjobeki
84d0a2f2f0 Merge pull request 'enable clan services for machines' (#3134) from hsjobeki/clan-core:clan-services into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/3134
2025-03-30 13:54:11 +00:00
Johannes Kirschbauer
1d07737989 docs(lib): init readme with folder and testing conventions 2025-03-30 15:29:05 +02:00
Johannes Kirschbauer
9d386485dd chore(auto-imports): Remove the auto-import feature. There will be a replacement, to make imports explicit" 2025-03-30 15:29:05 +02:00
Johannes Kirschbauer
ee9ae9c76d fix(eval/tests): distributed-services: don't abort on warn in tests" 2025-03-30 15:29:05 +02:00
Johannes Kirschbauer
d4d4d77d2d fix(tests): include 'lib' always as a whole 2025-03-30 15:29:05 +02:00
Johannes Kirschbauer
c0ebad1cd9 feat(inventory/instances): add wip warning 2025-03-30 15:29:05 +02:00
Johannes Kirschbauer
86d0c95da7 feat(inventory/instances): improve error location 2025-03-30 15:29:05 +02:00
Johannes Kirschbauer
0fb1b5c5ce feat(inventory/instances): add service result to nixos machines 2025-03-30 15:29:05 +02:00
renovate[bot]
dc0349e835 fix(deps): update dependency @tanstack/solid-query to v5.71.0 2025-03-30 13:00:14 +00:00
renovate[bot]
cc8a74b195 chore(deps): update data-mesher digest to 4d139ac 2025-03-30 09:50:10 +00:00
renovate[bot]
046fe0df36 chore(deps): update nixpkgs digest to eb0e0f2 2025-03-30 09:10:24 +00:00
lassulus
3f948fdbd4 Merge pull request 'Make Generator validation more dynamic' (#3052) from tangential/clan-core:dynamic-vars-generator-validation into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/3052
2025-03-30 07:00:43 +00:00
Jonathan Thiessen
eb35e6ea21 Make Generator's validation dynamic
* Switch `Generator`'s `validation` from a regular property to
  an `@property` annotated method backed by `Machine`'s `eval_nix()`.
* Ensure that `Machine`'s flake cache is flushed after each
  effectful generator execution (rather than only after all
  generators have been executed).
2025-03-30 04:33:30 +00:00
Jonathan Thiessen
4a0e1b3b6b Add dependent vars generator dynamic validation test 2025-03-30 04:33:30 +00:00
Jonathan Thiessen
1b8974d167 Fix cached None support in FlakeCacheEntry
Previously, you could cache None values; however,
insertion wasn't idempotent/identical reinsertion
would lead to errors due to missing None checks.
2025-03-30 04:33:30 +00:00
Jonathan Thiessen
5e2b5fe213 Add overlapping (consistent) flake cache insert test
* Additionally, update `insert`'s input type hint to support None values
  (as they are already selectable and (one shot) insertable).
  This is necessary to appease the linter wrt the added test.
2025-03-30 04:33:30 +00:00
renovate[bot]
74fb3abbc7 chore(deps): update sops-nix digest to 8e87388 2025-03-30 04:00:13 +00:00
Luis Hebendanz
f2b04e74f1 Merge pull request 'clan-cli: Fix deployment as non root with a buildHost set' (#3132) from Qubasa/clan-core:fix_target_host_as_non_root into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/3132
2025-03-29 16:56:49 +00:00
hsjobeki
d3ae684575 Merge pull request 'init inventory.instances and clan.service modules' (#3102) from hsjobeki/clan-core:clan-services into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/3102
2025-03-29 16:22:30 +00:00
Johannes Kirschbauer
5b294e7651 chore(ui): ignore type error in unfinished features 2025-03-29 16:45:05 +01:00
Johannes Kirschbauer
40ae510075 test(inventory/legacy): don't need to support clanModules 2025-03-29 16:35:43 +01:00
Johannes Kirschbauer
48d910f11f fix(auto-imports): disable since this is not needed anymore and causing collision with the new module type 2025-03-29 16:24:48 +01:00
renovate[bot]
f242b9a35c chore(deps): update data-mesher digest to 734883c 2025-03-29 15:20:14 +00:00
Johannes Kirschbauer
978822d40a test(inventory/instances): add tests for per machine resolution 2025-03-29 15:40:31 +01:00
Johannes Kirschbauer
fa6c3be21e feat(inventory/instances): preserve settings modifiers close at source 2025-03-29 15:39:02 +01:00
brianmcgee
be61bac9af Merge pull request 'data-mesher-module' (#3086) from data-mesher-module into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/3086
2025-03-29 14:07:42 +00:00
Pablo Ovelleiro Corral
42b58910a9 data-mesher: init module
Co-authored-by: Brian McGee <brian@bmcgee.ie>
2025-03-29 13:49:41 +00:00
Johannes Kirschbauer
a746b10578 chore: add description to {manifest, name} option 2025-03-29 14:33:44 +01:00
Johannes Kirschbauer
19341e4cb1 chore: format test, remove unused attributes 2025-03-29 14:27:52 +01:00
Johannes Kirschbauer
f4e06271ba chore: rename unused attributes in inventory adapter 2025-03-29 14:27:27 +01:00
Johannes Kirschbauer
d93fe229b3 chore: remove unused attributes from flake-module 2025-03-29 14:27:04 +01:00
Johannes Kirschbauer
5fc62806b1 feat(classgen): convert only certain attributes 2025-03-29 14:22:34 +01:00
Johannes Kirschbauer
e0be2f3435 fix(cli/inventory): update classes 2025-03-29 13:25:19 +01:00
Johannes Kirschbauer
a69b81488b fix(inventory/instances): fix jsonschema compatibility 2025-03-29 13:25:19 +01:00
Johannes Kirschbauer
b133a2407a feat(clan/services): init recursive service module 2025-03-29 13:25:19 +01:00
Johannes Kirschbauer
68ae27899a feat(clan/services): init test-suite for eval test 2025-03-29 13:25:19 +01:00
Johannes Kirschbauer
b83d3ecba2 feat(clan/services): init adapter function to convert inventory instances into clan.service module configurations 2025-03-29 13:25:19 +01:00
Johannes Kirschbauer
bec4317709 feat(inventory/instances): init instances as new attribute for adding distributed services 2025-03-29 13:25:19 +01:00
Johannes Kirschbauer
f37f15c482 feat(clan/services): init new clanInternals attribute 'distributedServices' 2025-03-29 13:25:19 +01:00
Johannes Kirschbauer
fae8ec318d feat(inventory/modules): allow inline modules 2025-03-29 13:25:19 +01:00
Qubasa
8e2005f38c clan-cli: Fix deployment as non root with a buildHost set 2025-03-28 18:47:36 +01:00
Mic92
94781bb358 Merge pull request 'clan_cli: fix support for non-root deployment user (from rtunreal)' (#3124) from Qubasa/clan-core:main into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/3124
2025-03-28 17:26:33 +00:00
Jörg Thalheim
de740cf686 tests: add fake_sudo to sshd fixture
This allows to use the same code for both testing and real-world.
2025-03-28 17:14:22 +00:00
Qubasa
064edf61ef test_secrets_upload: Don't prepend sudo inside test; Improve secret upload test 2025-03-28 17:14:22 +00:00
renovate[bot]
aaf58d7be8 chore(deps): update treefmt-nix digest to 29a3d7b 2025-03-28 15:50:19 +00:00
renovate[bot]
03f8e41291 chore(deps): update nixpkgs digest to 6c59633 2025-03-28 15:30:25 +00:00
renovate[bot]
43bd4403c6 fix(deps): update dependency @tanstack/solid-query to v5.70.0 2025-03-28 15:20:17 +00:00
renovate[bot]
ebee55ffdc chore(deps): update nixpkgs digest to 25d1b84 2025-03-27 03:30:23 +00:00
renovate[bot]
47e9e5a8f0 chore(deps): update dependency @types/node to v22.13.14 2025-03-27 03:20:15 +00:00
Qubasa
d1a79653fe checks/installation-without-system: modify to install through normal user instead of root 2025-03-26 18:37:31 +01:00
RTUnreal
351ce1414a clan_cli: fix support for non-root deployment user 2025-03-26 18:37:31 +01:00
DavHau
e2ccd979ed vars/prompts: print var name even if custom description is set 2025-03-26 10:48:05 +00:00
renovate[bot]
f5f3f96809 chore(deps): update treefmt-nix digest to 61c8834 2025-03-26 10:10:09 +00:00
Mic92
59253a9c71 Merge pull request 'ADR: init clan api as library decision record' (#2975) from hsjobeki/clan-core:adr/architecture into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/2975
2025-03-26 10:01:55 +00:00
Johannes Kirschbauer
aa03adc581 ADR: init clan api as library decision record 2025-03-26 09:52:05 +00:00
Mic92
ffd84d50f7 Merge pull request 'Fix(classgen): support number conversion from jsonschema' (#3119) from hsjobeki/clan-core:class-fix into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/3119
2025-03-26 09:45:09 +00:00
Johannes Kirschbauer
679387e4ba Fix(classgen): support number conversion from jsonschema 2025-03-25 19:27:01 +01:00
76 changed files with 2492 additions and 451 deletions

View File

@@ -160,14 +160,10 @@
"flake.lock"
"flakeModules"
"inventory.json"
"lib/build-clan"
"lib/default.nix"
"lib/select.nix"
"lib/flake-module.nix"
"lib/frontmatter"
"lib/inventory"
"lib/constraints"
"nixosModules"
# Just include everything in 'lib'
# If anything changes in /lib that may affect everything
"lib"
];
};
in

View File

@@ -0,0 +1,138 @@
(import ../lib/test-base.nix) (
{ self, lib, ... }:
let
inherit (self.lib.inventory) buildInventory;
machines = [
"signer"
"admin"
"peer"
];
serviceConfigs = buildInventory {
inventory = {
machines = lib.genAttrs machines (_: { });
services = {
data-mesher.default = {
roles.peer.machines = [ "peer" ];
roles.admin.machines = [ "admin" ];
roles.signer.machines = [ "signer" ];
};
};
modules = {
data-mesher = self.clanModules.data-mesher;
};
};
directory = ./.;
};
commonConfig =
{ config, ... }:
{
imports = [ self.nixosModules.clanCore ];
clan.core.settings.directory = builtins.toString ./.;
environment.systemPackages = [
config.services.data-mesher.package
];
clan.core.vars.settings.publicStore = "in_repo";
clan.core.vars.settings.secretStore = "vm";
clan.data-mesher.network.interface = "eth1";
clan.data-mesher.bootstrapNodes = [
"[2001:db8:1::1]:7946" # peer1
"[2001:db8:1::2]:7946" # peer2
];
# speed up for testing
services.data-mesher.settings = {
cluster.join_interval = lib.mkForce "2s";
cluster.push_pull_interval = lib.mkForce "5s";
};
systemd.tmpfiles.settings."vmsecrets" = {
"/etc/secrets" = {
C.argument = "${./vars/secret/${config.clan.core.settings.machine.name}}";
z = {
mode = "0700";
user = "data-mesher";
};
};
};
};
adminConfig = {
imports = serviceConfigs.machines.admin.machineImports;
config.clan.data-mesher.network.tld = "foo";
config.clan.core.settings.machine.name = "admin";
};
peerConfig = {
imports = serviceConfigs.machines.peer.machineImports;
config.clan.core.settings.machine.name = "peer";
};
signerConfig = {
imports = serviceConfigs.machines.signer.machineImports;
clan.core.settings.machine.name = "signer";
};
in
{
name = "data-mesher";
nodes = {
peer = {
imports = [
peerConfig
commonConfig
];
};
admin = {
imports = [
adminConfig
commonConfig
];
};
signer = {
imports = [
signerConfig
commonConfig
];
};
};
# TODO Add better test script.
testScript = ''
def resolve(node, success = {}, fail = [], timeout = 60):
for hostname, ips in success.items():
for ip in ips:
node.wait_until_succeeds(f"getent ahosts {hostname} | grep {ip}", timeout)
for hostname in fail:
node.wait_until_fails(f"getent ahosts {hostname}")
start_all()
admin.wait_for_unit("data-mesher")
signer.wait_for_unit("data-mesher")
peer.wait_for_unit("data-mesher")
# check dns resolution
for node in [admin, signer, peer]:
resolve(node, {
"admin.foo": ["2001:db8:1::1", "192.168.1.1"],
"peer.foo": ["2001:db8:1::2", "192.168.1.2"],
"signer.foo": ["2001:db8:1::3", "192.168.1.3"]
})
'';
}
)

View File

@@ -0,0 +1,3 @@
-----BEGIN PUBLIC KEY-----
MCowBQYDK2VwAyEAV/XZHv1UQEEzfD2YbJP1Q2jd1ZDG+CP5wvGf/1hcR+Q=
-----END PUBLIC KEY-----

View File

@@ -0,0 +1,3 @@
-----BEGIN PUBLIC KEY-----
MCowBQYDK2VwAyEAKSSUXJCftt5Vif6ek57CNKBcDRNfrWrxZUHjAIFW9HY=
-----END PUBLIC KEY-----

View File

@@ -0,0 +1,3 @@
-----BEGIN PUBLIC KEY-----
MCowBQYDK2VwAyEAvLD0mHQA+hf9ItlUHD0ml3i5XEArmmjwCC5rYEOmzWs=
-----END PUBLIC KEY-----

View File

@@ -0,0 +1,3 @@
-----BEGIN PRIVATE KEY-----
MC4CAQAwBQYDK2VwBCIEIFX+AzHy821hHqWLPeK3nzRuHod3FNrnPfaDoFvpz6LX
-----END PRIVATE KEY-----

View File

@@ -0,0 +1,3 @@
-----BEGIN PRIVATE KEY-----
MC4CAQAwBQYDK2VwBCIEIMwuDntiLoC7cFFyttGDf7cQWlOXOR0q90Jz3lEiuLg+
-----END PRIVATE KEY-----

View File

@@ -0,0 +1,3 @@
-----BEGIN PRIVATE KEY-----
MC4CAQAwBQYDK2VwBCIEIPmH2+vjYG6UOp+/g0Iqu7yZZKId5jffrfsySE36yO+D
-----END PRIVATE KEY-----

View File

@@ -0,0 +1,3 @@
-----BEGIN PRIVATE KEY-----
MC4CAQAwBQYDK2VwBCIEINS0tSnjHPG8IfpzQAS3wzoJA+4mYM70DIpltN8O4YD7
-----END PRIVATE KEY-----

View File

@@ -0,0 +1,3 @@
-----BEGIN PUBLIC KEY-----
MCowBQYDK2VwAyEA3P18+R5Gt+Jn7wYXpWNTXM5pyWn2WiOWekYCzXqWPwg=
-----END PUBLIC KEY-----

View File

@@ -41,6 +41,7 @@ in
borgbackup = import ./borgbackup nixosTestArgs;
matrix-synapse = import ./matrix-synapse nixosTestArgs;
mumble = import ./mumble nixosTestArgs;
data-mesher = import ./data-mesher nixosTestArgs;
syncthing = import ./syncthing nixosTestArgs;
zt-tcp-relay = import ./zt-tcp-relay nixosTestArgs;
postgresql = import ./postgresql nixosTestArgs;

View File

@@ -165,7 +165,6 @@
(modulesPath + "/../tests/common/auto-format-root-device.nix")
];
services.openssh.enable = true;
users.users.root.openssh.authorizedKeys.keyFiles = [ ../lib/ssh/pubkey ];
system.nixos.variant_id = "installer";
environment.systemPackages = [ pkgs.nixos-facter ];
virtualisation.emptyDiskImages = [ 512 ];
@@ -184,6 +183,12 @@
"flakes"
];
};
users.users.nonrootuser = {
isNormalUser = true;
openssh.authorizedKeys.keyFiles = [ ../lib/ssh/pubkey ];
extraGroups = [ "wheel" ];
};
security.sudo.wheelNeedsPassword = false;
system.extraDependencies = dependencies;
};
nodes.client = {
@@ -211,14 +216,14 @@
installer.start()
client.succeed("${pkgs.coreutils}/bin/install -Dm 600 ${../lib/ssh/privkey} /root/.ssh/id_ed25519")
client.wait_until_succeeds("timeout 2 ssh -o StrictHostKeyChecking=accept-new -v root@installer hostname")
client.wait_until_succeeds("timeout 2 ssh -o StrictHostKeyChecking=accept-new -v nonrootuser@installer hostname")
client.succeed("cp -r ${../..} test-flake && chmod -R +w test-flake")
client.fail("test -f test-flake/machines/test-install-machine-without-system/hardware-configuration.nix")
client.fail("test -f test-flake/machines/test-install-machine-without-system/facter.json")
client.succeed("clan machines update-hardware-config --flake test-flake test-install-machine-without-system root@installer >&2")
client.succeed("clan machines update-hardware-config --flake test-flake test-install-machine-without-system nonrootuser@installer >&2")
client.succeed("test -f test-flake/machines/test-install-machine-without-system/facter.json")
client.succeed("rm test-flake/machines/test-install-machine-without-system/facter.json")
client.succeed("clan machines install --debug --flake test-flake --yes test-install-machine-without-system --target-host root@installer --update-hardware-config nixos-facter >&2")
client.succeed("clan machines install --debug --flake test-flake --yes test-install-machine-without-system --target-host nonrootuser@installer --update-hardware-config nixos-facter >&2")
try:
installer.shutdown()
except BrokenPipeError:

View File

@@ -23,7 +23,6 @@
environment.etc."install-successful".text = "ok";
nixpkgs.hostPlatform = "x86_64-linux";
boot.consoleLogLevel = lib.mkForce 100;
boot.kernelParams = [ "boot.shell_on_fail" ];
@@ -89,9 +88,9 @@
let
dependencies = [
self
self.nixosConfigurations.test-install-machine.config.system.build.toplevel
self.nixosConfigurations.test-install-machine.config.system.build.diskoScript
self.nixosConfigurations.test-install-machine.config.system.clan.deployment.file
self.clanInternals.machines.${pkgs.hostPlatform.system}.test-install-machine.config.system.build.toplevel
self.clanInternals.machines.${pkgs.hostPlatform.system}.test-install-machine.config.system.build.diskoScript
self.clanInternals.machines.${pkgs.hostPlatform.system}.test-install-machine.config.system.clan.deployment.file
pkgs.bash.drvPath
pkgs.stdenv.drvPath
pkgs.nixos-anywhere
@@ -120,7 +119,7 @@
# vm-test-run-test-installation> new_machine: QEMU running (pid 80)
# vm-test-run-test-installation> new_machine: Guest root shell did not produce any data yet...
# vm-test-run-test-installation> new_machine: To debug, enter the VM and run 'systemctl status backdoor.service'.
checks = pkgs.lib.mkIf (pkgs.stdenv.isLinux && !pkgs.stdenv.isAarch64) {
checks = pkgs.lib.mkIf pkgs.stdenv.isLinux {
test-installation = (import ../lib/test-base.nix) {
name = "test-installation";
nodes.target = {

View File

@@ -23,7 +23,7 @@
...
}:
{
checks = pkgs.lib.mkIf (pkgs.stdenv.isLinux && !pkgs.stdenv.isAarch64) {
checks = pkgs.lib.mkIf pkgs.stdenv.isLinux {
test-morph = (import ../lib/test-base.nix) {
name = "morph";

View File

@@ -0,0 +1,10 @@
---
description = "Set up data-mesher"
categories = ["System"]
features = [ "inventory" ]
[constraints]
roles.admin.min = 1
roles.admin.max = 1
---

View File

@@ -0,0 +1,19 @@
lib: {
machines =
config:
let
instanceNames = builtins.attrNames config.clan.inventory.services.data-mesher;
instanceName = builtins.head instanceNames;
dataMesherInstances = config.clan.inventory.services.data-mesher.${instanceName};
uniqueStrings = list: builtins.attrNames (builtins.groupBy lib.id list);
in
rec {
admins = dataMesherInstances.roles.admin.machines or [ ];
signers = dataMesherInstances.roles.signer.machines or [ ];
peers = dataMesherInstances.roles.peer.machines or [ ];
bootstrap = uniqueStrings (admins ++ signers);
};
}

View File

@@ -0,0 +1,51 @@
{ lib, config, ... }:
let
cfg = config.clan.data-mesher;
dmLib = import ../lib.nix lib;
in
{
imports = [
../shared.nix
];
options.clan.data-mesher = {
network = {
tld = lib.mkOption {
type = lib.types.str;
default = (config.networking.domain or "clan");
description = "Top level domain to use for the network";
};
hostTTL = lib.mkOption {
type = lib.types.str;
default = "672h"; # 28 days
example = "24h";
description = "The TTL for hosts in the network, in the form of a Go time.Duration";
};
};
};
config = {
services.data-mesher.initNetwork =
let
# for a given machine, read it's public key and remove any new lines
readHostKey =
machine:
let
path = "${config.clan.core.settings.directory}/vars/per-machine/${machine}/data-mesher-host-key/public_key/value";
in
builtins.elemAt (lib.splitString "\n" (builtins.readFile path)) 1;
in
{
enable = true;
keyPath = config.clan.core.vars.generators.data-mesher-network-key.files.private_key.path;
tld = cfg.network.tld;
hostTTL = cfg.network.hostTTL;
# admin and signer host public keys
signingKeys = builtins.map readHostKey (dmLib.machines config).bootstrap;
};
};
}

View File

@@ -0,0 +1,5 @@
{
imports = [
../shared.nix
];
}

View File

@@ -0,0 +1,5 @@
{
imports = [
../shared.nix
];
}

View File

@@ -0,0 +1,154 @@
{
config,
lib,
...
}:
let
cfg = config.clan.data-mesher;
dmLib = import ./lib.nix lib;
# the default bootstrap nodes are any machines with the admin or signers role
# we iterate through those machines, determining an IP address for them based on their VPN
# currently only supports zerotier
defaultBootstrapNodes = builtins.foldl' (
urls: name:
if
builtins.pathExists "${config.clan.core.settings.directory}/machines/${name}/facts/zerotier-ip"
then
let
ip = builtins.readFile "${config.clan.core.settings.directory}/machines/${name}/facts/zerotier-ip";
in
urls ++ "${ip}:${cfg.network.port}"
else
urls
) [ ] (dmLib.machines config).bootstrap;
in
{
options.clan.data-mesher = {
bootstrapNodes = lib.mkOption {
type = lib.types.nullOr (lib.types.listOf lib.types.str);
default = null;
description = ''
A list of bootstrap nodes that act as an initial gateway when joining
the cluster.
'';
example = [
"192.168.1.1:7946"
"192.168.1.2:7946"
];
};
network = {
interface = lib.mkOption {
type = lib.types.str;
description = ''
The interface over which cluster communication should be performed.
All the ip addresses associate with this interface will be part of
our host claim, including both ipv4 and ipv6.
This should be set to an internal/VPN interface.
'';
example = "tailscale0";
};
port = lib.mkOption {
type = lib.types.port;
default = 7946;
description = ''
Port to listen on for cluster communication.
'';
};
};
};
config = {
services.data-mesher = {
enable = true;
openFirewall = true;
settings = {
log_level = "warn";
state_dir = "/var/lib/data-mesher";
# read network id from vars
network.id = config.clan.core.vars.generators.data-mesher-network-key.files.public_key.value;
host = {
names = [ config.networking.hostName ];
key_path = config.clan.core.vars.generators.data-mesher-host-key.files.private_key.path;
};
cluster = {
port = cfg.network.port;
join_interval = "30s";
push_pull_interval = "30s";
interface = cfg.network.interface;
bootstrap_nodes = cfg.bootstrapNodes or defaultBootstrapNodes;
};
http.port = 7331;
http.interface = "lo";
};
};
# Generate host key.
clan.core.vars.generators.data-mesher-host-key = {
files =
let
owner = config.users.users.data-mesher.name;
in
{
private_key = {
inherit owner;
};
public_key = {
inherit owner;
secret = false;
};
};
runtimeInputs = [
config.services.data-mesher.package
];
script = ''
data-mesher generate keypair \
--public-key-path $out/public_key \
--private-key-path $out/private_key
'';
};
clan.core.vars.generators.data-mesher-network-key = {
# generated once per clan
share = true;
files =
let
owner = config.users.users.data-mesher.name;
in
{
private_key = {
inherit owner;
};
public_key = {
inherit owner;
secret = false;
};
};
runtimeInputs = [
config.services.data-mesher.package
];
script = ''
data-mesher generate keypair \
--public-key-path $out/public_key \
--private-key-path $out/private_key
'';
};
};
}

View File

@@ -13,6 +13,7 @@ in
borgbackup = ./borgbackup;
borgbackup-static = ./borgbackup-static;
deltachat = ./deltachat;
data-mesher = ./data-mesher;
disk-id = ./disk-id;
dyndns = ./dyndns;
ergochat = ./ergochat;

116
decisions/02-clan-api.md Normal file
View File

@@ -0,0 +1,116 @@
# Clan as library
## Status
Accepted
## Context
In the long term we envision the clan application will consist of the following user facing tools in the long term.
- `CLI`
- `TUI`
- `Desktop Application`
- `REST-API`
- `Mobile Application`
We might not be sure whether all of those will exist but the architecture should be generic such that those are possible without major changes of the underlying system.
## Decision
This leads to the conclusion that we should do `library` centric development.
With the current `clan` python code beeing a library that can be imported to create various tools ontop of it.
All **CLI** or **UI** related parts should be moved out of the main library.
*Note: The next person who wants implement any new frontend should do this first. Currently it looks like the TUI is the next one.*
Imagine roughly the following architecture:
```mermaid
graph TD
%% Define styles
classDef frontend fill:#f9f,stroke:#333,stroke-width:2px;
classDef backend fill:#bbf,stroke:#333,stroke-width:2px;
classDef storage fill:#ff9,stroke:#333,stroke-width:2px;
classDef testing fill:#cfc,stroke:#333,stroke-width:2px;
%% Define nodes
user(["User"]) -->|Interacts with| Frontends
subgraph "Frontends"
CLI["CLI"]:::frontend
APP["Desktop App"]:::frontend
TUI["TUI"]:::frontend
REST["REST API"]:::frontend
end
subgraph "Python"
API["Library <br>for interacting with clan"]:::backend
BusinessLogic["Business Logic<br>Implements actions like 'machine create'"]:::backend
STORAGE[("Persistence")]:::storage
NIX["Nix Eval & Build"]:::backend
end
subgraph "CI/CD & Tests"
TEST["Feature Testing"]:::testing
end
%% Define connections
CLI --> API
APP --> API
TUI --> API
REST --> API
TEST --> API
API --> BusinessLogic
BusinessLogic --> STORAGE
BusinessLogic --> NIX
```
With this very simple design it is ensured that all the basic features remain stable across all frontends.
In the end it is straight forward to create python library function calls in a testing framework to ensure that kind of stability.
Integration tests and smaller unit-tests should both be utilized to ensure the stability of the library.
Note: Library function don't have to be json-serializable in general.
Persistence includes but is not limited to: creating git commits, writing to inventory.json, reading and writing vars and to/from disk in general.
## Benefits / Drawbacks
- (+) Less tight coupling of frontend- / backend-teams
- (+) Consistency and inherent behavior
- (+) Performance & Scalability
- (+) Different frontends for different user groups
- (+) Documentation per library function makes it convenient to interact with the clan resources.
- (+) Testing the library ensures stability of the underlyings for all layers above.
- (-) Complexity overhead
- (-) library needs to be designed / documented
- (+) library can be well documented since it is a finite set of functions.
- (-) Error handling might be harder.
- (+) Common error reporting
- (-) different frontends need different features. The library must include them all.
- (+) All those core features must be implemented anyways.
- (+) VPN Benchmarking uses the existing library's already and works relatively well.
## Implementation considerations
Not all required details that need to change over time are possible to be pointed out ahead of time.
The goal of this document is to create a common understanding for how we like our project to be structured.
Any future commits should contribute to this goal.
Some ideas what might be needed to change:
- Having separate locations or packages for the library and the CLI.
- Rename the `clan_cli` package to `clan` and move the `cli` frontend into a subfolder or a separate package.
- Python Argparse or other cli related code should not exist in the `clan` python library.
- `__init__.py` should be very minimal. Only init the business logic models and resources. Note that all `__init__.py` files all the way up in the module tree are always executed as part of the python module import logic and thus should be as small as possible.
i.e. `from clan_cli.vars.generators import ...` executes both `clan_cli/__init__.py` and `clan_cli/vars/__init__.py` if any of those exist.
- `api` folder doesn't make sense since the python library `clan` is the api.
- Logic needed for the webui that performs json serialization and deserialization will be some `json-adapter` folder or package.
- Code for serializing dataclasses and typed dictionaries is needed for the persistence layer. (i.e. for read-write of inventory.json)
- The inventory-json is a backend resource, that is internal. Its logic includes merging, unmerging and partial updates with considering nix values and their priorities. Nobody should try to read or write to it directly.
Instead there will be library methods i.e. to add a `service` or to update/read/delete some information from it.
- Library functions should be carefully designed with suitable conventions for writing good api's in mind. (i.e: https://swagger.io/resources/articles/best-practices-in-api-design/)

View File

@@ -1,10 +1,12 @@
{ ... }:
{ inputs, ... }:
{
perSystem =
{
lib,
pkgs,
self',
config,
system,
...
}:
let
@@ -24,18 +26,26 @@
in
{
devShells.default = pkgs.mkShell {
packages = [
select-shell
pkgs.nix-unit
pkgs.tea
# Better error messages than nix 2.18
pkgs.nixVersions.latest
self'.packages.tea-create-pr
self'.packages.merge-after-ci
self'.packages.pending-reviews
# treefmt with config defined in ./flake-parts/formatting.nix
config.treefmt.build.wrapper
];
packages =
[
select-shell
pkgs.nix-unit
pkgs.tea
# Better error messages than nix 2.18
pkgs.nixVersions.latest
self'.packages.tea-create-pr
self'.packages.merge-after-ci
self'.packages.pending-reviews
# treefmt with config defined in ./flake-parts/formatting.nix
config.treefmt.build.wrapper
]
# bring in data-mesher for the cli which can help with things like key generation
++ (
let
data-mesher = inputs.data-mesher.packages.${system}.data-mesher or null;
in
lib.optional (data-mesher != null) data-mesher
);
shellHook = ''
echo -e "${ansiEscapes.green}switch to another dev-shell using: select-shell${ansiEscapes.reset}"
export PRJ_ROOT=$(git rev-parse --show-toplevel)

View File

@@ -79,6 +79,7 @@ nav:
# This is the module overview and should stay at the top
- reference/clanModules/admin.md
- reference/clanModules/borgbackup-static.md
- reference/clanModules/data-mesher.md
- reference/clanModules/borgbackup.md
- reference/clanModules/deltachat.md
- reference/clanModules/disk-id.md

View File

@@ -13,8 +13,8 @@
# { clanCore = «derivation JSON»; clanModules = { ${name} = «derivation JSON» }; }
jsonDocs = pkgs.callPackage ./get-module-docs.nix {
inherit (self) clanModules;
evalClanModules = self.lib.evalClanModules;
modulesRolesOptions = self.lib.evalClanModulesWithRoles self.clanModules;
evalClanModules = self.lib.evalClan.evalClanModules;
modulesRolesOptions = self.lib.evalClan.evalClanModulesWithRoles self.clanModules;
};
# Frontmatter for clanModules

48
flake.lock generated
View File

@@ -1,5 +1,34 @@
{
"nodes": {
"data-mesher": {
"inputs": {
"flake-parts": [
"flake-parts"
],
"nixpkgs": [
"nixpkgs"
],
"systems": [
"systems"
],
"treefmt-nix": [
"treefmt-nix"
]
},
"locked": {
"lastModified": 1743379277,
"narHash": "sha256-4BNv+I6hksqZeRCrEHcQygK0MV1acjA8+L2TtA11H3c=",
"ref": "refs/heads/main",
"rev": "bf8c5448d826e047b842d6f2ac0fc698e976dda5",
"revCount": 375,
"type": "git",
"url": "https://git.clan.lol/clan/data-mesher"
},
"original": {
"type": "git",
"url": "https://git.clan.lol/clan/data-mesher"
}
},
"disko": {
"inputs": {
"nixpkgs": [
@@ -58,10 +87,10 @@
"nixpkgs": {
"locked": {
"lastModified": 315532800,
"narHash": "sha256-+bxPXRQiQ0SsjR8syBcc8X+S8WGllNM+Qreu5Td7gnI=",
"rev": "1750f3c1c89488e2ffdd47cab9d05454dddfb734",
"narHash": "sha256-Ls4VPCGSQrm6k3FCokyonfX/sgIdZc8f5ZzqEdukBFA=",
"rev": "eb0e0f21f15c559d2ac7633dc81d079d1caf5f5f",
"type": "tarball",
"url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.05pre773343.1750f3c1c894/nixexprs.tar.xz"
"url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.05pre776128.eb0e0f21f15c/nixexprs.tar.xz"
},
"original": {
"type": "tarball",
@@ -70,6 +99,7 @@
},
"root": {
"inputs": {
"data-mesher": "data-mesher",
"disko": "disko",
"flake-parts": "flake-parts",
"nixos-facter-modules": "nixos-facter-modules",
@@ -86,11 +116,11 @@
]
},
"locked": {
"lastModified": 1742700801,
"narHash": "sha256-ZGlpUDsuBdeZeTNgoMv+aw0ByXT2J3wkYw9kJwkAS4M=",
"lastModified": 1743305778,
"narHash": "sha256-Ux/UohNtnM5mn9SFjaHp6IZe2aAnUCzklMluNtV6zFo=",
"owner": "Mic92",
"repo": "sops-nix",
"rev": "67566fe68a8bed2a7b1175fdfb0697ed22ae8852",
"rev": "8e873886bbfc32163fe027b8676c75637b7da114",
"type": "github"
},
"original": {
@@ -121,11 +151,11 @@
]
},
"locked": {
"lastModified": 1742370146,
"narHash": "sha256-XRE8hL4vKIQyVMDXykFh4ceo3KSpuJF3ts8GKwh5bIU=",
"lastModified": 1743081648,
"narHash": "sha256-WRAylyYptt6OX5eCEBWyTwOEqEtD6zt33rlUkr6u3cE=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "adc195eef5da3606891cedf80c0d9ce2d3190808",
"rev": "29a3d7b768c70addce17af0869f6e2bd8f5be4b7",
"type": "github"
},
"original": {

View File

@@ -19,6 +19,16 @@
treefmt-nix.url = "github:numtide/treefmt-nix";
treefmt-nix.inputs.nixpkgs.follows = "nixpkgs";
data-mesher = {
url = "git+https://git.clan.lol/clan/data-mesher";
inputs = {
flake-parts.follows = "flake-parts";
nixpkgs.follows = "nixpkgs";
systems.follows = "systems";
treefmt-nix.follows = "treefmt-nix";
};
};
};
outputs =

View File

@@ -23,6 +23,7 @@
"*.clan-flake"
"*.code-workspace"
"*.pub"
"*.priv"
"*.typed"
"*.age"
"*.list"
@@ -37,6 +38,7 @@
# prettier messes up our mkdocs flavoured markdown
"*.md"
"checks/data-mesher/vars/*"
"checks/lib/ssh/privkey"
"checks/lib/ssh/pubkey"
"checks/matrix-synapse/synapse-registration_shared_secret"

72
lib/README.md Normal file
View File

@@ -0,0 +1,72 @@
# ClanLib
This folder is supposed to contain clan specific nix functions.
Such as:
- build-clan function
- select
- build-inventory function
- json-schema-converter
## Structure
Similar to `nixpkgs/lib` this produces a recursive attribute set in a fixed-point.
Functions within lib can depend on each other to create new abstractions.
### Conventions
Note: This is not consistently enforced yet.
If you start a new feature, or refactoring/touching existing ones, please help us to move towards the below illustrated.
A single feature-set/module may be organized like this:
```nix
# ↓ The final clanLib
{lib, clanLib, ...}:
# ↓ portion to add to clanLib
{
inventory.resolveTags = tags: inventory.machines; # implementation
inventory.buildMachines = x: clanLib.inventory.resolveTags x; # implementation
}
```
Every bigger feature should live in a subfolder with the feature name.
It should contain two files:
- `impl.nix`
- `test.nix`
- Everything else may be adopted as needed.
```
Example filetree
```
```sh
.
├── default.nix
├── feature_foo
│ ├── impl.nix
│ └── test.nix
└── feature_bar
├── impl.nix
├── complex-subfeature
│ ├── impl.nix
│ └── test.nix
├── testless-subfeature # <- We immediately see that this feature is not tested on itself.
│ └── impl.nix
└── test.nix
```
```nix
# default.nix
{lib, clanLib, ...}:
{
inventory.resolveTags = import ./resolveTags { inherit lib clanLib; };
}
```
## Testing
For testing we use [nix-unit](https://github.com/nix-community/nix-unit)
TODO: define a helper that automatically hooks up `tests` in `flake.legacyPackages` and a corresponding buildable `checks` attribute

View File

@@ -1,23 +0,0 @@
{
lib,
self,
...
}:
let
# Returns an attrset with inputs that have the attribute `clanModules`
inputsWithClanModules = lib.filterAttrs (
_name: value: builtins.hasAttr "clanModules" value
) self.inputs;
flattenedClanModules = lib.foldl' (
acc: input:
lib.mkMerge [
acc
input.clanModules
]
) { } (lib.attrValues inputsWithClanModules);
in
{
inventory.modules = flattenedClanModules;
}

View File

@@ -43,10 +43,7 @@ in
include = [
"flakeModules"
"inventory.json"
"lib/build-clan"
"lib/default.nix"
"lib/flake-module.nix"
"lib/inventory"
"lib"
"machines"
"nixosModules"
];

View File

@@ -142,11 +142,13 @@ in
inventoryFile = lib.mkOption { type = lib.types.raw; };
# The machine 'imports' generated by the inventory per machine
inventoryClass = lib.mkOption { type = lib.types.raw; };
# new attribute
distributedServices = lib.mkOption { type = lib.types.raw; };
# clan-core's modules
clanModules = lib.mkOption { type = lib.types.raw; };
source = lib.mkOption { type = lib.types.raw; };
meta = lib.mkOption { type = lib.types.raw; };
lib = lib.mkOption { type = lib.types.raw; };
clanLib = lib.mkOption { type = lib.types.raw; };
all-machines-json = lib.mkOption { type = lib.types.raw; };
machines = lib.mkOption { type = lib.types.raw; };
machinesFunc = lib.mkOption { type = lib.types.raw; };

View File

@@ -77,6 +77,9 @@ let
# Inherit the inventory assertions ?
# { inherit (mergedInventory) assertions; }
{ imports = inventoryClass.machines.${name}.machineImports or [ ]; }
# Import the distribute services
{ imports = config.clanInternals.distributedServices.allMachines.${name} or [ ]; }
(
{
# Settings
@@ -165,7 +168,6 @@ let
in
{
imports = [
./auto-imports.nix
# Merge the inventory file
{
inventory = _: {
@@ -199,7 +201,11 @@ in
clanInternals = {
moduleSchemas = clan-core.lib.modules.getModulesSchema config.inventory.modules;
inherit inventoryClass;
inherit (clan-core) clanModules;
distributedServices = import ../distributed-service/inventory-adapter.nix {
inherit lib inventory;
flake = config.self;
};
inherit (clan-core) clanModules clanLib;
inherit inventoryFile;
inventoryValuesPrios =
# Temporary workaround
@@ -211,9 +217,6 @@ in
templates = config.templates;
inventory = config.inventory;
meta = config.inventory.meta;
lib = {
inherit (clan-core.lib) select;
};
source = "${clan-core}";

View File

@@ -1,25 +1,35 @@
{
lib,
clan-core,
self,
nixpkgs,
...
}:
let
# Produces the
# 'clanLib' attribute set
# Wrapped with fix, so we can depend on other clanLib functions without passing the whole flake
lib.fix (clanLib: {
# TODO:
# SSome bad lib functions that depend on something in 'self'.
# We should reduce the dependency on 'self' aka the 'flake' object
# This makes it easier to test
# most of the time passing the whole flake is unnecessary
callLib = file: args: import file { inherit lib clanLib; } // args;
evalClan = import ./eval-clan-modules {
inherit clan-core lib;
inherit lib;
clan-core = self;
pkgs = nixpkgs.legacyPackages.x86_64-linux;
};
in
{
inherit (evalClan) evalClanModules evalClanModulesWithRoles;
buildClan = import ./build-clan { inherit lib nixpkgs clan-core; };
buildClan = import ./build-clan {
inherit lib nixpkgs;
clan-core = self;
};
# ------------------------------------
# Lib functions that don't depend on 'self'
inventory = clanLib.callLib ./inventory { };
modules = clanLib.callLib ./frontmatter { };
facts = import ./facts.nix { inherit lib; };
inventory = import ./inventory { inherit lib clan-core; };
values = import ./values { inherit lib; };
jsonschema = import ./jsonschema { inherit lib; };
modules = import ./frontmatter {
inherit lib;
self = clan-core;
};
select = import ./select.nix;
}
})

View File

@@ -0,0 +1,33 @@
{ self, inputs, ... }:
let
inputOverrides = builtins.concatStringsSep " " (
builtins.map (input: " --override-input ${input} ${inputs.${input}}") (builtins.attrNames inputs)
);
in
{
perSystem =
{
pkgs,
lib,
system,
...
}:
{
# Run: nix-unit --extra-experimental-features flakes --flake .#legacyPackages.x86_64-linux.<attrName>
legacyPackages.evalTest-distributedServices = import ./tests {
inherit lib self;
};
checks = {
lib-distributedServices-eval = pkgs.runCommand "tests" { nativeBuildInputs = [ pkgs.nix-unit ]; } ''
export HOME="$(realpath .)"
nix-unit --eval-store "$HOME" \
--extra-experimental-features flakes \
${inputOverrides} \
--flake ${self}#legacyPackages.${system}.evalTest-distributedServices
touch $out
'';
};
};
}

View File

@@ -0,0 +1,199 @@
# Adapter function between the inventory.instances and the clan.service module
#
# Data flow:
# - inventory.instances -> Adapter -> clan.service module -> Service Resources (i.e. NixosModules per Machine, Vars per Service, etc.)
#
# What this file does:
#
# - Resolves the [Module] to an actual module-path and imports it.
# - Groups together all the same modules into a single import and creates all instances for it.
# - Resolves the inventory tags into machines. Tags don't exist at the service level.
# Also combines the settings for 'machines' and 'tags'.
{
lib,
# This is used to resolve the module imports from 'flake.inputs'
flake,
# The clan inventory
inventory,
}:
let
# Returns the list of machine names
# { ... } -> [ string ]
resolveTags =
{
# Available InventoryMachines :: { {name} :: { tags = [ string ]; }; }
machines,
# Requested members :: { machines, tags }
# Those will be resolved against the available machines
members,
# Not needed for resolution - only for error reporting
roleName,
instanceName,
}:
{
machines =
members.machines or [ ]
++ (builtins.foldl' (
acc: tag:
let
# For error printing
availableTags = lib.foldlAttrs (
acc: _: v:
v.tags or [ ] ++ acc
) [ ] (machines);
tagMembers = builtins.attrNames (lib.filterAttrs (_n: v: builtins.elem tag v.tags or [ ]) machines);
in
if tagMembers == [ ] then
lib.warn ''
Service instance '${instanceName}': - ${roleName} tags: no machine with tag '${tag}' found.
Available tags: ${builtins.toJSON (lib.unique availableTags)}
'' acc
else
acc ++ tagMembers
) [ ] members.tags or [ ]);
};
machineHasTag = machineName: tagName: lib.elem tagName inventory.machines.${machineName}.tags;
# map the instances into the module
importedModuleWithInstances = lib.mapAttrs (
instanceName: instance:
let
# TODO:
resolvedModuleSet =
# If the module.name is self then take the modules defined in the flake
# Otherwise its an external input which provides the modules via 'clan.modules' attribute
if instance.module.input == null then
inventory.modules
else
let
input =
flake.inputs.${instance.module.input} or (throw ''
Flake doesn't provide input with name '${instance.module.input}'
Choose one of the following inputs:
- ${
builtins.concatStringsSep "\n- " (
lib.attrNames (lib.filterAttrs (_name: input: input ? clan) flake.inputs)
)
}
To import a local module from 'inventory.modules' remove the 'input' attribute from the module definition
Remove the following line from the module definition:
...
- module.input = "${instance.module.input}"
'');
clanAttrs =
input.clan
or (throw "It seems the flake input ${instance.module.input} doesn't export any clan resources");
in
clanAttrs.modules;
resolvedModule =
resolvedModuleSet.${instance.module.name}
or (throw "flake doesn't provide clan-module with name ${instance.module.name}");
# Every instance includes machines via roles
# :: { client :: ... }
instanceRoles = lib.mapAttrs (
roleName: role:
let
resolvedMachines = resolveTags {
members = {
# Explicit members
machines = lib.attrNames role.machines;
# Resolved Members
tags = lib.attrNames role.tags;
};
inherit (inventory) machines;
inherit instanceName roleName;
};
in
# instances.<instanceName>.roles.<roleName> =
{
machines = lib.genAttrs resolvedMachines.machines (
machineName:
let
machineSettings = instance.roles.${roleName}.machines.${machineName}.settings or { };
settingsViaTags = lib.filterAttrs (
tagName: _: machineHasTag machineName tagName
) instance.roles.${roleName}.tags;
in
{
# TODO: Do we want to wrap settings with
# setDefaultModuleLocation "inventory.instances.${instanceName}.roles.${roleName}.tags.${tagName}";
settings = {
imports = [
machineSettings
] ++ lib.attrValues (lib.mapAttrs (_tagName: v: v.settings) settingsViaTags);
};
}
);
# Maps to settings for the role.
# In other words this sets the following path of a clan.service module:
# instances.<instanceName>.roles.<roleName>.settings
settings = role.settings;
}
) instance.roles;
in
{
inherit (instance) module;
inherit resolvedModule instanceRoles;
}
) inventory.instances;
# TODO: Eagerly check the _class of the resolved module
evals = lib.mapAttrs (
_module_ident: instances:
(lib.evalModules {
class = "clan.service";
modules =
[
./service-module.nix
# Import the resolved module
(builtins.head instances).instance.resolvedModule
]
# Include all the instances that correlate to the resolved module
++ (builtins.map (v: {
instances.${v.instanceName}.roles = v.instance.instanceRoles;
}) instances);
})
) grouped;
# Group the instances by the module they resolve to
# This is necessary to evaluate the module in a single pass
# :: { <module.input>_<module.name> :: [ { name, value } ] }
# Since 'perMachine' needs access to all the instances we should include them as a whole
grouped = lib.foldlAttrs (
acc: instanceName: instance:
let
inputName = if instance.module.input == null then "self" else instance.module.input;
id = inputName + "-" + instance.module.name;
in
acc
// {
${id} = acc.${id} or [ ] ++ [
{
inherit instanceName instance;
}
];
}
) { } importedModuleWithInstances;
# TODO: Return an attribute set of resources instead of a plain list of nixosModules
allMachines = lib.foldlAttrs (
acc: _name: eval:
acc
// lib.mapAttrs (
machineName: result: acc.${machineName} or [ ] ++ [ result.nixosModule ]
) eval.config.result.final
) { } evals;
in
{
inherit importedModuleWithInstances grouped;
inherit evals allMachines;
}

View File

@@ -0,0 +1,514 @@
{ lib, config, ... }:
let
inherit (lib) mkOption types;
inherit (types) attrsWith submoduleWith;
# TODO:
# Remove once this gets merged upstream; performs in O(n*log(n) instead of O(n^2))
# https://github.com/NixOS/nixpkgs/pull/355616/files
uniqueStrings = list: builtins.attrNames (builtins.groupBy lib.id list);
checkInstanceRoles =
instanceName: instanceRoles:
let
unmatchedRoles = lib.filter (roleName: !lib.elem roleName (lib.attrNames config.roles)) (
lib.attrNames instanceRoles
);
in
if unmatchedRoles == [ ] then
true
else
throw ''
inventory instance: 'instances.${instanceName}' defines the following roles:
${builtins.toJSON unmatchedRoles}
But the clan-service module '${config.manifest.name}' defines roles:
${builtins.toJSON (lib.attrNames config.roles)}
'';
# checkInstanceSettings =
# instanceName: instanceSettings:
# let
# unmatchedRoles = 1;
# in
# unmatchedRoles;
/**
Merges the role- and machine-settings using the role interface
Arguments:
- roleName: The name of the role
- instanceName: The name of the instance
- settings: The settings of the machine. Leave empty to get the role settings
Returns: evalModules result
The caller is responsible to use .config or .extendModules
*/
# TODO: evaluate against the role.settings statically and use extendModules to get the machineSettings
# Doing this might improve performance
evalMachineSettings =
{
roleName,
instanceName,
machineName ? null,
settings,
}:
lib.evalModules {
# Prefix for better error reporting
# This prints the path where the option should be defined rather than the plain path within settings
# "The option `instances.foo.roles.server.machines.test.settings.<>' was accessed but has no value defined. Try setting the option."
prefix =
[
"instances"
instanceName
"roles"
roleName
]
++ (lib.optionals (machineName != null) [
"machines"
machineName
])
++ [ "settings" ];
# This may lead to better error reporting
# And catch errors if anyone tried to import i.e. a nixosConfiguration
# Set some class: i.e "network.server.settings"
class = lib.concatStringsSep "." [
config.manifest.name
roleName
"settings"
];
modules = [
(lib.setDefaultModuleLocation "Via clan.service module: roles.${roleName}.interface"
config.roles.${roleName}.interface
)
(lib.setDefaultModuleLocation "inventory.instances.${instanceName}.roles.${roleName}.settings"
config.instances.${instanceName}.roles.${roleName}.settings
)
settings
# Dont set the module location here
# This should already be set by the tags resolver
# config.instances.${instanceName}.roles.${roleName}.machines.${machineName}.settings
];
};
/**
Makes a module extensible
returning its config
and making it extensible via '__functor' polymorphism
Example:
```nix-repl
res = makeExtensibleConfig (evalModules { options.foo = mkOption { default = 42; };)
res
=>
{
foo = 42;
_functor = <function>;
}
# This allows to override using mkDefault, mkForce, etc.
res { foo = 100; }
=>
{
foo = 100;
_functor = <function>;
}
```
*/
makeExtensibleConfig =
f: args:
let
makeModuleExtensible =
eval:
eval.config
// {
__functor = _self: m: makeModuleExtensible (eval.extendModules { modules = lib.toList m; });
};
in
makeModuleExtensible (f args);
/**
Apply the settings to the instance
Takes a [ServiceInstance] :: { roles :: { roleName :: { machines :: { machineName :: { settings :: { ... } } } } } }
Returns the same object but evaluates the settings against the interface.
We need this because 'perMachine' shouldn't gain access the raw deferred module.
*/
applySettings =
instanceName: instance:
lib.mapAttrs (roleName: role: {
machines = lib.mapAttrs (machineName: v: {
# TODO: evaluate the settings against the interface
# settings = (evalMachineSettings { inherit roleName instanceName; inherit (v) settings; }).config;
settings = (
makeExtensibleConfig evalMachineSettings {
inherit roleName instanceName machineName;
inherit (v) settings;
}
);
}) role.machines;
# TODO: evaluate the settings against the interface
settings = (
makeExtensibleConfig evalMachineSettings {
inherit roleName instanceName;
inherit (role) settings;
}
);
}) instance.roles;
in
{
options = {
instances = mkOption {
default = throw ''
The clan service module ${config.manifest.name} doesn't define any instances.
Did you forget to create instances via 'inventory.instances' ?
'';
type = attrsWith {
placeholder = "instanceName";
elemType = submoduleWith {
modules = [
(
{ name, ... }:
{
# options.settings = mkOption {
# description = "settings of 'instance': ${name}";
# default = {};
# apply = v: lib.seq (checkInstanceSettings name v) v;
# };
options.roles = mkOption {
default = throw ''
Instance '${name}' of service '${config.manifest.name}' mut define members via 'roles'.
To include a machine:
'instances.${name}.roles.<role-name>.machines.<your-machine-name>' must be set.
'';
type = attrsWith {
placeholder = "roleName";
elemType = submoduleWith {
modules = [
(
{ ... }:
{
# instances.{instanceName}.roles.{roleName}.machines
options.machines = mkOption {
type = attrsWith {
placeholder = "machineName";
elemType = submoduleWith {
modules = [
(m: {
options.settings = mkOption {
type = types.raw;
description = "Settings of '${name}-machine': ${m.name}.";
default = { };
};
})
];
};
};
};
# instances.{instanceName}.roles.{roleName}.settings
# options._settings = mkOption { };
# options._settingsViaTags = mkOption { };
# A deferred module that combines _settingsViaTags with _settings
options.settings = mkOption {
type = types.raw;
description = "Settings of 'role': ${name}";
default = { };
};
}
)
];
};
};
apply = v: lib.seq (checkInstanceRoles name v) v;
};
}
)
];
};
};
};
manifest = mkOption {
description = "Meta information about this module itself";
type = submoduleWith {
modules = [
{
options = {
name = mkOption {
description = ''
The name of the module
Mainly used to create an error context while evaluating.
This helps backtracking which module was included; And where an error came from originally.
'';
type = types.str;
};
};
}
];
};
};
roles = mkOption {
default = throw ''
Role behavior of service '${config.manifest.name}' must be defined.
A 'clan.service' module should always define its behavior via 'roles'
---
To add the role:
`roles.client = {}`
To define multiple instance behavior:
`roles.client.perInstance = { ... }: {}`
'';
type = attrsWith {
placeholder = "roleName";
elemType = submoduleWith {
modules = [
(
{ name, ... }:
let
roleName = name;
in
{
options.interface = mkOption {
type = types.deferredModule;
# TODO: Default to an empty module
# need to test that an the empty module can be evaluated to empty settings
default = { };
};
options.perInstance = mkOption {
type = types.deferredModuleWith {
staticModules = [
# Common output format
# As described by adr
# { nixosModule, services, ... }
(
{ ... }:
{
options.nixosModule = mkOption { default = { }; };
options.services = mkOption {
type = attrsWith {
placeholder = "serviceName";
elemType = submoduleWith {
modules = [ ./service-module.nix ];
};
};
default = { };
};
}
)
];
};
default = { };
apply =
/**
This apply transforms the module into a function that takes arguments and returns an evaluated module
The arguments of the function are determined by its scope:
-> 'perInstance' maps over all instances and over all machines hence it takes 'instanceName' and 'machineName' as iterator arguments
*/
v: instanceName: machineName:
(lib.evalModules {
specialArgs = {
inherit instanceName;
machine = {
name = machineName;
roles = applySettings instanceName config.instances.${instanceName};
};
settings = (
makeExtensibleConfig evalMachineSettings {
inherit roleName instanceName machineName;
settings =
config.instances.${instanceName}.roles.${roleName}.machines.${machineName}.settings or { };
}
);
};
modules = [ v ];
}).config;
};
}
)
];
};
};
};
perMachine = mkOption {
type = types.deferredModuleWith {
staticModules = [
# Common output format
# As described by adr
# { nixosModule, services, ... }
(
{ ... }:
{
options.nixosModule = mkOption { default = { }; };
options.services = mkOption {
type = attrsWith {
placeholder = "serviceName";
elemType = submoduleWith {
modules = [ ./service-module.nix ];
};
};
default = { };
};
}
)
];
};
default = { };
apply =
v: machineName: machineScope:
(lib.evalModules {
specialArgs = {
/**
This apply transforms the module into a function that takes arguments and returns an evaluated module
The arguments of the function are determined by its scope:
-> 'perMachine' maps over all machines of a service 'machineName' and a helper 'scope' (some aggregated attributes) as iterator arguments
The 'scope' attribute is used to collect the 'roles' of all 'instances' where the machine is part of and inject both into the specialArgs
*/
machine = {
name = machineName;
roles =
let
collectRoles =
instances:
lib.foldlAttrs (
r: _instanceName: instance:
r
++ lib.foldlAttrs (
r2: roleName: _role:
r2 ++ [ roleName ]
) [ ] instance.roles
) [ ] instances;
in
uniqueStrings (collectRoles machineScope.instances);
};
inherit (machineScope) instances;
# There are no machine settings.
# Settings are always role specific, having settings that apply to a machine globally would mean to merge all role and all instance settings into a single module.
# But that will likely cause conflicts because it is inherently wrong.
settings = throw ''
'perMachine' doesn't have a 'settings' argument.
Alternatives:
- 'instances.<instanceName>.roles.<roleName>.settings' should be used instead.
- 'instances.<instanceName>.roles.<roleName>.machines.<machineName>.settings' should be used instead.
If that is insufficient, you might also consider using 'roles.<roleName>.perInstance' instead of 'perMachine'.
'';
};
modules = [ v ];
}).config;
};
# ---
# Place the result in _module.result to mark them as "internal" and discourage usage/overrides
#
# ---
# Intermediate result by mapping over the 'roles', 'instances', and 'machines'.
# During this step the 'perMachine' and 'perInstance' are applied.
# The result-set for a single machine can then be found by collecting all 'nixosModules' recursively.
result.allRoles = mkOption {
readOnly = true;
default = lib.mapAttrs (roleName: roleCfg: {
allInstances = lib.mapAttrs (instanceName: instanceCfg: {
allMachines = lib.mapAttrs (
machineName: _machineCfg: roleCfg.perInstance instanceName machineName
) instanceCfg.roles.${roleName}.machines or { };
}) config.instances;
}) config.roles;
};
result.allMachines = mkOption {
readOnly = true;
default =
let
collectMachinesFromInstance =
instance:
uniqueStrings (
lib.foldlAttrs (
acc: _roleName: role:
acc ++ (lib.attrNames role.machines)
) [ ] instance.roles
);
# The service machines are defined by collecting all instance machines
serviceMachines = lib.foldlAttrs (
acc: instanceName: instance:
acc
// lib.genAttrs (collectMachinesFromInstance instance) (machineName:
# Store information why this machine is part of the service
# MachineOrigin :: { instances :: [ string ]; }
{
# Helper attribute to
instances = [ instanceName ] ++ acc.${machineName}.instances or [ ];
# All roles of the machine ?
roles = lib.foldlAttrs (
acc2: roleName: role:
if builtins.elem machineName (lib.attrNames role.machines) then acc2 ++ [ roleName ] else acc2
) [ ] instance.roles;
})
) { } config.instances;
allMachines = lib.mapAttrs (_machineName: MachineOrigin: {
# Filter out instances of which the machine is not part of
instances = lib.mapAttrs (_n: v: { roles = v; }) (
lib.filterAttrs (instanceName: _: builtins.elem instanceName MachineOrigin.instances) (
# Instances with evaluated settings
lib.mapAttrs applySettings config.instances
)
);
}) serviceMachines;
in
# allMachines;
lib.mapAttrs config.perMachine allMachines;
};
result.final = mkOption {
readOnly = true;
default = lib.mapAttrs (
machineName: machineResult:
let
# config.result.allRoles.client.allInstances.bar.allMachines.test
# instanceResults = config.result.allRoles.client.allInstances.bar.allMachines.${machineName};
instanceResults = lib.foldlAttrs (
acc: roleName: role:
acc
++ lib.foldlAttrs (
acc: instanceName: instance:
if instance.allMachines.${machineName}.nixosModule or { } != { } then
acc
++ [
(lib.setDefaultModuleLocation
"Via instances.${instanceName}.roles.${roleName}.machines.${machineName}"
instance.allMachines.${machineName}.nixosModule
)
]
else
acc
) [ ] role.allInstances
) [ ] config.result.allRoles;
in
{
inherit instanceResults;
nixosModule = {
imports = [
# For error backtracing. This module was produced by the 'perMachine' function
# TODO: check if we need this or if it leads to better errors if we pass the underlying module locations
(lib.setDefaultModuleLocation "clan.service: ${config.manifest.name} - via perMachine" machineResult.nixosModule)
] ++ instanceResults;
};
}
) config.result.allMachines;
};
};
}

View File

@@ -0,0 +1,327 @@
{
lib,
...
}:
let
inherit (lib)
evalModules
;
evalInventory =
m:
(evalModules {
# Static modules
modules = [
../../inventory/build-inventory/interface.nix
{
modules.test = { };
}
m
];
}).config;
flakeFixture = {
inputs = { };
};
callInventoryAdapter =
inventoryModule:
import ../inventory-adapter.nix {
inherit lib;
flake = flakeFixture;
inventory = evalInventory inventoryModule;
};
in
{
test_simple =
let
res = callInventoryAdapter {
# Authored module
# A minimal module looks like this
# It isn't exactly doing anything but it's a valid module that produces an output
modules."simple-module" = {
_class = "clan.service";
manifest = {
name = "netwitness";
};
};
# User config
instances."instance_foo" = {
module = {
name = "simple-module";
};
};
};
in
{
# Test that the module is mapped into the output
# We might change the attribute name in the future
expr = res.evals ? "self-simple-module";
expected = true;
};
# A module can be imported multiple times
# A module can also have multiple instances within the same module
# This mean modules must be grouped together, imported once
# All instances should be included within one evaluation to make all of them available
test_module_grouping =
let
res = callInventoryAdapter {
# Authored module
# A minimal module looks like this
# It isn't exactly doing anything but it's a valid module that produces an output
modules."A" = {
_class = "clan.service";
manifest = {
name = "A-name";
};
perMachine = { }: { };
};
modules."B" = {
_class = "clan.service";
manifest = {
name = "B-name";
};
perMachine = { }: { };
};
# User config
instances."instance_foo" = {
module = {
name = "A";
};
};
instances."instance_bar" = {
module = {
name = "B";
};
};
instances."instance_baz" = {
module = {
name = "A";
};
};
};
in
{
# Test that the module is mapped into the output
# We might change the attribute name in the future
expr = lib.mapAttrs (_n: v: builtins.length v) res.grouped;
expected = {
self-A = 2;
self-B = 1;
};
};
test_creates_all_instances =
let
res = callInventoryAdapter {
# Authored module
# A minimal module looks like this
# It isn't exactly doing anything but it's a valid module that produces an output
modules."A" = {
_class = "clan.service";
manifest = {
name = "network";
};
perMachine = { }: { };
};
instances."instance_foo" = {
module = {
name = "A";
};
};
instances."instance_bar" = {
module = {
name = "A";
};
};
instances."instance_zaza" = {
module = {
name = "B";
};
};
};
in
{
# Test that the module is mapped into the output
# We might change the attribute name in the future
expr = lib.attrNames res.evals.self-A.config.instances;
expected = [
"instance_bar"
"instance_foo"
];
};
# Membership via roles
test_add_machines_directly =
let
res = callInventoryAdapter {
# Authored module
# A minimal module looks like this
# It isn't exactly doing anything but it's a valid module that produces an output
modules."A" = {
_class = "clan.service";
manifest = {
name = "network";
};
# Define a role without special behavior
roles.peer = { };
# perMachine = {}: {};
};
machines = {
jon = { };
sara = { };
hxi = { };
};
instances."instance_foo" = {
module = {
name = "A";
};
roles.peer.machines.jon = { };
};
instances."instance_bar" = {
module = {
name = "A";
};
roles.peer.machines.sara = { };
};
instances."instance_zaza" = {
module = {
name = "B";
};
roles.peer.tags.all = { };
};
};
in
{
# Test that the module is mapped into the output
# We might change the attribute name in the future
expr = lib.attrNames res.evals.self-A.config.result.allMachines;
expected = [
"jon"
"sara"
];
};
# Membership via tags
test_add_machines_via_tags =
let
res = callInventoryAdapter {
# Authored module
# A minimal module looks like this
# It isn't exactly doing anything but it's a valid module that produces an output
modules."A" = {
_class = "clan.service";
manifest = {
name = "network";
};
# Define a role without special behavior
roles.peer = { };
# perMachine = {}: {};
};
machines = {
jon = {
tags = [ "foo" ];
};
sara = {
tags = [ "foo" ];
};
hxi = { };
};
instances."instance_foo" = {
module = {
name = "A";
};
roles.peer.tags.foo = { };
};
instances."instance_zaza" = {
module = {
name = "B";
};
roles.peer.tags.all = { };
};
};
in
{
# Test that the module is mapped into the output
# We might change the attribute name in the future
expr = lib.attrNames res.evals.self-A.config.result.allMachines;
expected = [
"jon"
"sara"
];
};
per_machine_args = import ./per_machine_args.nix { inherit lib callInventoryAdapter; };
# test_per_machine_receives_instances =
# let
# res = callInventoryAdapter {
# # Authored module
# # A minimal module looks like this
# # It isn't exactly doing anything but it's a valid module that produces an output
# modules."A" = {
# _class = "clan.service";
# manifest = {
# name = "network";
# };
# # Define a role without special behavior
# roles.peer = { };
# perMachine =
# { instances, ... }:
# {
# nixosModule = instances;
# };
# };
# machines = {
# jon = { };
# sara = { };
# };
# instances."instance_foo" = {
# module = {
# name = "A";
# };
# roles.peer.machines.jon = { };
# };
# instances."instance_bar" = {
# module = {
# name = "A";
# };
# roles.peer.machines.sara = { };
# };
# instances."instance_zaza" = {
# module = {
# name = "B";
# };
# roles.peer.tags.all = { };
# };
# };
# in
# {
# expr = {
# hasMachineSettings =
# res.evals.self-A.config.result.allMachines.jon.nixosModule. # { {instanceName} :: { roles :: { {roleName} :: { machines :: { {machineName} :: { settings :: {} } } } } } }
# instance_foo.roles.peer.machines.jon ? settings;
# machineSettingsEmpty =
# lib.filterAttrs (n: _v: n != "__functor" ) res.evals.self-A.config.result.allMachines.jon.nixosModule. # { {instanceName} :: { roles :: { {roleName} :: { machines :: { {machineName} :: { settings :: {} } } } } } }
# instance_foo.roles.peer.machines.jon.settings;
# hasRoleSettings =
# res.evals.self-A.config.result.allMachines.jon.nixosModule. # { {instanceName} :: { roles :: { {roleName} :: { machines :: { {machineName} :: { settings :: {} } } } } } }
# instance_foo.roles.peer ? settings;
# roleSettingsEmpty =
# lib.filterAttrs (n: _v: n != "__functor" ) res.evals.self-A.config.result.allMachines.jon.nixosModule. # { {instanceName} :: { roles :: { {roleName} :: { machines :: { {machineName} :: { settings :: {} } } } } } }
# instance_foo.roles.peer.settings;
# };
# expected = {
# hasMachineSettings = true;
# machineSettingsEmpty = {};
# hasRoleSettings = true;
# roleSettingsEmpty = {};
# };
# };
}

View File

@@ -0,0 +1,107 @@
{ lib, callInventoryAdapter }:
let # Authored module
# A minimal module looks like this
# It isn't exactly doing anything but it's a valid module that produces an output
modules."A" = {
_class = "clan.service";
manifest = {
name = "network";
};
# Define two roles with unmergeable interfaces
# Both define some 'timeout' but with completely different types.
roles.peer.interface =
{ lib, ... }:
{
options.timeout = lib.mkOption {
type = lib.types.str;
};
};
roles.server.interface =
{ lib, ... }:
{
options.timeout = lib.mkOption {
type = lib.types.submodule;
};
};
perMachine =
{ instances, ... }:
{
nixosModule = instances;
};
};
machines = {
jon = { };
sara = { };
};
res = callInventoryAdapter {
inherit modules machines;
instances."instance_foo" = {
module = {
name = "A";
};
roles.peer.machines.jon = {
settings.timeout = lib.mkForce "foo-peer-jon";
};
roles.peer = {
settings.timeout = "foo-peer";
};
};
instances."instance_bar" = {
module = {
name = "A";
};
roles.peer.machines.jon = {
settings.timeout = "bar-peer-jon";
};
};
instances."instance_zaza" = {
module = {
name = "B";
};
roles.peer.tags.all = { };
};
};
filterInternals = lib.filterAttrs (n: _v: !lib.hasPrefix "_" n);
in
{
# settings should evaluate
test_per_machine_receives_instance_settings = {
expr = {
hasMachineSettings =
res.evals.self-A.config.result.allMachines.jon.nixosModule.instance_foo.roles.peer.machines.jon
? settings;
# settings are specific.
# Below we access:
# instance = instance_foo
# roles = peer
# machines = jon
specificMachineSettings = filterInternals res.evals.self-A.config.result.allMachines.jon.nixosModule.instance_foo.roles.peer.machines.jon.settings;
hasRoleSettings =
res.evals.self-A.config.result.allMachines.jon.nixosModule.instance_foo.roles.peer ? settings;
# settings are specific.
# Below we access:
# instance = instance_foo
# roles = peer
# machines = *
specificRoleSettings = filterInternals res.evals.self-A.config.result.allMachines.jon.nixosModule.instance_foo.roles.peer.settings;
};
expected = {
hasMachineSettings = true;
specificMachineSettings = {
timeout = "foo-peer-jon";
};
hasRoleSettings = true;
specificRoleSettings = {
timeout = "foo-peer";
};
};
};
}

View File

@@ -10,16 +10,20 @@ let
pathExists
;
in
{
rec {
# We should remove this.
# It would enforce treating at least 'lib' as a module in a whole
imports = filter pathExists [
./jsonschema/flake-module.nix
./inventory/flake-module.nix
./build-clan/flake-module.nix
./values/flake-module.nix
./distributed-service/flake-module.nix
];
flake.lib = import ./default.nix {
inherit lib inputs;
flake.clanLib = import ./default.nix {
inherit lib inputs self;
inherit (inputs) nixpkgs;
clan-core = self;
};
# TODO: remove this legacy alias
flake.lib = flake.clanLib;
}

View File

@@ -1,9 +1,9 @@
{ lib, self }:
{ lib, clanLib }:
let
# Trim the .nix extension from a filename
trimExtension = name: builtins.substring 0 (builtins.stringLength name - 4) name;
jsonWithoutHeader = self.lib.jsonschema {
jsonWithoutHeader = clanLib.jsonschema {
includeDefaults = true;
header = { };
};
@@ -13,7 +13,7 @@ let
lib.mapAttrs (
_moduleName: rolesOptions:
lib.mapAttrs (_roleName: options: jsonWithoutHeader.parseOptions options { }) rolesOptions
) (self.lib.evalClanModulesWithRoles modules);
) (clanLib.evalClan.evalClanModulesWithRoles modules);
evalFrontmatter =
{

View File

@@ -1,7 +1,7 @@
{
lib,
config,
clan-core,
clanLib,
...
}:
let
@@ -43,8 +43,7 @@ let
checkService =
modulepath: serviceName:
builtins.elem "inventory"
(clan-core.lib.modules.getFrontmatter modulepath serviceName).features or [ ];
builtins.elem "inventory" (clanLib.modules.getFrontmatter modulepath serviceName).features or [ ];
compileMachine =
{ machineConfig }:
@@ -160,7 +159,7 @@ in
inherit
resolveTags
inventory
clan-core
clanLib
machineName
serviceConfigs
;

View File

@@ -3,7 +3,7 @@
config,
resolveTags,
inventory,
clan-core,
clanLib,
machineName,
serviceConfigs,
...
@@ -14,7 +14,7 @@ in
{
# Roles resolution
# : List String
supportedRoles = clan-core.lib.modules.getRoles inventory.modules serviceName;
supportedRoles = clanLib.modules.getRoles inventory.modules serviceName;
matchedRoles = builtins.attrNames (
lib.filterAttrs (_: ms: builtins.elem machineName ms) config.machinesRoles
);
@@ -56,7 +56,7 @@ in
assertions = lib.concatMapAttrs (
instanceName: resolvedRoles:
clan-core.lib.modules.checkConstraints {
clanLib.modules.checkConstraints {
moduleName = serviceName;
allModules = inventory.modules;
inherit resolvedRoles instanceName;

View File

@@ -1,6 +1,6 @@
# Generate partial NixOS configurations for every machine in the inventory
# This function is responsible for generating the module configuration for every machine in the inventory.
{ lib, clan-core }:
{ lib, clanLib }:
let
/*
Returns a set with NixOS configuration for every machine in the inventory.
@@ -11,7 +11,7 @@ let
{ inventory, directory }:
(lib.evalModules {
specialArgs = {
inherit clan-core;
inherit clanLib;
};
modules = [
./builder

View File

@@ -103,7 +103,9 @@ in
default = options;
};
modules = lib.mkOption {
type = types.attrsOf types.path;
# Don't define the type yet
# We manually transform the value with types.deferredModule.merge later to keep them serializable
type = types.attrsOf types.raw;
default = { };
defaultText = "clanModules of clan-core";
description = ''
@@ -275,7 +277,79 @@ in
)
);
};
instances = lib.mkOption {
# Keep as internal until all de-/serialization issues are resolved
visible = false;
internal = true;
description = "Multi host service module instances";
type = types.attrsOf (
types.submodule {
options = {
# ModuleSpec
module = lib.mkOption {
type = types.submodule {
options.input = lib.mkOption {
type = types.nullOr types.str;
default = null;
defaultText = "Name of the input. Default to 'null' which means the module is local";
description = ''
Name of the input. Default to 'null' which means the module is local
'';
};
options.name = lib.mkOption {
type = types.str;
};
};
};
roles = lib.mkOption {
default = { };
type = types.attrsOf (
types.submodule {
options = {
# TODO: deduplicate
machines = lib.mkOption {
type = types.attrsOf (
types.submodule {
options.settings = lib.mkOption {
default = { };
# Dont transform the value with `types.deferredModule` here. We need to keep it json serializable
# TODO: We need a custom serializer for deferredModule
type = types.deferredModule;
};
}
);
default = { };
};
tags = lib.mkOption {
type = types.attrsOf (
types.submodule {
options.settings = lib.mkOption {
default = { };
type = types.deferredModule;
};
}
);
default = { };
};
settings = lib.mkOption {
default = { };
type = types.deferredModule;
};
};
}
);
};
};
}
);
default = { };
apply =
v:
if v == { } then
v
else
lib.warn "Inventory.instances and related features are still under development. Please use with care." v;
};
services = lib.mkOption {
description = ''
Services of the inventory.

View File

@@ -1,5 +1,5 @@
{ lib, clan-core }:
{ lib, clanLib }:
{
inherit (import ./build-inventory { inherit lib clan-core; }) buildInventory;
inherit (import ./build-inventory { inherit lib clanLib; }) buildInventory;
interface = ./build-inventory/interface.nix;
}

View File

@@ -50,11 +50,7 @@ in
self.filter {
include = [
"flakeModules"
"lib/default.nix"
"lib/flake-module.nix"
"lib/inventory"
"lib/constraints"
"lib/frontmatter"
"lib"
"clanModules/flake-module.nix"
"clanModules/borgbackup"
];

View File

@@ -1,4 +0,0 @@
---
features = [ "inventory" ]
---
Description

View File

@@ -1,6 +0,0 @@
{ ... }:
{
_class = "clan";
perInstance = { };
perService = { };
}

View File

@@ -2,8 +2,8 @@
let
inventory = (
import ../build-inventory {
inherit lib clan-core;
inherit lib;
clanLib = clan-core.clanLib;
}
);
inherit (inventory) buildInventory;
@@ -17,11 +17,9 @@ in
A = { };
};
services = {
clanModule = { };
legacyModule = { };
};
modules = {
clanModule = ./clanModule;
legacyModule = ./legacyModule;
};
};
@@ -30,17 +28,11 @@ in
in
{
expr = {
clanModule = lib.filterAttrs (
name: _: name == "isClanModule"
) compiled.machines.A.compiledServices.clanModule;
legacyModule = lib.filterAttrs (
name: _: name == "isClanModule"
) compiled.machines.A.compiledServices.legacyModule;
};
expected = {
clanModule = {
isClanModule = true;
};
legacyModule = {
isClanModule = false;
};

View File

@@ -1,10 +1,9 @@
{
lib,
config,
clan-core,
...
}:
{
# Just some random stuff
config.user.user = lib.mapAttrs clan-core.users.root;
options.test = lib.mapAttrs clan-core;
}

View File

@@ -28,9 +28,7 @@ in
self.filter {
include = [
"flakeModules"
"lib/default.nix"
"lib/flake-module.nix"
"lib/values"
"lib"
];
}
}#legacyPackages.${system}.evalTests-values

View File

@@ -6,11 +6,12 @@
{
config.clan.core.vars.settings = lib.mkIf (config.clan.core.vars.settings.secretStore == "vm") {
fileModule = file: {
path =
path = lib.mkIf (file.config.secret == true) (
if file.config.neededFor == "partitioning" then
"/run/partitioning-secrets/${file.config.generatorName}/${file.config.name}"
else
"/etc/secrets/${file.config.generatorName}/${file.config.name}";
"/etc/secrets/${file.config.generatorName}/${file.config.name}"
);
};
secretModule = "clan_cli.vars.secret_modules.vm";
};

View File

@@ -12,6 +12,7 @@
inputs.sops-nix.nixosModules.sops
inputs.nixos-facter-modules.nixosModules.facter
inputs.disko.nixosModules.default
inputs.data-mesher.nixosModules.data-mesher
./clanCore
(
{ pkgs, lib, ... }:

View File

@@ -15,6 +15,7 @@ from clan_cli.nix import (
nix_command,
nix_config,
nix_metadata,
nix_test_store,
)
log = logging.getLogger(__name__)
@@ -85,7 +86,7 @@ class FlakeCacheEntry:
self.selector = {int(selectors[0])}
selector = int(selectors[0])
elif isinstance(selectors[0], str):
self.selector = {(selectors[0])}
self.selector = {selectors[0]}
selector = selectors[0]
elif isinstance(selectors[0], AllSelector):
self.selector = AllSelector()
@@ -153,7 +154,9 @@ class FlakeCacheEntry:
self.value = value
def insert(
self, value: str | float | dict[str, Any] | list[Any], selectors: list[Selector]
self,
value: str | float | dict[str, Any] | list[Any] | None,
selectors: list[Selector],
) -> None:
selector: Selector
if selectors == []:
@@ -243,6 +246,12 @@ class FlakeCacheEntry:
if self.value != value:
msg = "value mismatch in cache, something is fishy"
raise TypeError(msg)
elif value is None:
if self.value is not None:
msg = "value mismatch in cache, something is fishy"
raise TypeError(msg)
else:
msg = f"Cannot insert value of type {type(value)} into cache"
raise TypeError(msg)
@@ -472,15 +481,21 @@ class Flake:
flake = builtins.getFlake("path:{self.store_path}?narHash={self.hash}");
in
flake.inputs.nixpkgs.legacyPackages.{config["system"]}.writeText "clan-flake-select" (
builtins.toJSON [ ({" ".join([f"flake.clanInternals.lib.select ''{attr}'' flake" for attr in selectors])}) ]
builtins.toJSON [ ({" ".join([f"flake.clanInternals.clanLib.select ''{attr}'' flake" for attr in selectors])}) ]
)
"""
if tmp_store := nix_test_store():
nix_options += ["--store", str(tmp_store)]
nix_options.append("--impure")
build_output = Path(
run(
nix_build(["--expr", nix_code, *nix_options]), RunOpts(log=Log.NONE)
).stdout.strip()
)
if tmp_store:
build_output = tmp_store.joinpath(*build_output.parts[1:])
outputs = json.loads(build_output.read_text())
if len(outputs) != len(selectors):
msg = f"flake_prepare_cache: Expected {len(outputs)} outputs, got {len(outputs)}"

View File

@@ -31,6 +31,6 @@ Service = dict[str, Any]
class Inventory(TypedDict):
machines: NotRequired[dict[str, Machine]]
meta: NotRequired[Meta]
modules: NotRequired[dict[str, str]]
modules: NotRequired[dict[str, Any]]
services: NotRequired[dict[str, Service]]
tags: NotRequired[dict[str, list[str]]]
tags: NotRequired[dict[str, Any]]

View File

@@ -5,4 +5,4 @@ set -euo pipefail
jsonSchema=$(nix build .#schemas.inventory-schema-abstract --print-out-paths)/schema.json
SCRIPT_DIR=$(dirname "$0")
cd "$SCRIPT_DIR"
nix run .#classgen -- "$jsonSchema" "../../../clan-cli/clan_cli/inventory/classes.py" --stop-at "Service"
nix run .#classgen -- "$jsonSchema" "../../../clan-cli/clan_cli/inventory/classes.py"

View File

@@ -135,6 +135,11 @@ def generate_machine_hardware_info(opts: HardwareGenerateOptions) -> HardwareCon
]
host = machine.target_host
# HACK: to make non-root user work
if host.user != "root":
config_command.insert(0, "sudo")
cmd = nix_shell(
[
"nixpkgs#openssh",

View File

@@ -3,7 +3,7 @@ from pathlib import Path
from clan_cli.cmd import run
from clan_cli.flake import Flake
from clan_cli.nix import nix_build, nix_config
from clan_cli.nix import nix_build, nix_config, nix_test_store
from .machines import Machine
@@ -18,6 +18,9 @@ def get_all_machines(flake: Flake, nix_options: list[str]) -> list[Machine]:
).stdout.rstrip()
)
if test_store := nix_test_store():
json_path = test_store.joinpath(*json_path.parts[1:])
machines_json = json.loads(json_path.read_text())
machines = []

View File

@@ -10,7 +10,7 @@ from clan_cli.errors import ClanError
from clan_cli.facts import public_modules as facts_public_modules
from clan_cli.facts import secret_modules as facts_secret_modules
from clan_cli.flake import Flake
from clan_cli.nix import nix_config
from clan_cli.nix import nix_config, nix_test_store
from clan_cli.ssh.host import Host
from clan_cli.ssh.host_key import HostKeyCheck
from clan_cli.ssh.parse import parse_deployment_address
@@ -243,6 +243,8 @@ class Machine:
output = self.nix("build", attr, nix_options)
output = Path(output)
if tmp_store := nix_test_store():
output = tmp_store.joinpath(*output.parts[1:])
assert output.exists(), f"The output {output} doesn't exist"
if isinstance(output, Path):
return output

View File

@@ -152,6 +152,7 @@ def deploy_machines(machines: list[Machine]) -> None:
"--flake",
f"{path}#{machine.name}",
]
switch_cmd = ["nixos-rebuild", "switch", *nix_options]
test_cmd = ["nixos-rebuild", "test", *nix_options]
@@ -160,6 +161,10 @@ def deploy_machines(machines: list[Machine]) -> None:
switch_cmd.extend(["--target-host", target_host.target])
test_cmd.extend(["--target-host", target_host.target])
if target_host and target_host.user != "root":
switch_cmd.extend(["--use-remote-sudo"])
test_cmd.extend(["--use-remote-sudo"])
env = host.nix_ssh_env(None)
ret = host.run(
switch_cmd,
@@ -185,6 +190,7 @@ def deploy_machines(machines: list[Machine]) -> None:
test_cmd,
RunOpts(msg_color=MsgColor(stderr=AnsiColor.DEFAULT)),
extra_env=env,
become_root=True,
)
# retry nixos-rebuild switch if the first attempt failed
@@ -193,6 +199,7 @@ def deploy_machines(machines: list[Machine]) -> None:
switch_cmd,
RunOpts(msg_color=MsgColor(stderr=AnsiColor.DEFAULT)),
extra_env=env,
become_root=True,
)
with AsyncRuntime() as runtime:

View File

@@ -1,18 +1,22 @@
import json
import logging
import os
import tempfile
from pathlib import Path
from typing import Any
from clan_cli.cmd import run, run_no_stdout
from clan_cli.dirs import nixpkgs_flake, nixpkgs_source
from clan_cli.errors import ClanError
from clan_cli.locked_open import locked_open
log = logging.getLogger(__name__)
def nix_command(flags: list[str]) -> list[str]:
args = ["nix", "--extra-experimental-features", "nix-command flakes", *flags]
if store := nix_test_store():
args += ["--store", str(store)]
return args
@@ -57,6 +61,21 @@ def nix_config() -> dict[str, Any]:
return config
def nix_test_store() -> Path | None:
store = os.environ.get("CLAN_TEST_STORE", None)
lock_nix = os.environ.get("LOCK_NIX", "")
if not lock_nix:
lock_nix = tempfile.NamedTemporaryFile().name # NOQA: SIM115
if not os.environ.get("IN_NIX_SANDBOX"):
return None
if store:
Path.mkdir(Path(store), exist_ok=True)
with locked_open(Path(lock_nix), "w"):
return Path(store)
return None
def nix_eval(flags: list[str]) -> list[str]:
default_flags = nix_command(
[

View File

@@ -1,21 +1,28 @@
import tarfile
from pathlib import Path
from shlex import quote
from tempfile import TemporaryDirectory
from clan_cli.cmd import Log, RunOpts
from clan_cli.cmd import run as run_local
from clan_cli.errors import ClanError
from clan_cli.ssh.host import Host
def upload(
host: Host,
local_src: Path, # must be a directory
local_src: Path,
remote_dest: Path, # must be a directory
file_user: str = "root",
file_group: str = "root",
dir_mode: int = 0o700,
file_mode: int = 0o400,
) -> None:
# Check if the remote destination is at least 3 directories deep
if len(remote_dest.parts) < 3:
msg = f"The remote destination must be at least 3 directories deep. Got: {remote_dest}. Reason: The directory will be deleted with 'rm -rf'."
raise ClanError(msg)
# Create the tarball from the temporary directory
with TemporaryDirectory(prefix="facts-upload-") as tardir:
tar_path = Path(tardir) / "upload.tar.gz"
@@ -55,50 +62,22 @@ def upload(
with local_src.open("rb") as f:
tar.addfile(tarinfo, f)
if local_src.is_dir():
cmd = [
*host.ssh_cmd(),
"rm",
"-r",
str(remote_dest),
";",
"mkdir",
"-m",
f"{dir_mode:o}",
"-p",
str(remote_dest),
"&&",
"tar",
"-C",
str(remote_dest),
"-xzf",
"-",
]
else:
# For single file, extract to parent directory and ensure correct name
cmd = [
*host.ssh_cmd(),
"rm",
"-f",
str(remote_dest),
";",
"mkdir",
"-m",
f"{dir_mode:o}",
"-p",
str(remote_dest.parent),
"&&",
"tar",
"-C",
str(remote_dest.parent),
"-xzf",
"-",
]
sudo = ""
if host.user != "root":
sudo = "sudo -- "
cmd = "rm -rf $0 && mkdir -m $1 -p $0 && tar -C $0 -xzf -"
# TODO accept `input` to be an IO object instead of bytes so that we don't have to read the tarfile into memory.
with tar_path.open("rb") as f:
run_local(
cmd,
[
*host.ssh_cmd(),
"--",
f"{sudo}bash -c {quote(cmd)}",
str(remote_dest),
f"{dir_mode:o}",
],
RunOpts(
input=f.read(),
log=Log.BOTH,

View File

@@ -17,7 +17,7 @@ from clan_cli.completions import (
from clan_cli.errors import ClanError
from clan_cli.git import commit_files
from clan_cli.machines.inventory import get_all_machines, get_selected_machines
from clan_cli.nix import nix_shell
from clan_cli.nix import nix_shell, nix_test_store
from clan_cli.vars._types import StoreBase
from .check import check_vars
@@ -39,7 +39,6 @@ class Generator:
name: str
files: list[Var] = field(default_factory=list)
share: bool = False
validation: str | None = None
prompts: list[Prompt] = field(default_factory=list)
dependencies: list[str] = field(default_factory=list)
@@ -62,7 +61,6 @@ class Generator:
name=data["name"],
share=data["share"],
files=[Var.from_json(data["name"], f) for f in data["files"].values()],
validation=data["validationHash"],
dependencies=data["dependencies"],
migrate_fact=data["migrateFact"],
prompts=[Prompt.from_json(p) for p in data["prompts"].values()],
@@ -76,8 +74,17 @@ class Generator:
)
return final_script
@property
def validation(self) -> str | None:
assert self._machine is not None
return self._machine.eval_nix(
f'config.clan.core.vars.generators."{self.name}".validationHash'
)
def bubblewrap_cmd(generator: str, tmpdir: Path) -> list[str]:
test_store = nix_test_store()
# fmt: off
return nix_shell(
[
@@ -89,6 +96,7 @@ def bubblewrap_cmd(generator: str, tmpdir: Path) -> list[str]:
"--unshare-all",
"--tmpfs", "/",
"--ro-bind", "/nix/store", "/nix/store",
*(["--ro-bind", str(test_store), str(test_store)] if test_store else []),
"--dev", "/dev",
# not allowed to bind procfs in some sandboxes
"--bind", str(tmpdir), str(tmpdir),
@@ -250,6 +258,8 @@ def execute_generator(
machine.flake_dir,
f"Update vars via generator {generator.name} for machine {machine.name}",
)
if len(files_to_commit) > 0:
machine.flush_caches()
def _ask_prompts(
@@ -453,8 +463,6 @@ def generate_vars_for_machine(
public_vars_store=machine.public_vars_store,
prompt_values=_ask_prompts(generator),
)
# flush caches to make sure the new secrets are available in evaluation
machine.flush_caches()
return True

View File

@@ -45,7 +45,7 @@ def ask(
text = f"Enter the value for {ident}:"
if label:
text = f"{label}"
log.info(f"Prompting value for {ident}")
if MOCK_PROMPT_RESPONSE:
return next(MOCK_PROMPT_RESPONSE)
match input_type:

View File

@@ -60,7 +60,7 @@ let
ln -sf ${nixpkgs'} $out/clan_cli/nixpkgs
cp -r ${../../templates} $out/clan_cli/templates
${classgen}/bin/classgen ${inventory-schema-abstract}/schema.json $out/clan_cli/inventory/classes.py --stop-at "Service"
${classgen}/bin/classgen ${inventory-schema-abstract}/schema.json $out/clan_cli/inventory/classes.py
'';
# Create a custom nixpkgs for use within the project
@@ -133,13 +133,12 @@ pythonRuntime.pkgs.buildPythonApplication {
runCommand "clan-pytest-without-core"
{
nativeBuildInputs = testDependencies;
requiredSystemFeatures = [ "recursive-nix" ];
closureInfo = pkgs.closureInfo {
rootPaths = [
templateDerivation
];
};
}
''
set -u -o pipefail
@@ -147,10 +146,12 @@ pythonRuntime.pkgs.buildPythonApplication {
chmod +w -R ./src
cd ./src
export IN_NIX_SANDBOX=1 PYTHONWARNINGS=error
export NIX_STATE_DIR=$TMPDIR/nix IN_NIX_SANDBOX=1 PYTHONWARNINGS=error
# required to prevent concurrent 'nix flake lock' operations
export CLAN_TEST_STORE=$TMPDIR/store
export LOCK_NIX=$TMPDIR/nix_lock
mkdir -p "$CLAN_TEST_STORE/nix/store"
# limit build cores to 16
jobs="$((NIX_BUILD_CORES>16 ? 16 : NIX_BUILD_CORES))"
@@ -158,24 +159,15 @@ pythonRuntime.pkgs.buildPythonApplication {
python -m pytest -m "not impure and not with_core" -n $jobs ./tests
touch $out
'';
}
// lib.optionalAttrs (!stdenv.isDarwin) {
clan-pytest-with-core =
runCommand "clan-pytest-with-core"
{
nativeBuildInputs = testDependencies;
requiredSystemFeatures = [ "recursive-nix" ];
closureInfo = pkgs.closureInfo {
rootPaths = [
templateDerivation
];
};
buildInputs = [
pkgs.bash
pkgs.coreutils
pkgs.jq.dev
pkgs.stdenv
pkgs.stdenvNoCC
# looks like Nix 2.26 fixes the profile creation race condition we were running into on Nix 2.24
# Switch this back to `pkgs.nix` when `pkgs.nix` is Nix 2.26+
(
@@ -185,6 +177,16 @@ pythonRuntime.pkgs.buildPythonApplication {
pkgs.nixVersions.latest
)
];
closureInfo = pkgs.closureInfo {
rootPaths = [
templateDerivation
pkgs.bash
pkgs.coreutils
pkgs.jq.dev
pkgs.stdenv
pkgs.stdenvNoCC
];
};
}
''
set -u -o pipefail
@@ -193,10 +195,16 @@ pythonRuntime.pkgs.buildPythonApplication {
cd ./src
export CLAN_CORE_PATH=${clan-core-path}
export NIX_STATE_DIR=$TMPDIR/nix
export IN_NIX_SANDBOX=1
export PYTHONWARNINGS=error
export CLAN_TEST_STORE=$TMPDIR/store
# required to prevent concurrent 'nix flake lock' operations
export LOCK_NIX=$TMPDIR/nix_lock
mkdir -p "$CLAN_TEST_STORE/nix/store"
mkdir -p "$CLAN_TEST_STORE/nix/var/nix/gcroots"
xargs cp --recursive --target "$CLAN_TEST_STORE/nix/store" < "$closureInfo/store-paths"
nix-store --load-db --store "$CLAN_TEST_STORE" < "$closureInfo/registration"
# limit build cores to 16
jobs="$((NIX_BUILD_CORES>16 ? 16 : NIX_BUILD_CORES))"

View File

@@ -169,7 +169,7 @@
];
installPhase = ''
${self'.packages.classgen}/bin/classgen ${self'.legacyPackages.schemas.inventory-schema-abstract}/schema.json ./clan_cli/inventory/classes.py --stop-at "Service"
${self'.packages.classgen}/bin/classgen ${self'.legacyPackages.schemas.inventory-schema-abstract}/schema.json ./clan_cli/inventory/classes.py
python docs.py reference
mkdir -p $out
@@ -188,7 +188,7 @@
];
installPhase = ''
${self'.packages.classgen}/bin/classgen ${self'.legacyPackages.schemas.inventory-schema-abstract}/schema.json ./clan_cli/inventory/classes.py --stop-at "Service"
${self'.packages.classgen}/bin/classgen ${self'.legacyPackages.schemas.inventory-schema-abstract}/schema.json ./clan_cli/inventory/classes.py
mkdir -p $out
# Retrieve python API Typescript types
python api.py > $out/API.json
@@ -214,7 +214,7 @@
classFile = "classes.py";
};
installPhase = ''
${self'.packages.classgen}/bin/classgen ${self'.legacyPackages.schemas.inventory-schema-abstract}/schema.json b_classes.py --stop-at "Service"
${self'.packages.classgen}/bin/classgen ${self'.legacyPackages.schemas.inventory-schema-abstract}/schema.json b_classes.py
file1=$classFile
file2=b_classes.py

View File

@@ -46,6 +46,6 @@ mkShell {
# Generate classes.py from inventory schema
# This file is in .gitignore
${self'.packages.classgen}/bin/classgen ${self'.legacyPackages.schemas.inventory-schema-abstract}/schema.json $PKG_ROOT/clan_cli/inventory/classes.py --stop-at "Service"
${self'.packages.classgen}/bin/classgen ${self'.legacyPackages.schemas.inventory-schema-abstract}/schema.json $PKG_ROOT/clan_cli/inventory/classes.py
'';
}

View File

@@ -13,6 +13,7 @@ import age_keys
import pytest
from clan_cli.dirs import TemplateType, clan_templates, nixpkgs_source
from clan_cli.locked_open import locked_open
from clan_cli.nix import nix_test_store
from fixture_error import FixtureError
from root import CLAN_CORE
from temporary_dir import TEMPDIR
@@ -289,6 +290,9 @@ def create_flake(
flake_nix = flake / "flake.nix"
# this is where we would install the sops key to, when updating
substitute(flake_nix, clan_core_flake, flake)
nix_options = []
if tmp_store := nix_test_store():
nix_options += ["--store", str(tmp_store)]
with locked_open(Path(lock_nix), "w"):
sp.run(
@@ -299,6 +303,7 @@ def create_flake(
flake,
"--extra-experimental-features",
"nix-command flakes",
*nix_options,
],
check=True,
)

View File

@@ -57,7 +57,10 @@ def sshd_config(test_root: Path) -> Iterator[SshdConfig]:
)
config = tmpdir / "sshd_config"
config.write_text(content)
login_shell = tmpdir / "shell"
bin_path = tmpdir / "bin"
login_shell = bin_path / "shell"
fake_sudo = bin_path / "sudo"
login_shell.parent.mkdir(parents=True)
bash = shutil.which("bash")
path = os.environ["PATH"]
@@ -65,19 +68,23 @@ def sshd_config(test_root: Path) -> Iterator[SshdConfig]:
login_shell.write_text(
f"""#!{bash}
set -x
if [[ -f /etc/profile ]]; then
source /etc/profile
fi
if [[ -n "$REALPATH" ]]; then
export PATH="$REALPATH:${path}"
else
export PATH="${path}"
fi
export PATH="{bin_path}:{path}"
exec {bash} -l "${{@}}"
"""
)
login_shell.chmod(0o755)
fake_sudo.write_text(
f"""#!{bash}
exec "${{@}}"
"""
)
fake_sudo.chmod(0o755)
lib_path = None
extension = ".so"

View File

@@ -17,6 +17,14 @@ def test_select() -> None:
assert not test_cache.is_cached(["x", "z", 1])
def test_insert() -> None:
test_cache = FlakeCacheEntry({}, [])
# Inserting the same thing twice should succeed
test_cache.insert(None, ["nix"])
test_cache.insert(None, ["nix"])
assert test_cache.select(["nix"]) is None
def test_out_path() -> None:
testdict = {"x": {"y": [123, 345, 456], "z": "/nix/store/bla"}}
test_cache = FlakeCacheEntry(testdict, [])

View File

@@ -26,6 +26,17 @@ def test_secrets_upload(
monkeypatch.chdir(str(flake.path))
monkeypatch.setenv("SOPS_AGE_KEY", age_keys[0].privkey)
sops_dir = flake.path / "facts"
# the flake defines this path as the location where the sops key should be installed
sops_key = sops_dir / "key.txt"
sops_key2 = sops_dir / "key2.txt"
# Create old state, which should be cleaned up
sops_dir.mkdir()
sops_key.write_text("OLD STATE")
sops_key2.write_text("OLD STATE2")
cli.run(
[
"secrets",
@@ -56,8 +67,6 @@ def test_secrets_upload(
cli.run(["facts", "upload", "--flake", str(flake_path), "vm1"])
# the flake defines this path as the location where the sops key should be installed
sops_key = flake.path / "facts" / "key.txt"
assert sops_key.exists()
assert sops_key.read_text() == age_keys[0].privkey
assert not sops_key2.exists()

View File

@@ -919,3 +919,75 @@ def test_invalidation(
str(machine.flake.path), machine.name, "my_generator/my_value"
).printable_value
assert value2 == value2_new
@pytest.mark.with_core
def test_dynamic_invalidation(
monkeypatch: pytest.MonkeyPatch,
flake: ClanFlake,
) -> None:
gen_prefix = "config.clan.core.vars.generators"
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
config = flake.machines[machine.name]
config["nixpkgs"]["hostPlatform"] = "x86_64-linux"
my_generator = config["clan"]["core"]["vars"]["generators"]["my_generator"]
my_generator["files"]["my_value"]["secret"] = False
my_generator["script"] = "echo -n $RANDOM > $out/my_value"
dependent_generator = config["clan"]["core"]["vars"]["generators"][
"dependent_generator"
]
dependent_generator["files"]["my_value"]["secret"] = False
dependent_generator["dependencies"] = ["my_generator"]
dependent_generator["script"] = "echo -n $RANDOM > $out/my_value"
flake.refresh()
# this is an abuse
custom_nix = flake.path / "machines" / machine.name / "hardware-configuration.nix"
custom_nix.write_text("""
{ config, ... }: let
p = config.clan.core.vars.generators.my_generator.files.my_value.path;
in {
clan.core.vars.generators.dependent_generator.validation = if builtins.pathExists p then builtins.readFile p else null;
}
""")
flake.refresh()
machine.flush_caches()
monkeypatch.chdir(flake.path)
# before generating, dependent generator validation should be empty; see bogus hardware-configuration.nix above
# we have to avoid `*.files.value` in this initial select because the generators haven't been run yet
generators_0 = machine.eval_nix(f"{gen_prefix}.*.{{validationHash}}")
assert generators_0["dependent_generator"]["validationHash"] is None
# generate both my_generator and (the dependent) dependent_generator
cli.run(["vars", "generate", "--flake", str(flake.path), machine.name])
machine.flush_caches()
# after generating once, dependent generator validation should be set
generators_1 = machine.eval_nix(gen_prefix)
assert generators_1["dependent_generator"]["validationHash"] is not None
# after generating once, neither generator should want to run again because `clan vars generate` should have re-evaluated the dependent generator's validationHash after executing the parent generator but before executing the dependent generator
# this ensures that validation can depend on parent generators while still only requiring a single pass
cli.run(["vars", "generate", "--flake", str(flake.path), machine.name])
machine.flush_caches()
generators_2 = machine.eval_nix(gen_prefix)
assert (
generators_1["dependent_generator"]["validationHash"]
== generators_2["dependent_generator"]["validationHash"]
)
assert (
generators_1["my_generator"]["files"]["my_value"]["value"]
== generators_2["my_generator"]["files"]["my_value"]["value"]
)
assert (
generators_1["dependent_generator"]["files"]["my_value"]["value"]
== generators_2["dependent_generator"]["files"]["my_value"]["value"]
)

View File

@@ -32,8 +32,14 @@ def map_json_type(
return {"str"}
if json_type == "integer":
return {"int"}
if json_type == "number":
return {"float"}
if json_type == "boolean":
return {"bool"}
# In Python, "number" is analogous to the float type.
# https://json-schema.org/understanding-json-schema/reference/numeric#number
if json_type == "number":
return {"float"}
if json_type == "array":
assert nested_types, f"Array type not found for {parent}"
return {f"""list[{" | ".join(nested_types)}]"""}
@@ -48,7 +54,11 @@ def map_json_type(
known_classes = set()
root_class = "Inventory"
stop_at = None
# TODO: make this configurable
# For now this only includes static top-level attributes of the inventory.
attrs = ["machines", "meta", "services"]
static: dict[str, str] = {"Service": "dict[str, Any]"}
def field_def_from_default_type(
@@ -187,19 +197,32 @@ def get_field_def(
# Recursive function to generate dataclasses from JSON schema
def generate_dataclass(schema: dict[str, Any], class_name: str = root_class) -> str:
def generate_dataclass(
schema: dict[str, Any],
attr_path: list[str],
class_name: str = root_class,
) -> str:
properties = schema.get("properties", {})
required_fields = []
fields_with_default = []
nested_classes: list[str] = []
if stop_at and class_name == stop_at:
# Skip generating classes below the stop_at property
return f"{class_name} = dict[str, Any]"
# if We are at the top level, and the attribute name is in shallow
# return f"{class_name} = dict[str, Any]"
if class_name in static:
return f"{class_name} = {static[class_name]}"
for prop, prop_info in properties.items():
# If we are at the top level, and the attribute name is not explicitly included we only do shallow
field_name = prop.replace("-", "_")
if len(attr_path) == 0 and prop not in attrs:
field_def = f"{field_name}: NotRequired[dict[str, Any]]"
fields_with_default.append(field_def)
# breakpoint()
continue
prop_type = prop_info.get("type", None)
union_variants = prop_info.get("oneOf", [])
enum_variants = prop_info.get("enum", [])
@@ -237,7 +260,9 @@ def generate_dataclass(schema: dict[str, Any], class_name: str = root_class) ->
if nested_class_name not in known_classes:
nested_classes.append(
generate_dataclass(inner_type, nested_class_name)
generate_dataclass(
inner_type, [*attr_path, prop], nested_class_name
)
)
known_classes.add(nested_class_name)
@@ -253,7 +278,9 @@ def generate_dataclass(schema: dict[str, Any], class_name: str = root_class) ->
field_types = {nested_class_name}
if nested_class_name not in known_classes:
nested_classes.append(
generate_dataclass(prop_info, nested_class_name)
generate_dataclass(
prop_info, [*attr_path, prop], nested_class_name
)
)
known_classes.add(nested_class_name)
else:
@@ -318,6 +345,8 @@ def generate_dataclass(schema: dict[str, Any], class_name: str = root_class) ->
)
required_fields.append(field_def)
# breakpoint()
fields_str = "\n ".join(required_fields + fields_with_default)
nested_classes_str = "\n\n".join(nested_classes)
@@ -332,14 +361,11 @@ def generate_dataclass(schema: dict[str, Any], class_name: str = root_class) ->
def run_gen(args: argparse.Namespace) -> None:
print(f"Converting {args.input} to {args.output}")
if args.stop_at:
global stop_at
stop_at = args.stop_at
dataclass_code = ""
with args.input.open() as f:
schema = json.load(f)
dataclass_code = generate_dataclass(schema)
dataclass_code = generate_dataclass(schema, [])
with args.output.open("w") as f:
f.write(

View File

@@ -663,9 +663,9 @@
}
},
"node_modules/@esbuild/aix-ppc64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz",
"integrity": "sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.2.tgz",
"integrity": "sha512-wCIboOL2yXZym2cgm6mlA742s9QeJ8DjGVaL39dLN4rRwrOgOyYSnOaFPhKZGLb2ngj4EyfAFjsNJwPXZvseag==",
"cpu": [
"ppc64"
],
@@ -679,9 +679,9 @@
}
},
"node_modules/@esbuild/android-arm": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.1.tgz",
"integrity": "sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.2.tgz",
"integrity": "sha512-NQhH7jFstVY5x8CKbcfa166GoV0EFkaPkCKBQkdPJFvo5u+nGXLEH/ooniLb3QI8Fk58YAx7nsPLozUWfCBOJA==",
"cpu": [
"arm"
],
@@ -695,9 +695,9 @@
}
},
"node_modules/@esbuild/android-arm64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.1.tgz",
"integrity": "sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.2.tgz",
"integrity": "sha512-5ZAX5xOmTligeBaeNEPnPaeEuah53Id2tX4c2CVP3JaROTH+j4fnfHCkr1PjXMd78hMst+TlkfKcW/DlTq0i4w==",
"cpu": [
"arm64"
],
@@ -711,9 +711,9 @@
}
},
"node_modules/@esbuild/android-x64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.1.tgz",
"integrity": "sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.2.tgz",
"integrity": "sha512-Ffcx+nnma8Sge4jzddPHCZVRvIfQ0kMsUsCMcJRHkGJ1cDmhe4SsrYIjLUKn1xpHZybmOqCWwB0zQvsjdEHtkg==",
"cpu": [
"x64"
],
@@ -727,9 +727,9 @@
}
},
"node_modules/@esbuild/darwin-arm64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.1.tgz",
"integrity": "sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.2.tgz",
"integrity": "sha512-MpM6LUVTXAzOvN4KbjzU/q5smzryuoNjlriAIx+06RpecwCkL9JpenNzpKd2YMzLJFOdPqBpuub6eVRP5IgiSA==",
"cpu": [
"arm64"
],
@@ -743,9 +743,9 @@
}
},
"node_modules/@esbuild/darwin-x64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.1.tgz",
"integrity": "sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.2.tgz",
"integrity": "sha512-5eRPrTX7wFyuWe8FqEFPG2cU0+butQQVNcT4sVipqjLYQjjh8a8+vUTfgBKM88ObB85ahsnTwF7PSIt6PG+QkA==",
"cpu": [
"x64"
],
@@ -759,9 +759,9 @@
}
},
"node_modules/@esbuild/freebsd-arm64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.1.tgz",
"integrity": "sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.2.tgz",
"integrity": "sha512-mLwm4vXKiQ2UTSX4+ImyiPdiHjiZhIaE9QvC7sw0tZ6HoNMjYAqQpGyui5VRIi5sGd+uWq940gdCbY3VLvsO1w==",
"cpu": [
"arm64"
],
@@ -775,9 +775,9 @@
}
},
"node_modules/@esbuild/freebsd-x64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.1.tgz",
"integrity": "sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.2.tgz",
"integrity": "sha512-6qyyn6TjayJSwGpm8J9QYYGQcRgc90nmfdUb0O7pp1s4lTY+9D0H9O02v5JqGApUyiHOtkz6+1hZNvNtEhbwRQ==",
"cpu": [
"x64"
],
@@ -791,9 +791,9 @@
}
},
"node_modules/@esbuild/linux-arm": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.1.tgz",
"integrity": "sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.2.tgz",
"integrity": "sha512-UHBRgJcmjJv5oeQF8EpTRZs/1knq6loLxTsjc3nxO9eXAPDLcWW55flrMVc97qFPbmZP31ta1AZVUKQzKTzb0g==",
"cpu": [
"arm"
],
@@ -807,9 +807,9 @@
}
},
"node_modules/@esbuild/linux-arm64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.1.tgz",
"integrity": "sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.2.tgz",
"integrity": "sha512-gq/sjLsOyMT19I8obBISvhoYiZIAaGF8JpeXu1u8yPv8BE5HlWYobmlsfijFIZ9hIVGYkbdFhEqC0NvM4kNO0g==",
"cpu": [
"arm64"
],
@@ -823,9 +823,9 @@
}
},
"node_modules/@esbuild/linux-ia32": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.1.tgz",
"integrity": "sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.2.tgz",
"integrity": "sha512-bBYCv9obgW2cBP+2ZWfjYTU+f5cxRoGGQ5SeDbYdFCAZpYWrfjjfYwvUpP8MlKbP0nwZ5gyOU/0aUzZ5HWPuvQ==",
"cpu": [
"ia32"
],
@@ -839,9 +839,9 @@
}
},
"node_modules/@esbuild/linux-loong64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.1.tgz",
"integrity": "sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.2.tgz",
"integrity": "sha512-SHNGiKtvnU2dBlM5D8CXRFdd+6etgZ9dXfaPCeJtz+37PIUlixvlIhI23L5khKXs3DIzAn9V8v+qb1TRKrgT5w==",
"cpu": [
"loong64"
],
@@ -855,9 +855,9 @@
}
},
"node_modules/@esbuild/linux-mips64el": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.1.tgz",
"integrity": "sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.2.tgz",
"integrity": "sha512-hDDRlzE6rPeoj+5fsADqdUZl1OzqDYow4TB4Y/3PlKBD0ph1e6uPHzIQcv2Z65u2K0kpeByIyAjCmjn1hJgG0Q==",
"cpu": [
"mips64el"
],
@@ -871,9 +871,9 @@
}
},
"node_modules/@esbuild/linux-ppc64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.1.tgz",
"integrity": "sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.2.tgz",
"integrity": "sha512-tsHu2RRSWzipmUi9UBDEzc0nLc4HtpZEI5Ba+Omms5456x5WaNuiG3u7xh5AO6sipnJ9r4cRWQB2tUjPyIkc6g==",
"cpu": [
"ppc64"
],
@@ -887,9 +887,9 @@
}
},
"node_modules/@esbuild/linux-riscv64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.1.tgz",
"integrity": "sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.2.tgz",
"integrity": "sha512-k4LtpgV7NJQOml/10uPU0s4SAXGnowi5qBSjaLWMojNCUICNu7TshqHLAEbkBdAszL5TabfvQ48kK84hyFzjnw==",
"cpu": [
"riscv64"
],
@@ -903,9 +903,9 @@
}
},
"node_modules/@esbuild/linux-s390x": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.1.tgz",
"integrity": "sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.2.tgz",
"integrity": "sha512-GRa4IshOdvKY7M/rDpRR3gkiTNp34M0eLTaC1a08gNrh4u488aPhuZOCpkF6+2wl3zAN7L7XIpOFBhnaE3/Q8Q==",
"cpu": [
"s390x"
],
@@ -919,9 +919,9 @@
}
},
"node_modules/@esbuild/linux-x64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.1.tgz",
"integrity": "sha512-xbfUhu/gnvSEg+EGovRc+kjBAkrvtk38RlerAzQxvMzlB4fXpCFCeUAYzJvrnhFtdeyVCDANSjJvOvGYoeKzFA==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.2.tgz",
"integrity": "sha512-QInHERlqpTTZ4FRB0fROQWXcYRD64lAoiegezDunLpalZMjcUcld3YzZmVJ2H/Cp0wJRZ8Xtjtj0cEHhYc/uUg==",
"cpu": [
"x64"
],
@@ -935,9 +935,9 @@
}
},
"node_modules/@esbuild/netbsd-arm64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.1.tgz",
"integrity": "sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.2.tgz",
"integrity": "sha512-talAIBoY5M8vHc6EeI2WW9d/CkiO9MQJ0IOWX8hrLhxGbro/vBXJvaQXefW2cP0z0nQVTdQ/eNyGFV1GSKrxfw==",
"cpu": [
"arm64"
],
@@ -951,9 +951,9 @@
}
},
"node_modules/@esbuild/netbsd-x64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.1.tgz",
"integrity": "sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.2.tgz",
"integrity": "sha512-voZT9Z+tpOxrvfKFyfDYPc4DO4rk06qamv1a/fkuzHpiVBMOhpjK+vBmWM8J1eiB3OLSMFYNaOaBNLXGChf5tg==",
"cpu": [
"x64"
],
@@ -967,9 +967,9 @@
}
},
"node_modules/@esbuild/openbsd-arm64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.1.tgz",
"integrity": "sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.2.tgz",
"integrity": "sha512-dcXYOC6NXOqcykeDlwId9kB6OkPUxOEqU+rkrYVqJbK2hagWOMrsTGsMr8+rW02M+d5Op5NNlgMmjzecaRf7Tg==",
"cpu": [
"arm64"
],
@@ -983,9 +983,9 @@
}
},
"node_modules/@esbuild/openbsd-x64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.1.tgz",
"integrity": "sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.2.tgz",
"integrity": "sha512-t/TkWwahkH0Tsgoq1Ju7QfgGhArkGLkF1uYz8nQS/PPFlXbP5YgRpqQR3ARRiC2iXoLTWFxc6DJMSK10dVXluw==",
"cpu": [
"x64"
],
@@ -999,9 +999,9 @@
}
},
"node_modules/@esbuild/sunos-x64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.1.tgz",
"integrity": "sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.2.tgz",
"integrity": "sha512-cfZH1co2+imVdWCjd+D1gf9NjkchVhhdpgb1q5y6Hcv9TP6Zi9ZG/beI3ig8TvwT9lH9dlxLq5MQBBgwuj4xvA==",
"cpu": [
"x64"
],
@@ -1015,9 +1015,9 @@
}
},
"node_modules/@esbuild/win32-arm64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.1.tgz",
"integrity": "sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.2.tgz",
"integrity": "sha512-7Loyjh+D/Nx/sOTzV8vfbB3GJuHdOQyrOryFdZvPHLf42Tk9ivBU5Aedi7iyX+x6rbn2Mh68T4qq1SDqJBQO5Q==",
"cpu": [
"arm64"
],
@@ -1031,9 +1031,9 @@
}
},
"node_modules/@esbuild/win32-ia32": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.1.tgz",
"integrity": "sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.2.tgz",
"integrity": "sha512-WRJgsz9un0nqZJ4MfhabxaD9Ft8KioqU3JMinOTvobbX6MOSUigSBlogP8QB3uxpJDsFS6yN+3FDBdqE5lg9kg==",
"cpu": [
"ia32"
],
@@ -1047,9 +1047,9 @@
}
},
"node_modules/@esbuild/win32-x64": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.1.tgz",
"integrity": "sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.2.tgz",
"integrity": "sha512-kM3HKb16VIXZyIeVrM1ygYmZBKybX8N4p754bw390wGO3Tf2j4L2/WYL+4suWujpgf6GBYs3jv7TyUivdd05JA==",
"cpu": [
"x64"
],
@@ -1445,9 +1445,9 @@
}
},
"node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.37.0.tgz",
"integrity": "sha512-l7StVw6WAa8l3vA1ov80jyetOAEo1FtHvZDbzXDO/02Sq/QVvqlHkYoFwDJPIMj0GKiistsBudfx5tGFnwYWDQ==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.38.0.tgz",
"integrity": "sha512-ldomqc4/jDZu/xpYU+aRxo3V4mGCV9HeTgUBANI3oIQMOL+SsxB+S2lxMpkFp5UamSS3XuTMQVbsS24R4J4Qjg==",
"cpu": [
"arm"
],
@@ -1458,9 +1458,9 @@
]
},
"node_modules/@rollup/rollup-android-arm64": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.37.0.tgz",
"integrity": "sha512-6U3SlVyMxezt8Y+/iEBcbp945uZjJwjZimu76xoG7tO1av9VO691z8PkhzQ85ith2I8R2RddEPeSfcbyPfD4hA==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.38.0.tgz",
"integrity": "sha512-VUsgcy4GhhT7rokwzYQP+aV9XnSLkkhlEJ0St8pbasuWO/vwphhZQxYEKUP3ayeCYLhk6gEtacRpYP/cj3GjyQ==",
"cpu": [
"arm64"
],
@@ -1471,9 +1471,9 @@
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.37.0.tgz",
"integrity": "sha512-+iTQ5YHuGmPt10NTzEyMPbayiNTcOZDWsbxZYR1ZnmLnZxG17ivrPSWFO9j6GalY0+gV3Jtwrrs12DBscxnlYA==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.38.0.tgz",
"integrity": "sha512-buA17AYXlW9Rn091sWMq1xGUvWQFOH4N1rqUxGJtEQzhChxWjldGCCup7r/wUnaI6Au8sKXpoh0xg58a7cgcpg==",
"cpu": [
"arm64"
],
@@ -1484,9 +1484,9 @@
]
},
"node_modules/@rollup/rollup-darwin-x64": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.37.0.tgz",
"integrity": "sha512-m8W2UbxLDcmRKVjgl5J/k4B8d7qX2EcJve3Sut7YGrQoPtCIQGPH5AMzuFvYRWZi0FVS0zEY4c8uttPfX6bwYQ==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.38.0.tgz",
"integrity": "sha512-Mgcmc78AjunP1SKXl624vVBOF2bzwNWFPMP4fpOu05vS0amnLcX8gHIge7q/lDAHy3T2HeR0TqrriZDQS2Woeg==",
"cpu": [
"x64"
],
@@ -1497,9 +1497,9 @@
]
},
"node_modules/@rollup/rollup-freebsd-arm64": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.37.0.tgz",
"integrity": "sha512-FOMXGmH15OmtQWEt174v9P1JqqhlgYge/bUjIbiVD1nI1NeJ30HYT9SJlZMqdo1uQFyt9cz748F1BHghWaDnVA==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.38.0.tgz",
"integrity": "sha512-zzJACgjLbQTsscxWqvrEQAEh28hqhebpRz5q/uUd1T7VTwUNZ4VIXQt5hE7ncs0GrF+s7d3S4on4TiXUY8KoQA==",
"cpu": [
"arm64"
],
@@ -1510,9 +1510,9 @@
]
},
"node_modules/@rollup/rollup-freebsd-x64": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.37.0.tgz",
"integrity": "sha512-SZMxNttjPKvV14Hjck5t70xS3l63sbVwl98g3FlVVx2YIDmfUIy29jQrsw06ewEYQ8lQSuY9mpAPlmgRD2iSsA==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.38.0.tgz",
"integrity": "sha512-hCY/KAeYMCyDpEE4pTETam0XZS4/5GXzlLgpi5f0IaPExw9kuB+PDTOTLuPtM10TlRG0U9OSmXJ+Wq9J39LvAg==",
"cpu": [
"x64"
],
@@ -1523,9 +1523,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.37.0.tgz",
"integrity": "sha512-hhAALKJPidCwZcj+g+iN+38SIOkhK2a9bqtJR+EtyxrKKSt1ynCBeqrQy31z0oWU6thRZzdx53hVgEbRkuI19w==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.38.0.tgz",
"integrity": "sha512-mimPH43mHl4JdOTD7bUMFhBdrg6f9HzMTOEnzRmXbOZqjijCw8LA5z8uL6LCjxSa67H2xiLFvvO67PT05PRKGg==",
"cpu": [
"arm"
],
@@ -1536,9 +1536,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.37.0.tgz",
"integrity": "sha512-jUb/kmn/Gd8epbHKEqkRAxq5c2EwRt0DqhSGWjPFxLeFvldFdHQs/n8lQ9x85oAeVb6bHcS8irhTJX2FCOd8Ag==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.38.0.tgz",
"integrity": "sha512-tPiJtiOoNuIH8XGG8sWoMMkAMm98PUwlriOFCCbZGc9WCax+GLeVRhmaxjJtz6WxrPKACgrwoZ5ia/uapq3ZVg==",
"cpu": [
"arm"
],
@@ -1549,9 +1549,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.37.0.tgz",
"integrity": "sha512-oNrJxcQT9IcbcmKlkF+Yz2tmOxZgG9D9GRq+1OE6XCQwCVwxixYAa38Z8qqPzQvzt1FCfmrHX03E0pWoXm1DqA==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.38.0.tgz",
"integrity": "sha512-wZco59rIVuB0tjQS0CSHTTUcEde+pXQWugZVxWaQFdQQ1VYub/sTrNdY76D1MKdN2NB48JDuGABP6o6fqos8mA==",
"cpu": [
"arm64"
],
@@ -1562,9 +1562,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.37.0.tgz",
"integrity": "sha512-pfxLBMls+28Ey2enpX3JvjEjaJMBX5XlPCZNGxj4kdJyHduPBXtxYeb8alo0a7bqOoWZW2uKynhHxF/MWoHaGQ==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.38.0.tgz",
"integrity": "sha512-fQgqwKmW0REM4LomQ+87PP8w8xvU9LZfeLBKybeli+0yHT7VKILINzFEuggvnV9M3x1Ed4gUBmGUzCo/ikmFbQ==",
"cpu": [
"arm64"
],
@@ -1575,9 +1575,9 @@
]
},
"node_modules/@rollup/rollup-linux-loongarch64-gnu": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.37.0.tgz",
"integrity": "sha512-yCE0NnutTC/7IGUq/PUHmoeZbIwq3KRh02e9SfFh7Vmc1Z7atuJRYWhRME5fKgT8aS20mwi1RyChA23qSyRGpA==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.38.0.tgz",
"integrity": "sha512-hz5oqQLXTB3SbXpfkKHKXLdIp02/w3M+ajp8p4yWOWwQRtHWiEOCKtc9U+YXahrwdk+3qHdFMDWR5k+4dIlddg==",
"cpu": [
"loong64"
],
@@ -1588,9 +1588,9 @@
]
},
"node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.37.0.tgz",
"integrity": "sha512-NxcICptHk06E2Lh3a4Pu+2PEdZ6ahNHuK7o6Np9zcWkrBMuv21j10SQDJW3C9Yf/A/P7cutWoC/DptNLVsZ0VQ==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.38.0.tgz",
"integrity": "sha512-NXqygK/dTSibQ+0pzxsL3r4Xl8oPqVoWbZV9niqOnIHV/J92fe65pOir0xjkUZDRSPyFRvu+4YOpJF9BZHQImw==",
"cpu": [
"ppc64"
],
@@ -1601,9 +1601,9 @@
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.37.0.tgz",
"integrity": "sha512-PpWwHMPCVpFZLTfLq7EWJWvrmEuLdGn1GMYcm5MV7PaRgwCEYJAwiN94uBuZev0/J/hFIIJCsYw4nLmXA9J7Pw==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.38.0.tgz",
"integrity": "sha512-GEAIabR1uFyvf/jW/5jfu8gjM06/4kZ1W+j1nWTSSB3w6moZEBm7iBtzwQ3a1Pxos2F7Gz+58aVEnZHU295QTg==",
"cpu": [
"riscv64"
],
@@ -1614,9 +1614,9 @@
]
},
"node_modules/@rollup/rollup-linux-riscv64-musl": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.37.0.tgz",
"integrity": "sha512-DTNwl6a3CfhGTAOYZ4KtYbdS8b+275LSLqJVJIrPa5/JuIufWWZ/QFvkxp52gpmguN95eujrM68ZG+zVxa8zHA==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.38.0.tgz",
"integrity": "sha512-9EYTX+Gus2EGPbfs+fh7l95wVADtSQyYw4DfSBcYdUEAmP2lqSZY0Y17yX/3m5VKGGJ4UmIH5LHLkMJft3bYoA==",
"cpu": [
"riscv64"
],
@@ -1627,9 +1627,9 @@
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.37.0.tgz",
"integrity": "sha512-hZDDU5fgWvDdHFuExN1gBOhCuzo/8TMpidfOR+1cPZJflcEzXdCy1LjnklQdW8/Et9sryOPJAKAQRw8Jq7Tg+A==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.38.0.tgz",
"integrity": "sha512-Mpp6+Z5VhB9VDk7RwZXoG2qMdERm3Jw07RNlXHE0bOnEeX+l7Fy4bg+NxfyN15ruuY3/7Vrbpm75J9QHFqj5+Q==",
"cpu": [
"s390x"
],
@@ -1640,9 +1640,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.37.0.tgz",
"integrity": "sha512-pKivGpgJM5g8dwj0ywBwe/HeVAUSuVVJhUTa/URXjxvoyTT/AxsLTAbkHkDHG7qQxLoW2s3apEIl26uUe08LVQ==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.38.0.tgz",
"integrity": "sha512-vPvNgFlZRAgO7rwncMeE0+8c4Hmc+qixnp00/Uv3ht2x7KYrJ6ERVd3/R0nUtlE6/hu7/HiiNHJ/rP6knRFt1w==",
"cpu": [
"x64"
],
@@ -1653,9 +1653,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.37.0.tgz",
"integrity": "sha512-E2lPrLKE8sQbY/2bEkVTGDEk4/49UYRVWgj90MY8yPjpnGBQ+Xi1Qnr7b7UIWw1NOggdFQFOLZ8+5CzCiz143w==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.38.0.tgz",
"integrity": "sha512-q5Zv+goWvQUGCaL7fU8NuTw8aydIL/C9abAVGCzRReuj5h30TPx4LumBtAidrVOtXnlB+RZkBtExMsfqkMfb8g==",
"cpu": [
"x64"
],
@@ -1666,9 +1666,9 @@
]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.37.0.tgz",
"integrity": "sha512-Jm7biMazjNzTU4PrQtr7VS8ibeys9Pn29/1bm4ph7CP2kf21950LgN+BaE2mJ1QujnvOc6p54eWWiVvn05SOBg==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.38.0.tgz",
"integrity": "sha512-u/Jbm1BU89Vftqyqbmxdq14nBaQjQX1HhmsdBWqSdGClNaKwhjsg5TpW+5Ibs1mb8Es9wJiMdl86BcmtUVXNZg==",
"cpu": [
"arm64"
],
@@ -1679,9 +1679,9 @@
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.37.0.tgz",
"integrity": "sha512-e3/1SFm1OjefWICB2Ucstg2dxYDkDTZGDYgwufcbsxTHyqQps1UQf33dFEChBNmeSsTOyrjw2JJq0zbG5GF6RA==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.38.0.tgz",
"integrity": "sha512-mqu4PzTrlpNHHbu5qleGvXJoGgHpChBlrBx/mEhTPpnAL1ZAYFlvHD7rLK839LLKQzqEQMFJfGrrOHItN4ZQqA==",
"cpu": [
"ia32"
],
@@ -1692,9 +1692,9 @@
]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.37.0.tgz",
"integrity": "sha512-LWbXUBwn/bcLx2sSsqy7pK5o+Nr+VCoRoAohfJ5C/aBio9nfJmGQqHAhU6pwxV/RmyTk5AqdySma7uwWGlmeuA==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.38.0.tgz",
"integrity": "sha512-jjqy3uWlecfB98Psxb5cD6Fny9Fupv9LrDSPTQZUROqjvZmcCqNu4UMl7qqhlUUGpwiAkotj6GYu4SZdcr/nLw==",
"cpu": [
"x64"
],
@@ -2020,9 +2020,9 @@
}
},
"node_modules/@tanstack/query-core": {
"version": "5.69.0",
"resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.69.0.tgz",
"integrity": "sha512-Kn410jq6vs1P8Nm+ZsRj9H+U3C0kjuEkYLxbiCyn3MDEiYor1j2DGVULqAz62SLZtUZ/e9Xt6xMXiJ3NJ65WyQ==",
"version": "5.71.0",
"resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.71.0.tgz",
"integrity": "sha512-p4+T7CIEe1kMhii4booWiw42nuaiYI9La/bRCNzBaj1P3PDb0dEZYDhc/7oBifKJfHYN+mtS1ynW1qsmzQW7Og==",
"license": "MIT",
"funding": {
"type": "github",
@@ -2030,12 +2030,12 @@
}
},
"node_modules/@tanstack/solid-query": {
"version": "5.69.0",
"resolved": "https://registry.npmjs.org/@tanstack/solid-query/-/solid-query-5.69.0.tgz",
"integrity": "sha512-yWSodAKaTGLRkGHwMe0Gg6Mh11QDoyb7GgjmU8Q0G6MGeAhcPKpPgbALeoNisa8FW5XfW2hFz2vb6zePIdx6rQ==",
"version": "5.71.0",
"resolved": "https://registry.npmjs.org/@tanstack/solid-query/-/solid-query-5.71.0.tgz",
"integrity": "sha512-Gz3MyZtOda9ppPnoPcaohEqwj3ovlU1Mg1IuOr0BSIe4EzkRW8Wpn47VqMi2I2wtryNStPZ2tIU/N98w8IfO/Q==",
"license": "MIT",
"dependencies": {
"@tanstack/query-core": "5.69.0"
"@tanstack/query-core": "5.71.0"
},
"funding": {
"type": "github",
@@ -2090,9 +2090,9 @@
}
},
"node_modules/@types/babel__traverse": {
"version": "7.20.6",
"resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz",
"integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==",
"version": "7.20.7",
"resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.7.tgz",
"integrity": "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2145,9 +2145,9 @@
"license": "MIT"
},
"node_modules/@types/node": {
"version": "22.13.13",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.13.tgz",
"integrity": "sha512-ClsL5nMwKaBRwPcCvH8E7+nU4GxHVx1axNvMZTFHMEfNI7oahimt26P5zjVCRrjiIWj6YFXfE1v3dEp94wLcGQ==",
"version": "22.13.14",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.14.tgz",
"integrity": "sha512-Zs/Ollc1SJ8nKUAgc7ivOEdIBM8JAKgrqqUYi2J997JuKO7/tpQC+WCetQ1sypiKCQWHdvdg9wBNpUPEWZae7w==",
"devOptional": true,
"license": "MIT",
"dependencies": {
@@ -3369,9 +3369,9 @@
"license": "MIT"
},
"node_modules/electron-to-chromium": {
"version": "1.5.123",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.123.tgz",
"integrity": "sha512-refir3NlutEZqlKaBLK0tzlVLe5P2wDKS7UQt/3SpibizgsRAPOsqQC3ffw1nlv3ze5gjRQZYHoPymgVZkplFA==",
"version": "1.5.128",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.128.tgz",
"integrity": "sha512-bo1A4HH/NS522Ws0QNFIzyPcyUUNV/yyy70Ho1xqfGYzPUme2F/xr4tlEOuM6/A538U1vDA7a4XfCd1CKRegKQ==",
"dev": true,
"license": "ISC"
},
@@ -3451,9 +3451,9 @@
}
},
"node_modules/esbuild": {
"version": "0.25.1",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz",
"integrity": "sha512-BGO5LtrGC7vxnqucAe/rmvKdJllfGaYWdyABvyMoXQlfYMb2bbRuReWR5tEGE//4LcNJj9XrkovTqNYRFZHAMQ==",
"version": "0.25.2",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.2.tgz",
"integrity": "sha512-16854zccKPnC+toMywC+uKNeYSv+/eXkevRAfwRD/G9Cleq66m8XFIrigkbvauLLlCfDL45Q2cWegSg53gGBnQ==",
"hasInstallScript": true,
"license": "MIT",
"bin": {
@@ -3463,31 +3463,31 @@
"node": ">=18"
},
"optionalDependencies": {
"@esbuild/aix-ppc64": "0.25.1",
"@esbuild/android-arm": "0.25.1",
"@esbuild/android-arm64": "0.25.1",
"@esbuild/android-x64": "0.25.1",
"@esbuild/darwin-arm64": "0.25.1",
"@esbuild/darwin-x64": "0.25.1",
"@esbuild/freebsd-arm64": "0.25.1",
"@esbuild/freebsd-x64": "0.25.1",
"@esbuild/linux-arm": "0.25.1",
"@esbuild/linux-arm64": "0.25.1",
"@esbuild/linux-ia32": "0.25.1",
"@esbuild/linux-loong64": "0.25.1",
"@esbuild/linux-mips64el": "0.25.1",
"@esbuild/linux-ppc64": "0.25.1",
"@esbuild/linux-riscv64": "0.25.1",
"@esbuild/linux-s390x": "0.25.1",
"@esbuild/linux-x64": "0.25.1",
"@esbuild/netbsd-arm64": "0.25.1",
"@esbuild/netbsd-x64": "0.25.1",
"@esbuild/openbsd-arm64": "0.25.1",
"@esbuild/openbsd-x64": "0.25.1",
"@esbuild/sunos-x64": "0.25.1",
"@esbuild/win32-arm64": "0.25.1",
"@esbuild/win32-ia32": "0.25.1",
"@esbuild/win32-x64": "0.25.1"
"@esbuild/aix-ppc64": "0.25.2",
"@esbuild/android-arm": "0.25.2",
"@esbuild/android-arm64": "0.25.2",
"@esbuild/android-x64": "0.25.2",
"@esbuild/darwin-arm64": "0.25.2",
"@esbuild/darwin-x64": "0.25.2",
"@esbuild/freebsd-arm64": "0.25.2",
"@esbuild/freebsd-x64": "0.25.2",
"@esbuild/linux-arm": "0.25.2",
"@esbuild/linux-arm64": "0.25.2",
"@esbuild/linux-ia32": "0.25.2",
"@esbuild/linux-loong64": "0.25.2",
"@esbuild/linux-mips64el": "0.25.2",
"@esbuild/linux-ppc64": "0.25.2",
"@esbuild/linux-riscv64": "0.25.2",
"@esbuild/linux-s390x": "0.25.2",
"@esbuild/linux-x64": "0.25.2",
"@esbuild/netbsd-arm64": "0.25.2",
"@esbuild/netbsd-x64": "0.25.2",
"@esbuild/openbsd-arm64": "0.25.2",
"@esbuild/openbsd-x64": "0.25.2",
"@esbuild/sunos-x64": "0.25.2",
"@esbuild/win32-arm64": "0.25.2",
"@esbuild/win32-ia32": "0.25.2",
"@esbuild/win32-x64": "0.25.2"
}
},
"node_modules/escalade": {
@@ -5315,9 +5315,9 @@
}
},
"node_modules/nwsapi": {
"version": "2.2.19",
"resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.19.tgz",
"integrity": "sha512-94bcyI3RsqiZufXjkr3ltkI86iEl+I7uiHVDtcq9wJUTwYQJ5odHDeSzkkrRzi80jJ8MaeZgqKjH1bAWAFw9bA==",
"version": "2.2.20",
"resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.20.tgz",
"integrity": "sha512-/ieB+mDe4MrrKMT8z+mQL8klXydZWGR5Dowt4RAGKbJ3kIGEx3X4ljUo+6V73IXtUPWgfOlU5B9MlGxFO5T+cA==",
"dev": true,
"license": "MIT"
},
@@ -5508,9 +5508,9 @@
}
},
"node_modules/pirates": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz",
"integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==",
"version": "4.0.7",
"resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz",
"integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==",
"dev": true,
"license": "MIT",
"engines": {
@@ -5902,12 +5902,12 @@
}
},
"node_modules/rollup": {
"version": "4.37.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.37.0.tgz",
"integrity": "sha512-iAtQy/L4QFU+rTJ1YUjXqJOJzuwEghqWzCEYD2FEghT7Gsy1VdABntrO4CLopA5IkflTyqNiLNwPcOJ3S7UKLg==",
"version": "4.38.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.38.0.tgz",
"integrity": "sha512-5SsIRtJy9bf1ErAOiFMFzl64Ex9X5V7bnJ+WlFMb+zmP459OSWCEG7b0ERZ+PEU7xPt4OG3RHbrp1LJlXxYTrw==",
"license": "MIT",
"dependencies": {
"@types/estree": "1.0.6"
"@types/estree": "1.0.7"
},
"bin": {
"rollup": "dist/bin/rollup"
@@ -5917,35 +5917,29 @@
"npm": ">=8.0.0"
},
"optionalDependencies": {
"@rollup/rollup-android-arm-eabi": "4.37.0",
"@rollup/rollup-android-arm64": "4.37.0",
"@rollup/rollup-darwin-arm64": "4.37.0",
"@rollup/rollup-darwin-x64": "4.37.0",
"@rollup/rollup-freebsd-arm64": "4.37.0",
"@rollup/rollup-freebsd-x64": "4.37.0",
"@rollup/rollup-linux-arm-gnueabihf": "4.37.0",
"@rollup/rollup-linux-arm-musleabihf": "4.37.0",
"@rollup/rollup-linux-arm64-gnu": "4.37.0",
"@rollup/rollup-linux-arm64-musl": "4.37.0",
"@rollup/rollup-linux-loongarch64-gnu": "4.37.0",
"@rollup/rollup-linux-powerpc64le-gnu": "4.37.0",
"@rollup/rollup-linux-riscv64-gnu": "4.37.0",
"@rollup/rollup-linux-riscv64-musl": "4.37.0",
"@rollup/rollup-linux-s390x-gnu": "4.37.0",
"@rollup/rollup-linux-x64-gnu": "4.37.0",
"@rollup/rollup-linux-x64-musl": "4.37.0",
"@rollup/rollup-win32-arm64-msvc": "4.37.0",
"@rollup/rollup-win32-ia32-msvc": "4.37.0",
"@rollup/rollup-win32-x64-msvc": "4.37.0",
"@rollup/rollup-android-arm-eabi": "4.38.0",
"@rollup/rollup-android-arm64": "4.38.0",
"@rollup/rollup-darwin-arm64": "4.38.0",
"@rollup/rollup-darwin-x64": "4.38.0",
"@rollup/rollup-freebsd-arm64": "4.38.0",
"@rollup/rollup-freebsd-x64": "4.38.0",
"@rollup/rollup-linux-arm-gnueabihf": "4.38.0",
"@rollup/rollup-linux-arm-musleabihf": "4.38.0",
"@rollup/rollup-linux-arm64-gnu": "4.38.0",
"@rollup/rollup-linux-arm64-musl": "4.38.0",
"@rollup/rollup-linux-loongarch64-gnu": "4.38.0",
"@rollup/rollup-linux-powerpc64le-gnu": "4.38.0",
"@rollup/rollup-linux-riscv64-gnu": "4.38.0",
"@rollup/rollup-linux-riscv64-musl": "4.38.0",
"@rollup/rollup-linux-s390x-gnu": "4.38.0",
"@rollup/rollup-linux-x64-gnu": "4.38.0",
"@rollup/rollup-linux-x64-musl": "4.38.0",
"@rollup/rollup-win32-arm64-msvc": "4.38.0",
"@rollup/rollup-win32-ia32-msvc": "4.38.0",
"@rollup/rollup-win32-x64-msvc": "4.38.0",
"fsevents": "~2.3.2"
}
},
"node_modules/rollup/node_modules/@types/estree": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
"integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==",
"license": "MIT"
},
"node_modules/rrweb-cssom": {
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz",
@@ -7429,9 +7423,9 @@
"license": "ISC"
},
"node_modules/yaml": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz",
"integrity": "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==",
"version": "2.7.1",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.1.tgz",
"integrity": "sha512-10ULxpnOCQXxJvBgxsn9ptjq6uviG/htZKk9veJGhlqn3w/DxQ631zFF+nlQXLwmImeS5amR2dl2U8sg6U9jsQ==",
"devOptional": true,
"license": "ISC",
"bin": {

View File

@@ -11,6 +11,8 @@ export async function get_iwd_service(base_path: string, machine_name: string) {
if (r.status == "error") {
return null;
}
// @FIXME: Clean this up once we implement the feature
// @ts-expect-error: This doesn't check currently
const inventory: Inventory = r.data;
const instance_key = instance_name(machine_name);