Compare commits
142 Commits
ke-qa-nixp
...
push-tnkqq
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
05c7d885b6 | ||
|
|
6482094cb4 | ||
|
|
cbcfcd507d | ||
|
|
9b71f106f6 | ||
|
|
1482bd571c | ||
|
|
ec2537d088 | ||
|
|
41229af93e | ||
|
|
7e7e58eb64 | ||
|
|
46f746d09c | ||
|
|
56e03d1f25 | ||
|
|
dd783bdf85 | ||
|
|
bf41a9ef00 | ||
|
|
f313ace19a | ||
|
|
fe8f7e919e | ||
|
|
c64276b64e | ||
|
|
436da16bf9 | ||
|
|
1c3282bb63 | ||
|
|
3c4b3e180e | ||
|
|
3953715b48 | ||
|
|
7b95fa039f | ||
|
|
347668a57f | ||
|
|
38712d6fe0 | ||
|
|
1d38ffa9c2 | ||
|
|
665f036dec | ||
|
|
b74b6ff449 | ||
|
|
9c8797e770 | ||
|
|
2be6cedec4 | ||
|
|
7f49449f94 | ||
|
|
1f7bfa4e34 | ||
|
|
67fab4b11d | ||
|
|
18e3c72ef0 | ||
|
|
84d4660a8d | ||
|
|
13c3e1411a | ||
|
|
3c3a505aca | ||
|
|
f33c8e98fe | ||
|
|
869a04e5af | ||
|
|
d09fdc3528 | ||
|
|
652677d06f | ||
|
|
ec163657cd | ||
|
|
7d3aa5936d | ||
|
|
f8f8efbb88 | ||
|
|
8887e209d6 | ||
|
|
a72f74a36e | ||
|
|
0e0f8e73ec | ||
|
|
f15a113f52 | ||
|
|
1fbb4f5014 | ||
|
|
980a3c90b5 | ||
|
|
c01b14aef5 | ||
|
|
0a3e564ec0 | ||
|
|
bc09d5c886 | ||
|
|
f6b8d660d8 | ||
|
|
6014ddcd9a | ||
|
|
551f5144c7 | ||
|
|
9a664c323c | ||
|
|
7572dc8c2b | ||
|
|
e22f0d9e36 | ||
|
|
f93ae13448 | ||
|
|
749bac63f4 | ||
|
|
2bac2ec7ee | ||
|
|
f224d4b20c | ||
|
|
47aa0a3b8e | ||
|
|
dd1cab5daa | ||
|
|
32edae4ebd | ||
|
|
d829aa5838 | ||
|
|
fd6619668b | ||
|
|
50a26ece32 | ||
|
|
8f224b00a6 | ||
|
|
27d43ee21d | ||
|
|
9626e22db7 | ||
|
|
1df329fe0d | ||
|
|
9da38abc77 | ||
|
|
2814c46e68 | ||
|
|
feef0a513e | ||
|
|
9cc85b36c6 | ||
|
|
1465b18820 | ||
|
|
6fa0062573 | ||
|
|
6cd68c23f5 | ||
|
|
fdddc60676 | ||
|
|
684aa27068 | ||
|
|
35d8deb393 | ||
|
|
e2f20b5ffc | ||
|
|
fd5d7934a0 | ||
|
|
f194c31e0e | ||
|
|
061b598adf | ||
|
|
744f35e0cc | ||
|
|
4a6d46198c | ||
|
|
82d5ca9a0b | ||
|
|
28d8a91a30 | ||
|
|
18f8d69728 | ||
|
|
1feead4ce4 | ||
|
|
7f28110558 | ||
|
|
38787da891 | ||
|
|
2b587da9fe | ||
|
|
acd2c1654b | ||
|
|
2ecb1399c3 | ||
|
|
46ae6b49c1 | ||
|
|
50a8a69719 | ||
|
|
203761a99c | ||
|
|
990b4e0223 | ||
|
|
032f54cbfb | ||
|
|
47146efa0f | ||
|
|
c031abcd9e | ||
|
|
6b5dca5842 | ||
|
|
016fe3d114 | ||
|
|
9b60b4a989 | ||
|
|
3088ce025b | ||
|
|
4f1fda3de6 | ||
|
|
57f14827c2 | ||
|
|
0390d5999d | ||
|
|
58e9a28f14 | ||
|
|
b4ad5ca1bd | ||
|
|
84ecb1aae6 | ||
|
|
2b9971f538 | ||
|
|
81e15cab34 | ||
|
|
215c808071 | ||
|
|
4de052e58b | ||
|
|
a06a7a7a2c | ||
|
|
94df3855b5 | ||
|
|
a83f3c23f4 | ||
|
|
da6cd324f0 | ||
|
|
c5b96df7b0 | ||
|
|
c4feeace31 | ||
|
|
6117b664ae | ||
|
|
b8fdb48fd8 | ||
|
|
9165f7ccaf | ||
|
|
8058a7c158 | ||
|
|
fed61f49f9 | ||
|
|
f1f05c7e6b | ||
|
|
7597d1560f | ||
|
|
f739e1b66d | ||
|
|
5d3609aacd | ||
|
|
7aa51d6bd7 | ||
|
|
af91ae8c7f | ||
|
|
077bf55fd7 | ||
|
|
1f6dcb910f | ||
|
|
6363d9c99c | ||
|
|
fd30dbd1be | ||
|
|
ba4dc36ddf | ||
|
|
5abac04b15 | ||
|
|
8c84d32b13 | ||
|
|
c083548795 | ||
|
|
204f9d09e3 |
12
.gitea/PULL_REQUEST_TEMPLATE.md
Normal file
12
.gitea/PULL_REQUEST_TEMPLATE.md
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
## Description of the change
|
||||||
|
|
||||||
|
<!-- Brief summary of the change if not already clear from the title -->
|
||||||
|
|
||||||
|
## Checklist
|
||||||
|
|
||||||
|
- [ ] Updated Documentation
|
||||||
|
- [ ] Added tests
|
||||||
|
- [ ] Doesn't affect backwards compatibility - or check the next points
|
||||||
|
- [ ] Add the breaking change and migration details to docs/release-notes.md
|
||||||
|
- !!! Review from another person is required *BEFORE* merge !!!
|
||||||
|
- [ ] Add introduction of major feature to docs/release-notes.md
|
||||||
@@ -2,7 +2,6 @@
|
|||||||
self,
|
self,
|
||||||
lib,
|
lib,
|
||||||
inputs,
|
inputs,
|
||||||
privateInputs ? { },
|
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
@@ -19,28 +18,19 @@ let
|
|||||||
nixosLib = import (self.inputs.nixpkgs + "/nixos/lib") { };
|
nixosLib = import (self.inputs.nixpkgs + "/nixos/lib") { };
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
imports =
|
imports = filter pathExists [
|
||||||
let
|
./devshell/flake-module.nix
|
||||||
clanCoreModulesDir = ../nixosModules/clanCore;
|
./flash/flake-module.nix
|
||||||
getClanCoreTestModules =
|
./installation/flake-module.nix
|
||||||
let
|
./update/flake-module.nix
|
||||||
moduleNames = attrNames (builtins.readDir clanCoreModulesDir);
|
./morph/flake-module.nix
|
||||||
testPaths = map (
|
./nixos-documentation/flake-module.nix
|
||||||
moduleName: clanCoreModulesDir + "/${moduleName}/tests/flake-module.nix"
|
./dont-depend-on-repo-root.nix
|
||||||
) moduleNames;
|
# clan core submodule tests
|
||||||
in
|
../nixosModules/clanCore/machine-id/tests/flake-module.nix
|
||||||
filter pathExists testPaths;
|
../nixosModules/clanCore/postgresql/tests/flake-module.nix
|
||||||
in
|
../nixosModules/clanCore/state-version/tests/flake-module.nix
|
||||||
getClanCoreTestModules
|
];
|
||||||
++ filter pathExists [
|
|
||||||
./devshell/flake-module.nix
|
|
||||||
./flash/flake-module.nix
|
|
||||||
./installation/flake-module.nix
|
|
||||||
./update/flake-module.nix
|
|
||||||
./morph/flake-module.nix
|
|
||||||
./nixos-documentation/flake-module.nix
|
|
||||||
./dont-depend-on-repo-root.nix
|
|
||||||
];
|
|
||||||
flake.check = genAttrs [ "x86_64-linux" "aarch64-darwin" ] (
|
flake.check = genAttrs [ "x86_64-linux" "aarch64-darwin" ] (
|
||||||
system:
|
system:
|
||||||
let
|
let
|
||||||
@@ -120,7 +110,7 @@ in
|
|||||||
) (self.darwinConfigurations or { })
|
) (self.darwinConfigurations or { })
|
||||||
// lib.mapAttrs' (n: lib.nameValuePair "package-${n}") (
|
// lib.mapAttrs' (n: lib.nameValuePair "package-${n}") (
|
||||||
if system == "aarch64-darwin" then
|
if system == "aarch64-darwin" then
|
||||||
lib.filterAttrs (n: _: n != "docs" && n != "deploy-docs" && n != "docs-options") packagesToBuild
|
lib.filterAttrs (n: _: n != "docs" && n != "deploy-docs" && n != "option-search") packagesToBuild
|
||||||
else
|
else
|
||||||
packagesToBuild
|
packagesToBuild
|
||||||
)
|
)
|
||||||
@@ -138,7 +128,7 @@ in
|
|||||||
// flakeOutputs
|
// flakeOutputs
|
||||||
// {
|
// {
|
||||||
clan-core-for-checks = pkgs.runCommand "clan-core-for-checks" { } ''
|
clan-core-for-checks = pkgs.runCommand "clan-core-for-checks" { } ''
|
||||||
cp -r ${privateInputs.clan-core-for-checks} $out
|
cp -r ${self} $out
|
||||||
chmod -R +w $out
|
chmod -R +w $out
|
||||||
cp ${../flake.lock} $out/flake.lock
|
cp ${../flake.lock} $out/flake.lock
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ let
|
|||||||
networking.useNetworkd = true;
|
networking.useNetworkd = true;
|
||||||
services.openssh.enable = true;
|
services.openssh.enable = true;
|
||||||
services.openssh.settings.UseDns = false;
|
services.openssh.settings.UseDns = false;
|
||||||
services.openssh.settings.PasswordAuthentication = false;
|
|
||||||
system.nixos.variant_id = "installer";
|
system.nixos.variant_id = "installer";
|
||||||
environment.systemPackages = [
|
environment.systemPackages = [
|
||||||
pkgs.nixos-facter
|
pkgs.nixos-facter
|
||||||
|
|||||||
@@ -50,13 +50,13 @@
|
|||||||
dns =
|
dns =
|
||||||
{ pkgs, ... }:
|
{ pkgs, ... }:
|
||||||
{
|
{
|
||||||
environment.systemPackages = [ pkgs.net-tools ];
|
environment.systemPackages = [ pkgs.nettools ];
|
||||||
};
|
};
|
||||||
|
|
||||||
client =
|
client =
|
||||||
{ pkgs, ... }:
|
{ pkgs, ... }:
|
||||||
{
|
{
|
||||||
environment.systemPackages = [ pkgs.net-tools ];
|
environment.systemPackages = [ pkgs.nettools ];
|
||||||
};
|
};
|
||||||
|
|
||||||
server01 = {
|
server01 = {
|
||||||
|
|||||||
@@ -1,91 +1,39 @@
|
|||||||
# Clan service: sshd
|
The `sshd` Clan service manages SSH to make it easy to securely access your
|
||||||
What it does
|
machines over the internet. The service uses `vars` to store the SSH host keys
|
||||||
- Generates and persists SSH host keys via `vars`.
|
for each machine to ensure they remain stable across deployments.
|
||||||
- Optionally issues CA‑signed host certificates for servers.
|
|
||||||
- Installs the `server` CA public key into `clients` `known_hosts` for TOFU‑less verification.
|
|
||||||
|
|
||||||
|
`sshd` also generates SSH certificates for both servers and clients allowing for
|
||||||
|
certificate-based authentication for SSH.
|
||||||
|
|
||||||
When to use it
|
The service also disables password-based authentication over SSH, to access your
|
||||||
- Zero‑TOFU SSH for dynamic fleets: admins/CI can connect to frequently rebuilt hosts (e.g., server-1.example.com) without prompts or per‑host `known_hosts` churn.
|
machines you'll need to use public key authentication or certificate-based
|
||||||
|
authentication.
|
||||||
|
|
||||||
Roles
|
## Usage
|
||||||
- Server: runs sshd, presents a CA‑signed host certificate for `<machine>.<domain>`.
|
|
||||||
- Client: trusts the CA for the given domains to verify servers’ certificates.
|
|
||||||
Tip: assign both roles to a machine if it should both present a cert and verify others.
|
|
||||||
|
|
||||||
Quick start (with host certificates)
|
|
||||||
Useful if you never want to get a prompt about trusting the ssh fingerprint.
|
|
||||||
```nix
|
|
||||||
{
|
|
||||||
inventory.instances = {
|
|
||||||
sshd-with-certs = {
|
|
||||||
module = { name = "sshd"; input = "clan-core"; };
|
|
||||||
# Servers present certificates for <machine>.example.com
|
|
||||||
roles.server.tags.all = { };
|
|
||||||
roles.server.settings = {
|
|
||||||
certificate.searchDomains = [ "example.com" ];
|
|
||||||
# Optional: also add RSA host keys
|
|
||||||
# hostKeys.rsa.enable = true;
|
|
||||||
};
|
|
||||||
# Clients trust the CA for *.example.com
|
|
||||||
roles.client.tags.all = { };
|
|
||||||
roles.client.settings = {
|
|
||||||
certificate.searchDomains = [ "example.com" ];
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Basic: only add persistent host keys (ed25519), no certificates
|
|
||||||
Useful if you want to get an ssh "trust this server" prompt once and then never again.
|
|
||||||
```nix
|
```nix
|
||||||
{
|
{
|
||||||
inventory.instances = {
|
inventory.instances = {
|
||||||
|
# By default this service only generates ed25519 host keys
|
||||||
sshd-basic = {
|
sshd-basic = {
|
||||||
module = {
|
module = {
|
||||||
name = "sshd";
|
name = "sshd";
|
||||||
input = "clan-core";
|
input = "clan-core";
|
||||||
};
|
};
|
||||||
roles.server.tags.all = { };
|
roles.server.tags.all = { };
|
||||||
|
roles.client.tags.all = { };
|
||||||
};
|
};
|
||||||
};
|
# Also generate RSA host keys for all servers
|
||||||
}
|
sshd-with-rsa = {
|
||||||
```
|
module = {
|
||||||
|
name = "sshd";
|
||||||
Example: selective trust per environment
|
input = "clan-core";
|
||||||
Admins should trust only production; CI should trust prod and staging. Servers are reachable under both domains.
|
};
|
||||||
```nix
|
|
||||||
{
|
|
||||||
inventory.instances = {
|
|
||||||
sshd-env-scoped = {
|
|
||||||
module = { name = "sshd"; input = "clan-core"; };
|
|
||||||
|
|
||||||
# Servers present certs for both prod and staging FQDNs
|
|
||||||
roles.server.tags.all = { };
|
roles.server.tags.all = { };
|
||||||
roles.server.settings = {
|
roles.server.settings = {
|
||||||
certificate.searchDomains = [ "prod.example.com" "staging.example.com" ];
|
hostKeys.rsa.enable = true;
|
||||||
};
|
|
||||||
|
|
||||||
# Admin laptop: trust prod only
|
|
||||||
roles.client.machines."admin-laptop".settings = {
|
|
||||||
certificate.searchDomains = [ "prod.example.com" ];
|
|
||||||
};
|
|
||||||
|
|
||||||
# CI runner: trust prod and staging
|
|
||||||
roles.client.machines."ci-runner-1".settings = {
|
|
||||||
certificate.searchDomains = [ "prod.example.com" "staging.example.com" ];
|
|
||||||
};
|
};
|
||||||
|
roles.client.tags.all = { };
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
- Admin -> server1.prod.example.com: zero‑TOFU (verified via cert).
|
|
||||||
- Admin -> server1.staging.example.com: falls back to TOFU (or is blocked by policy).
|
|
||||||
- CI -> either prod or staging: zero‑TOFU for both.
|
|
||||||
Note: server and client searchDomains don’t have to be identical; they only need to overlap for the hostnames you actually use.
|
|
||||||
|
|
||||||
Notes
|
|
||||||
- Connect using a name that matches a cert principal (e.g., `server1.example.com`); wildcards are not allowed inside the certificate.
|
|
||||||
- CA private key stays in `vars` (not deployed); only the CA public key is distributed.
|
|
||||||
- Logins still require your user SSH keys on the server (passwords are disabled).
|
|
||||||
@@ -11,9 +11,7 @@
|
|||||||
pkgs.syncthing
|
pkgs.syncthing
|
||||||
];
|
];
|
||||||
script = ''
|
script = ''
|
||||||
export TMPDIR=/tmp
|
syncthing generate --config "$out"
|
||||||
TEMPORARY=$(mktemp -d)
|
|
||||||
syncthing generate --config "$out" --data "$TEMPORARY"
|
|
||||||
mv "$out"/key.pem "$out"/key
|
mv "$out"/key.pem "$out"/key
|
||||||
mv "$out"/cert.pem "$out"/cert
|
mv "$out"/cert.pem "$out"/cert
|
||||||
cat "$out"/config.xml | grep -oP '(?<=<device id=")[^"]+' | uniq > "$out"/id
|
cat "$out"/config.xml | grep -oP '(?<=<device id=")[^"]+' | uniq > "$out"/id
|
||||||
|
|||||||
24
devFlake/flake.lock
generated
24
devFlake/flake.lock
generated
@@ -3,16 +3,16 @@
|
|||||||
"clan-core-for-checks": {
|
"clan-core-for-checks": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1759795610,
|
"lastModified": 1760368011,
|
||||||
"narHash": "sha256-YFOK+aoJjWLfMHj2spvrQIe0ufIsv6P8o44NqoFPwp0=",
|
"narHash": "sha256-mLK2nwbfklfOGIVAKVNDwGyYz8mPh4fzsAqSK3BlCiI=",
|
||||||
"ref": "main",
|
"ref": "clan-25.05",
|
||||||
"rev": "0de79962eacfe6f09d7aabca2a7305deef4fde0c",
|
"rev": "1b3c129aa9741d99b27810652ca888b3fbfc3a11",
|
||||||
"shallow": true,
|
"shallow": true,
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://git.clan.lol/clan/clan-core"
|
"url": "https://git.clan.lol/clan/clan-core"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"ref": "main",
|
"ref": "clan-25.05",
|
||||||
"shallow": true,
|
"shallow": true,
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://git.clan.lol/clan/clan-core"
|
"url": "https://git.clan.lol/clan/clan-core"
|
||||||
@@ -105,16 +105,16 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs-dev": {
|
"nixpkgs-dev": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1759794031,
|
"lastModified": 1760309387,
|
||||||
"narHash": "sha256-Zruni/00BlDHSWVJf3mb0o+OHnxIvJNuXkPloY9c+PU=",
|
"narHash": "sha256-e0lvQ7+B1Y8zjykYHAj9tBv10ggLqK0nmxwvMU3J0Eo=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "09c221b2f0726da85b124efb60a1d123971dfa08",
|
"rev": "6cd95994a9c8f7c6f8c1f1161be94119afdcb305",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-unstable-small",
|
"ref": "nixos-25.05-small",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
@@ -208,11 +208,11 @@
|
|||||||
"nixpkgs": []
|
"nixpkgs": []
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1758728421,
|
"lastModified": 1760120816,
|
||||||
"narHash": "sha256-ySNJ008muQAds2JemiyrWYbwbG+V7S5wg3ZVKGHSFu8=",
|
"narHash": "sha256-gq9rdocpmRZCwLS5vsHozwB6b5nrOBDNc2kkEaTXHfg=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "treefmt-nix",
|
"repo": "treefmt-nix",
|
||||||
"rev": "5eda4ee8121f97b218f7cc73f5172098d458f1d1",
|
"rev": "761ae7aff00907b607125b2f57338b74177697ed",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
description = "private dev inputs";
|
description = "private dev inputs";
|
||||||
|
|
||||||
# Dev dependencies
|
# Dev dependencies
|
||||||
inputs.nixpkgs-dev.url = "github:NixOS/nixpkgs/nixos-unstable-small";
|
inputs.nixpkgs-dev.url = "github:NixOS/nixpkgs/nixos-25.05-small";
|
||||||
|
|
||||||
inputs.flake-utils.url = "github:numtide/flake-utils";
|
inputs.flake-utils.url = "github:numtide/flake-utils";
|
||||||
inputs.flake-utils.inputs.systems.follows = "systems";
|
inputs.flake-utils.inputs.systems.follows = "systems";
|
||||||
@@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
inputs.systems.url = "github:nix-systems/default";
|
inputs.systems.url = "github:nix-systems/default";
|
||||||
|
|
||||||
inputs.clan-core-for-checks.url = "git+https://git.clan.lol/clan/clan-core?ref=main&shallow=1";
|
inputs.clan-core-for-checks.url = "git+https://git.clan.lol/clan/clan-core?ref=clan-25.05&shallow=1";
|
||||||
inputs.clan-core-for-checks.flake = false;
|
inputs.clan-core-for-checks.flake = false;
|
||||||
|
|
||||||
inputs.test-fixtures.url = "git+https://git.clan.lol/clan/test-fixtures";
|
inputs.test-fixtures.url = "git+https://git.clan.lol/clan/test-fixtures";
|
||||||
|
|||||||
2
docs/.gitignore
vendored
2
docs/.gitignore
vendored
@@ -1,6 +1,6 @@
|
|||||||
/site/reference
|
/site/reference
|
||||||
/site/services/official
|
/site/services/official
|
||||||
/site/static
|
/site/static
|
||||||
/site/options
|
/site/option-search
|
||||||
/site/openapi.json
|
/site/openapi.json
|
||||||
!/site/static/extra.css
|
!/site/static/extra.css
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Serve documentation locally
|
# Serve documentation locally
|
||||||
|
|
||||||
```
|
```
|
||||||
$ nix develop .#docs -c mkdocs serve
|
nix develop .#docs -c mkdocs serve
|
||||||
```
|
```
|
||||||
|
|||||||
41
docs/main.py
41
docs/main.py
@@ -1,41 +0,0 @@
|
|||||||
from typing import Any
|
|
||||||
|
|
||||||
|
|
||||||
def define_env(env: Any) -> None:
|
|
||||||
static_dir = "/static/"
|
|
||||||
video_dir = "https://clan.lol/" + "videos/"
|
|
||||||
asciinema_dir = static_dir + "asciinema-player/"
|
|
||||||
|
|
||||||
@env.macro
|
|
||||||
def video(name: str) -> str:
|
|
||||||
return f"""<video loop muted autoplay id="{name}">
|
|
||||||
<source src={video_dir + name} type="video/webm">
|
|
||||||
Your browser does not support the video tag.
|
|
||||||
</video>"""
|
|
||||||
|
|
||||||
@env.macro
|
|
||||||
def asciinema(name: str) -> str:
|
|
||||||
return f"""<div id="{name}">
|
|
||||||
<script>
|
|
||||||
// Function to load the script and then create the Asciinema player
|
|
||||||
function loadAsciinemaPlayer() {{
|
|
||||||
var script = document.createElement('script');
|
|
||||||
script.src = "{asciinema_dir}/asciinema-player.min.js";
|
|
||||||
script.onload = function() {{
|
|
||||||
AsciinemaPlayer.create('{video_dir + name}', document.getElementById("{name}"), {{
|
|
||||||
loop: true,
|
|
||||||
autoPlay: true,
|
|
||||||
controls: false,
|
|
||||||
speed: 1.5,
|
|
||||||
theme: "solarized-light"
|
|
||||||
}});
|
|
||||||
}};
|
|
||||||
document.head.appendChild(script);
|
|
||||||
}}
|
|
||||||
|
|
||||||
// Load the Asciinema player script
|
|
||||||
loadAsciinemaPlayer();
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<link rel="stylesheet" type="text/css" href="{asciinema_dir}/asciinema-player.css" />
|
|
||||||
</div>"""
|
|
||||||
@@ -58,7 +58,7 @@ nav:
|
|||||||
- getting-started/configure-disk.md
|
- getting-started/configure-disk.md
|
||||||
- getting-started/update-machines.md
|
- getting-started/update-machines.md
|
||||||
- getting-started/continuous-integration.md
|
- getting-started/continuous-integration.md
|
||||||
- getting-started/convert-existing-NixOS-configuration.md
|
- Convert existing NixOS configurations: getting-started/convert-existing-NixOS-configuration.md
|
||||||
- Guides:
|
- Guides:
|
||||||
- Inventory:
|
- Inventory:
|
||||||
- Introduction to Inventory: guides/inventory/inventory.md
|
- Introduction to Inventory: guides/inventory/inventory.md
|
||||||
@@ -66,6 +66,7 @@ nav:
|
|||||||
- Services:
|
- Services:
|
||||||
- Introduction to Services: guides/services/introduction-to-services.md
|
- Introduction to Services: guides/services/introduction-to-services.md
|
||||||
- Author Your Own Service: guides/services/community.md
|
- Author Your Own Service: guides/services/community.md
|
||||||
|
- Internal Services with SSL: guides/internal-ssl-services.md
|
||||||
- Vars:
|
- Vars:
|
||||||
- Introduction to Vars: guides/vars/vars-overview.md
|
- Introduction to Vars: guides/vars/vars-overview.md
|
||||||
- Minimal Example: guides/vars/vars-backend.md
|
- Minimal Example: guides/vars/vars-backend.md
|
||||||
@@ -179,7 +180,7 @@ nav:
|
|||||||
- services/official/zerotier.md
|
- services/official/zerotier.md
|
||||||
- services/community.md
|
- services/community.md
|
||||||
|
|
||||||
- Search Clan Options: "/options"
|
- Search Clan Options: "/option-search"
|
||||||
|
|
||||||
docs_dir: site
|
docs_dir: site
|
||||||
site_dir: out
|
site_dir: out
|
||||||
|
|||||||
@@ -3,11 +3,9 @@
|
|||||||
module-docs,
|
module-docs,
|
||||||
clan-cli-docs,
|
clan-cli-docs,
|
||||||
clan-lib-openapi,
|
clan-lib-openapi,
|
||||||
asciinema-player-js,
|
|
||||||
asciinema-player-css,
|
|
||||||
roboto,
|
roboto,
|
||||||
fira-code,
|
fira-code,
|
||||||
docs-options,
|
option-search,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
@@ -53,13 +51,9 @@ pkgs.stdenv.mkDerivation {
|
|||||||
chmod -R +w ./site
|
chmod -R +w ./site
|
||||||
echo "Generated API documentation in './site/reference/' "
|
echo "Generated API documentation in './site/reference/' "
|
||||||
|
|
||||||
rm -rf ./site/options
|
rm -rf ./site/option-search
|
||||||
cp -r ${docs-options} ./site/options
|
cp -r ${option-search} ./site/option-search
|
||||||
chmod -R +w ./site/options
|
chmod -R +w ./site/option-search
|
||||||
|
|
||||||
mkdir -p ./site/static/asciinema-player
|
|
||||||
ln -snf ${asciinema-player-js} ./site/static/asciinema-player/asciinema-player.min.js
|
|
||||||
ln -snf ${asciinema-player-css} ./site/static/asciinema-player/asciinema-player.css
|
|
||||||
|
|
||||||
# Link to fonts
|
# Link to fonts
|
||||||
ln -snf ${roboto}/share/fonts/truetype/Roboto-Regular.ttf ./site/static/
|
ln -snf ${roboto}/share/fonts/truetype/Roboto-Regular.ttf ./site/static/
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
{ inputs, self, ... }:
|
{ inputs, ... }:
|
||||||
{
|
{
|
||||||
imports = [
|
|
||||||
./options/flake-module.nix
|
|
||||||
];
|
|
||||||
perSystem =
|
perSystem =
|
||||||
{
|
{
|
||||||
config,
|
config,
|
||||||
@@ -10,83 +7,7 @@
|
|||||||
pkgs,
|
pkgs,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
let
|
|
||||||
# Simply evaluated options (JSON)
|
|
||||||
# { clanCore = «derivation JSON»; clanModules = { ${name} = «derivation JSON» }; }
|
|
||||||
jsonDocs = pkgs.callPackage ./get-module-docs.nix {
|
|
||||||
inherit (self) clanModules;
|
|
||||||
clan-core = self;
|
|
||||||
inherit pkgs;
|
|
||||||
};
|
|
||||||
|
|
||||||
# clan service options
|
|
||||||
clanModulesViaService = pkgs.writeText "info.json" (builtins.toJSON jsonDocs.clanModulesViaService);
|
|
||||||
|
|
||||||
# Simply evaluated options (JSON)
|
|
||||||
renderOptions =
|
|
||||||
pkgs.runCommand "render-options"
|
|
||||||
{
|
|
||||||
# TODO: ruff does not splice properly in nativeBuildInputs
|
|
||||||
depsBuildBuild = [ pkgs.ruff ];
|
|
||||||
nativeBuildInputs = [
|
|
||||||
pkgs.python3
|
|
||||||
pkgs.mypy
|
|
||||||
self'.packages.clan-cli
|
|
||||||
];
|
|
||||||
}
|
|
||||||
''
|
|
||||||
install -D -m755 ${./render_options}/__init__.py $out/bin/render-options
|
|
||||||
patchShebangs --build $out/bin/render-options
|
|
||||||
|
|
||||||
ruff format --check --diff $out/bin/render-options
|
|
||||||
ruff check --line-length 88 $out/bin/render-options
|
|
||||||
mypy --strict $out/bin/render-options
|
|
||||||
'';
|
|
||||||
|
|
||||||
asciinema-player-js = pkgs.fetchurl {
|
|
||||||
url = "https://github.com/asciinema/asciinema-player/releases/download/v3.7.0/asciinema-player.min.js";
|
|
||||||
sha256 = "sha256-Ymco/+FinDr5YOrV72ehclpp4amrczjo5EU3jfr/zxs=";
|
|
||||||
};
|
|
||||||
asciinema-player-css = pkgs.fetchurl {
|
|
||||||
url = "https://github.com/asciinema/asciinema-player/releases/download/v3.7.0/asciinema-player.css";
|
|
||||||
sha256 = "sha256-GZMeZFFGvP5GMqqh516mjJKfQaiJ6bL38bSYOXkaohc=";
|
|
||||||
};
|
|
||||||
|
|
||||||
module-docs =
|
|
||||||
pkgs.runCommand "rendered"
|
|
||||||
{
|
|
||||||
buildInputs = [
|
|
||||||
pkgs.python3
|
|
||||||
self'.packages.clan-cli
|
|
||||||
];
|
|
||||||
}
|
|
||||||
''
|
|
||||||
export CLAN_CORE_PATH=${
|
|
||||||
inputs.nixpkgs.lib.fileset.toSource {
|
|
||||||
root = ../..;
|
|
||||||
fileset = ../../clanModules;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
export CLAN_CORE_DOCS=${jsonDocs.clanCore}/share/doc/nixos/options.json
|
|
||||||
|
|
||||||
# A file that contains the links to all clanModule docs
|
|
||||||
export CLAN_MODULES_VIA_SERVICE=${clanModulesViaService}
|
|
||||||
export CLAN_SERVICE_INTERFACE=${self'.legacyPackages.clan-service-module-interface}/share/doc/nixos/options.json
|
|
||||||
export CLAN_OPTIONS_PATH=${self'.legacyPackages.clan-options}/share/doc/nixos/options.json
|
|
||||||
|
|
||||||
mkdir $out
|
|
||||||
|
|
||||||
# The python script will place mkDocs files in the output directory
|
|
||||||
exec python3 ${renderOptions}/bin/render-options
|
|
||||||
'';
|
|
||||||
in
|
|
||||||
{
|
{
|
||||||
legacyPackages = {
|
|
||||||
inherit
|
|
||||||
jsonDocs
|
|
||||||
clanModulesViaService
|
|
||||||
;
|
|
||||||
};
|
|
||||||
devShells.docs = self'.packages.docs.overrideAttrs (_old: {
|
devShells.docs = self'.packages.docs.overrideAttrs (_old: {
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
# Run: htmlproofer --disable-external
|
# Run: htmlproofer --disable-external
|
||||||
@@ -105,17 +26,14 @@
|
|||||||
docs = pkgs.python3.pkgs.callPackage ./default.nix {
|
docs = pkgs.python3.pkgs.callPackage ./default.nix {
|
||||||
inherit (self'.packages)
|
inherit (self'.packages)
|
||||||
clan-cli-docs
|
clan-cli-docs
|
||||||
docs-options
|
option-search
|
||||||
inventory-api-docs
|
inventory-api-docs
|
||||||
clan-lib-openapi
|
clan-lib-openapi
|
||||||
|
module-docs
|
||||||
;
|
;
|
||||||
inherit (inputs) nixpkgs;
|
inherit (inputs) nixpkgs;
|
||||||
inherit module-docs;
|
|
||||||
inherit asciinema-player-js;
|
|
||||||
inherit asciinema-player-css;
|
|
||||||
};
|
};
|
||||||
deploy-docs = pkgs.callPackage ./deploy-docs.nix { inherit (config.packages) docs; };
|
deploy-docs = pkgs.callPackage ./deploy-docs.nix { inherit (config.packages) docs; };
|
||||||
inherit module-docs;
|
|
||||||
};
|
};
|
||||||
checks.docs-integrity =
|
checks.docs-integrity =
|
||||||
pkgs.runCommand "docs-integrity"
|
pkgs.runCommand "docs-integrity"
|
||||||
|
|||||||
9
docs/release-notes.md
Normal file
9
docs/release-notes.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# clan-core release notes 25.11
|
||||||
|
|
||||||
|
<!-- This is not rendered yet -->
|
||||||
|
|
||||||
|
## New features
|
||||||
|
|
||||||
|
## Breaking Changes
|
||||||
|
|
||||||
|
## Misc
|
||||||
@@ -4,14 +4,14 @@ This guide will help you convert your existing NixOS configurations into a Clan.
|
|||||||
Migrating instead of starting new can be trickier and might lead to bugs or
|
Migrating instead of starting new can be trickier and might lead to bugs or
|
||||||
unexpected issues. We recommend reading the [Getting Started](../getting-started/creating-your-first-clan.md) guide first.
|
unexpected issues. We recommend reading the [Getting Started](../getting-started/creating-your-first-clan.md) guide first.
|
||||||
|
|
||||||
Once you have a working setup and understand the concepts transfering your NixOS configurations over is easy.
|
Once you have a working setup and understand the concepts transferring your NixOS configurations over is easy.
|
||||||
|
|
||||||
## Back up your existing configuration
|
## Back up your existing configuration
|
||||||
|
|
||||||
Before you start, it is strongly recommended to back up your existing
|
Before you start, it is strongly recommended to back up your existing
|
||||||
configuration in any form you see fit. If you use version control to manage
|
configuration in any form you see fit. If you use version control to manage
|
||||||
your configuration changes, it is also a good idea to follow the migration
|
your configuration changes, it is also a good idea to follow the migration
|
||||||
guide in a separte branch until everything works as expected.
|
guide in a separate branch until everything works as expected.
|
||||||
|
|
||||||
## Starting Point
|
## Starting Point
|
||||||
|
|
||||||
|
|||||||
@@ -67,6 +67,59 @@ nix build .#checks.x86_64-linux.{test-attr-name}
|
|||||||
```
|
```
|
||||||
(replace `{test-attr-name}` with the name of the test)
|
(replace `{test-attr-name}` with the name of the test)
|
||||||
|
|
||||||
|
### Testing services with vars
|
||||||
|
|
||||||
|
Services that define their own vars (using `clan.core.vars.generators`) require generating test vars before running the tests.
|
||||||
|
|
||||||
|
#### Understanding the `clan.directory` setting
|
||||||
|
|
||||||
|
The `clan.directory` option is critical for vars generation and loading in tests. This setting determines:
|
||||||
|
|
||||||
|
1. **Where vars are generated**: When you run `update-vars`, it creates `vars/` and `sops/` directories inside the path specified by `clan.directory`
|
||||||
|
2. **Where vars are loaded from**: During test execution, machines look for their vars and secrets relative to `clan.directory`
|
||||||
|
|
||||||
|
#### Generating test vars
|
||||||
|
|
||||||
|
For services that define vars, you must first run:
|
||||||
|
|
||||||
|
```shellSession
|
||||||
|
nix run .#checks.x86_64-linux.{test-attr-name}.update-vars
|
||||||
|
```
|
||||||
|
|
||||||
|
This generates the necessary var files in the directory specified by `clan.directory`. After running this command, you can run the test normally:
|
||||||
|
|
||||||
|
```shellSession
|
||||||
|
nix run .#checks.x86_64-linux.{test-attr-name}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Example: service-dummy-test
|
||||||
|
|
||||||
|
The `service-dummy-test` is a good example of a test that uses vars. To run it:
|
||||||
|
|
||||||
|
```shellSession
|
||||||
|
# First, generate the test vars
|
||||||
|
nix run .#checks.x86_64-linux.service-dummy-test.update-vars
|
||||||
|
|
||||||
|
# Then run the test
|
||||||
|
nix run .#checks.x86_64-linux.service-dummy-test
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Common issues
|
||||||
|
|
||||||
|
If `update-vars` fails, you may need to ensure that:
|
||||||
|
|
||||||
|
- **`clan.directory` is set correctly**: It should point to the directory where you want vars to be generated (typically `clan.directory = ./.;` in your test definition)
|
||||||
|
- **Your test defines machines**: Machines must be defined in `clan.inventory.machines` or through the inventory system
|
||||||
|
- **Machine definitions are complete**: Each machine should have the necessary service configuration that defines the vars generators
|
||||||
|
|
||||||
|
**If vars are not found during test execution:**
|
||||||
|
|
||||||
|
- Verify that `clan.directory` points to the same location where you ran `update-vars`
|
||||||
|
- Check that the `vars/` and `sops/` directories exist in that location
|
||||||
|
- Ensure the generated files match the machines and generators defined in your test
|
||||||
|
|
||||||
|
You can reference `/checks/service-dummy-test/` to see a complete working example of a test with vars, including the correct directory structure.
|
||||||
|
|
||||||
### Debugging VM tests
|
### Debugging VM tests
|
||||||
|
|
||||||
The following techniques can be used to debug a VM test:
|
The following techniques can be used to debug a VM test:
|
||||||
|
|||||||
213
docs/site/guides/internal-ssl-services.md
Normal file
213
docs/site/guides/internal-ssl-services.md
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
A common use case you might have is to host services and applications which are
|
||||||
|
only reachable within your clan.
|
||||||
|
|
||||||
|
This guide explains how to set up such secure, clan-internal web services using
|
||||||
|
a custom top-level domain (TLD) with SSL certificates.
|
||||||
|
|
||||||
|
Your services will be accessible only within your clan network and secured with
|
||||||
|
proper SSL certificates that all clan machines trust.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
By combining the `coredns` and `certificates` clan services, you can:
|
||||||
|
|
||||||
|
- Create a custom TLD for your clan (e.g. `.c`)
|
||||||
|
- Host internal web services accessible via HTTPS (e.g. `https://api.c`, `https://dashboard.c`)
|
||||||
|
- Automatically provision and trust SSL certificates across all clan machines
|
||||||
|
- Keep internal services secure and isolated from the public internet
|
||||||
|
|
||||||
|
The setup uses two clan services working together:
|
||||||
|
|
||||||
|
- **coredns service**: Provides DNS resolution for your custom TLD within the clan
|
||||||
|
- **certificates service**: Creates a certificate authority (CA) and issues SSL certificates for your TLD
|
||||||
|
|
||||||
|
### DNS Resolution Flow
|
||||||
|
|
||||||
|
1. A clan machine tries to access `https://service.c`
|
||||||
|
2. The machine queries its local DNS resolver (unbound)
|
||||||
|
3. For `.c` domains, the query is forwarded to your clan's CoreDNS server. All
|
||||||
|
other domains will be resolved as usual.
|
||||||
|
4. CoreDNS returns the IP address of the machine hosting the service
|
||||||
|
5. The machine connects directly to the service over HTTPS
|
||||||
|
6. The SSL certificate is trusted because all machines trust your clan's CA
|
||||||
|
|
||||||
|
## Step-by-Step Setup
|
||||||
|
|
||||||
|
The following setup assumes you have a VPN (e.g. Zerotier) already running. The
|
||||||
|
IPs configured in the options below will probably the Zerotier-IPs of the
|
||||||
|
respective machines.
|
||||||
|
|
||||||
|
### Configure the CoreDNS Service
|
||||||
|
|
||||||
|
The CoreDNS service has two roles:
|
||||||
|
- `server`: Runs the DNS server for your custom TLD
|
||||||
|
- `default`: Makes machines use the DNS server for TLD resolution and allows exposing services
|
||||||
|
|
||||||
|
Add this to your inventory:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
inventory = {
|
||||||
|
machines = {
|
||||||
|
dns-server = { }; # Machine that will run the DNS server
|
||||||
|
web-server = { }; # Machine that will host web services
|
||||||
|
client = { }; # Any other machines in your clan
|
||||||
|
};
|
||||||
|
|
||||||
|
instances = {
|
||||||
|
coredns = {
|
||||||
|
|
||||||
|
# Add the default role to all machines
|
||||||
|
roles.default.tags = [ "all" ];
|
||||||
|
|
||||||
|
# DNS server for the .c TLD
|
||||||
|
roles.server.machines.dns-server.settings = {
|
||||||
|
ip = "192.168.1.10"; # IP of your DNS server machine
|
||||||
|
tld = "c";
|
||||||
|
};
|
||||||
|
|
||||||
|
# Machine hosting services (example: ca.c and admin.c)
|
||||||
|
roles.default.machines.web-server.settings = {
|
||||||
|
ip = "192.168.1.20"; # IP of your web server
|
||||||
|
services = [ "ca" "admin" ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configure the Certificates Service
|
||||||
|
|
||||||
|
The certificates service also has two roles:
|
||||||
|
- `ca`: Sets up the certificate authority on a server
|
||||||
|
- `default`: Makes machines trust the CA and allows them to request certificates
|
||||||
|
|
||||||
|
Add this to your inventory:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
inventory = {
|
||||||
|
instances = {
|
||||||
|
# ... coredns configuration from above ...
|
||||||
|
|
||||||
|
certificates = {
|
||||||
|
|
||||||
|
# Set up CA for .c domain
|
||||||
|
roles.ca.machines.dns-server.settings = {
|
||||||
|
tlds = [ "c" ];
|
||||||
|
acmeEmail = "admin@example.com"; # Optional: your email
|
||||||
|
};
|
||||||
|
|
||||||
|
# Add default role to all machines to trust the CA
|
||||||
|
roles.default.tags = [ "all" ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Complete Example Configuration
|
||||||
|
|
||||||
|
Here's a complete working example:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
nventory = {
|
||||||
|
machines = {
|
||||||
|
caserver = { }; # DNS server + CA + web services
|
||||||
|
webserver = { }; # Additional web services
|
||||||
|
client = { }; # Client machine
|
||||||
|
};
|
||||||
|
|
||||||
|
instances = {
|
||||||
|
coredns = {
|
||||||
|
|
||||||
|
# Add the default role to all machines
|
||||||
|
roles.default.tags = [ "all" ];
|
||||||
|
|
||||||
|
# DNS server for the .c TLD
|
||||||
|
roles.server.machines.caserver.settings = {
|
||||||
|
ip = "192.168.8.5";
|
||||||
|
tld = "c";
|
||||||
|
};
|
||||||
|
|
||||||
|
# machine hosting https://ca.c (our CA for SSL)
|
||||||
|
roles.default.machines.caserver.settings = {
|
||||||
|
ip = "192.168.8.5";
|
||||||
|
services = [ "ca" ];
|
||||||
|
};
|
||||||
|
|
||||||
|
# machine hosting https://blub.c (some internal web-service)
|
||||||
|
roles.default.machines.webserver.settings = {
|
||||||
|
ip = "192.168.8.6";
|
||||||
|
services = [ "blub" ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
# Provide https for the .c top-level domain
|
||||||
|
certificates = {
|
||||||
|
|
||||||
|
roles.ca.machines.caserver.settings = {
|
||||||
|
tlds = [ "c" ];
|
||||||
|
acmeEmail = "admin@example.com";
|
||||||
|
};
|
||||||
|
|
||||||
|
roles.default.tags = [ "all" ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing Your Configuration
|
||||||
|
|
||||||
|
DNS resolution can be tested with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# On any clan machine, test DNS resolution
|
||||||
|
nslookup ca.c
|
||||||
|
nslookup blub.c
|
||||||
|
```
|
||||||
|
|
||||||
|
You should also now be able to visit `https://ca.c` to access the certificate authority or visit `https://blub.c` to access your web service.
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### DNS Resolution Issues
|
||||||
|
|
||||||
|
1. **Check if DNS server is running**:
|
||||||
|
```bash
|
||||||
|
# On the DNS server machine
|
||||||
|
systemctl status coredns
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Verify DNS configuration**:
|
||||||
|
```bash
|
||||||
|
# Check if the right nameservers are configured
|
||||||
|
cat /etc/resolv.conf
|
||||||
|
systemctl status systemd-resolved
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Test DNS directly**:
|
||||||
|
```bash
|
||||||
|
# Query the DNS server directly
|
||||||
|
dig @192.168.8.5 ca.c
|
||||||
|
```
|
||||||
|
|
||||||
|
### Certificate Issues
|
||||||
|
|
||||||
|
1. **Check CA status**:
|
||||||
|
```bash
|
||||||
|
# On the CA machine
|
||||||
|
systemctl status step-ca
|
||||||
|
systemctl status nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Verify certificate trust**:
|
||||||
|
```bash
|
||||||
|
# Test certificate trust
|
||||||
|
curl -v https://ca.c
|
||||||
|
openssl s_client -connect ca.c:443 -verify_return_error
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Check ACME configuration**:
|
||||||
|
```bash
|
||||||
|
# View ACME certificates
|
||||||
|
ls /var/lib/acme/
|
||||||
|
journalctl -u acme-ca.c.service
|
||||||
|
```
|
||||||
@@ -288,7 +288,7 @@ of their type.
|
|||||||
In the inventory we the assign machines to a type, e.g. by using tags
|
In the inventory we the assign machines to a type, e.g. by using tags
|
||||||
|
|
||||||
```nix title="flake.nix"
|
```nix title="flake.nix"
|
||||||
instnaces.machine-type = {
|
instances.machine-type = {
|
||||||
module.input = "self";
|
module.input = "self";
|
||||||
module.name = "@pinpox/machine-type";
|
module.name = "@pinpox/machine-type";
|
||||||
roles.desktop.tags.desktop = { };
|
roles.desktop.tags.desktop = { };
|
||||||
@@ -303,3 +303,4 @@ instnaces.machine-type = {
|
|||||||
- [Reference Documentation for Service Authors](../../reference/options/clan_service.md)
|
- [Reference Documentation for Service Authors](../../reference/options/clan_service.md)
|
||||||
- [Migration Guide from ClanModules to ClanServices](../../guides/migrations/migrate-inventory-services.md)
|
- [Migration Guide from ClanModules to ClanServices](../../guides/migrations/migrate-inventory-services.md)
|
||||||
- [Decision that lead to ClanServices](../../decisions/01-Clan-Modules.md)
|
- [Decision that lead to ClanServices](../../decisions/01-Clan-Modules.md)
|
||||||
|
- [Testing Guide for Services with Vars](../contributing/testing.md#testing-services-with-vars)
|
||||||
|
|||||||
@@ -122,7 +122,7 @@ hide:
|
|||||||
|
|
||||||
command line interface
|
command line interface
|
||||||
|
|
||||||
- [Clan Options](/options)
|
- [Clan Options](./reference/options/clan.md)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -4,10 +4,10 @@ This section of the site provides an overview of available options and commands
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
- [Clan Configuration Option](/options) - for defining a Clan
|
|
||||||
- Learn how to use the [Clan CLI](../reference/cli/index.md)
|
- Learn how to use the [Clan CLI](../reference/cli/index.md)
|
||||||
- Explore available [services](../services/definition.md)
|
- Explore available [services](../services/definition.md)
|
||||||
- [NixOS Configuration Options](../reference/clan.core/index.md) - Additional options avilable on a NixOS machine.
|
- [NixOS Configuration Options](../reference/clan.core/index.md) - Additional options avilable on a NixOS machine.
|
||||||
|
- [Search Clan Option](/option-search) - for defining a Clan
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
23
flake.lock
generated
23
flake.lock
generated
@@ -71,15 +71,16 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1758805352,
|
"lastModified": 1759509947,
|
||||||
"narHash": "sha256-BHdc43Lkayd+72W/NXRKHzX5AZ+28F3xaUs3a88/Uew=",
|
"narHash": "sha256-4XifSIHfpJKcCf5bZZRhj8C4aCpjNBaE3kXr02s4rHU=",
|
||||||
"owner": "nix-darwin",
|
"owner": "nix-darwin",
|
||||||
"repo": "nix-darwin",
|
"repo": "nix-darwin",
|
||||||
"rev": "c48e963a5558eb1c3827d59d21c5193622a1477c",
|
"rev": "000eadb231812ad6ea6aebd7526974aaf4e79355",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "nix-darwin",
|
"owner": "nix-darwin",
|
||||||
|
"ref": "nix-darwin-25.05",
|
||||||
"repo": "nix-darwin",
|
"repo": "nix-darwin",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
@@ -114,15 +115,15 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 315532800,
|
"lastModified": 1760324802,
|
||||||
"narHash": "sha256-1tUpklZsKzMGI3gjo/dWD+hS8cf+5Jji8TF5Cfz7i3I=",
|
"narHash": "sha256-VWlJtLQ5EQQj45Wj0yTExtSjwRyZ59/qMqEwus/Exlg=",
|
||||||
"rev": "08b8f92ac6354983f5382124fef6006cade4a1c1",
|
"rev": "7e297ddff44a3cc93673bb38d0374df8d0ad73e4",
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.11pre862603.08b8f92ac635/nixexprs.tar.xz"
|
"url": "https://releases.nixos.org/nixos/25.05/nixos-25.05.811135.7e297ddff44a/nixexprs.tar.xz"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"type": "tarball",
|
"type": "tarball",
|
||||||
"url": "https://nixos.org/channels/nixpkgs-unstable/nixexprs.tar.xz"
|
"url": "https://nixos.org/channels/nixos-25.05/nixexprs.tar.xz"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"root": {
|
"root": {
|
||||||
@@ -181,11 +182,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1758728421,
|
"lastModified": 1760120816,
|
||||||
"narHash": "sha256-ySNJ008muQAds2JemiyrWYbwbG+V7S5wg3ZVKGHSFu8=",
|
"narHash": "sha256-gq9rdocpmRZCwLS5vsHozwB6b5nrOBDNc2kkEaTXHfg=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "treefmt-nix",
|
"repo": "treefmt-nix",
|
||||||
"rev": "5eda4ee8121f97b218f7cc73f5172098d458f1d1",
|
"rev": "761ae7aff00907b607125b2f57338b74177697ed",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|||||||
@@ -2,9 +2,9 @@
|
|||||||
description = "clan.lol base operating system";
|
description = "clan.lol base operating system";
|
||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "https://nixos.org/channels/nixpkgs-unstable/nixexprs.tar.xz";
|
nixpkgs.url = "https://nixos.org/channels/nixos-25.05/nixexprs.tar.xz";
|
||||||
|
|
||||||
nix-darwin.url = "github:nix-darwin/nix-darwin";
|
nix-darwin.url = "github:nix-darwin/nix-darwin/nix-darwin-25.05";
|
||||||
nix-darwin.inputs.nixpkgs.follows = "nixpkgs";
|
nix-darwin.inputs.nixpkgs.follows = "nixpkgs";
|
||||||
|
|
||||||
flake-parts.url = "github:hercules-ci/flake-parts";
|
flake-parts.url = "github:hercules-ci/flake-parts";
|
||||||
@@ -77,6 +77,8 @@
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
# Allows downstream users to inject "unsupported" nixpkgs versions
|
||||||
|
checks.minNixpkgsVersion.ignore = true;
|
||||||
};
|
};
|
||||||
systems = import systems;
|
systems = import systems;
|
||||||
imports = [
|
imports = [
|
||||||
|
|||||||
@@ -11,8 +11,6 @@
|
|||||||
treefmt.programs.nixfmt.enable = true;
|
treefmt.programs.nixfmt.enable = true;
|
||||||
treefmt.programs.nixfmt.package = pkgs.nixfmt-rfc-style;
|
treefmt.programs.nixfmt.package = pkgs.nixfmt-rfc-style;
|
||||||
treefmt.programs.deadnix.enable = true;
|
treefmt.programs.deadnix.enable = true;
|
||||||
treefmt.programs.sizelint.enable = true;
|
|
||||||
treefmt.programs.sizelint.failOnWarn = true;
|
|
||||||
treefmt.programs.clang-format.enable = true;
|
treefmt.programs.clang-format.enable = true;
|
||||||
treefmt.settings.global.excludes = [
|
treefmt.settings.global.excludes = [
|
||||||
"*.png"
|
"*.png"
|
||||||
@@ -52,8 +50,6 @@
|
|||||||
"checks/secrets/sops/groups/group/machines/machine"
|
"checks/secrets/sops/groups/group/machines/machine"
|
||||||
"checks/syncthing/introducer/introducer_device_id"
|
"checks/syncthing/introducer/introducer_device_id"
|
||||||
"checks/syncthing/introducer/introducer_test_api"
|
"checks/syncthing/introducer/introducer_test_api"
|
||||||
"docs/site/static/asciinema-player/asciinema-player.css"
|
|
||||||
"docs/site/static/asciinema-player/asciinema-player.min.js"
|
|
||||||
"nixosModules/clanCore/vars/secret/sops/eval-tests/populated/vars/my_machine/my_generator/my_secret"
|
"nixosModules/clanCore/vars/secret/sops/eval-tests/populated/vars/my_machine/my_generator/my_secret"
|
||||||
"pkgs/clan-cli/clan_cli/tests/data/gnupg.conf"
|
"pkgs/clan-cli/clan_cli/tests/data/gnupg.conf"
|
||||||
"pkgs/clan-cli/clan_cli/tests/data/password-store/.gpg-id"
|
"pkgs/clan-cli/clan_cli/tests/data/password-store/.gpg-id"
|
||||||
@@ -94,9 +90,6 @@
|
|||||||
"*.yaml"
|
"*.yaml"
|
||||||
"*.yml"
|
"*.yml"
|
||||||
];
|
];
|
||||||
excludes = [
|
|
||||||
"*/asciinema-player/*"
|
|
||||||
];
|
|
||||||
};
|
};
|
||||||
treefmt.programs.mypy.directories = {
|
treefmt.programs.mypy.directories = {
|
||||||
"clan-cli" = {
|
"clan-cli" = {
|
||||||
|
|||||||
51
lib/clanTest/virtual-fs.nix
Normal file
51
lib/clanTest/virtual-fs.nix
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
{ lib }:
|
||||||
|
let
|
||||||
|
sanitizePath =
|
||||||
|
rootPath: path:
|
||||||
|
let
|
||||||
|
storePrefix = builtins.unsafeDiscardStringContext ("${rootPath}");
|
||||||
|
pathStr = lib.removePrefix "/" (
|
||||||
|
lib.removePrefix storePrefix (builtins.unsafeDiscardStringContext (toString path))
|
||||||
|
);
|
||||||
|
in
|
||||||
|
pathStr;
|
||||||
|
|
||||||
|
mkFunctions = rootPath: passthru: virtual_fs: {
|
||||||
|
# Some functions to override lib functions
|
||||||
|
pathExists =
|
||||||
|
path:
|
||||||
|
let
|
||||||
|
pathStr = sanitizePath rootPath path;
|
||||||
|
isPassthru = builtins.any (exclude: (builtins.match exclude pathStr) != null) passthru;
|
||||||
|
in
|
||||||
|
if isPassthru then
|
||||||
|
builtins.pathExists path
|
||||||
|
else
|
||||||
|
let
|
||||||
|
res = virtual_fs ? ${pathStr};
|
||||||
|
in
|
||||||
|
lib.trace "pathExists: '${pathStr}' -> '${lib.generators.toPretty { } res}'" res;
|
||||||
|
readDir =
|
||||||
|
path:
|
||||||
|
let
|
||||||
|
pathStr = sanitizePath rootPath path;
|
||||||
|
base = (pathStr + "/");
|
||||||
|
res = lib.mapAttrs' (name: fileInfo: {
|
||||||
|
name = lib.removePrefix base name;
|
||||||
|
value = fileInfo.type;
|
||||||
|
}) (lib.filterAttrs (n: _: lib.hasPrefix base n) virtual_fs);
|
||||||
|
isPassthru = builtins.any (exclude: (builtins.match exclude pathStr) != null) passthru;
|
||||||
|
in
|
||||||
|
if isPassthru then
|
||||||
|
builtins.readDir path
|
||||||
|
else
|
||||||
|
lib.trace "readDir: '${pathStr}' -> '${lib.generators.toPretty { } res}'" res;
|
||||||
|
};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
virtual_fs,
|
||||||
|
rootPath,
|
||||||
|
# Patterns
|
||||||
|
passthru ? [ ],
|
||||||
|
}:
|
||||||
|
mkFunctions rootPath passthru virtual_fs
|
||||||
@@ -28,7 +28,6 @@ lib.fix (
|
|||||||
# Plain imports.
|
# Plain imports.
|
||||||
introspection = import ./introspection { inherit lib; };
|
introspection = import ./introspection { inherit lib; };
|
||||||
jsonschema = import ./jsonschema { inherit lib; };
|
jsonschema = import ./jsonschema { inherit lib; };
|
||||||
facts = import ./facts.nix { inherit lib; };
|
|
||||||
docs = import ./docs.nix { inherit lib; };
|
docs = import ./docs.nix { inherit lib; };
|
||||||
|
|
||||||
# flakes
|
# flakes
|
||||||
@@ -36,6 +35,10 @@ lib.fix (
|
|||||||
|
|
||||||
# TODO: Flatten our lib functions like this:
|
# TODO: Flatten our lib functions like this:
|
||||||
resolveModule = clanLib.callLib ./resolve-module { };
|
resolveModule = clanLib.callLib ./resolve-module { };
|
||||||
|
|
||||||
|
fs = {
|
||||||
|
inherit (builtins) pathExists readDir;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
in
|
in
|
||||||
f
|
f
|
||||||
|
|||||||
@@ -1,71 +0,0 @@
|
|||||||
{ lib, ... }:
|
|
||||||
clanDir:
|
|
||||||
let
|
|
||||||
|
|
||||||
allMachineNames = lib.mapAttrsToList (name: _: name) (builtins.readDir clanDir);
|
|
||||||
|
|
||||||
getFactPath = machine: fact: "${clanDir}/machines/${machine}/facts/${fact}";
|
|
||||||
|
|
||||||
readFact =
|
|
||||||
machine: fact:
|
|
||||||
let
|
|
||||||
path = getFactPath machine fact;
|
|
||||||
in
|
|
||||||
if builtins.pathExists path then builtins.readFile path else null;
|
|
||||||
|
|
||||||
# Example:
|
|
||||||
#
|
|
||||||
# readFactFromAllMachines zerotier-ip
|
|
||||||
# => {
|
|
||||||
# machineA = "1.2.3.4";
|
|
||||||
# machineB = "5.6.7.8";
|
|
||||||
# };
|
|
||||||
readFactFromAllMachines =
|
|
||||||
fact:
|
|
||||||
let
|
|
||||||
machines = allMachineNames;
|
|
||||||
facts = lib.genAttrs machines (machine: readFact machine fact);
|
|
||||||
filteredFacts = lib.filterAttrs (_machine: fact: fact != null) facts;
|
|
||||||
in
|
|
||||||
filteredFacts;
|
|
||||||
|
|
||||||
# all given facts are are set and factvalues are never null.
|
|
||||||
#
|
|
||||||
# Example:
|
|
||||||
#
|
|
||||||
# readFactsFromAllMachines [ "zerotier-ip" "syncthing.pub" ]
|
|
||||||
# => {
|
|
||||||
# machineA =
|
|
||||||
# {
|
|
||||||
# "zerotier-ip" = "1.2.3.4";
|
|
||||||
# "synching.pub" = "1234";
|
|
||||||
# };
|
|
||||||
# machineB =
|
|
||||||
# {
|
|
||||||
# "zerotier-ip" = "5.6.7.8";
|
|
||||||
# "synching.pub" = "23456719";
|
|
||||||
# };
|
|
||||||
# };
|
|
||||||
readFactsFromAllMachines =
|
|
||||||
facts:
|
|
||||||
let
|
|
||||||
# machine -> fact -> factvalue
|
|
||||||
machinesFactsAttrs = lib.genAttrs allMachineNames (
|
|
||||||
machine: lib.genAttrs facts (fact: readFact machine fact)
|
|
||||||
);
|
|
||||||
# remove all machines which don't have all facts set
|
|
||||||
filteredMachineFactAttrs = lib.filterAttrs (
|
|
||||||
_machine: values: builtins.all (fact: values.${fact} != null) facts
|
|
||||||
) machinesFactsAttrs;
|
|
||||||
in
|
|
||||||
filteredMachineFactAttrs;
|
|
||||||
in
|
|
||||||
{
|
|
||||||
inherit
|
|
||||||
allMachineNames
|
|
||||||
getFactPath
|
|
||||||
readFact
|
|
||||||
readFactFromAllMachines
|
|
||||||
readFactsFromAllMachines
|
|
||||||
;
|
|
||||||
}
|
|
||||||
@@ -149,6 +149,13 @@ let
|
|||||||
# TODO: Add index support in nixpkgs first
|
# TODO: Add index support in nixpkgs first
|
||||||
# else if type.name == "listOf" then
|
# else if type.name == "listOf" then
|
||||||
# handleListOf meta.list
|
# handleListOf meta.list
|
||||||
|
else if type.name == "either" then
|
||||||
|
# For either(oneOf) types, we skip introspection as we cannot
|
||||||
|
# determine which branch of the union was taken without more context
|
||||||
|
# This *should* be safe, as it can currently mostly be triggered through
|
||||||
|
# The `extraModules` setting of inventory modules and seems to be better
|
||||||
|
# than just aborting entirely.
|
||||||
|
{ }
|
||||||
else
|
else
|
||||||
throw "Yet Unsupported type: ${type.name}";
|
throw "Yet Unsupported type: ${type.name}";
|
||||||
in
|
in
|
||||||
|
|||||||
@@ -699,4 +699,44 @@ in
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
test_listOf_either =
|
||||||
|
let
|
||||||
|
evaluated = eval [
|
||||||
|
{
|
||||||
|
options.extraModules = lib.mkOption {
|
||||||
|
description = "List of modules that can be strings, paths, or attrsets";
|
||||||
|
default = [ ];
|
||||||
|
type = lib.types.listOf (
|
||||||
|
lib.types.oneOf [
|
||||||
|
lib.types.str
|
||||||
|
lib.types.path
|
||||||
|
(lib.types.attrsOf lib.types.anything)
|
||||||
|
]
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
({
|
||||||
|
_file = "config.nix";
|
||||||
|
extraModules = [
|
||||||
|
"modules/common.nix"
|
||||||
|
./some/path.nix
|
||||||
|
{ config = { }; }
|
||||||
|
];
|
||||||
|
})
|
||||||
|
];
|
||||||
|
result = slib.getPrios { options = evaluated.options; };
|
||||||
|
in
|
||||||
|
{
|
||||||
|
inherit evaluated;
|
||||||
|
# Test that either types in list items return empty objects
|
||||||
|
# This is a behavioral test and not necessarily the correct
|
||||||
|
# behavior. But this is better than crashing on people directly.
|
||||||
|
expr = result.extraModules.__list;
|
||||||
|
expected = [
|
||||||
|
{ }
|
||||||
|
{ }
|
||||||
|
{ }
|
||||||
|
];
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -133,12 +133,13 @@ in
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
# TODO: Figure out why this causes infinite recursion
|
# Note: we use clanLib.fs here, so that we can override it in tests
|
||||||
inventory.machines = lib.optionalAttrs (builtins.pathExists "${directory}/machines") (
|
inventory = lib.optionalAttrs (clanLib.fs.pathExists "${directory}/machines") ({
|
||||||
builtins.mapAttrs (_n: _v: { }) (
|
imports = lib.mapAttrsToList (name: _t: {
|
||||||
lib.filterAttrs (_: t: t == "directory") (builtins.readDir "${directory}/machines")
|
_file = "${directory}/machines/${name}";
|
||||||
)
|
machines.${name} = { };
|
||||||
);
|
}) ((lib.filterAttrs (_: t: t == "directory") (clanLib.fs.readDir "${directory}/machines")));
|
||||||
|
});
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
inventory.machines = lib.mapAttrs (_n: _: { }) config.machines;
|
inventory.machines = lib.mapAttrs (_n: _: { }) config.machines;
|
||||||
|
|||||||
108
lib/modules/dir_test.nix
Normal file
108
lib/modules/dir_test.nix
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
{
|
||||||
|
lib ? import <nixpkgs/lib>,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
clanLibOrig = (import ./.. { inherit lib; }).__unfix__;
|
||||||
|
clanLibWithFs =
|
||||||
|
{ virtual_fs }:
|
||||||
|
lib.fix (
|
||||||
|
lib.extends (
|
||||||
|
final: _:
|
||||||
|
let
|
||||||
|
clan-core = {
|
||||||
|
clanLib = final;
|
||||||
|
modules.clan.default = lib.modules.importApply ./clan { inherit clan-core; };
|
||||||
|
|
||||||
|
# Note: Can add other things to "clan-core"
|
||||||
|
# ... Not needed for this test
|
||||||
|
};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
clan = import ../clan {
|
||||||
|
inherit lib clan-core;
|
||||||
|
};
|
||||||
|
|
||||||
|
# Override clanLib.fs for unit-testing against a virtual filesystem
|
||||||
|
fs = import ../clanTest/virtual-fs.nix { inherit lib; } {
|
||||||
|
inherit rootPath virtual_fs;
|
||||||
|
# Example of a passthru
|
||||||
|
# passthru = [
|
||||||
|
# ".*inventory\.json$"
|
||||||
|
# ];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
) clanLibOrig
|
||||||
|
);
|
||||||
|
|
||||||
|
rootPath = ./.;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
test_autoload_directories =
|
||||||
|
let
|
||||||
|
vclan =
|
||||||
|
(clanLibWithFs {
|
||||||
|
virtual_fs = {
|
||||||
|
"machines" = {
|
||||||
|
type = "directory";
|
||||||
|
};
|
||||||
|
"machines/foo-machine" = {
|
||||||
|
type = "directory";
|
||||||
|
};
|
||||||
|
"machines/bar-machine" = {
|
||||||
|
type = "directory";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}).clan
|
||||||
|
{ config.directory = rootPath; };
|
||||||
|
in
|
||||||
|
{
|
||||||
|
inherit vclan;
|
||||||
|
expr = {
|
||||||
|
machines = lib.attrNames vclan.config.inventory.machines;
|
||||||
|
definedInMachinesDir = map (
|
||||||
|
p: lib.hasInfix "/machines/" p
|
||||||
|
) vclan.options.inventory.valueMeta.configuration.options.machines.files;
|
||||||
|
};
|
||||||
|
expected = {
|
||||||
|
machines = [
|
||||||
|
"bar-machine"
|
||||||
|
"foo-machine"
|
||||||
|
];
|
||||||
|
definedInMachinesDir = [
|
||||||
|
true # /machines/foo-machine
|
||||||
|
true # /machines/bar-machine
|
||||||
|
false # <clan-core>/module.nix defines "machines" without members
|
||||||
|
];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
# Could probably be unified with the previous test
|
||||||
|
# This is here for the sake to show that 'virtual_fs' is a test parameter
|
||||||
|
test_files_are_not_machines =
|
||||||
|
let
|
||||||
|
vclan =
|
||||||
|
(clanLibWithFs {
|
||||||
|
virtual_fs = {
|
||||||
|
"machines" = {
|
||||||
|
type = "directory";
|
||||||
|
};
|
||||||
|
"machines/foo.nix" = {
|
||||||
|
type = "file";
|
||||||
|
};
|
||||||
|
"machines/bar.nix" = {
|
||||||
|
type = "file";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}).clan
|
||||||
|
{ config.directory = rootPath; };
|
||||||
|
in
|
||||||
|
{
|
||||||
|
inherit vclan;
|
||||||
|
expr = {
|
||||||
|
machines = lib.attrNames vclan.config.inventory.machines;
|
||||||
|
};
|
||||||
|
expected = {
|
||||||
|
machines = [ ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -12,6 +12,7 @@ let
|
|||||||
in
|
in
|
||||||
#######
|
#######
|
||||||
{
|
{
|
||||||
|
autoloading = import ./dir_test.nix { inherit lib; };
|
||||||
test_missing_self =
|
test_missing_self =
|
||||||
let
|
let
|
||||||
eval = clan {
|
eval = clan {
|
||||||
|
|||||||
@@ -164,13 +164,25 @@
|
|||||||
config = lib.mkIf (config.clan.core.secrets != { }) {
|
config = lib.mkIf (config.clan.core.secrets != { }) {
|
||||||
clan.core.facts.services = lib.mapAttrs' (
|
clan.core.facts.services = lib.mapAttrs' (
|
||||||
name: service:
|
name: service:
|
||||||
lib.warn "clan.core.secrets.${name} is deprecated, use clan.core.facts.services.${name} instead" (
|
lib.warn
|
||||||
lib.nameValuePair name ({
|
''
|
||||||
secret = service.secrets;
|
###############################################################################
|
||||||
public = service.facts;
|
# #
|
||||||
generator = service.generator;
|
# clan.core.secrets.${name} clan.core.facts.services.${name} is deprecated #
|
||||||
})
|
# in favor of "vars" #
|
||||||
)
|
# #
|
||||||
|
# Refer to https://docs.clan.lol/guides/migrations/migration-facts-vars/ #
|
||||||
|
# for migration instructions. #
|
||||||
|
# #
|
||||||
|
###############################################################################
|
||||||
|
''
|
||||||
|
(
|
||||||
|
lib.nameValuePair name ({
|
||||||
|
secret = service.secrets;
|
||||||
|
public = service.facts;
|
||||||
|
generator = service.generator;
|
||||||
|
})
|
||||||
|
)
|
||||||
) config.clan.core.secrets;
|
) config.clan.core.secrets;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,17 @@
|
|||||||
}:
|
}:
|
||||||
{
|
{
|
||||||
config.warnings = lib.optionals (config.clan.core.facts.services != { }) [
|
config.warnings = lib.optionals (config.clan.core.facts.services != { }) [
|
||||||
"Facts are deprecated, please migrate them to vars instead, see: https://docs.clan.lol/guides/migrations/migration-facts-vars/"
|
''
|
||||||
|
###############################################################################
|
||||||
|
# #
|
||||||
|
# Facts are deprecated please migrate any usages to vars instead #
|
||||||
|
# #
|
||||||
|
# #
|
||||||
|
# Refer to https://docs.clan.lol/guides/migrations/migration-facts-vars/ #
|
||||||
|
# for migration instructions. #
|
||||||
|
# #
|
||||||
|
###############################################################################
|
||||||
|
''
|
||||||
];
|
];
|
||||||
|
|
||||||
options.clan.core.facts = {
|
options.clan.core.facts = {
|
||||||
|
|||||||
@@ -5,33 +5,31 @@
|
|||||||
let
|
let
|
||||||
inherit (lib)
|
inherit (lib)
|
||||||
filterAttrs
|
filterAttrs
|
||||||
flatten
|
|
||||||
mapAttrsToList
|
mapAttrsToList
|
||||||
;
|
;
|
||||||
in
|
|
||||||
generators:
|
relevantFiles = filterAttrs (
|
||||||
let
|
_name: f: f.secret && f.deploy && (f.neededFor == "users" || f.neededFor == "services")
|
||||||
relevantFiles =
|
|
||||||
generator:
|
|
||||||
filterAttrs (
|
|
||||||
_name: f: f.secret && f.deploy && (f.neededFor == "users" || f.neededFor == "services")
|
|
||||||
) generator.files;
|
|
||||||
allFiles = flatten (
|
|
||||||
mapAttrsToList (
|
|
||||||
gen_name: generator:
|
|
||||||
mapAttrsToList (fname: file: {
|
|
||||||
name = fname;
|
|
||||||
generator = gen_name;
|
|
||||||
neededForUsers = file.neededFor == "users";
|
|
||||||
inherit (generator) share;
|
|
||||||
inherit (file)
|
|
||||||
owner
|
|
||||||
group
|
|
||||||
mode
|
|
||||||
restartUnits
|
|
||||||
;
|
|
||||||
}) (relevantFiles generator)
|
|
||||||
) generators
|
|
||||||
);
|
);
|
||||||
|
|
||||||
|
collectFiles =
|
||||||
|
generators:
|
||||||
|
builtins.concatLists (
|
||||||
|
mapAttrsToList (
|
||||||
|
gen_name: generator:
|
||||||
|
mapAttrsToList (fname: file: {
|
||||||
|
name = fname;
|
||||||
|
generator = gen_name;
|
||||||
|
neededForUsers = file.neededFor == "users";
|
||||||
|
inherit (generator) share;
|
||||||
|
inherit (file)
|
||||||
|
owner
|
||||||
|
group
|
||||||
|
mode
|
||||||
|
restartUnits
|
||||||
|
;
|
||||||
|
}) (relevantFiles generator.files)
|
||||||
|
) generators
|
||||||
|
);
|
||||||
in
|
in
|
||||||
allFiles
|
collectFiles
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ class ApiBridge(Protocol):
|
|||||||
|
|
||||||
def process_request(self, request: BackendRequest) -> None:
|
def process_request(self, request: BackendRequest) -> None:
|
||||||
"""Process an API request through the middleware chain."""
|
"""Process an API request through the middleware chain."""
|
||||||
from clan_app.middleware.base import MiddlewareContext # noqa: PLC0415
|
from clan_app.middleware.base import MiddlewareContext
|
||||||
|
|
||||||
with ExitStack() as stack:
|
with ExitStack() as stack:
|
||||||
# Capture the current call stack up to this point
|
# Capture the current call stack up to this point
|
||||||
@@ -62,7 +62,7 @@ class ApiBridge(Protocol):
|
|||||||
)
|
)
|
||||||
middleware.process(context)
|
middleware.process(context)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
from clan_app.middleware.base import ( # noqa: PLC0415
|
from clan_app.middleware.base import (
|
||||||
MiddlewareError,
|
MiddlewareError,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -191,13 +191,13 @@ class HttpBridge(ApiBridge, BaseHTTPRequestHandler):
|
|||||||
|
|
||||||
return file_data
|
return file_data
|
||||||
|
|
||||||
def do_OPTIONS(self) -> None:
|
def do_OPTIONS(self) -> None: # noqa: N802
|
||||||
"""Handle CORS preflight requests."""
|
"""Handle CORS preflight requests."""
|
||||||
self.send_response_only(200)
|
self.send_response_only(200)
|
||||||
self._send_cors_headers()
|
self._send_cors_headers()
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
|
|
||||||
def do_GET(self) -> None:
|
def do_GET(self) -> None: # noqa: N802
|
||||||
"""Handle GET requests."""
|
"""Handle GET requests."""
|
||||||
parsed_url = urlparse(self.path)
|
parsed_url = urlparse(self.path)
|
||||||
path = parsed_url.path
|
path = parsed_url.path
|
||||||
@@ -211,7 +211,7 @@ class HttpBridge(ApiBridge, BaseHTTPRequestHandler):
|
|||||||
else:
|
else:
|
||||||
self.send_api_error_response("info", "Not Found", ["http_bridge", "GET"])
|
self.send_api_error_response("info", "Not Found", ["http_bridge", "GET"])
|
||||||
|
|
||||||
def do_POST(self) -> None:
|
def do_POST(self) -> None: # noqa: N802
|
||||||
"""Handle POST requests."""
|
"""Handle POST requests."""
|
||||||
parsed_url = urlparse(self.path)
|
parsed_url = urlparse(self.path)
|
||||||
path = parsed_url.path
|
path = parsed_url.path
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ class WebviewBridge(ApiBridge):
|
|||||||
|
|
||||||
log.debug(f"Sending response: {serialized}")
|
log.debug(f"Sending response: {serialized}")
|
||||||
# Import FuncStatus locally to avoid circular import
|
# Import FuncStatus locally to avoid circular import
|
||||||
from .webview import FuncStatus # noqa: PLC0415
|
from .webview import FuncStatus
|
||||||
|
|
||||||
self.webview.return_(response._op_key, FuncStatus.SUCCESS, serialized) # noqa: SLF001
|
self.webview.return_(response._op_key, FuncStatus.SUCCESS, serialized) # noqa: SLF001
|
||||||
|
|
||||||
|
|||||||
@@ -113,15 +113,27 @@ mkShell {
|
|||||||
# todo darwin support needs some work
|
# todo darwin support needs some work
|
||||||
(lib.optionalString stdenv.hostPlatform.isLinux ''
|
(lib.optionalString stdenv.hostPlatform.isLinux ''
|
||||||
# configure playwright for storybook snapshot testing
|
# configure playwright for storybook snapshot testing
|
||||||
export PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1
|
# we only want webkit as that matches what the app is rendered with
|
||||||
|
|
||||||
export PLAYWRIGHT_BROWSERS_PATH=${
|
export PLAYWRIGHT_BROWSERS_PATH=${
|
||||||
playwright-driver.browsers.override {
|
playwright-driver.browsers.override {
|
||||||
withFfmpeg = false;
|
withFfmpeg = false;
|
||||||
withFirefox = false;
|
withFirefox = false;
|
||||||
|
withWebkit = true;
|
||||||
withChromium = false;
|
withChromium = false;
|
||||||
withChromiumHeadlessShell = true;
|
withChromiumHeadlessShell = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export PLAYWRIGHT_HOST_PLATFORM_OVERRIDE="ubuntu-24.04"
|
|
||||||
|
# stop playwright from trying to validate it has downloaded the necessary browsers
|
||||||
|
# we are providing them manually via nix
|
||||||
|
|
||||||
|
export PLAYWRIGHT_SKIP_VALIDATE_HOST_REQUIREMENTS=true
|
||||||
|
|
||||||
|
# playwright browser drivers are versioned e.g. webkit-2191
|
||||||
|
# this helps us avoid having to update the playwright js dependency everytime we update nixpkgs and vice versa
|
||||||
|
# see vitest.config.js for corresponding launch configuration
|
||||||
|
|
||||||
|
export PLAYWRIGHT_WEBKIT_EXECUTABLE=$(find -L "$PLAYWRIGHT_BROWSERS_PATH" -type f -name "pw_run.sh")
|
||||||
'');
|
'');
|
||||||
}
|
}
|
||||||
|
|||||||
16
pkgs/clan-app/ui/package-lock.json
generated
16
pkgs/clan-app/ui/package-lock.json
generated
@@ -53,7 +53,7 @@
|
|||||||
"jsdom": "^26.1.0",
|
"jsdom": "^26.1.0",
|
||||||
"knip": "^5.61.2",
|
"knip": "^5.61.2",
|
||||||
"markdown-to-jsx": "^7.7.10",
|
"markdown-to-jsx": "^7.7.10",
|
||||||
"playwright": "~1.53.2",
|
"playwright": "~1.55.1",
|
||||||
"postcss": "^8.4.38",
|
"postcss": "^8.4.38",
|
||||||
"postcss-url": "^10.1.3",
|
"postcss-url": "^10.1.3",
|
||||||
"prettier": "^3.2.5",
|
"prettier": "^3.2.5",
|
||||||
@@ -6956,13 +6956,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/playwright": {
|
"node_modules/playwright": {
|
||||||
"version": "1.53.2",
|
"version": "1.55.1",
|
||||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.53.2.tgz",
|
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.55.1.tgz",
|
||||||
"integrity": "sha512-6K/qQxVFuVQhRQhFsVZ9fGeatxirtrpPgxzBYWyZLEXJzqYwuL4fuNmfOfD5et1tJE4GScKyPNeLhZeRwuTU3A==",
|
"integrity": "sha512-cJW4Xd/G3v5ovXtJJ52MAOclqeac9S/aGGgRzLabuF8TnIb6xHvMzKIa6JmrRzUkeXJgfL1MhukP0NK6l39h3A==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"playwright-core": "1.53.2"
|
"playwright-core": "1.55.1"
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
"playwright": "cli.js"
|
"playwright": "cli.js"
|
||||||
@@ -6975,9 +6975,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/playwright-core": {
|
"node_modules/playwright-core": {
|
||||||
"version": "1.53.2",
|
"version": "1.55.1",
|
||||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.53.2.tgz",
|
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.55.1.tgz",
|
||||||
"integrity": "sha512-ox/OytMy+2w1jcYEYlOo1Hhp8hZkLCximMTUTMBXjGUA1KoFfiSZ+DU+3a739jsPY0yoKH2TFy9S2fsJas8yAw==",
|
"integrity": "sha512-Z6Mh9mkwX+zxSlHqdr5AOcJnfp+xUWLCt9uKV18fhzA8eyxUd8NUWzAjxUh55RZKSYwDGX0cfaySdhZJGMoJ+w==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"bin": {
|
"bin": {
|
||||||
|
|||||||
@@ -48,7 +48,7 @@
|
|||||||
"jsdom": "^26.1.0",
|
"jsdom": "^26.1.0",
|
||||||
"knip": "^5.61.2",
|
"knip": "^5.61.2",
|
||||||
"markdown-to-jsx": "^7.7.10",
|
"markdown-to-jsx": "^7.7.10",
|
||||||
"playwright": "~1.53.2",
|
"playwright": "~1.55.1",
|
||||||
"postcss": "^8.4.38",
|
"postcss": "^8.4.38",
|
||||||
"postcss-url": "^10.1.3",
|
"postcss-url": "^10.1.3",
|
||||||
"prettier": "^3.2.5",
|
"prettier": "^3.2.5",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import type { Meta, StoryObj } from "@kachurun/storybook-solid";
|
import type { Meta, StoryObj } from "@kachurun/storybook-solid";
|
||||||
import { Button, ButtonProps } from "./Button";
|
import { Button, ButtonProps } from "./Button";
|
||||||
import { Component } from "solid-js";
|
import { Component } from "solid-js";
|
||||||
import { expect, fn, waitFor } from "storybook/test";
|
import { expect, fn, waitFor, within } from "storybook/test";
|
||||||
import { StoryContext } from "@kachurun/storybook-solid-vite";
|
import { StoryContext } from "@kachurun/storybook-solid-vite";
|
||||||
|
|
||||||
const getCursorStyle = (el: Element) => window.getComputedStyle(el).cursor;
|
const getCursorStyle = (el: Element) => window.getComputedStyle(el).cursor;
|
||||||
@@ -216,17 +216,11 @@ const timeout = process.env.NODE_ENV === "test" ? 500 : 2000;
|
|||||||
export const Primary: Story = {
|
export const Primary: Story = {
|
||||||
args: {
|
args: {
|
||||||
hierarchy: "primary",
|
hierarchy: "primary",
|
||||||
onAction: fn(async () => {
|
onClick: fn(),
|
||||||
// wait 500 ms to simulate an action
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, timeout));
|
|
||||||
// randomly fail to check that the loading state still returns to normal
|
|
||||||
if (Math.random() > 0.5) {
|
|
||||||
throw new Error("Action failure");
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
},
|
},
|
||||||
|
|
||||||
play: async ({ canvas, step, userEvent, args }: StoryContext) => {
|
play: async ({ canvasElement, step, userEvent, args }: StoryContext) => {
|
||||||
|
const canvas = within(canvasElement);
|
||||||
const buttons = await canvas.findAllByRole("button");
|
const buttons = await canvas.findAllByRole("button");
|
||||||
|
|
||||||
for (const button of buttons) {
|
for (const button of buttons) {
|
||||||
@@ -238,14 +232,6 @@ export const Primary: Story = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
await step(`Click on ${testID}`, async () => {
|
await step(`Click on ${testID}`, async () => {
|
||||||
// check for the loader
|
|
||||||
const loaders = button.getElementsByClassName("loader");
|
|
||||||
await expect(loaders.length).toEqual(1);
|
|
||||||
|
|
||||||
// assert its width is 0 before we click
|
|
||||||
const [loader] = loaders;
|
|
||||||
await expect(loader.clientWidth).toEqual(0);
|
|
||||||
|
|
||||||
// move the mouse over the button
|
// move the mouse over the button
|
||||||
await userEvent.hover(button);
|
await userEvent.hover(button);
|
||||||
|
|
||||||
@@ -255,33 +241,8 @@ export const Primary: Story = {
|
|||||||
// click the button
|
// click the button
|
||||||
await userEvent.click(button);
|
await userEvent.click(button);
|
||||||
|
|
||||||
// check the button has changed
|
// the click handler should have been called
|
||||||
await waitFor(
|
await expect(args.onClick).toHaveBeenCalled();
|
||||||
async () => {
|
|
||||||
// the action handler should have been called
|
|
||||||
await expect(args.onAction).toHaveBeenCalled();
|
|
||||||
// the button should have a loading class
|
|
||||||
await expect(button).toHaveClass("loading");
|
|
||||||
// the loader should be visible
|
|
||||||
await expect(loader.clientWidth).toBeGreaterThan(0);
|
|
||||||
// the pointer should have changed to wait
|
|
||||||
await expect(getCursorStyle(button)).toEqual("wait");
|
|
||||||
},
|
|
||||||
{ timeout: timeout + 500 },
|
|
||||||
);
|
|
||||||
|
|
||||||
// wait for the action handler to finish
|
|
||||||
await waitFor(
|
|
||||||
async () => {
|
|
||||||
// the loading class should be removed
|
|
||||||
await expect(button).not.toHaveClass("loading");
|
|
||||||
// the loader should be hidden
|
|
||||||
await expect(loader.clientWidth).toEqual(0);
|
|
||||||
// the pointer should be normal
|
|
||||||
await expect(getCursorStyle(button)).toEqual("pointer");
|
|
||||||
},
|
|
||||||
{ timeout: timeout + 500 },
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -57,6 +57,7 @@ export const Button = (props: ButtonProps) => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<KobalteButton
|
<KobalteButton
|
||||||
|
role="button"
|
||||||
class={cx(
|
class={cx(
|
||||||
styles.button, // default button class
|
styles.button, // default button class
|
||||||
local.size != "default" && styles[local.size],
|
local.size != "default" && styles[local.size],
|
||||||
|
|||||||
@@ -160,47 +160,47 @@ const mockFetcher = <K extends OperationNames>(
|
|||||||
},
|
},
|
||||||
}) satisfies ApiCall<K>;
|
}) satisfies ApiCall<K>;
|
||||||
|
|
||||||
export const Default: Story = {
|
// export const Default: Story = {
|
||||||
args: {},
|
// args: {},
|
||||||
decorators: [
|
// decorators: [
|
||||||
(Story: StoryObj) => {
|
// (Story: StoryObj) => {
|
||||||
const queryClient = new QueryClient({
|
// const queryClient = new QueryClient({
|
||||||
defaultOptions: {
|
// defaultOptions: {
|
||||||
queries: {
|
// queries: {
|
||||||
retry: false,
|
// retry: false,
|
||||||
staleTime: Infinity,
|
// staleTime: Infinity,
|
||||||
},
|
// },
|
||||||
},
|
// },
|
||||||
});
|
// });
|
||||||
|
//
|
||||||
Object.entries(queryData).forEach(([clanURI, clan]) => {
|
// Object.entries(queryData).forEach(([clanURI, clan]) => {
|
||||||
queryClient.setQueryData(
|
// queryClient.setQueryData(
|
||||||
["clans", encodeBase64(clanURI), "details"],
|
// ["clans", encodeBase64(clanURI), "details"],
|
||||||
clan.details,
|
// clan.details,
|
||||||
);
|
// );
|
||||||
|
//
|
||||||
const machines = clan.machines || {};
|
// const machines = clan.machines || {};
|
||||||
|
//
|
||||||
queryClient.setQueryData(
|
// queryClient.setQueryData(
|
||||||
["clans", encodeBase64(clanURI), "machines"],
|
// ["clans", encodeBase64(clanURI), "machines"],
|
||||||
machines,
|
// machines,
|
||||||
);
|
// );
|
||||||
|
//
|
||||||
Object.entries(machines).forEach(([name, machine]) => {
|
// Object.entries(machines).forEach(([name, machine]) => {
|
||||||
queryClient.setQueryData(
|
// queryClient.setQueryData(
|
||||||
["clans", encodeBase64(clanURI), "machine", name, "state"],
|
// ["clans", encodeBase64(clanURI), "machine", name, "state"],
|
||||||
machine.state,
|
// machine.state,
|
||||||
);
|
// );
|
||||||
});
|
// });
|
||||||
});
|
// });
|
||||||
|
//
|
||||||
return (
|
// return (
|
||||||
<ApiClientProvider client={{ fetch: mockFetcher }}>
|
// <ApiClientProvider client={{ fetch: mockFetcher }}>
|
||||||
<QueryClientProvider client={queryClient}>
|
// <QueryClientProvider client={queryClient}>
|
||||||
<Story />
|
// <Story />
|
||||||
</QueryClientProvider>
|
// </QueryClientProvider>
|
||||||
</ApiClientProvider>
|
// </ApiClientProvider>
|
||||||
);
|
// );
|
||||||
},
|
// },
|
||||||
],
|
// ],
|
||||||
};
|
// };
|
||||||
|
|||||||
@@ -11,28 +11,35 @@ export default meta;
|
|||||||
|
|
||||||
type Story = StoryObj<ClanSettingsModalProps>;
|
type Story = StoryObj<ClanSettingsModalProps>;
|
||||||
|
|
||||||
export const Default: Story = {
|
const props: ClanSettingsModalProps = {
|
||||||
args: {
|
onClose: fn(),
|
||||||
onClose: fn(),
|
model: {
|
||||||
model: {
|
uri: "/home/foo/my-clan",
|
||||||
uri: "/home/foo/my-clan",
|
details: {
|
||||||
name: "Sol",
|
name: "Sol",
|
||||||
description: null,
|
description: null,
|
||||||
icon: null,
|
icon: null,
|
||||||
fieldsSchema: {
|
},
|
||||||
name: {
|
fieldsSchema: {
|
||||||
readonly: true,
|
name: {
|
||||||
reason: null,
|
readonly: true,
|
||||||
},
|
reason: null,
|
||||||
description: {
|
readonly_members: [],
|
||||||
readonly: false,
|
},
|
||||||
reason: null,
|
description: {
|
||||||
},
|
readonly: false,
|
||||||
icon: {
|
reason: null,
|
||||||
readonly: false,
|
readonly_members: [],
|
||||||
reason: null,
|
},
|
||||||
},
|
icon: {
|
||||||
|
readonly: false,
|
||||||
|
reason: null,
|
||||||
|
readonly_members: [],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const Default: Story = {
|
||||||
|
args: props,
|
||||||
|
};
|
||||||
|
|||||||
@@ -22,9 +22,9 @@ import { Alert } from "@/src/components/Alert/Alert";
|
|||||||
import { removeClanURI } from "@/src/stores/clan";
|
import { removeClanURI } from "@/src/stores/clan";
|
||||||
|
|
||||||
const schema = v.object({
|
const schema = v.object({
|
||||||
name: v.pipe(v.optional(v.string())),
|
name: v.string(),
|
||||||
description: v.nullish(v.string()),
|
description: v.optional(v.string()),
|
||||||
icon: v.pipe(v.nullish(v.string())),
|
icon: v.optional(v.string()),
|
||||||
});
|
});
|
||||||
|
|
||||||
export interface ClanSettingsModalProps {
|
export interface ClanSettingsModalProps {
|
||||||
|
|||||||
@@ -1,15 +0,0 @@
|
|||||||
import { Meta, StoryObj } from "@kachurun/storybook-solid";
|
|
||||||
import { CubeScene } from "./cubes";
|
|
||||||
|
|
||||||
const meta: Meta = {
|
|
||||||
title: "scene/cubes",
|
|
||||||
component: CubeScene,
|
|
||||||
};
|
|
||||||
|
|
||||||
export default meta;
|
|
||||||
|
|
||||||
type Story = StoryObj;
|
|
||||||
|
|
||||||
export const Default: Story = {
|
|
||||||
args: {},
|
|
||||||
};
|
|
||||||
@@ -304,11 +304,10 @@ const FlashProgress = () => {
|
|||||||
const [store, set] = getStepStore<InstallStoreType>(stepSignal);
|
const [store, set] = getStepStore<InstallStoreType>(stepSignal);
|
||||||
|
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
const result = await store.flash.progress.result;
|
const result = await store.flash?.progress?.result;
|
||||||
if (result.status == "success") {
|
if (result?.status == "success") {
|
||||||
console.log("Flashing Success");
|
stepSignal.next();
|
||||||
}
|
}
|
||||||
stepSignal.next();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const handleCancel = async () => {
|
const handleCancel = async () => {
|
||||||
|
|||||||
@@ -165,23 +165,23 @@ export default meta;
|
|||||||
|
|
||||||
type Story = StoryObj<typeof ServiceWorkflow>;
|
type Story = StoryObj<typeof ServiceWorkflow>;
|
||||||
|
|
||||||
export const Default: Story = {
|
// export const Default: Story = {
|
||||||
args: {},
|
// args: {},
|
||||||
};
|
// };
|
||||||
|
//
|
||||||
export const SelectRoleMembers: Story = {
|
// export const SelectRoleMembers: Story = {
|
||||||
render: () => (
|
// render: () => (
|
||||||
<ServiceWorkflow
|
// <ServiceWorkflow
|
||||||
handleSubmit={(instance) => {
|
// handleSubmit={(instance) => {
|
||||||
console.log("Submitted instance:", instance);
|
// console.log("Submitted instance:", instance);
|
||||||
}}
|
// }}
|
||||||
onClose={() => {
|
// onClose={() => {
|
||||||
console.log("Closed");
|
// console.log("Closed");
|
||||||
}}
|
// }}
|
||||||
initialStep="select:members"
|
// initialStep="select:members"
|
||||||
initialStore={{
|
// initialStore={{
|
||||||
currentRole: "peer",
|
// currentRole: "peer",
|
||||||
}}
|
// }}
|
||||||
/>
|
// />
|
||||||
),
|
// ),
|
||||||
};
|
// };
|
||||||
|
|||||||
@@ -9,7 +9,11 @@
|
|||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"jsx": "preserve",
|
"jsx": "preserve",
|
||||||
"jsxImportSource": "solid-js",
|
"jsxImportSource": "solid-js",
|
||||||
"types": ["vite/client", "vite-plugin-solid-svg/types-component-solid"],
|
"types": [
|
||||||
|
"vite/client",
|
||||||
|
"vite-plugin-solid-svg/types-component-solid",
|
||||||
|
"@vitest/browser/providers/playwright"
|
||||||
|
],
|
||||||
"noEmit": true,
|
"noEmit": true,
|
||||||
"resolveJsonModule": true,
|
"resolveJsonModule": true,
|
||||||
"allowJs": true,
|
"allowJs": true,
|
||||||
|
|||||||
@@ -40,7 +40,14 @@ export default mergeConfig(
|
|||||||
enabled: true,
|
enabled: true,
|
||||||
headless: true,
|
headless: true,
|
||||||
provider: "playwright",
|
provider: "playwright",
|
||||||
instances: [{ browser: "chromium" }],
|
instances: [
|
||||||
|
{
|
||||||
|
browser: "webkit",
|
||||||
|
launch: {
|
||||||
|
executablePath: process.env.PLAYWRIGHT_WEBKIT_EXECUTABLE,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
// This setup file applies Storybook project annotations for Vitest
|
// This setup file applies Storybook project annotations for Vitest
|
||||||
// More info at: https://storybook.js.org/docs/api/portable-stories/portable-stories-vitest#setprojectannotations
|
// More info at: https://storybook.js.org/docs/api/portable-stories/portable-stories-vitest#setprojectannotations
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ def main() -> None:
|
|||||||
load_in_all_api_functions()
|
load_in_all_api_functions()
|
||||||
|
|
||||||
# import lazily since we otherwise we do not have all api functions loaded according to Qubasa
|
# import lazily since we otherwise we do not have all api functions loaded according to Qubasa
|
||||||
from clan_lib.api import API # noqa: PLC0415
|
from clan_lib.api import API
|
||||||
|
|
||||||
schema = API.to_json_schema()
|
schema = API.to_json_schema()
|
||||||
print(f"""{json.dumps(schema, indent=2)}""")
|
print(f"""{json.dumps(schema, indent=2)}""")
|
||||||
|
|||||||
@@ -75,13 +75,14 @@ class TestFlake(Flake):
|
|||||||
def path(self) -> Path:
|
def path(self) -> Path:
|
||||||
return self.test_dir
|
return self.test_dir
|
||||||
|
|
||||||
def select_machine(self, machine_name: str, selector: str) -> Any:
|
def machine_selector(self, machine_name: str, selector: str) -> str:
|
||||||
"""Select a nix attribute for a specific machine.
|
"""Create a selector for a specific machine.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
machine_name: The name of the machine
|
machine_name: The name of the machine
|
||||||
selector: The attribute selector string relative to the machine config
|
selector: The attribute selector string relative to the machine config
|
||||||
apply: Optional function to apply to the result
|
Returns:
|
||||||
|
The full selector string for the machine
|
||||||
|
|
||||||
"""
|
"""
|
||||||
config = nix_config()
|
config = nix_config()
|
||||||
@@ -89,9 +90,7 @@ class TestFlake(Flake):
|
|||||||
test_system = system
|
test_system = system
|
||||||
if system.endswith("-darwin"):
|
if system.endswith("-darwin"):
|
||||||
test_system = system.rstrip("darwin") + "linux"
|
test_system = system.rstrip("darwin") + "linux"
|
||||||
|
return f'checks."{test_system}".{self.check_attr}.machinesCross."{system}"."{machine_name}".{selector}'
|
||||||
full_selector = f'checks."{test_system}".{self.check_attr}.machinesCross.{system}."{machine_name}".{selector}'
|
|
||||||
return self.select(full_selector)
|
|
||||||
|
|
||||||
# we don't want to evaluate all machines of the flake. Only the ones defined in the test
|
# we don't want to evaluate all machines of the flake. Only the ones defined in the test
|
||||||
def set_machine_names(self, machine_names: list[str]) -> None:
|
def set_machine_names(self, machine_names: list[str]) -> None:
|
||||||
@@ -103,7 +102,7 @@ class TestFlake(Flake):
|
|||||||
opts: "ListOptions | None" = None, # noqa: ARG002
|
opts: "ListOptions | None" = None, # noqa: ARG002
|
||||||
) -> "dict[str, MachineResponse]":
|
) -> "dict[str, MachineResponse]":
|
||||||
"""List machines of a clan"""
|
"""List machines of a clan"""
|
||||||
from clan_lib.machines.actions import ( # noqa: PLC0415
|
from clan_lib.machines.actions import (
|
||||||
InventoryMachine,
|
InventoryMachine,
|
||||||
MachineResponse,
|
MachineResponse,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -231,7 +231,7 @@ def remove_machine_command(args: argparse.Namespace) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def add_group_argument(parser: argparse.ArgumentParser) -> None:
|
def add_group_argument(parser: argparse.ArgumentParser) -> None:
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_groups,
|
complete_groups,
|
||||||
)
|
)
|
||||||
@@ -334,7 +334,7 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the machines to add",
|
help="the name of the machines to add",
|
||||||
type=machine_name_type,
|
type=machine_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_machines,
|
complete_machines,
|
||||||
)
|
)
|
||||||
@@ -353,7 +353,7 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the machines to remove",
|
help="the name of the machines to remove",
|
||||||
type=machine_name_type,
|
type=machine_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_machines,
|
complete_machines,
|
||||||
)
|
)
|
||||||
@@ -369,7 +369,7 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the user to add",
|
help="the name of the user to add",
|
||||||
type=user_name_type,
|
type=user_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_users,
|
complete_users,
|
||||||
)
|
)
|
||||||
@@ -388,7 +388,7 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the user to remove",
|
help="the name of the user to remove",
|
||||||
type=user_name_type,
|
type=user_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_users,
|
complete_users,
|
||||||
)
|
)
|
||||||
@@ -407,7 +407,7 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the secret",
|
help="the name of the secret",
|
||||||
type=secret_name_type,
|
type=secret_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_secrets,
|
complete_secrets,
|
||||||
)
|
)
|
||||||
@@ -426,7 +426,7 @@ def register_groups_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the secret",
|
help="the name of the secret",
|
||||||
type=secret_name_type,
|
type=secret_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_secrets,
|
complete_secrets,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -69,7 +69,7 @@ def register_import_sops_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
default=[],
|
default=[],
|
||||||
help="the group to import the secrets to",
|
help="the group to import the secrets to",
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_groups,
|
complete_groups,
|
||||||
)
|
)
|
||||||
@@ -82,7 +82,7 @@ def register_import_sops_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
default=[],
|
default=[],
|
||||||
help="the machine to import the secrets to",
|
help="the machine to import the secrets to",
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_machines,
|
complete_machines,
|
||||||
)
|
)
|
||||||
@@ -95,7 +95,7 @@ def register_import_sops_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
default=[],
|
default=[],
|
||||||
help="the user to import the secrets to",
|
help="the user to import the secrets to",
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_users,
|
complete_users,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -172,7 +172,7 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the machine",
|
help="the name of the machine",
|
||||||
type=machine_name_type,
|
type=machine_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_machines,
|
complete_machines,
|
||||||
)
|
)
|
||||||
@@ -192,7 +192,7 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the machine",
|
help="the name of the machine",
|
||||||
type=machine_name_type,
|
type=machine_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_machines,
|
complete_machines,
|
||||||
)
|
)
|
||||||
@@ -207,7 +207,7 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the machine",
|
help="the name of the machine",
|
||||||
type=machine_name_type,
|
type=machine_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_machines,
|
complete_machines,
|
||||||
)
|
)
|
||||||
@@ -225,7 +225,7 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the machine",
|
help="the name of the machine",
|
||||||
type=machine_name_type,
|
type=machine_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_machines,
|
complete_machines,
|
||||||
complete_secrets,
|
complete_secrets,
|
||||||
@@ -250,7 +250,7 @@ def register_machines_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the machine",
|
help="the name of the machine",
|
||||||
type=machine_name_type,
|
type=machine_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_machines,
|
complete_machines,
|
||||||
complete_secrets,
|
complete_secrets,
|
||||||
|
|||||||
@@ -158,8 +158,10 @@ def encrypt_secret(
|
|||||||
admin_keys = sops.ensure_admin_public_keys(flake_dir)
|
admin_keys = sops.ensure_admin_public_keys(flake_dir)
|
||||||
|
|
||||||
if not admin_keys:
|
if not admin_keys:
|
||||||
# TODO double check the correct command to run
|
msg = (
|
||||||
msg = "No keys found. Please run 'clan secrets add-key' to add a key."
|
"No admin keys found.\n\n"
|
||||||
|
"Please run 'clan vars keygen' to generate and set up keys."
|
||||||
|
)
|
||||||
raise ClanError(msg)
|
raise ClanError(msg)
|
||||||
|
|
||||||
username = next(iter(admin_keys)).username
|
username = next(iter(admin_keys)).username
|
||||||
@@ -253,7 +255,7 @@ def add_secret_argument(parser: argparse.ArgumentParser, autocomplete: bool) ->
|
|||||||
type=secret_name_type,
|
type=secret_name_type,
|
||||||
)
|
)
|
||||||
if autocomplete:
|
if autocomplete:
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_secrets,
|
complete_secrets,
|
||||||
)
|
)
|
||||||
@@ -465,7 +467,7 @@ def register_secrets_parser(subparser: argparse._SubParsersAction) -> None:
|
|||||||
default=[],
|
default=[],
|
||||||
help="the group to import the secrets to (can be repeated)",
|
help="the group to import the secrets to (can be repeated)",
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_groups,
|
complete_groups,
|
||||||
)
|
)
|
||||||
@@ -478,7 +480,7 @@ def register_secrets_parser(subparser: argparse._SubParsersAction) -> None:
|
|||||||
default=[],
|
default=[],
|
||||||
help="the machine to import the secrets to (can be repeated)",
|
help="the machine to import the secrets to (can be repeated)",
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_machines,
|
complete_machines,
|
||||||
)
|
)
|
||||||
@@ -491,7 +493,7 @@ def register_secrets_parser(subparser: argparse._SubParsersAction) -> None:
|
|||||||
default=[],
|
default=[],
|
||||||
help="the user to import the secrets to (can be repeated)",
|
help="the user to import the secrets to (can be repeated)",
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_users,
|
complete_users,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -281,7 +281,7 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the user",
|
help="the name of the user",
|
||||||
type=user_name_type,
|
type=user_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_users,
|
complete_users,
|
||||||
)
|
)
|
||||||
@@ -295,7 +295,7 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the user",
|
help="the name of the user",
|
||||||
type=user_name_type,
|
type=user_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_users,
|
complete_users,
|
||||||
)
|
)
|
||||||
@@ -312,7 +312,7 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the user",
|
help="the name of the user",
|
||||||
type=user_name_type,
|
type=user_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_secrets,
|
complete_secrets,
|
||||||
complete_users,
|
complete_users,
|
||||||
@@ -336,7 +336,7 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the group",
|
help="the name of the group",
|
||||||
type=user_name_type,
|
type=user_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_secrets,
|
complete_secrets,
|
||||||
complete_users,
|
complete_users,
|
||||||
@@ -360,7 +360,7 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the user",
|
help="the name of the user",
|
||||||
type=user_name_type,
|
type=user_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_users,
|
complete_users,
|
||||||
)
|
)
|
||||||
@@ -378,7 +378,7 @@ def register_users_parser(parser: argparse.ArgumentParser) -> None:
|
|||||||
help="the name of the user",
|
help="the name of the user",
|
||||||
type=user_name_type,
|
type=user_name_type,
|
||||||
)
|
)
|
||||||
from clan_cli.completions import ( # noqa: PLC0415
|
from clan_cli.completions import (
|
||||||
add_dynamic_completer,
|
add_dynamic_completer,
|
||||||
complete_users,
|
complete_users,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,24 +0,0 @@
|
|||||||
{
|
|
||||||
# Use this path to our repo root e.g. for UI test
|
|
||||||
# inputs.clan-core.url = "../../../../.";
|
|
||||||
|
|
||||||
# this placeholder is replaced by the path to nixpkgs
|
|
||||||
inputs.clan-core.url = "__CLAN_CORE__";
|
|
||||||
|
|
||||||
outputs =
|
|
||||||
{ self, clan-core }:
|
|
||||||
let
|
|
||||||
clan = clan-core.lib.clan {
|
|
||||||
inherit self;
|
|
||||||
meta.name = "test_flake_with_core_dynamic_machines";
|
|
||||||
machines =
|
|
||||||
let
|
|
||||||
machineModules = builtins.readDir (self + "/machines");
|
|
||||||
in
|
|
||||||
builtins.mapAttrs (name: _type: import (self + "/machines/${name}")) machineModules;
|
|
||||||
};
|
|
||||||
in
|
|
||||||
{
|
|
||||||
inherit (clan.config) nixosConfigurations nixosModules clanInternals;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
@@ -166,16 +167,16 @@ def test_generate_public_and_secret_vars(
|
|||||||
assert shared_value.startswith("shared")
|
assert shared_value.startswith("shared")
|
||||||
vars_text = stringify_all_vars(machine)
|
vars_text = stringify_all_vars(machine)
|
||||||
flake_obj = Flake(str(flake.path))
|
flake_obj = Flake(str(flake.path))
|
||||||
my_generator = Generator("my_generator", machine="my_machine", _flake=flake_obj)
|
my_generator = Generator("my_generator", machines=["my_machine"], _flake=flake_obj)
|
||||||
shared_generator = Generator(
|
shared_generator = Generator(
|
||||||
"my_shared_generator",
|
"my_shared_generator",
|
||||||
share=True,
|
share=True,
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
dependent_generator = Generator(
|
dependent_generator = Generator(
|
||||||
"dependent_generator",
|
"dependent_generator",
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
in_repo_store = in_repo.FactStore(flake=flake_obj)
|
in_repo_store = in_repo.FactStore(flake=flake_obj)
|
||||||
@@ -340,12 +341,12 @@ def test_generate_secret_var_sops_with_default_group(
|
|||||||
flake_obj = Flake(str(flake.path))
|
flake_obj = Flake(str(flake.path))
|
||||||
first_generator = Generator(
|
first_generator = Generator(
|
||||||
"first_generator",
|
"first_generator",
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
second_generator = Generator(
|
second_generator = Generator(
|
||||||
"second_generator",
|
"second_generator",
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
in_repo_store = in_repo.FactStore(flake=flake_obj)
|
in_repo_store = in_repo.FactStore(flake=flake_obj)
|
||||||
@@ -375,13 +376,13 @@ def test_generate_secret_var_sops_with_default_group(
|
|||||||
first_generator_with_share = Generator(
|
first_generator_with_share = Generator(
|
||||||
"first_generator",
|
"first_generator",
|
||||||
share=False,
|
share=False,
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
second_generator_with_share = Generator(
|
second_generator_with_share = Generator(
|
||||||
"second_generator",
|
"second_generator",
|
||||||
share=False,
|
share=False,
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
assert sops_store.user_has_access("user2", first_generator_with_share, "my_secret")
|
assert sops_store.user_has_access("user2", first_generator_with_share, "my_secret")
|
||||||
@@ -429,10 +430,43 @@ def test_generated_shared_secret_sops(
|
|||||||
machine1 = Machine(name="machine1", flake=Flake(str(flake.path)))
|
machine1 = Machine(name="machine1", flake=Flake(str(flake.path)))
|
||||||
machine2 = Machine(name="machine2", flake=Flake(str(flake.path)))
|
machine2 = Machine(name="machine2", flake=Flake(str(flake.path)))
|
||||||
cli.run(["vars", "generate", "--flake", str(flake.path), "machine1"])
|
cli.run(["vars", "generate", "--flake", str(flake.path), "machine1"])
|
||||||
assert check_vars(machine1.name, machine1.flake)
|
|
||||||
|
# Get the initial state of the flake directory after generation
|
||||||
|
def get_file_mtimes(path: str) -> dict[str, float]:
|
||||||
|
"""Get modification times of all files in a directory tree."""
|
||||||
|
mtimes = {}
|
||||||
|
for root, _dirs, files in os.walk(path):
|
||||||
|
# Skip .git directory
|
||||||
|
if ".git" in root:
|
||||||
|
continue
|
||||||
|
for file in files:
|
||||||
|
filepath = Path(root) / file
|
||||||
|
mtimes[str(filepath)] = filepath.stat().st_mtime
|
||||||
|
return mtimes
|
||||||
|
|
||||||
|
initial_mtimes = get_file_mtimes(str(flake.path))
|
||||||
|
|
||||||
|
# First check_vars should not write anything
|
||||||
|
assert check_vars(machine1.name, machine1.flake), (
|
||||||
|
"machine1 has already generated vars, so check_vars should return True\n"
|
||||||
|
f"Check result:\n{check_vars(machine1.name, machine1.flake)}"
|
||||||
|
)
|
||||||
|
# Verify no files were modified
|
||||||
|
after_check_mtimes = get_file_mtimes(str(flake.path))
|
||||||
|
assert initial_mtimes == after_check_mtimes, (
|
||||||
|
"check_vars should not modify any files when vars are already valid"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert not check_vars(machine2.name, machine2.flake), (
|
||||||
|
"machine2 has not generated vars yet, so check_vars should return False"
|
||||||
|
)
|
||||||
|
# Verify no files were modified
|
||||||
|
after_check_mtimes_2 = get_file_mtimes(str(flake.path))
|
||||||
|
assert initial_mtimes == after_check_mtimes_2, (
|
||||||
|
"check_vars should not modify any files when vars are not valid"
|
||||||
|
)
|
||||||
|
|
||||||
cli.run(["vars", "generate", "--flake", str(flake.path), "machine2"])
|
cli.run(["vars", "generate", "--flake", str(flake.path), "machine2"])
|
||||||
assert check_vars(machine2.name, machine2.flake)
|
|
||||||
assert check_vars(machine2.name, machine2.flake)
|
|
||||||
m1_sops_store = sops.SecretStore(machine1.flake)
|
m1_sops_store = sops.SecretStore(machine1.flake)
|
||||||
m2_sops_store = sops.SecretStore(machine2.flake)
|
m2_sops_store = sops.SecretStore(machine2.flake)
|
||||||
# Create generators with machine context for testing
|
# Create generators with machine context for testing
|
||||||
@@ -513,28 +547,28 @@ def test_generate_secret_var_password_store(
|
|||||||
"my_generator",
|
"my_generator",
|
||||||
share=False,
|
share=False,
|
||||||
files=[],
|
files=[],
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
my_generator_shared = Generator(
|
my_generator_shared = Generator(
|
||||||
"my_generator",
|
"my_generator",
|
||||||
share=True,
|
share=True,
|
||||||
files=[],
|
files=[],
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
my_shared_generator = Generator(
|
my_shared_generator = Generator(
|
||||||
"my_shared_generator",
|
"my_shared_generator",
|
||||||
share=True,
|
share=True,
|
||||||
files=[],
|
files=[],
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
my_shared_generator_not_shared = Generator(
|
my_shared_generator_not_shared = Generator(
|
||||||
"my_shared_generator",
|
"my_shared_generator",
|
||||||
share=False,
|
share=False,
|
||||||
files=[],
|
files=[],
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
assert store.exists(my_generator, "my_secret")
|
assert store.exists(my_generator, "my_secret")
|
||||||
@@ -546,7 +580,7 @@ def test_generate_secret_var_password_store(
|
|||||||
name="my_generator",
|
name="my_generator",
|
||||||
share=False,
|
share=False,
|
||||||
files=[],
|
files=[],
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
assert store.get(generator, "my_secret").decode() == "hello\n"
|
assert store.get(generator, "my_secret").decode() == "hello\n"
|
||||||
@@ -557,7 +591,7 @@ def test_generate_secret_var_password_store(
|
|||||||
"my_generator",
|
"my_generator",
|
||||||
share=False,
|
share=False,
|
||||||
files=[],
|
files=[],
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
var_name = "my_secret"
|
var_name = "my_secret"
|
||||||
@@ -570,7 +604,7 @@ def test_generate_secret_var_password_store(
|
|||||||
"my_generator2",
|
"my_generator2",
|
||||||
share=False,
|
share=False,
|
||||||
files=[],
|
files=[],
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
var_name = "my_secret2"
|
var_name = "my_secret2"
|
||||||
@@ -582,7 +616,7 @@ def test_generate_secret_var_password_store(
|
|||||||
"my_shared_generator",
|
"my_shared_generator",
|
||||||
share=True,
|
share=True,
|
||||||
files=[],
|
files=[],
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
var_name = "my_shared_secret"
|
var_name = "my_shared_secret"
|
||||||
@@ -629,8 +663,8 @@ def test_generate_secret_for_multiple_machines(
|
|||||||
in_repo_store2 = in_repo.FactStore(flake=flake_obj)
|
in_repo_store2 = in_repo.FactStore(flake=flake_obj)
|
||||||
|
|
||||||
# Create generators for each machine
|
# Create generators for each machine
|
||||||
gen1 = Generator("my_generator", machine="machine1", _flake=flake_obj)
|
gen1 = Generator("my_generator", machines=["machine1"], _flake=flake_obj)
|
||||||
gen2 = Generator("my_generator", machine="machine2", _flake=flake_obj)
|
gen2 = Generator("my_generator", machines=["machine2"], _flake=flake_obj)
|
||||||
|
|
||||||
assert in_repo_store1.exists(gen1, "my_value")
|
assert in_repo_store1.exists(gen1, "my_value")
|
||||||
assert in_repo_store2.exists(gen2, "my_value")
|
assert in_repo_store2.exists(gen2, "my_value")
|
||||||
@@ -694,12 +728,12 @@ def test_prompt(
|
|||||||
|
|
||||||
# Set up objects for testing the results
|
# Set up objects for testing the results
|
||||||
flake_obj = Flake(str(flake.path))
|
flake_obj = Flake(str(flake.path))
|
||||||
my_generator = Generator("my_generator", machine="my_machine", _flake=flake_obj)
|
my_generator = Generator("my_generator", machines=["my_machine"], _flake=flake_obj)
|
||||||
my_generator_with_details = Generator(
|
my_generator_with_details = Generator(
|
||||||
name="my_generator",
|
name="my_generator",
|
||||||
share=False,
|
share=False,
|
||||||
files=[],
|
files=[],
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -784,10 +818,10 @@ def test_shared_vars_regeneration(
|
|||||||
in_repo_store_2 = in_repo.FactStore(machine2.flake)
|
in_repo_store_2 = in_repo.FactStore(machine2.flake)
|
||||||
# Create generators with machine context for testing
|
# Create generators with machine context for testing
|
||||||
child_gen_m1 = Generator(
|
child_gen_m1 = Generator(
|
||||||
"child_generator", share=False, machine="machine1", _flake=machine1.flake
|
"child_generator", share=False, machines=["machine1"], _flake=machine1.flake
|
||||||
)
|
)
|
||||||
child_gen_m2 = Generator(
|
child_gen_m2 = Generator(
|
||||||
"child_generator", share=False, machine="machine2", _flake=machine2.flake
|
"child_generator", share=False, machines=["machine2"], _flake=machine2.flake
|
||||||
)
|
)
|
||||||
# generate for machine 1
|
# generate for machine 1
|
||||||
cli.run(["vars", "generate", "--flake", str(flake.path), "machine1"])
|
cli.run(["vars", "generate", "--flake", str(flake.path), "machine1"])
|
||||||
@@ -855,13 +889,13 @@ def test_multi_machine_shared_vars(
|
|||||||
generator_m1 = Generator(
|
generator_m1 = Generator(
|
||||||
"shared_generator",
|
"shared_generator",
|
||||||
share=True,
|
share=True,
|
||||||
machine="machine1",
|
machines=["machine1"],
|
||||||
_flake=machine1.flake,
|
_flake=machine1.flake,
|
||||||
)
|
)
|
||||||
generator_m2 = Generator(
|
generator_m2 = Generator(
|
||||||
"shared_generator",
|
"shared_generator",
|
||||||
share=True,
|
share=True,
|
||||||
machine="machine2",
|
machines=["machine2"],
|
||||||
_flake=machine2.flake,
|
_flake=machine2.flake,
|
||||||
)
|
)
|
||||||
# generate for machine 1
|
# generate for machine 1
|
||||||
@@ -917,7 +951,9 @@ def test_api_set_prompts(
|
|||||||
)
|
)
|
||||||
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
|
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||||
store = in_repo.FactStore(machine.flake)
|
store = in_repo.FactStore(machine.flake)
|
||||||
my_generator = Generator("my_generator", machine="my_machine", _flake=machine.flake)
|
my_generator = Generator(
|
||||||
|
"my_generator", machines=["my_machine"], _flake=machine.flake
|
||||||
|
)
|
||||||
assert store.exists(my_generator, "prompt1")
|
assert store.exists(my_generator, "prompt1")
|
||||||
assert store.get(my_generator, "prompt1").decode() == "input1"
|
assert store.get(my_generator, "prompt1").decode() == "input1"
|
||||||
run_generators(
|
run_generators(
|
||||||
@@ -1061,10 +1097,10 @@ def test_migration(
|
|||||||
assert "Migrated var my_generator/my_value" in caplog.text
|
assert "Migrated var my_generator/my_value" in caplog.text
|
||||||
assert "Migrated secret var my_generator/my_secret" in caplog.text
|
assert "Migrated secret var my_generator/my_secret" in caplog.text
|
||||||
flake_obj = Flake(str(flake.path))
|
flake_obj = Flake(str(flake.path))
|
||||||
my_generator = Generator("my_generator", machine="my_machine", _flake=flake_obj)
|
my_generator = Generator("my_generator", machines=["my_machine"], _flake=flake_obj)
|
||||||
other_generator = Generator(
|
other_generator = Generator(
|
||||||
"other_generator",
|
"other_generator",
|
||||||
machine="my_machine",
|
machines=["my_machine"],
|
||||||
_flake=flake_obj,
|
_flake=flake_obj,
|
||||||
)
|
)
|
||||||
in_repo_store = in_repo.FactStore(flake=flake_obj)
|
in_repo_store = in_repo.FactStore(flake=flake_obj)
|
||||||
@@ -1210,7 +1246,7 @@ def test_share_mode_switch_regenerates_secret(
|
|||||||
sops_store = sops.SecretStore(flake=flake_obj)
|
sops_store = sops.SecretStore(flake=flake_obj)
|
||||||
|
|
||||||
generator_not_shared = Generator(
|
generator_not_shared = Generator(
|
||||||
"my_generator", share=False, machine="my_machine", _flake=flake_obj
|
"my_generator", share=False, machines=["my_machine"], _flake=flake_obj
|
||||||
)
|
)
|
||||||
|
|
||||||
initial_public = in_repo_store.get(generator_not_shared, "my_value").decode()
|
initial_public = in_repo_store.get(generator_not_shared, "my_value").decode()
|
||||||
@@ -1229,7 +1265,7 @@ def test_share_mode_switch_regenerates_secret(
|
|||||||
|
|
||||||
# Read the new values with shared generator
|
# Read the new values with shared generator
|
||||||
generator_shared = Generator(
|
generator_shared = Generator(
|
||||||
"my_generator", share=True, machine="my_machine", _flake=flake_obj
|
"my_generator", share=True, machines=["my_machine"], _flake=flake_obj
|
||||||
)
|
)
|
||||||
|
|
||||||
new_public = in_repo_store.get(generator_shared, "my_value").decode()
|
new_public = in_repo_store.get(generator_shared, "my_value").decode()
|
||||||
@@ -1264,68 +1300,117 @@ def test_cache_misses_for_vars_operations(
|
|||||||
flake: ClanFlake,
|
flake: ClanFlake,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test that vars operations result in minimal cache misses."""
|
"""Test that vars operations result in minimal cache misses."""
|
||||||
|
# Set up first machine with two generators
|
||||||
config = flake.machines["my_machine"] = create_test_machine_config()
|
config = flake.machines["my_machine"] = create_test_machine_config()
|
||||||
|
|
||||||
# Set up a simple generator with a public value
|
# Set up two generators with public values
|
||||||
my_generator = config["clan"]["core"]["vars"]["generators"]["my_generator"]
|
gen1 = config["clan"]["core"]["vars"]["generators"]["gen1"]
|
||||||
my_generator["files"]["my_value"]["secret"] = False
|
gen1["files"]["value1"]["secret"] = False
|
||||||
my_generator["script"] = 'echo -n "test_value" > "$out"/my_value'
|
gen1["script"] = 'echo -n "test_value1" > "$out"/value1'
|
||||||
|
|
||||||
|
gen2 = config["clan"]["core"]["vars"]["generators"]["gen2"]
|
||||||
|
gen2["files"]["value2"]["secret"] = False
|
||||||
|
gen2["script"] = 'echo -n "test_value2" > "$out"/value2'
|
||||||
|
|
||||||
|
# Add a second machine with the same generator configuration
|
||||||
|
flake.machines["other_machine"] = config.copy()
|
||||||
|
|
||||||
flake.refresh()
|
flake.refresh()
|
||||||
monkeypatch.chdir(flake.path)
|
monkeypatch.chdir(flake.path)
|
||||||
|
|
||||||
# Create a fresh machine object to ensure clean cache state
|
# Create fresh machine objects to ensure clean cache state
|
||||||
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
|
flake_obj = Flake(str(flake.path))
|
||||||
|
machine1 = Machine(name="my_machine", flake=flake_obj)
|
||||||
|
machine2 = Machine(name="other_machine", flake=flake_obj)
|
||||||
|
|
||||||
# Test 1: Running vars generate with a fresh cache should result in exactly 3 cache misses
|
# Test 1: Running vars generate for BOTH machines simultaneously should still result in exactly 2 cache misses
|
||||||
# Expected cache misses:
|
# Even though we have:
|
||||||
# 1. One for getting the list of generators
|
# - 2 machines (my_machine and other_machine)
|
||||||
# 2. One for getting the final script of our test generator (my_generator)
|
# - 2 generators per machine (gen1 and gen2)
|
||||||
# 3. One for getting the final script of the state version generator (added by default)
|
# We still only get 2 cache misses when generating for both machines:
|
||||||
# TODO: The third cache miss is undesired in tests. disable state version module for tests
|
# 1. One for getting the list of generators for both machines
|
||||||
|
# 2. One batched evaluation for getting all generator scripts for both machines
|
||||||
|
# The key insight: the system should batch ALL evaluations across ALL machines into a single nix eval
|
||||||
|
|
||||||
run_generators(
|
run_generators(
|
||||||
machines=[machine],
|
machines=[machine1, machine2],
|
||||||
generators=None, # Generate all
|
generators=None, # Generate all
|
||||||
)
|
)
|
||||||
|
|
||||||
# Print stack traces if we have more than 3 cache misses
|
# Print stack traces if we have more than 2 cache misses
|
||||||
if machine.flake._cache_misses != 3:
|
if flake_obj._cache_misses != 2:
|
||||||
machine.flake.print_cache_miss_analysis(
|
flake_obj.print_cache_miss_analysis(
|
||||||
title="Cache miss analysis for vars generate"
|
title="Cache miss analysis for vars generate"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert machine.flake._cache_misses == 2, (
|
assert flake_obj._cache_misses == 2, (
|
||||||
f"Expected exactly 2 cache misses for vars generate, got {machine.flake._cache_misses}"
|
f"Expected exactly 2 cache misses for vars generate, got {flake_obj._cache_misses}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify the value was generated correctly
|
|
||||||
var_value = get_machine_var(machine, "my_generator/my_value")
|
|
||||||
assert var_value.printable_value == "test_value"
|
|
||||||
|
|
||||||
# Test 2: List all vars should result in exactly 1 cache miss
|
# Test 2: List all vars should result in exactly 1 cache miss
|
||||||
# Force cache invalidation (this also resets cache miss tracking)
|
# Force cache invalidation (this also resets cache miss tracking)
|
||||||
invalidate_flake_cache(flake.path)
|
invalidate_flake_cache(flake.path)
|
||||||
machine.flake.invalidate_cache()
|
flake_obj.invalidate_cache()
|
||||||
|
|
||||||
stringify_all_vars(machine)
|
stringify_all_vars(machine1)
|
||||||
assert machine.flake._cache_misses == 1, (
|
assert flake_obj._cache_misses == 1, (
|
||||||
f"Expected exactly 1 cache miss for vars list, got {machine.flake._cache_misses}"
|
f"Expected exactly 1 cache miss for vars list, got {flake_obj._cache_misses}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Test 3: Getting a specific var with a fresh cache should result in exactly 1 cache miss
|
# Test 3: Getting a specific var with a fresh cache should result in exactly 1 cache miss
|
||||||
# Force cache invalidation (this also resets cache miss tracking)
|
# Force cache invalidation (this also resets cache miss tracking)
|
||||||
invalidate_flake_cache(flake.path)
|
invalidate_flake_cache(flake.path)
|
||||||
machine.flake.invalidate_cache()
|
flake_obj.invalidate_cache()
|
||||||
|
|
||||||
var_value = get_machine_var(machine, "my_generator/my_value")
|
# Only test gen1 for the get operation
|
||||||
assert var_value.printable_value == "test_value"
|
var_value = get_machine_var(machine1, "gen1/value1")
|
||||||
|
assert var_value.printable_value == "test_value1"
|
||||||
|
|
||||||
assert machine.flake._cache_misses == 1, (
|
assert flake_obj._cache_misses == 1, (
|
||||||
f"Expected exactly 1 cache miss for vars get with fresh cache, got {machine.flake._cache_misses}"
|
f"Expected exactly 1 cache miss for vars get with fresh cache, got {flake_obj._cache_misses}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.with_core
|
||||||
|
def test_shared_generator_conflicting_definition_raises_error(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
flake_with_sops: ClanFlake,
|
||||||
|
) -> None:
|
||||||
|
"""Test that vars generation raises an error when two machines have different
|
||||||
|
definitions for the same shared generator.
|
||||||
|
"""
|
||||||
|
flake = flake_with_sops
|
||||||
|
|
||||||
|
# Create machine1 with a shared generator
|
||||||
|
machine1_config = flake.machines["machine1"] = create_test_machine_config()
|
||||||
|
shared_gen1 = machine1_config["clan"]["core"]["vars"]["generators"][
|
||||||
|
"shared_generator"
|
||||||
|
]
|
||||||
|
shared_gen1["share"] = True
|
||||||
|
shared_gen1["files"]["file1"]["secret"] = False
|
||||||
|
shared_gen1["script"] = 'echo "test" > "$out"/file1'
|
||||||
|
|
||||||
|
# Create machine2 with the same shared generator but different files
|
||||||
|
machine2_config = flake.machines["machine2"] = create_test_machine_config()
|
||||||
|
shared_gen2 = machine2_config["clan"]["core"]["vars"]["generators"][
|
||||||
|
"shared_generator"
|
||||||
|
]
|
||||||
|
shared_gen2["share"] = True
|
||||||
|
shared_gen2["files"]["file2"]["secret"] = False # Different file name
|
||||||
|
shared_gen2["script"] = 'echo "test" > "$out"/file2'
|
||||||
|
|
||||||
|
flake.refresh()
|
||||||
|
monkeypatch.chdir(flake.path)
|
||||||
|
|
||||||
|
# Attempting to generate vars for both machines should raise an error
|
||||||
|
# because they have conflicting definitions for the same shared generator
|
||||||
|
with pytest.raises(
|
||||||
|
ClanError,
|
||||||
|
match=".*differ.*",
|
||||||
|
):
|
||||||
|
cli.run(["vars", "generate", "--flake", str(flake.path)])
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.with_core
|
@pytest.mark.with_core
|
||||||
def test_dynamic_invalidation(
|
def test_dynamic_invalidation(
|
||||||
monkeypatch: pytest.MonkeyPatch,
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
|||||||
@@ -40,12 +40,15 @@ class StoreBase(ABC):
|
|||||||
|
|
||||||
def get_machine(self, generator: "Generator") -> str:
|
def get_machine(self, generator: "Generator") -> str:
|
||||||
"""Get machine name from generator, asserting it's not None for now."""
|
"""Get machine name from generator, asserting it's not None for now."""
|
||||||
if generator.machine is None:
|
if generator.share:
|
||||||
if generator.share:
|
return "__shared"
|
||||||
return "__shared"
|
if not generator.machines:
|
||||||
msg = f"Generator '{generator.name}' has no machine associated"
|
msg = f"Generator '{generator.name}' has no machine associated"
|
||||||
raise ClanError(msg)
|
raise ClanError(msg)
|
||||||
return generator.machine
|
if len(generator.machines) != 1:
|
||||||
|
msg = f"Generator '{generator.name}' has {len(generator.machines)} machines, expected exactly 1"
|
||||||
|
raise ClanError(msg)
|
||||||
|
return generator.machines[0]
|
||||||
|
|
||||||
# get a single fact
|
# get a single fact
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
@@ -147,7 +150,7 @@ class StoreBase(ABC):
|
|||||||
prev_generator = dataclasses.replace(
|
prev_generator = dataclasses.replace(
|
||||||
generator,
|
generator,
|
||||||
share=not generator.share,
|
share=not generator.share,
|
||||||
machine=machine if generator.share else None,
|
machines=[] if not generator.share else [machine],
|
||||||
)
|
)
|
||||||
if self.exists(prev_generator, var.name):
|
if self.exists(prev_generator, var.name):
|
||||||
changed_files += self.delete(prev_generator, var.name)
|
changed_files += self.delete(prev_generator, var.name)
|
||||||
@@ -165,12 +168,12 @@ class StoreBase(ABC):
|
|||||||
new_file = self._set(generator, var, value, machine)
|
new_file = self._set(generator, var, value, machine)
|
||||||
action_str = "Migrated" if is_migration else "Updated"
|
action_str = "Migrated" if is_migration else "Updated"
|
||||||
log_info: Callable
|
log_info: Callable
|
||||||
if generator.machine is None:
|
if generator.share:
|
||||||
log_info = log.info
|
log_info = log.info
|
||||||
else:
|
else:
|
||||||
from clan_lib.machines.machines import Machine # noqa: PLC0415
|
from clan_lib.machines.machines import Machine
|
||||||
|
|
||||||
machine_obj = Machine(name=generator.machine, flake=self.flake)
|
machine_obj = Machine(name=generator.machines[0], flake=self.flake)
|
||||||
log_info = machine_obj.info
|
log_info = machine_obj.info
|
||||||
if self.is_secret_store:
|
if self.is_secret_store:
|
||||||
log.info(f"{action_str} secret var {generator.name}/{var.name}\n")
|
log.info(f"{action_str} secret var {generator.name}/{var.name}\n")
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import logging
|
|||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from clan_cli.completions import add_dynamic_completer, complete_machines
|
from clan_cli.completions import add_dynamic_completer, complete_machines
|
||||||
|
from clan_cli.vars.secret_modules import sops
|
||||||
from clan_lib.errors import ClanError
|
from clan_lib.errors import ClanError
|
||||||
from clan_lib.flake import Flake, require_flake
|
from clan_lib.flake import Flake, require_flake
|
||||||
from clan_lib.machines.machines import Machine
|
from clan_lib.machines.machines import Machine
|
||||||
@@ -26,13 +27,33 @@ class VarStatus:
|
|||||||
self.unfixed_secret_vars = unfixed_secret_vars
|
self.unfixed_secret_vars = unfixed_secret_vars
|
||||||
self.invalid_generators = invalid_generators
|
self.invalid_generators = invalid_generators
|
||||||
|
|
||||||
|
def text(self) -> str:
|
||||||
|
log = ""
|
||||||
|
if self.missing_secret_vars:
|
||||||
|
log += "Missing secret vars:\n"
|
||||||
|
for var in self.missing_secret_vars:
|
||||||
|
log += f" - {var.id}\n"
|
||||||
|
if self.missing_public_vars:
|
||||||
|
log += "Missing public vars:\n"
|
||||||
|
for var in self.missing_public_vars:
|
||||||
|
log += f" - {var.id}\n"
|
||||||
|
if self.unfixed_secret_vars:
|
||||||
|
log += "Unfixed secret vars:\n"
|
||||||
|
for var in self.unfixed_secret_vars:
|
||||||
|
log += f" - {var.id}\n"
|
||||||
|
if self.invalid_generators:
|
||||||
|
log += "Invalid generators (outdated invalidation hash):\n"
|
||||||
|
for gen in self.invalid_generators:
|
||||||
|
log += f" - {gen}\n"
|
||||||
|
return log if log else "All vars are present and valid."
|
||||||
|
|
||||||
|
|
||||||
def vars_status(
|
def vars_status(
|
||||||
machine_name: str,
|
machine_name: str,
|
||||||
flake: Flake,
|
flake: Flake,
|
||||||
generator_name: None | str = None,
|
generator_name: None | str = None,
|
||||||
) -> VarStatus:
|
) -> VarStatus:
|
||||||
from clan_cli.vars.generator import Generator # noqa: PLC0415
|
from clan_cli.vars.generator import Generator
|
||||||
|
|
||||||
machine = Machine(name=machine_name, flake=flake)
|
machine = Machine(name=machine_name, flake=flake)
|
||||||
missing_secret_vars = []
|
missing_secret_vars = []
|
||||||
@@ -66,15 +87,32 @@ def vars_status(
|
|||||||
f"Secret var '{file.name}' for service '{generator.name}' in machine {machine.name} is missing.",
|
f"Secret var '{file.name}' for service '{generator.name}' in machine {machine.name} is missing.",
|
||||||
)
|
)
|
||||||
missing_secret_vars.append(file)
|
missing_secret_vars.append(file)
|
||||||
|
if (
|
||||||
|
isinstance(machine.secret_vars_store, sops.SecretStore)
|
||||||
|
and generator.share
|
||||||
|
and file.exists
|
||||||
|
and not machine.secret_vars_store.machine_has_access(
|
||||||
|
generator=generator,
|
||||||
|
secret_name=file.name,
|
||||||
|
machine=machine.name,
|
||||||
|
)
|
||||||
|
):
|
||||||
|
msg = (
|
||||||
|
f"Secret var '{generator.name}/{file.name}' is marked for deployment to machine '{machine.name}', but the machine does not have access to it.\n"
|
||||||
|
f"Run 'clan vars generate {machine.name}' to fix this.\n"
|
||||||
|
)
|
||||||
|
machine.info(msg)
|
||||||
|
missing_secret_vars.append(file)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
msg = machine.secret_vars_store.health_check(
|
health_msg = machine.secret_vars_store.health_check(
|
||||||
machine=machine.name,
|
machine=machine.name,
|
||||||
generators=[generator],
|
generators=[generator],
|
||||||
file_name=file.name,
|
file_name=file.name,
|
||||||
)
|
)
|
||||||
if msg:
|
if health_msg is not None:
|
||||||
machine.info(
|
machine.info(
|
||||||
f"Secret var '{file.name}' for service '{generator.name}' in machine {machine.name} needs update: {msg}",
|
f"Secret var '{file.name}' for service '{generator.name}' in machine {machine.name} needs update: {health_msg}",
|
||||||
)
|
)
|
||||||
unfixed_secret_vars.append(file)
|
unfixed_secret_vars.append(file)
|
||||||
|
|
||||||
@@ -106,6 +144,7 @@ def check_vars(
|
|||||||
generator_name: None | str = None,
|
generator_name: None | str = None,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
status = vars_status(machine_name, flake, generator_name=generator_name)
|
status = vars_status(machine_name, flake, generator_name=generator_name)
|
||||||
|
log.info(f"Check results for machine '{machine_name}': \n{status.text()}")
|
||||||
return not (
|
return not (
|
||||||
status.missing_secret_vars
|
status.missing_secret_vars
|
||||||
or status.missing_public_vars
|
or status.missing_public_vars
|
||||||
|
|||||||
@@ -61,14 +61,22 @@ class Generator:
|
|||||||
migrate_fact: str | None = None
|
migrate_fact: str | None = None
|
||||||
validation_hash: str | None = None
|
validation_hash: str | None = None
|
||||||
|
|
||||||
machine: str | None = None
|
machines: list[str] = field(default_factory=list)
|
||||||
_flake: "Flake | None" = None
|
_flake: "Flake | None" = None
|
||||||
_public_store: "StoreBase | None" = None
|
_public_store: "StoreBase | None" = None
|
||||||
_secret_store: "StoreBase | None" = None
|
_secret_store: "StoreBase | None" = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def key(self) -> GeneratorKey:
|
def key(self) -> GeneratorKey:
|
||||||
return GeneratorKey(machine=self.machine, name=self.name)
|
if self.share:
|
||||||
|
# must be a shared generator
|
||||||
|
machine = None
|
||||||
|
elif len(self.machines) != 1:
|
||||||
|
msg = f"Shared generator {self.name} must have exactly one machine, but has {len(self.machines)}: {', '.join(self.machines)}"
|
||||||
|
raise ClanError(msg)
|
||||||
|
else:
|
||||||
|
machine = self.machines[0]
|
||||||
|
return GeneratorKey(machine=machine, name=self.name)
|
||||||
|
|
||||||
def __hash__(self) -> int:
|
def __hash__(self) -> int:
|
||||||
return hash(self.key)
|
return hash(self.key)
|
||||||
@@ -143,7 +151,10 @@ class Generator:
|
|||||||
files_selector = "config.clan.core.vars.generators.*.files.*.{secret,deploy,owner,group,mode,neededFor}"
|
files_selector = "config.clan.core.vars.generators.*.files.*.{secret,deploy,owner,group,mode,neededFor}"
|
||||||
flake.precache(cls.get_machine_selectors(machine_names))
|
flake.precache(cls.get_machine_selectors(machine_names))
|
||||||
|
|
||||||
generators = []
|
generators: list[Generator] = []
|
||||||
|
shared_generators_raw: dict[
|
||||||
|
str, tuple[str, dict, dict]
|
||||||
|
] = {} # name -> (machine_name, gen_data, files_data)
|
||||||
|
|
||||||
for machine_name in machine_names:
|
for machine_name in machine_names:
|
||||||
# Get all generator metadata in one select (safe fields only)
|
# Get all generator metadata in one select (safe fields only)
|
||||||
@@ -165,6 +176,38 @@ class Generator:
|
|||||||
sec_store = machine.secret_vars_store
|
sec_store = machine.secret_vars_store
|
||||||
|
|
||||||
for gen_name, gen_data in generators_data.items():
|
for gen_name, gen_data in generators_data.items():
|
||||||
|
# Check for conflicts in shared generator definitions using raw data
|
||||||
|
if gen_data["share"]:
|
||||||
|
if gen_name in shared_generators_raw:
|
||||||
|
prev_machine, prev_gen_data, prev_files_data = (
|
||||||
|
shared_generators_raw[gen_name]
|
||||||
|
)
|
||||||
|
# Compare raw data
|
||||||
|
prev_gen_files = prev_files_data.get(gen_name, {})
|
||||||
|
curr_gen_files = files_data.get(gen_name, {})
|
||||||
|
# Build list of differences with details
|
||||||
|
differences = []
|
||||||
|
if prev_gen_files != curr_gen_files:
|
||||||
|
differences.append("files")
|
||||||
|
if prev_gen_data.get("prompts") != gen_data.get("prompts"):
|
||||||
|
differences.append("prompts")
|
||||||
|
if prev_gen_data.get("dependencies") != gen_data.get(
|
||||||
|
"dependencies"
|
||||||
|
):
|
||||||
|
differences.append("dependencies")
|
||||||
|
if prev_gen_data.get("validationHash") != gen_data.get(
|
||||||
|
"validationHash"
|
||||||
|
):
|
||||||
|
differences.append("validation_hash")
|
||||||
|
if differences:
|
||||||
|
msg = f"Machines {prev_machine} and {machine_name} have different definitions for shared generator '{gen_name}' (differ in: {', '.join(differences)})"
|
||||||
|
raise ClanError(msg)
|
||||||
|
else:
|
||||||
|
shared_generators_raw[gen_name] = (
|
||||||
|
machine_name,
|
||||||
|
gen_data,
|
||||||
|
files_data,
|
||||||
|
)
|
||||||
# Build files from the files_data
|
# Build files from the files_data
|
||||||
files = []
|
files = []
|
||||||
gen_files = files_data.get(gen_name, {})
|
gen_files = files_data.get(gen_name, {})
|
||||||
@@ -209,14 +252,31 @@ class Generator:
|
|||||||
migrate_fact=gen_data.get("migrateFact"),
|
migrate_fact=gen_data.get("migrateFact"),
|
||||||
validation_hash=gen_data.get("validationHash"),
|
validation_hash=gen_data.get("validationHash"),
|
||||||
prompts=prompts,
|
prompts=prompts,
|
||||||
# only set machine for machine-specific generators
|
# shared generators can have multiple machines, machine-specific have one
|
||||||
# this is essential for the graph algorithms to work correctly
|
machines=[machine_name],
|
||||||
machine=None if share else machine_name,
|
|
||||||
_flake=flake,
|
_flake=flake,
|
||||||
_public_store=pub_store,
|
_public_store=pub_store,
|
||||||
_secret_store=sec_store,
|
_secret_store=sec_store,
|
||||||
)
|
)
|
||||||
generators.append(generator)
|
|
||||||
|
# link generator to its files
|
||||||
|
for file in files:
|
||||||
|
file.generator(generator)
|
||||||
|
|
||||||
|
if share:
|
||||||
|
# For shared generators, check if we already created it
|
||||||
|
existing = next(
|
||||||
|
(g for g in generators if g.name == gen_name and g.share), None
|
||||||
|
)
|
||||||
|
if existing:
|
||||||
|
# Just append the machine to the existing generator
|
||||||
|
existing.machines.append(machine_name)
|
||||||
|
else:
|
||||||
|
# Add the new shared generator
|
||||||
|
generators.append(generator)
|
||||||
|
else:
|
||||||
|
# Always add per-machine generators
|
||||||
|
generators.append(generator)
|
||||||
|
|
||||||
# TODO: This should be done in a non-mutable way.
|
# TODO: This should be done in a non-mutable way.
|
||||||
if include_previous_values:
|
if include_previous_values:
|
||||||
@@ -245,15 +305,19 @@ class Generator:
|
|||||||
return sec_store.get(self, prompt.name).decode()
|
return sec_store.get(self, prompt.name).decode()
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def final_script_selector(self, machine_name: str) -> str:
|
||||||
|
if self._flake is None:
|
||||||
|
msg = "Flake cannot be None"
|
||||||
|
raise ClanError(msg)
|
||||||
|
return self._flake.machine_selector(
|
||||||
|
machine_name, f'config.clan.core.vars.generators."{self.name}".finalScript'
|
||||||
|
)
|
||||||
|
|
||||||
def final_script(self, machine: "Machine") -> Path:
|
def final_script(self, machine: "Machine") -> Path:
|
||||||
if self._flake is None:
|
if self._flake is None:
|
||||||
msg = "Flake cannot be None"
|
msg = "Flake cannot be None"
|
||||||
raise ClanError(msg)
|
raise ClanError(msg)
|
||||||
output = Path(
|
output = Path(self._flake.select(self.final_script_selector(machine.name)))
|
||||||
machine.select(
|
|
||||||
f'config.clan.core.vars.generators."{self.name}".finalScript',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
if tmp_store := nix_test_store():
|
if tmp_store := nix_test_store():
|
||||||
output = tmp_store.joinpath(*output.parts[1:])
|
output = tmp_store.joinpath(*output.parts[1:])
|
||||||
return output
|
return output
|
||||||
@@ -418,7 +482,7 @@ class Generator:
|
|||||||
if sys.platform == "linux" and bwrap.bubblewrap_works():
|
if sys.platform == "linux" and bwrap.bubblewrap_works():
|
||||||
cmd = bubblewrap_cmd(str(final_script), tmpdir)
|
cmd = bubblewrap_cmd(str(final_script), tmpdir)
|
||||||
elif sys.platform == "darwin":
|
elif sys.platform == "darwin":
|
||||||
from clan_lib.sandbox_exec import sandbox_exec_cmd # noqa: PLC0415
|
from clan_lib.sandbox_exec import sandbox_exec_cmd
|
||||||
|
|
||||||
cmd = stack.enter_context(sandbox_exec_cmd(str(final_script), tmpdir))
|
cmd = stack.enter_context(sandbox_exec_cmd(str(final_script), tmpdir))
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -49,28 +49,28 @@ def test_required_generators() -> None:
|
|||||||
gen_1 = Generator(
|
gen_1 = Generator(
|
||||||
name="gen_1",
|
name="gen_1",
|
||||||
dependencies=[],
|
dependencies=[],
|
||||||
machine=machine_name,
|
machines=[machine_name],
|
||||||
_public_store=public_store,
|
_public_store=public_store,
|
||||||
_secret_store=secret_store,
|
_secret_store=secret_store,
|
||||||
)
|
)
|
||||||
gen_2 = Generator(
|
gen_2 = Generator(
|
||||||
name="gen_2",
|
name="gen_2",
|
||||||
dependencies=[gen_1.key],
|
dependencies=[gen_1.key],
|
||||||
machine=machine_name,
|
machines=[machine_name],
|
||||||
_public_store=public_store,
|
_public_store=public_store,
|
||||||
_secret_store=secret_store,
|
_secret_store=secret_store,
|
||||||
)
|
)
|
||||||
gen_2a = Generator(
|
gen_2a = Generator(
|
||||||
name="gen_2a",
|
name="gen_2a",
|
||||||
dependencies=[gen_2.key],
|
dependencies=[gen_2.key],
|
||||||
machine=machine_name,
|
machines=[machine_name],
|
||||||
_public_store=public_store,
|
_public_store=public_store,
|
||||||
_secret_store=secret_store,
|
_secret_store=secret_store,
|
||||||
)
|
)
|
||||||
gen_2b = Generator(
|
gen_2b = Generator(
|
||||||
name="gen_2b",
|
name="gen_2b",
|
||||||
dependencies=[gen_2.key],
|
dependencies=[gen_2.key],
|
||||||
machine=machine_name,
|
machines=[machine_name],
|
||||||
_public_store=public_store,
|
_public_store=public_store,
|
||||||
_secret_store=secret_store,
|
_secret_store=secret_store,
|
||||||
)
|
)
|
||||||
@@ -118,21 +118,22 @@ def test_shared_generator_invalidates_multiple_machines_dependents() -> None:
|
|||||||
shared_gen = Generator(
|
shared_gen = Generator(
|
||||||
name="shared_gen",
|
name="shared_gen",
|
||||||
dependencies=[],
|
dependencies=[],
|
||||||
machine=None, # Shared generator
|
share=True, # Mark as shared generator
|
||||||
|
machines=[machine_1, machine_2], # Shared across both machines
|
||||||
_public_store=public_store,
|
_public_store=public_store,
|
||||||
_secret_store=secret_store,
|
_secret_store=secret_store,
|
||||||
)
|
)
|
||||||
gen_1 = Generator(
|
gen_1 = Generator(
|
||||||
name="gen_1",
|
name="gen_1",
|
||||||
dependencies=[shared_gen.key],
|
dependencies=[shared_gen.key],
|
||||||
machine=machine_1,
|
machines=[machine_1],
|
||||||
_public_store=public_store,
|
_public_store=public_store,
|
||||||
_secret_store=secret_store,
|
_secret_store=secret_store,
|
||||||
)
|
)
|
||||||
gen_2 = Generator(
|
gen_2 = Generator(
|
||||||
name="gen_2",
|
name="gen_2",
|
||||||
dependencies=[shared_gen.key],
|
dependencies=[shared_gen.key],
|
||||||
machine=machine_2,
|
machines=[machine_2],
|
||||||
_public_store=public_store,
|
_public_store=public_store,
|
||||||
_secret_store=secret_store,
|
_secret_store=secret_store,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ class SecretStore(StoreBase):
|
|||||||
def ensure_machine_key(self, machine: str) -> None:
|
def ensure_machine_key(self, machine: str) -> None:
|
||||||
"""Ensure machine has sops keys initialized."""
|
"""Ensure machine has sops keys initialized."""
|
||||||
# no need to generate keys if we don't manage secrets
|
# no need to generate keys if we don't manage secrets
|
||||||
from clan_cli.vars.generator import Generator # noqa: PLC0415
|
from clan_cli.vars.generator import Generator
|
||||||
|
|
||||||
vars_generators = Generator.get_machine_generators([machine], self.flake)
|
vars_generators = Generator.get_machine_generators([machine], self.flake)
|
||||||
if not vars_generators:
|
if not vars_generators:
|
||||||
@@ -98,7 +98,8 @@ class SecretStore(StoreBase):
|
|||||||
def machine_has_access(
|
def machine_has_access(
|
||||||
self, generator: Generator, secret_name: str, machine: str
|
self, generator: Generator, secret_name: str, machine: str
|
||||||
) -> bool:
|
) -> bool:
|
||||||
self.ensure_machine_key(machine)
|
if not has_machine(self.flake.path, machine):
|
||||||
|
return False
|
||||||
key_dir = sops_machines_folder(self.flake.path) / machine
|
key_dir = sops_machines_folder(self.flake.path) / machine
|
||||||
return self.key_has_access(key_dir, generator, secret_name)
|
return self.key_has_access(key_dir, generator, secret_name)
|
||||||
|
|
||||||
@@ -142,7 +143,7 @@ class SecretStore(StoreBase):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
if generators is None:
|
if generators is None:
|
||||||
from clan_cli.vars.generator import Generator # noqa: PLC0415
|
from clan_cli.vars.generator import Generator
|
||||||
|
|
||||||
generators = Generator.get_machine_generators([machine], self.flake)
|
generators = Generator.get_machine_generators([machine], self.flake)
|
||||||
file_found = False
|
file_found = False
|
||||||
@@ -156,8 +157,6 @@ class SecretStore(StoreBase):
|
|||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
if file.secret and self.exists(generator, file.name):
|
if file.secret and self.exists(generator, file.name):
|
||||||
if file.deploy:
|
|
||||||
self.ensure_machine_has_access(generator, file.name, machine)
|
|
||||||
needs_update, msg = self.needs_fix(generator, file.name, machine)
|
needs_update, msg = self.needs_fix(generator, file.name, machine)
|
||||||
if needs_update:
|
if needs_update:
|
||||||
outdated.append((generator.name, file.name, msg))
|
outdated.append((generator.name, file.name, msg))
|
||||||
@@ -219,7 +218,7 @@ class SecretStore(StoreBase):
|
|||||||
return [store_folder]
|
return [store_folder]
|
||||||
|
|
||||||
def populate_dir(self, machine: str, output_dir: Path, phases: list[str]) -> None:
|
def populate_dir(self, machine: str, output_dir: Path, phases: list[str]) -> None:
|
||||||
from clan_cli.vars.generator import Generator # noqa: PLC0415
|
from clan_cli.vars.generator import Generator
|
||||||
|
|
||||||
vars_generators = Generator.get_machine_generators([machine], self.flake)
|
vars_generators = Generator.get_machine_generators([machine], self.flake)
|
||||||
if "users" in phases or "services" in phases:
|
if "users" in phases or "services" in phases:
|
||||||
@@ -283,6 +282,7 @@ class SecretStore(StoreBase):
|
|||||||
) -> None:
|
) -> None:
|
||||||
if self.machine_has_access(generator, name, machine):
|
if self.machine_has_access(generator, name, machine):
|
||||||
return
|
return
|
||||||
|
self.ensure_machine_key(machine)
|
||||||
secret_folder = self.secret_path(generator, name)
|
secret_folder = self.secret_path(generator, name)
|
||||||
add_secret(
|
add_secret(
|
||||||
self.flake.path,
|
self.flake.path,
|
||||||
@@ -292,7 +292,7 @@ class SecretStore(StoreBase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def collect_keys_for_secret(self, machine: str, path: Path) -> set[sops.SopsKey]:
|
def collect_keys_for_secret(self, machine: str, path: Path) -> set[sops.SopsKey]:
|
||||||
from clan_cli.secrets.secrets import ( # noqa: PLC0415
|
from clan_cli.secrets.secrets import (
|
||||||
collect_keys_for_path,
|
collect_keys_for_path,
|
||||||
collect_keys_for_type,
|
collect_keys_for_type,
|
||||||
)
|
)
|
||||||
@@ -354,10 +354,10 @@ class SecretStore(StoreBase):
|
|||||||
ClanError: If the specified file_name is not found
|
ClanError: If the specified file_name is not found
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from clan_cli.secrets.secrets import update_keys # noqa: PLC0415
|
from clan_cli.secrets.secrets import update_keys
|
||||||
|
|
||||||
if generators is None:
|
if generators is None:
|
||||||
from clan_cli.vars.generator import Generator # noqa: PLC0415
|
from clan_cli.vars.generator import Generator
|
||||||
|
|
||||||
generators = Generator.get_machine_generators([machine], self.flake)
|
generators = Generator.get_machine_generators([machine], self.flake)
|
||||||
file_found = False
|
file_found = False
|
||||||
|
|||||||
@@ -319,9 +319,9 @@ def load_in_all_api_functions() -> None:
|
|||||||
We have to make sure python loads every wrapped function at least once.
|
We have to make sure python loads every wrapped function at least once.
|
||||||
This is done by importing all modules from the clan_lib and clan_cli packages.
|
This is done by importing all modules from the clan_lib and clan_cli packages.
|
||||||
"""
|
"""
|
||||||
import clan_cli # noqa: PLC0415 # Avoid circular imports - many modules import from clan_lib.api
|
import clan_cli # Avoid circular imports - many modules import from clan_lib.api
|
||||||
|
|
||||||
import clan_lib # noqa: PLC0415 # Avoid circular imports - many modules import from clan_lib.api
|
import clan_lib # Avoid circular imports - many modules import from clan_lib.api
|
||||||
|
|
||||||
import_all_modules_from_package(clan_lib)
|
import_all_modules_from_package(clan_lib)
|
||||||
import_all_modules_from_package(clan_cli)
|
import_all_modules_from_package(clan_cli)
|
||||||
|
|||||||
@@ -88,7 +88,7 @@ def list_system_storage_devices() -> Blockdevices:
|
|||||||
A list of detected block devices with metadata like size, path, type, etc.
|
A list of detected block devices with metadata like size, path, type, etc.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from clan_lib.nix import nix_shell # noqa: PLC0415
|
from clan_lib.nix import nix_shell
|
||||||
|
|
||||||
cmd = nix_shell(
|
cmd = nix_shell(
|
||||||
["util-linux"],
|
["util-linux"],
|
||||||
@@ -124,7 +124,7 @@ def get_clan_directory_relative(flake: Flake) -> str:
|
|||||||
ClanError: If the flake evaluation fails or directories cannot be found
|
ClanError: If the flake evaluation fails or directories cannot be found
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from clan_lib.dirs import get_clan_directories # noqa: PLC0415
|
from clan_lib.dirs import get_clan_directories
|
||||||
|
|
||||||
_, relative_dir = get_clan_directories(flake)
|
_, relative_dir = get_clan_directories(flake)
|
||||||
return relative_dir
|
return relative_dir
|
||||||
|
|||||||
@@ -1132,6 +1132,20 @@ class Flake:
|
|||||||
|
|
||||||
return self._cache.select(selector)
|
return self._cache.select(selector)
|
||||||
|
|
||||||
|
def machine_selector(self, machine_name: str, selector: str) -> str:
|
||||||
|
"""Create a selector for a specific machine.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
machine_name: The name of the machine
|
||||||
|
selector: The attribute selector string relative to the machine config
|
||||||
|
Returns:
|
||||||
|
The full selector string for the machine
|
||||||
|
|
||||||
|
"""
|
||||||
|
config = nix_config()
|
||||||
|
system = config["system"]
|
||||||
|
return f'clanInternals.machines."{system}"."{machine_name}".{selector}'
|
||||||
|
|
||||||
def select_machine(self, machine_name: str, selector: str) -> Any:
|
def select_machine(self, machine_name: str, selector: str) -> Any:
|
||||||
"""Select a nix attribute for a specific machine.
|
"""Select a nix attribute for a specific machine.
|
||||||
|
|
||||||
@@ -1141,18 +1155,14 @@ class Flake:
|
|||||||
apply: Optional function to apply to the result
|
apply: Optional function to apply to the result
|
||||||
|
|
||||||
"""
|
"""
|
||||||
config = nix_config()
|
return self.select(self.machine_selector(machine_name, selector))
|
||||||
system = config["system"]
|
|
||||||
|
|
||||||
full_selector = f'clanInternals.machines."{system}"."{machine_name}".{selector}'
|
|
||||||
return self.select(full_selector)
|
|
||||||
|
|
||||||
def list_machines(
|
def list_machines(
|
||||||
self,
|
self,
|
||||||
opts: "ListOptions | None" = None,
|
opts: "ListOptions | None" = None,
|
||||||
) -> "dict[str, MachineResponse]":
|
) -> "dict[str, MachineResponse]":
|
||||||
"""List machines of a clan"""
|
"""List machines of a clan"""
|
||||||
from clan_lib.machines.actions import list_machines # noqa: PLC0415
|
from clan_lib.machines.actions import list_machines
|
||||||
|
|
||||||
return list_machines(self, opts)
|
return list_machines(self, opts)
|
||||||
|
|
||||||
|
|||||||
@@ -18,14 +18,14 @@ def locked_open(filename: Path, mode: str = "r") -> Generator:
|
|||||||
|
|
||||||
|
|
||||||
def write_history_file(data: Any) -> None:
|
def write_history_file(data: Any) -> None:
|
||||||
from clan_lib.dirs import user_history_file # noqa: PLC0415
|
from clan_lib.dirs import user_history_file
|
||||||
|
|
||||||
with locked_open(user_history_file(), "w+") as f:
|
with locked_open(user_history_file(), "w+") as f:
|
||||||
f.write(json.dumps(data, cls=ClanJSONEncoder, indent=4))
|
f.write(json.dumps(data, cls=ClanJSONEncoder, indent=4))
|
||||||
|
|
||||||
|
|
||||||
def read_history_file() -> list[dict]:
|
def read_history_file() -> list[dict]:
|
||||||
from clan_lib.dirs import user_history_file # noqa: PLC0415
|
from clan_lib.dirs import user_history_file
|
||||||
|
|
||||||
with locked_open(user_history_file(), "r") as f:
|
with locked_open(user_history_file(), "r") as f:
|
||||||
content: str = f.read()
|
content: str = f.read()
|
||||||
|
|||||||
@@ -119,6 +119,9 @@ def run_machine_hardware_info_init(
|
|||||||
if opts.debug:
|
if opts.debug:
|
||||||
cmd += ["--debug"]
|
cmd += ["--debug"]
|
||||||
|
|
||||||
|
# Add nix options to nixos-anywhere
|
||||||
|
cmd.extend(opts.machine.flake.nix_options or [])
|
||||||
|
|
||||||
cmd += [target_host.target]
|
cmd += [target_host.target]
|
||||||
cmd = nix_shell(
|
cmd = nix_shell(
|
||||||
["nixos-anywhere"],
|
["nixos-anywhere"],
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ class Machine:
|
|||||||
|
|
||||||
def get_inv_machine(self) -> "InventoryMachine":
|
def get_inv_machine(self) -> "InventoryMachine":
|
||||||
# Import on demand to avoid circular imports
|
# Import on demand to avoid circular imports
|
||||||
from clan_lib.machines.actions import get_machine # noqa: PLC0415
|
from clan_lib.machines.actions import get_machine
|
||||||
|
|
||||||
return get_machine(self.flake, self.name)
|
return get_machine(self.flake, self.name)
|
||||||
|
|
||||||
@@ -95,7 +95,7 @@ class Machine:
|
|||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def secret_vars_store(self) -> StoreBase:
|
def secret_vars_store(self) -> StoreBase:
|
||||||
from clan_cli.vars.secret_modules import password_store # noqa: PLC0415
|
from clan_cli.vars.secret_modules import password_store
|
||||||
|
|
||||||
secret_module = self.select("config.clan.core.vars.settings.secretModule")
|
secret_module = self.select("config.clan.core.vars.settings.secretModule")
|
||||||
module = importlib.import_module(secret_module)
|
module = importlib.import_module(secret_module)
|
||||||
@@ -126,7 +126,7 @@ class Machine:
|
|||||||
return self.flake.path
|
return self.flake.path
|
||||||
|
|
||||||
def target_host(self) -> Remote:
|
def target_host(self) -> Remote:
|
||||||
from clan_lib.network.network import get_best_remote # noqa: PLC0415
|
from clan_lib.network.network import get_best_remote
|
||||||
|
|
||||||
with get_best_remote(self) as remote:
|
with get_best_remote(self) as remote:
|
||||||
return remote
|
return remote
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ def _suggest_similar_names(
|
|||||||
|
|
||||||
|
|
||||||
def get_available_machines(flake: Flake) -> list[str]:
|
def get_available_machines(flake: Flake) -> list[str]:
|
||||||
from clan_lib.machines.list import list_machines # noqa: PLC0415
|
from clan_lib.machines.list import list_machines
|
||||||
|
|
||||||
machines = list_machines(flake)
|
machines = list_machines(flake)
|
||||||
return list(machines.keys())
|
return list(machines.keys())
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ class Peer:
|
|||||||
_var: dict[str, str] = self._host["var"]
|
_var: dict[str, str] = self._host["var"]
|
||||||
machine_name = _var["machine"]
|
machine_name = _var["machine"]
|
||||||
generator = _var["generator"]
|
generator = _var["generator"]
|
||||||
from clan_lib.machines.machines import Machine # noqa: PLC0415
|
from clan_lib.machines.machines import Machine
|
||||||
|
|
||||||
machine = Machine(name=machine_name, flake=self.flake)
|
machine = Machine(name=machine_name, flake=self.flake)
|
||||||
var = get_machine_var(
|
var = get_machine_var(
|
||||||
@@ -136,92 +136,123 @@ def networks_from_flake(flake: Flake) -> dict[str, Network]:
|
|||||||
return networks
|
return networks
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
class BestRemoteContext:
|
||||||
def get_best_remote(machine: "Machine") -> Iterator["Remote"]:
|
"""Class-based context manager for establishing and maintaining network connections."""
|
||||||
"""Context manager that yields the best remote connection for a machine following this priority:
|
|
||||||
1. If machine has targetHost in inventory, return a direct connection
|
|
||||||
2. Return the highest priority network where machine is reachable
|
|
||||||
3. If no network works, try to get targetHost from machine nixos config
|
|
||||||
|
|
||||||
Args:
|
def __init__(self, machine: "Machine") -> None:
|
||||||
machine: Machine instance to connect to
|
self.machine = machine
|
||||||
|
self._network_ctx: Any = None
|
||||||
|
self._remote: Remote | None = None
|
||||||
|
|
||||||
Yields:
|
def __enter__(self) -> "Remote":
|
||||||
Remote object for connecting to the machine
|
"""Establish the best remote connection for a machine following this priority:
|
||||||
|
1. If machine has targetHost in inventory, return a direct connection
|
||||||
|
2. Return the highest priority network where machine is reachable
|
||||||
|
3. If no network works, try to get targetHost from machine nixos config
|
||||||
|
|
||||||
Raises:
|
Returns:
|
||||||
ClanError: If no connection method works
|
Remote object for connecting to the machine
|
||||||
|
|
||||||
"""
|
Raises:
|
||||||
# Step 1: Check if targetHost is set in inventory
|
ClanError: If no connection method works
|
||||||
inv_machine = machine.get_inv_machine()
|
|
||||||
target_host = inv_machine.get("deploy", {}).get("targetHost")
|
|
||||||
|
|
||||||
if target_host:
|
"""
|
||||||
log.debug(f"Using targetHost from inventory for {machine.name}: {target_host}")
|
# Step 1: Check if targetHost is set in inventory
|
||||||
# Create a direct network with just this machine
|
inv_machine = self.machine.get_inv_machine()
|
||||||
remote = Remote.from_ssh_uri(machine_name=machine.name, address=target_host)
|
target_host = inv_machine.get("deploy", {}).get("targetHost")
|
||||||
yield remote
|
|
||||||
return
|
|
||||||
|
|
||||||
# Step 2: Try existing networks by priority
|
if target_host:
|
||||||
try:
|
log.debug(
|
||||||
networks = networks_from_flake(machine.flake)
|
f"Using targetHost from inventory for {self.machine.name}: {target_host}"
|
||||||
|
)
|
||||||
|
self._remote = Remote.from_ssh_uri(
|
||||||
|
machine_name=self.machine.name, address=target_host
|
||||||
|
)
|
||||||
|
return self._remote
|
||||||
|
|
||||||
sorted_networks = sorted(networks.items(), key=lambda x: -x[1].priority)
|
# Step 2: Try existing networks by priority
|
||||||
|
try:
|
||||||
|
networks = networks_from_flake(self.machine.flake)
|
||||||
|
sorted_networks = sorted(networks.items(), key=lambda x: -x[1].priority)
|
||||||
|
|
||||||
for network_name, network in sorted_networks:
|
for network_name, network in sorted_networks:
|
||||||
if machine.name not in network.peers:
|
if self.machine.name not in network.peers:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Check if network is running and machine is reachable
|
log.debug(f"trying to connect via {network_name}")
|
||||||
log.debug(f"trying to connect via {network_name}")
|
if network.is_running():
|
||||||
if network.is_running():
|
try:
|
||||||
try:
|
ping_time = network.ping(self.machine.name)
|
||||||
ping_time = network.ping(machine.name)
|
|
||||||
if ping_time is not None:
|
|
||||||
log.info(
|
|
||||||
f"Machine {machine.name} reachable via {network_name} network",
|
|
||||||
)
|
|
||||||
yield network.remote(machine.name)
|
|
||||||
return
|
|
||||||
except ClanError as e:
|
|
||||||
log.debug(f"Failed to reach {machine.name} via {network_name}: {e}")
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
log.debug(f"Establishing connection for network {network_name}")
|
|
||||||
with network.module.connection(network) as connected_network:
|
|
||||||
ping_time = connected_network.ping(machine.name)
|
|
||||||
if ping_time is not None:
|
if ping_time is not None:
|
||||||
log.info(
|
log.info(
|
||||||
f"Machine {machine.name} reachable via {network_name} network after connection",
|
f"Machine {self.machine.name} reachable via {network_name} network",
|
||||||
)
|
)
|
||||||
yield connected_network.remote(machine.name)
|
self._remote = remote = network.remote(self.machine.name)
|
||||||
return
|
return remote
|
||||||
except ClanError as e:
|
except ClanError as e:
|
||||||
log.debug(
|
log.debug(
|
||||||
f"Failed to establish connection to {machine.name} via {network_name}: {e}",
|
f"Failed to reach {self.machine.name} via {network_name}: {e}"
|
||||||
)
|
)
|
||||||
except (ImportError, AttributeError, KeyError) as e:
|
else:
|
||||||
log.debug(f"Failed to use networking modules to determine machines remote: {e}")
|
try:
|
||||||
|
log.debug(f"Establishing connection for network {network_name}")
|
||||||
|
# Enter the network context and keep it alive
|
||||||
|
self._network_ctx = network.module.connection(network)
|
||||||
|
connected_network = self._network_ctx.__enter__()
|
||||||
|
ping_time = connected_network.ping(self.machine.name)
|
||||||
|
if ping_time is not None:
|
||||||
|
log.info(
|
||||||
|
f"Machine {self.machine.name} reachable via {network_name} network after connection",
|
||||||
|
)
|
||||||
|
self._remote = remote = connected_network.remote(
|
||||||
|
self.machine.name
|
||||||
|
)
|
||||||
|
return remote
|
||||||
|
# Ping failed, clean up this connection attempt
|
||||||
|
self._network_ctx.__exit__(None, None, None)
|
||||||
|
self._network_ctx = None
|
||||||
|
except ClanError as e:
|
||||||
|
# Clean up failed connection attempt
|
||||||
|
if self._network_ctx is not None:
|
||||||
|
self._network_ctx.__exit__(None, None, None)
|
||||||
|
self._network_ctx = None
|
||||||
|
log.debug(
|
||||||
|
f"Failed to establish connection to {self.machine.name} via {network_name}: {e}",
|
||||||
|
)
|
||||||
|
except (ImportError, AttributeError, KeyError) as e:
|
||||||
|
log.debug(
|
||||||
|
f"Failed to use networking modules to determine machines remote: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
# Step 3: Try targetHost from machine nixos config
|
# Step 3: Try targetHost from machine nixos config
|
||||||
target_host = machine.select('config.clan.core.networking."targetHost"')
|
target_host = self.machine.select('config.clan.core.networking."targetHost"')
|
||||||
if target_host:
|
if target_host:
|
||||||
log.debug(
|
log.debug(
|
||||||
f"Using targetHost from machine config for {machine.name}: {target_host}",
|
f"Using targetHost from machine config for {self.machine.name}: {target_host}",
|
||||||
)
|
)
|
||||||
# Check if reachable
|
self._remote = Remote.from_ssh_uri(
|
||||||
remote = Remote.from_ssh_uri(
|
machine_name=self.machine.name,
|
||||||
machine_name=machine.name,
|
address=target_host,
|
||||||
address=target_host,
|
)
|
||||||
)
|
return self._remote
|
||||||
yield remote
|
|
||||||
return
|
|
||||||
|
|
||||||
# No connection method found
|
# No connection method found
|
||||||
msg = f"Could not find any way to connect to machine '{machine.name}'. No targetHost configured and machine not reachable via any network."
|
msg = f"Could not find any way to connect to machine '{self.machine.name}'. No targetHost configured and machine not reachable via any network."
|
||||||
raise ClanError(msg)
|
raise ClanError(msg)
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: object,
|
||||||
|
) -> None:
|
||||||
|
"""Clean up network connection if one was established."""
|
||||||
|
if self._network_ctx is not None:
|
||||||
|
self._network_ctx.__exit__(exc_type, exc_val, exc_tb)
|
||||||
|
|
||||||
|
|
||||||
|
def get_best_remote(machine: "Machine") -> BestRemoteContext:
|
||||||
|
return BestRemoteContext(machine)
|
||||||
|
|
||||||
|
|
||||||
def get_network_overview(networks: dict[str, Network]) -> dict:
|
def get_network_overview(networks: dict[str, Network]) -> dict:
|
||||||
|
|||||||
@@ -88,7 +88,7 @@ def nix_eval(flags: list[str]) -> list[str]:
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
if os.environ.get("IN_NIX_SANDBOX"):
|
if os.environ.get("IN_NIX_SANDBOX"):
|
||||||
from clan_lib.dirs import nixpkgs_source # noqa: PLC0415
|
from clan_lib.dirs import nixpkgs_source
|
||||||
|
|
||||||
return [
|
return [
|
||||||
*default_flags,
|
*default_flags,
|
||||||
@@ -169,7 +169,7 @@ def nix_shell(packages: list[str], cmd: list[str]) -> list[str]:
|
|||||||
if not missing_packages:
|
if not missing_packages:
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
from clan_lib.dirs import nixpkgs_flake # noqa: PLC0415
|
from clan_lib.dirs import nixpkgs_flake
|
||||||
|
|
||||||
return [
|
return [
|
||||||
*nix_command(["shell", "--inputs-from", f"{nixpkgs_flake()!s}"]),
|
*nix_command(["shell", "--inputs-from", f"{nixpkgs_flake()!s}"]),
|
||||||
|
|||||||
@@ -464,12 +464,12 @@ class Remote:
|
|||||||
self,
|
self,
|
||||||
opts: "ConnectionOptions | None" = None,
|
opts: "ConnectionOptions | None" = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
from clan_lib.network.check import check_machine_ssh_reachable # noqa: PLC0415
|
from clan_lib.network.check import check_machine_ssh_reachable
|
||||||
|
|
||||||
return check_machine_ssh_reachable(self, opts)
|
return check_machine_ssh_reachable(self, opts)
|
||||||
|
|
||||||
def check_machine_ssh_login(self) -> None:
|
def check_machine_ssh_login(self) -> None:
|
||||||
from clan_lib.network.check import check_machine_ssh_login # noqa: PLC0415
|
from clan_lib.network.check import check_machine_ssh_login
|
||||||
|
|
||||||
return check_machine_ssh_login(self)
|
return check_machine_ssh_login(self)
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from clan_cli.vars import graph
|
|||||||
from clan_cli.vars.generator import Generator
|
from clan_cli.vars.generator import Generator
|
||||||
from clan_cli.vars.graph import requested_closure
|
from clan_cli.vars.graph import requested_closure
|
||||||
from clan_cli.vars.migration import check_can_migrate, migrate_files
|
from clan_cli.vars.migration import check_can_migrate, migrate_files
|
||||||
|
from clan_cli.vars.secret_modules import sops
|
||||||
|
|
||||||
from clan_lib.api import API
|
from clan_lib.api import API
|
||||||
from clan_lib.errors import ClanError
|
from clan_lib.errors import ClanError
|
||||||
@@ -93,21 +94,21 @@ def _ensure_healthy(
|
|||||||
if generators is None:
|
if generators is None:
|
||||||
generators = Generator.get_machine_generators([machine.name], machine.flake)
|
generators = Generator.get_machine_generators([machine.name], machine.flake)
|
||||||
|
|
||||||
pub_healtcheck_msg = machine.public_vars_store.health_check(
|
public_health_check_msg = machine.public_vars_store.health_check(
|
||||||
machine.name,
|
machine.name,
|
||||||
generators,
|
generators,
|
||||||
)
|
)
|
||||||
sec_healtcheck_msg = machine.secret_vars_store.health_check(
|
secret_health_check_msg = machine.secret_vars_store.health_check(
|
||||||
machine.name,
|
machine.name,
|
||||||
generators,
|
generators,
|
||||||
)
|
)
|
||||||
|
|
||||||
if pub_healtcheck_msg or sec_healtcheck_msg:
|
if public_health_check_msg or secret_health_check_msg:
|
||||||
msg = f"Health check failed for machine {machine.name}:\n"
|
msg = f"Health check failed for machine {machine.name}:\n"
|
||||||
if pub_healtcheck_msg:
|
if public_health_check_msg:
|
||||||
msg += f"Public vars store: {pub_healtcheck_msg}\n"
|
msg += f"Public vars store: {public_health_check_msg}\n"
|
||||||
if sec_healtcheck_msg:
|
if secret_health_check_msg:
|
||||||
msg += f"Secret vars store: {sec_healtcheck_msg}"
|
msg += f"Secret vars store: {secret_health_check_msg}"
|
||||||
raise ClanError(msg)
|
raise ClanError(msg)
|
||||||
|
|
||||||
|
|
||||||
@@ -152,15 +153,15 @@ def run_generators(
|
|||||||
if not machines:
|
if not machines:
|
||||||
msg = "At least one machine must be provided"
|
msg = "At least one machine must be provided"
|
||||||
raise ClanError(msg)
|
raise ClanError(msg)
|
||||||
|
all_generators = get_generators(machines, full_closure=True)
|
||||||
if isinstance(generators, list):
|
if isinstance(generators, list):
|
||||||
# List of generator names - use them exactly as provided
|
# List of generator names - use them exactly as provided
|
||||||
if len(generators) == 0:
|
if len(generators) == 0:
|
||||||
return
|
return
|
||||||
all_generators = get_generators(machines, full_closure=True)
|
generators_to_run = [g for g in all_generators if g.key.name in generators]
|
||||||
generator_objects = [g for g in all_generators if g.key.name in generators]
|
|
||||||
else:
|
else:
|
||||||
# None or single string - use get_generators with closure parameter
|
# None or single string - use get_generators with closure parameter
|
||||||
generator_objects = get_generators(
|
generators_to_run = get_generators(
|
||||||
machines,
|
machines,
|
||||||
full_closure=full_closure,
|
full_closure=full_closure,
|
||||||
generator_name=generators,
|
generator_name=generators,
|
||||||
@@ -170,20 +171,49 @@ def run_generators(
|
|||||||
# TODO: make this more lazy and ask for every generator on execution
|
# TODO: make this more lazy and ask for every generator on execution
|
||||||
if callable(prompt_values):
|
if callable(prompt_values):
|
||||||
prompt_values = {
|
prompt_values = {
|
||||||
generator.name: prompt_values(generator) for generator in generator_objects
|
generator.name: prompt_values(generator) for generator in generators_to_run
|
||||||
}
|
}
|
||||||
|
|
||||||
# execute health check
|
# execute health check
|
||||||
for machine in machines:
|
for machine in machines:
|
||||||
_ensure_healthy(machine=machine)
|
_ensure_healthy(machine=machine)
|
||||||
|
|
||||||
|
# ensure all selected machines have access to all selected shared generators
|
||||||
|
for machine in machines:
|
||||||
|
# This is only relevant for the sops store
|
||||||
|
# TODO: improve store abstraction to use Protocols and introduce a proper SecretStore interface
|
||||||
|
if not isinstance(machine.secret_vars_store, sops.SecretStore):
|
||||||
|
continue
|
||||||
|
for generator in all_generators:
|
||||||
|
if generator.share:
|
||||||
|
for file in generator.files:
|
||||||
|
if not file.secret or not file.exists:
|
||||||
|
continue
|
||||||
|
machine.secret_vars_store.ensure_machine_has_access(
|
||||||
|
generator,
|
||||||
|
file.name,
|
||||||
|
machine.name,
|
||||||
|
)
|
||||||
|
|
||||||
|
# get the flake via any machine (they are all the same)
|
||||||
|
flake = machines[0].flake
|
||||||
|
|
||||||
|
def get_generator_machine(generator: Generator) -> Machine:
|
||||||
|
if generator.share:
|
||||||
|
# return first machine if generator is shared
|
||||||
|
return machines[0]
|
||||||
|
return Machine(name=generator.machines[0], flake=flake)
|
||||||
|
|
||||||
|
# preheat the select cache, to reduce repeated calls during execution
|
||||||
|
selectors = []
|
||||||
|
for generator in generators_to_run:
|
||||||
|
machine = get_generator_machine(generator)
|
||||||
|
selectors.append(generator.final_script_selector(machine.name))
|
||||||
|
flake.precache(selectors)
|
||||||
|
|
||||||
# execute generators
|
# execute generators
|
||||||
for generator in generator_objects:
|
for generator in generators_to_run:
|
||||||
machine = (
|
machine = get_generator_machine(generator)
|
||||||
machines[0]
|
|
||||||
if generator.machine is None
|
|
||||||
else Machine(name=generator.machine, flake=machines[0].flake)
|
|
||||||
)
|
|
||||||
if check_can_migrate(machine, generator):
|
if check_can_migrate(machine, generator):
|
||||||
migrate_files(machine, generator)
|
migrate_files(machine, generator)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -290,9 +290,7 @@ def collect_commands() -> list[Category]:
|
|||||||
# 3. sort by title alphabetically
|
# 3. sort by title alphabetically
|
||||||
return (c.title.split(" ")[0], c.title, weight)
|
return (c.title.split(" ")[0], c.title, weight)
|
||||||
|
|
||||||
result = sorted(result, key=weight_cmd_groups)
|
return sorted(result, key=weight_cmd_groups)
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def build_command_reference() -> None:
|
def build_command_reference() -> None:
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ class MPProcess:
|
|||||||
def _set_proc_name(name: str) -> None:
|
def _set_proc_name(name: str) -> None:
|
||||||
if sys.platform != "linux":
|
if sys.platform != "linux":
|
||||||
return
|
return
|
||||||
import ctypes # noqa: PLC0415
|
import ctypes
|
||||||
|
|
||||||
# Define the prctl function with the appropriate arguments and return type
|
# Define the prctl function with the appropriate arguments and return type
|
||||||
libc = ctypes.CDLL("libc.so.6")
|
libc = ctypes.CDLL("libc.so.6")
|
||||||
|
|||||||
@@ -759,12 +759,12 @@ class Win32Implementation(BaseImplementation):
|
|||||||
SM_CXSMICON = 49
|
SM_CXSMICON = 49
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from ctypes import Structure # noqa: PLC0415
|
from ctypes import Structure
|
||||||
|
|
||||||
class WNDCLASSW(Structure):
|
class WNDCLASSW(Structure):
|
||||||
"""Windows class structure for window registration."""
|
"""Windows class structure for window registration."""
|
||||||
|
|
||||||
from ctypes import CFUNCTYPE, wintypes # noqa: PLC0415
|
from ctypes import CFUNCTYPE, wintypes
|
||||||
|
|
||||||
LPFN_WND_PROC = CFUNCTYPE(
|
LPFN_WND_PROC = CFUNCTYPE(
|
||||||
wintypes.INT,
|
wintypes.INT,
|
||||||
@@ -789,7 +789,7 @@ class Win32Implementation(BaseImplementation):
|
|||||||
class MENUITEMINFOW(Structure):
|
class MENUITEMINFOW(Structure):
|
||||||
"""Windows menu item information structure."""
|
"""Windows menu item information structure."""
|
||||||
|
|
||||||
from ctypes import wintypes # noqa: PLC0415
|
from ctypes import wintypes
|
||||||
|
|
||||||
_fields_: ClassVar = [
|
_fields_: ClassVar = [
|
||||||
("cb_size", wintypes.UINT),
|
("cb_size", wintypes.UINT),
|
||||||
@@ -809,7 +809,7 @@ class Win32Implementation(BaseImplementation):
|
|||||||
class NOTIFYICONDATAW(Structure):
|
class NOTIFYICONDATAW(Structure):
|
||||||
"""Windows notification icon data structure."""
|
"""Windows notification icon data structure."""
|
||||||
|
|
||||||
from ctypes import wintypes # noqa: PLC0415
|
from ctypes import wintypes
|
||||||
|
|
||||||
_fields_: ClassVar = [
|
_fields_: ClassVar = [
|
||||||
("cb_size", wintypes.DWORD),
|
("cb_size", wintypes.DWORD),
|
||||||
@@ -1061,7 +1061,7 @@ class Win32Implementation(BaseImplementation):
|
|||||||
if sys.platform != "win32":
|
if sys.platform != "win32":
|
||||||
return
|
return
|
||||||
|
|
||||||
from ctypes import wintypes # noqa: PLC0415
|
from ctypes import wintypes
|
||||||
|
|
||||||
if self._menu is None:
|
if self._menu is None:
|
||||||
self.update_menu()
|
self.update_menu()
|
||||||
@@ -1110,7 +1110,7 @@ class Win32Implementation(BaseImplementation):
|
|||||||
if sys.platform != "win32":
|
if sys.platform != "win32":
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
from ctypes import wintypes # noqa: PLC0415
|
from ctypes import wintypes
|
||||||
|
|
||||||
if msg == self.WM_TRAYICON:
|
if msg == self.WM_TRAYICON:
|
||||||
if l_param == self.WM_RBUTTONUP:
|
if l_param == self.WM_RBUTTONUP:
|
||||||
|
|||||||
71
pkgs/docs-from-code/flake-module.nix
Normal file
71
pkgs/docs-from-code/flake-module.nix
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
{ self, inputs, ... }:
|
||||||
|
{
|
||||||
|
perSystem =
|
||||||
|
{ pkgs, self', ... }:
|
||||||
|
let
|
||||||
|
# Simply evaluated options (JSON)
|
||||||
|
# { clanCore = «derivation JSON»; clanModules = { ${name} = «derivation JSON» }; }
|
||||||
|
jsonDocs = pkgs.callPackage ./get-module-docs.nix {
|
||||||
|
inherit (self) clanModules;
|
||||||
|
clan-core = self;
|
||||||
|
inherit pkgs;
|
||||||
|
};
|
||||||
|
|
||||||
|
# clan service options
|
||||||
|
clanModulesViaService = pkgs.writeText "info.json" (builtins.toJSON jsonDocs.clanModulesViaService);
|
||||||
|
|
||||||
|
# Simply evaluated options (JSON)
|
||||||
|
renderOptions =
|
||||||
|
pkgs.runCommand "render-options"
|
||||||
|
{
|
||||||
|
# TODO: ruff does not splice properly in nativeBuildInputs
|
||||||
|
depsBuildBuild = [ pkgs.ruff ];
|
||||||
|
nativeBuildInputs = [
|
||||||
|
pkgs.python3
|
||||||
|
pkgs.mypy
|
||||||
|
self'.packages.clan-cli
|
||||||
|
];
|
||||||
|
}
|
||||||
|
''
|
||||||
|
install -D -m755 ${./generate}/__init__.py $out/bin/render-options
|
||||||
|
patchShebangs --build $out/bin/render-options
|
||||||
|
|
||||||
|
ruff format --check --diff $out/bin/render-options
|
||||||
|
ruff check --line-length 88 $out/bin/render-options
|
||||||
|
mypy --strict $out/bin/render-options
|
||||||
|
'';
|
||||||
|
|
||||||
|
module-docs =
|
||||||
|
pkgs.runCommand "rendered"
|
||||||
|
{
|
||||||
|
buildInputs = [
|
||||||
|
pkgs.python3
|
||||||
|
self'.packages.clan-cli
|
||||||
|
];
|
||||||
|
}
|
||||||
|
''
|
||||||
|
export CLAN_CORE_PATH=${
|
||||||
|
inputs.nixpkgs.lib.fileset.toSource {
|
||||||
|
root = ../..;
|
||||||
|
fileset = ../../clanModules;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export CLAN_CORE_DOCS=${jsonDocs.clanCore}/share/doc/nixos/options.json
|
||||||
|
|
||||||
|
# A file that contains the links to all clanModule docs
|
||||||
|
export CLAN_MODULES_VIA_SERVICE=${clanModulesViaService}
|
||||||
|
export CLAN_SERVICE_INTERFACE=${self'.legacyPackages.clan-service-module-interface}/share/doc/nixos/options.json
|
||||||
|
export CLAN_OPTIONS_PATH=${self'.legacyPackages.clan-options}/share/doc/nixos/options.json
|
||||||
|
|
||||||
|
mkdir $out
|
||||||
|
|
||||||
|
# The python script will place mkDocs files in the output directory
|
||||||
|
exec python3 ${renderOptions}/bin/render-options
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
{
|
||||||
|
packages = {
|
||||||
|
inherit module-docs;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -2,12 +2,14 @@
|
|||||||
|
|
||||||
{
|
{
|
||||||
imports = [
|
imports = [
|
||||||
./clan-cli/flake-module.nix
|
|
||||||
./clan-vm-manager/flake-module.nix
|
|
||||||
./installer/flake-module.nix
|
|
||||||
./icon-update/flake-module.nix
|
|
||||||
./clan-core-flake/flake-module.nix
|
|
||||||
./clan-app/flake-module.nix
|
./clan-app/flake-module.nix
|
||||||
|
./clan-cli/flake-module.nix
|
||||||
|
./clan-core-flake/flake-module.nix
|
||||||
|
./clan-vm-manager/flake-module.nix
|
||||||
|
./icon-update/flake-module.nix
|
||||||
|
./installer/flake-module.nix
|
||||||
|
./option-search/flake-module.nix
|
||||||
|
./docs-from-code/flake-module.nix
|
||||||
./testing/flake-module.nix
|
./testing/flake-module.nix
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
@@ -24,7 +24,7 @@
|
|||||||
|
|
||||||
serviceModules = self.clan.modules;
|
serviceModules = self.clan.modules;
|
||||||
|
|
||||||
baseHref = "/options/";
|
baseHref = "/option-search/";
|
||||||
|
|
||||||
getRoles =
|
getRoles =
|
||||||
module:
|
module:
|
||||||
@@ -118,7 +118,7 @@
|
|||||||
_file = "docs flake-module";
|
_file = "docs flake-module";
|
||||||
imports = [
|
imports = [
|
||||||
{ _module.args = { inherit clanLib; }; }
|
{ _module.args = { inherit clanLib; }; }
|
||||||
(import ../../../lib/modules/inventoryClass/roles-interface.nix {
|
(import ../../lib/modules/inventoryClass/roles-interface.nix {
|
||||||
nestedSettingsOption = mkOption {
|
nestedSettingsOption = mkOption {
|
||||||
type = types.raw;
|
type = types.raw;
|
||||||
description = ''
|
description = ''
|
||||||
@@ -201,7 +201,7 @@
|
|||||||
# };
|
# };
|
||||||
|
|
||||||
packages = {
|
packages = {
|
||||||
docs-options =
|
option-search =
|
||||||
if privateInputs ? nuschtos then
|
if privateInputs ? nuschtos then
|
||||||
privateInputs.nuschtos.packages.${pkgs.stdenv.hostPlatform.system}.mkMultiSearch {
|
privateInputs.nuschtos.packages.${pkgs.stdenv.hostPlatform.system}.mkMultiSearch {
|
||||||
inherit baseHref;
|
inherit baseHref;
|
||||||
Reference in New Issue
Block a user