Merge branch 'main' into ke-test-extra-packages

This commit is contained in:
Kenji Berthold
2025-10-13 12:31:37 +00:00
36 changed files with 521 additions and 421 deletions

View File

@@ -19,28 +19,19 @@ let
nixosLib = import (self.inputs.nixpkgs + "/nixos/lib") { };
in
{
imports =
let
clanCoreModulesDir = ../nixosModules/clanCore;
getClanCoreTestModules =
let
moduleNames = attrNames (builtins.readDir clanCoreModulesDir);
testPaths = map (
moduleName: clanCoreModulesDir + "/${moduleName}/tests/flake-module.nix"
) moduleNames;
in
filter pathExists testPaths;
in
getClanCoreTestModules
++ filter pathExists [
./devshell/flake-module.nix
./flash/flake-module.nix
./installation/flake-module.nix
./update/flake-module.nix
./morph/flake-module.nix
./nixos-documentation/flake-module.nix
./dont-depend-on-repo-root.nix
];
imports = filter pathExists [
./devshell/flake-module.nix
./flash/flake-module.nix
./installation/flake-module.nix
./update/flake-module.nix
./morph/flake-module.nix
./nixos-documentation/flake-module.nix
./dont-depend-on-repo-root.nix
# clan core submodule tests
../nixosModules/clanCore/machine-id/tests/flake-module.nix
../nixosModules/clanCore/postgresql/tests/flake-module.nix
../nixosModules/clanCore/state-version/tests/flake-module.nix
];
flake.check = genAttrs [ "x86_64-linux" "aarch64-darwin" ] (
system:
let

View File

@@ -15,7 +15,6 @@ let
networking.useNetworkd = true;
services.openssh.enable = true;
services.openssh.settings.UseDns = false;
services.openssh.settings.PasswordAuthentication = false;
system.nixos.variant_id = "installer";
environment.systemPackages = [
pkgs.nixos-facter

View File

@@ -1,91 +1,39 @@
# Clan service: sshd
What it does
- Generates and persists SSH host keys via `vars`.
- Optionally issues CAsigned host certificates for servers.
- Installs the `server` CA public key into `clients` `known_hosts` for TOFUless verification.
The `sshd` Clan service manages SSH to make it easy to securely access your
machines over the internet. The service uses `vars` to store the SSH host keys
for each machine to ensure they remain stable across deployments.
`sshd` also generates SSH certificates for both servers and clients allowing for
certificate-based authentication for SSH.
When to use it
- ZeroTOFU SSH for dynamic fleets: admins/CI can connect to frequently rebuilt hosts (e.g., server-1.example.com) without prompts or perhost `known_hosts` churn.
The service also disables password-based authentication over SSH, to access your
machines you'll need to use public key authentication or certificate-based
authentication.
Roles
- Server: runs sshd, presents a CAsigned host certificate for `<machine>.<domain>`.
- Client: trusts the CA for the given domains to verify servers certificates.
Tip: assign both roles to a machine if it should both present a cert and verify others.
Quick start (with host certificates)
Useful if you never want to get a prompt about trusting the ssh fingerprint.
```nix
{
inventory.instances = {
sshd-with-certs = {
module = { name = "sshd"; input = "clan-core"; };
# Servers present certificates for <machine>.example.com
roles.server.tags.all = { };
roles.server.settings = {
certificate.searchDomains = [ "example.com" ];
# Optional: also add RSA host keys
# hostKeys.rsa.enable = true;
};
# Clients trust the CA for *.example.com
roles.client.tags.all = { };
roles.client.settings = {
certificate.searchDomains = [ "example.com" ];
};
};
};
}
```
Basic: only add persistent host keys (ed25519), no certificates
Useful if you want to get an ssh "trust this server" prompt once and then never again.
## Usage
```nix
{
inventory.instances = {
# By default this service only generates ed25519 host keys
sshd-basic = {
module = {
name = "sshd";
input = "clan-core";
};
roles.server.tags.all = { };
roles.client.tags.all = { };
};
};
}
```
Example: selective trust per environment
Admins should trust only production; CI should trust prod and staging. Servers are reachable under both domains.
```nix
{
inventory.instances = {
sshd-env-scoped = {
module = { name = "sshd"; input = "clan-core"; };
# Servers present certs for both prod and staging FQDNs
# Also generate RSA host keys for all servers
sshd-with-rsa = {
module = {
name = "sshd";
input = "clan-core";
};
roles.server.tags.all = { };
roles.server.settings = {
certificate.searchDomains = [ "prod.example.com" "staging.example.com" ];
};
# Admin laptop: trust prod only
roles.client.machines."admin-laptop".settings = {
certificate.searchDomains = [ "prod.example.com" ];
};
# CI runner: trust prod and staging
roles.client.machines."ci-runner-1".settings = {
certificate.searchDomains = [ "prod.example.com" "staging.example.com" ];
hostKeys.rsa.enable = true;
};
roles.client.tags.all = { };
};
};
}
```
- Admin -> server1.prod.example.com: zeroTOFU (verified via cert).
- Admin -> server1.staging.example.com: falls back to TOFU (or is blocked by policy).
- CI -> either prod or staging: zeroTOFU for both.
Note: server and client searchDomains dont have to be identical; they only need to overlap for the hostnames you actually use.
Notes
- Connect using a name that matches a cert principal (e.g., `server1.example.com`); wildcards are not allowed inside the certificate.
- CA private key stays in `vars` (not deployed); only the CA public key is distributed.
- Logins still require your user SSH keys on the server (passwords are disabled).

18
devFlake/flake.lock generated
View File

@@ -3,10 +3,10 @@
"clan-core-for-checks": {
"flake": false,
"locked": {
"lastModified": 1759968599,
"narHash": "sha256-OdJ4OPAdvaIXZvwomVzjHWNTDdAX6++v4Ynjm2sXxBw=",
"lastModified": 1760213549,
"narHash": "sha256-XosVRUEcdsoEdRtXyz9HrRc4Dt9Ke+viM5OVF7tLK50=",
"ref": "main",
"rev": "28d8a91a309985aa2b8586ff120365de6b0241b3",
"rev": "9c8797e77031d8d472d057894f18a53bdc9bbe1e",
"shallow": true,
"type": "git",
"url": "https://git.clan.lol/clan/clan-core"
@@ -105,11 +105,11 @@
},
"nixpkgs-dev": {
"locked": {
"lastModified": 1759860509,
"narHash": "sha256-c7eJvqAlWLhwNc9raHkQ7mvoFbHLUO/cLMrww1ds4Zg=",
"lastModified": 1760161054,
"narHash": "sha256-PO3cKHFIQEPI0dr/SzcZwG50cHXfjoIqP2uS5W78OXg=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "b574dcadf3fb578dee8d104b565bd745a5a9edc0",
"rev": "e18d8ec6fafaed55561b7a1b54eb1c1ce3ffa2c5",
"type": "github"
},
"original": {
@@ -208,11 +208,11 @@
"nixpkgs": []
},
"locked": {
"lastModified": 1758728421,
"narHash": "sha256-ySNJ008muQAds2JemiyrWYbwbG+V7S5wg3ZVKGHSFu8=",
"lastModified": 1760120816,
"narHash": "sha256-gq9rdocpmRZCwLS5vsHozwB6b5nrOBDNc2kkEaTXHfg=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "5eda4ee8121f97b218f7cc73f5172098d458f1d1",
"rev": "761ae7aff00907b607125b2f57338b74177697ed",
"type": "github"
},
"original": {

View File

@@ -70,8 +70,6 @@ hide:
.clamp-toggle:checked ~ .clamp-more::after { content: "Read less"; }
</style>
trivial change
<div class="clamp-wrap" style="--lines: 3;">
<input type="checkbox" id="clan-readmore" class="clamp-toggle" />
<div class="clamp-content">

6
flake.lock generated
View File

@@ -181,11 +181,11 @@
]
},
"locked": {
"lastModified": 1758728421,
"narHash": "sha256-ySNJ008muQAds2JemiyrWYbwbG+V7S5wg3ZVKGHSFu8=",
"lastModified": 1760120816,
"narHash": "sha256-gq9rdocpmRZCwLS5vsHozwB6b5nrOBDNc2kkEaTXHfg=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "5eda4ee8121f97b218f7cc73f5172098d458f1d1",
"rev": "761ae7aff00907b607125b2f57338b74177697ed",
"type": "github"
},
"original": {

View File

@@ -0,0 +1,51 @@
{ lib }:
let
sanitizePath =
rootPath: path:
let
storePrefix = builtins.unsafeDiscardStringContext ("${rootPath}");
pathStr = lib.removePrefix "/" (
lib.removePrefix storePrefix (builtins.unsafeDiscardStringContext (toString path))
);
in
pathStr;
mkFunctions = rootPath: passthru: virtual_fs: {
# Some functions to override lib functions
pathExists =
path:
let
pathStr = sanitizePath rootPath path;
isPassthru = builtins.any (exclude: (builtins.match exclude pathStr) != null) passthru;
in
if isPassthru then
builtins.pathExists path
else
let
res = virtual_fs ? ${pathStr};
in
lib.trace "pathExists: '${pathStr}' -> '${lib.generators.toPretty { } res}'" res;
readDir =
path:
let
pathStr = sanitizePath rootPath path;
base = (pathStr + "/");
res = lib.mapAttrs' (name: fileInfo: {
name = lib.removePrefix base name;
value = fileInfo.type;
}) (lib.filterAttrs (n: _: lib.hasPrefix base n) virtual_fs);
isPassthru = builtins.any (exclude: (builtins.match exclude pathStr) != null) passthru;
in
if isPassthru then
builtins.readDir path
else
lib.trace "readDir: '${pathStr}' -> '${lib.generators.toPretty { } res}'" res;
};
in
{
virtual_fs,
rootPath,
# Patterns
passthru ? [ ],
}:
mkFunctions rootPath passthru virtual_fs

View File

@@ -28,7 +28,6 @@ lib.fix (
# Plain imports.
introspection = import ./introspection { inherit lib; };
jsonschema = import ./jsonschema { inherit lib; };
facts = import ./facts.nix { inherit lib; };
docs = import ./docs.nix { inherit lib; };
# flakes
@@ -36,6 +35,10 @@ lib.fix (
# TODO: Flatten our lib functions like this:
resolveModule = clanLib.callLib ./resolve-module { };
fs = {
inherit (builtins) pathExists readDir;
};
};
in
f

View File

@@ -1,71 +0,0 @@
{ lib, ... }:
clanDir:
let
allMachineNames = lib.mapAttrsToList (name: _: name) (builtins.readDir clanDir);
getFactPath = machine: fact: "${clanDir}/machines/${machine}/facts/${fact}";
readFact =
machine: fact:
let
path = getFactPath machine fact;
in
if builtins.pathExists path then builtins.readFile path else null;
# Example:
#
# readFactFromAllMachines zerotier-ip
# => {
# machineA = "1.2.3.4";
# machineB = "5.6.7.8";
# };
readFactFromAllMachines =
fact:
let
machines = allMachineNames;
facts = lib.genAttrs machines (machine: readFact machine fact);
filteredFacts = lib.filterAttrs (_machine: fact: fact != null) facts;
in
filteredFacts;
# all given facts are are set and factvalues are never null.
#
# Example:
#
# readFactsFromAllMachines [ "zerotier-ip" "syncthing.pub" ]
# => {
# machineA =
# {
# "zerotier-ip" = "1.2.3.4";
# "synching.pub" = "1234";
# };
# machineB =
# {
# "zerotier-ip" = "5.6.7.8";
# "synching.pub" = "23456719";
# };
# };
readFactsFromAllMachines =
facts:
let
# machine -> fact -> factvalue
machinesFactsAttrs = lib.genAttrs allMachineNames (
machine: lib.genAttrs facts (fact: readFact machine fact)
);
# remove all machines which don't have all facts set
filteredMachineFactAttrs = lib.filterAttrs (
_machine: values: builtins.all (fact: values.${fact} != null) facts
) machinesFactsAttrs;
in
filteredMachineFactAttrs;
in
{
inherit
allMachineNames
getFactPath
readFact
readFactFromAllMachines
readFactsFromAllMachines
;
}

View File

@@ -133,12 +133,13 @@ in
}
)
{
# TODO: Figure out why this causes infinite recursion
inventory.machines = lib.optionalAttrs (builtins.pathExists "${directory}/machines") (
builtins.mapAttrs (_n: _v: { }) (
lib.filterAttrs (_: t: t == "directory") (builtins.readDir "${directory}/machines")
)
);
# Note: we use clanLib.fs here, so that we can override it in tests
inventory = lib.optionalAttrs (clanLib.fs.pathExists "${directory}/machines") ({
imports = lib.mapAttrsToList (name: _t: {
_file = "${directory}/machines/${name}";
machines.${name} = { };
}) ((lib.filterAttrs (_: t: t == "directory") (clanLib.fs.readDir "${directory}/machines")));
});
}
{
inventory.machines = lib.mapAttrs (_n: _: { }) config.machines;

108
lib/modules/dir_test.nix Normal file
View File

@@ -0,0 +1,108 @@
{
lib ? import <nixpkgs/lib>,
}:
let
clanLibOrig = (import ./.. { inherit lib; }).__unfix__;
clanLibWithFs =
{ virtual_fs }:
lib.fix (
lib.extends (
final: _:
let
clan-core = {
clanLib = final;
modules.clan.default = lib.modules.importApply ./clan { inherit clan-core; };
# Note: Can add other things to "clan-core"
# ... Not needed for this test
};
in
{
clan = import ../clan {
inherit lib clan-core;
};
# Override clanLib.fs for unit-testing against a virtual filesystem
fs = import ../clanTest/virtual-fs.nix { inherit lib; } {
inherit rootPath virtual_fs;
# Example of a passthru
# passthru = [
# ".*inventory\.json$"
# ];
};
}
) clanLibOrig
);
rootPath = ./.;
in
{
test_autoload_directories =
let
vclan =
(clanLibWithFs {
virtual_fs = {
"machines" = {
type = "directory";
};
"machines/foo-machine" = {
type = "directory";
};
"machines/bar-machine" = {
type = "directory";
};
};
}).clan
{ config.directory = rootPath; };
in
{
inherit vclan;
expr = {
machines = lib.attrNames vclan.config.inventory.machines;
definedInMachinesDir = map (
p: lib.hasInfix "/machines/" p
) vclan.options.inventory.valueMeta.configuration.options.machines.files;
};
expected = {
machines = [
"bar-machine"
"foo-machine"
];
definedInMachinesDir = [
true # /machines/foo-machine
true # /machines/bar-machine
false # <clan-core>/module.nix defines "machines" without members
];
};
};
# Could probably be unified with the previous test
# This is here for the sake to show that 'virtual_fs' is a test parameter
test_files_are_not_machines =
let
vclan =
(clanLibWithFs {
virtual_fs = {
"machines" = {
type = "directory";
};
"machines/foo.nix" = {
type = "file";
};
"machines/bar.nix" = {
type = "file";
};
};
}).clan
{ config.directory = rootPath; };
in
{
inherit vclan;
expr = {
machines = lib.attrNames vclan.config.inventory.machines;
};
expected = {
machines = [ ];
};
};
}

View File

@@ -12,6 +12,7 @@ let
in
#######
{
autoloading = import ./dir_test.nix { inherit lib; };
test_missing_self =
let
eval = clan {

View File

@@ -164,13 +164,25 @@
config = lib.mkIf (config.clan.core.secrets != { }) {
clan.core.facts.services = lib.mapAttrs' (
name: service:
lib.warn "clan.core.secrets.${name} is deprecated, use clan.core.facts.services.${name} instead" (
lib.nameValuePair name ({
secret = service.secrets;
public = service.facts;
generator = service.generator;
})
)
lib.warn
''
###############################################################################
# #
# clan.core.secrets.${name} clan.core.facts.services.${name} is deprecated #
# in favor of "vars" #
# #
# Refer to https://docs.clan.lol/guides/migrations/migration-facts-vars/ #
# for migration instructions. #
# #
###############################################################################
''
(
lib.nameValuePair name ({
secret = service.secrets;
public = service.facts;
generator = service.generator;
})
)
) config.clan.core.secrets;
};
}

View File

@@ -6,7 +6,17 @@
}:
{
config.warnings = lib.optionals (config.clan.core.facts.services != { }) [
"Facts are deprecated, please migrate them to vars instead, see: https://docs.clan.lol/guides/migrations/migration-facts-vars/"
''
###############################################################################
# #
# Facts are deprecated please migrate any usages to vars instead #
# #
# #
# Refer to https://docs.clan.lol/guides/migrations/migration-facts-vars/ #
# for migration instructions. #
# #
###############################################################################
''
];
options.clan.core.facts = {

View File

@@ -5,33 +5,31 @@
let
inherit (lib)
filterAttrs
flatten
mapAttrsToList
;
in
generators:
let
relevantFiles =
generator:
filterAttrs (
_name: f: f.secret && f.deploy && (f.neededFor == "users" || f.neededFor == "services")
) generator.files;
allFiles = flatten (
mapAttrsToList (
gen_name: generator:
mapAttrsToList (fname: file: {
name = fname;
generator = gen_name;
neededForUsers = file.neededFor == "users";
inherit (generator) share;
inherit (file)
owner
group
mode
restartUnits
;
}) (relevantFiles generator)
) generators
relevantFiles = filterAttrs (
_name: f: f.secret && f.deploy && (f.neededFor == "users" || f.neededFor == "services")
);
collectFiles =
generators:
builtins.concatLists (
mapAttrsToList (
gen_name: generator:
mapAttrsToList (fname: file: {
name = fname;
generator = gen_name;
neededForUsers = file.neededFor == "users";
inherit (generator) share;
inherit (file)
owner
group
mode
restartUnits
;
}) (relevantFiles generator.files)
) generators
);
in
allFiles
collectFiles

View File

@@ -113,15 +113,27 @@ mkShell {
# todo darwin support needs some work
(lib.optionalString stdenv.hostPlatform.isLinux ''
# configure playwright for storybook snapshot testing
export PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD=1
# we only want webkit as that matches what the app is rendered with
export PLAYWRIGHT_BROWSERS_PATH=${
playwright-driver.browsers.override {
withFfmpeg = false;
withFirefox = false;
withWebkit = true;
withChromium = false;
withChromiumHeadlessShell = true;
withChromiumHeadlessShell = false;
}
}
export PLAYWRIGHT_HOST_PLATFORM_OVERRIDE="ubuntu-24.04"
# stop playwright from trying to validate it has downloaded the necessary browsers
# we are providing them manually via nix
export PLAYWRIGHT_SKIP_VALIDATE_HOST_REQUIREMENTS=true
# playwright browser drivers are versioned e.g. webkit-2191
# this helps us avoid having to update the playwright js dependency everytime we update nixpkgs and vice versa
# see vitest.config.js for corresponding launch configuration
export PLAYWRIGHT_WEBKIT_EXECUTABLE=$(find -L "$PLAYWRIGHT_BROWSERS_PATH" -type f -name "pw_run.sh")
'');
}

View File

@@ -53,7 +53,7 @@
"jsdom": "^26.1.0",
"knip": "^5.61.2",
"markdown-to-jsx": "^7.7.10",
"playwright": "~1.53.2",
"playwright": "~1.55.1",
"postcss": "^8.4.38",
"postcss-url": "^10.1.3",
"prettier": "^3.2.5",
@@ -6956,13 +6956,13 @@
}
},
"node_modules/playwright": {
"version": "1.53.2",
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.53.2.tgz",
"integrity": "sha512-6K/qQxVFuVQhRQhFsVZ9fGeatxirtrpPgxzBYWyZLEXJzqYwuL4fuNmfOfD5et1tJE4GScKyPNeLhZeRwuTU3A==",
"version": "1.55.1",
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.55.1.tgz",
"integrity": "sha512-cJW4Xd/G3v5ovXtJJ52MAOclqeac9S/aGGgRzLabuF8TnIb6xHvMzKIa6JmrRzUkeXJgfL1MhukP0NK6l39h3A==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"playwright-core": "1.53.2"
"playwright-core": "1.55.1"
},
"bin": {
"playwright": "cli.js"
@@ -6975,9 +6975,9 @@
}
},
"node_modules/playwright-core": {
"version": "1.53.2",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.53.2.tgz",
"integrity": "sha512-ox/OytMy+2w1jcYEYlOo1Hhp8hZkLCximMTUTMBXjGUA1KoFfiSZ+DU+3a739jsPY0yoKH2TFy9S2fsJas8yAw==",
"version": "1.55.1",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.55.1.tgz",
"integrity": "sha512-Z6Mh9mkwX+zxSlHqdr5AOcJnfp+xUWLCt9uKV18fhzA8eyxUd8NUWzAjxUh55RZKSYwDGX0cfaySdhZJGMoJ+w==",
"dev": true,
"license": "Apache-2.0",
"bin": {

View File

@@ -48,7 +48,7 @@
"jsdom": "^26.1.0",
"knip": "^5.61.2",
"markdown-to-jsx": "^7.7.10",
"playwright": "~1.53.2",
"playwright": "~1.55.1",
"postcss": "^8.4.38",
"postcss-url": "^10.1.3",
"prettier": "^3.2.5",

View File

@@ -1,7 +1,7 @@
import type { Meta, StoryObj } from "@kachurun/storybook-solid";
import { Button, ButtonProps } from "./Button";
import { Component } from "solid-js";
import { expect, fn, waitFor } from "storybook/test";
import { expect, fn, waitFor, within } from "storybook/test";
import { StoryContext } from "@kachurun/storybook-solid-vite";
const getCursorStyle = (el: Element) => window.getComputedStyle(el).cursor;
@@ -216,17 +216,11 @@ const timeout = process.env.NODE_ENV === "test" ? 500 : 2000;
export const Primary: Story = {
args: {
hierarchy: "primary",
onAction: fn(async () => {
// wait 500 ms to simulate an action
await new Promise((resolve) => setTimeout(resolve, timeout));
// randomly fail to check that the loading state still returns to normal
if (Math.random() > 0.5) {
throw new Error("Action failure");
}
}),
onClick: fn(),
},
play: async ({ canvas, step, userEvent, args }: StoryContext) => {
play: async ({ canvasElement, step, userEvent, args }: StoryContext) => {
const canvas = within(canvasElement);
const buttons = await canvas.findAllByRole("button");
for (const button of buttons) {
@@ -238,14 +232,6 @@ export const Primary: Story = {
}
await step(`Click on ${testID}`, async () => {
// check for the loader
const loaders = button.getElementsByClassName("loader");
await expect(loaders.length).toEqual(1);
// assert its width is 0 before we click
const [loader] = loaders;
await expect(loader.clientWidth).toEqual(0);
// move the mouse over the button
await userEvent.hover(button);
@@ -255,33 +241,8 @@ export const Primary: Story = {
// click the button
await userEvent.click(button);
// check the button has changed
await waitFor(
async () => {
// the action handler should have been called
await expect(args.onAction).toHaveBeenCalled();
// the button should have a loading class
await expect(button).toHaveClass("loading");
// the loader should be visible
await expect(loader.clientWidth).toBeGreaterThan(0);
// the pointer should have changed to wait
await expect(getCursorStyle(button)).toEqual("wait");
},
{ timeout: timeout + 500 },
);
// wait for the action handler to finish
await waitFor(
async () => {
// the loading class should be removed
await expect(button).not.toHaveClass("loading");
// the loader should be hidden
await expect(loader.clientWidth).toEqual(0);
// the pointer should be normal
await expect(getCursorStyle(button)).toEqual("pointer");
},
{ timeout: timeout + 500 },
);
// the click handler should have been called
await expect(args.onClick).toHaveBeenCalled();
});
}
},

View File

@@ -57,6 +57,7 @@ export const Button = (props: ButtonProps) => {
return (
<KobalteButton
role="button"
class={cx(
styles.button, // default button class
local.size != "default" && styles[local.size],

View File

@@ -160,47 +160,47 @@ const mockFetcher = <K extends OperationNames>(
},
}) satisfies ApiCall<K>;
export const Default: Story = {
args: {},
decorators: [
(Story: StoryObj) => {
const queryClient = new QueryClient({
defaultOptions: {
queries: {
retry: false,
staleTime: Infinity,
},
},
});
Object.entries(queryData).forEach(([clanURI, clan]) => {
queryClient.setQueryData(
["clans", encodeBase64(clanURI), "details"],
clan.details,
);
const machines = clan.machines || {};
queryClient.setQueryData(
["clans", encodeBase64(clanURI), "machines"],
machines,
);
Object.entries(machines).forEach(([name, machine]) => {
queryClient.setQueryData(
["clans", encodeBase64(clanURI), "machine", name, "state"],
machine.state,
);
});
});
return (
<ApiClientProvider client={{ fetch: mockFetcher }}>
<QueryClientProvider client={queryClient}>
<Story />
</QueryClientProvider>
</ApiClientProvider>
);
},
],
};
// export const Default: Story = {
// args: {},
// decorators: [
// (Story: StoryObj) => {
// const queryClient = new QueryClient({
// defaultOptions: {
// queries: {
// retry: false,
// staleTime: Infinity,
// },
// },
// });
//
// Object.entries(queryData).forEach(([clanURI, clan]) => {
// queryClient.setQueryData(
// ["clans", encodeBase64(clanURI), "details"],
// clan.details,
// );
//
// const machines = clan.machines || {};
//
// queryClient.setQueryData(
// ["clans", encodeBase64(clanURI), "machines"],
// machines,
// );
//
// Object.entries(machines).forEach(([name, machine]) => {
// queryClient.setQueryData(
// ["clans", encodeBase64(clanURI), "machine", name, "state"],
// machine.state,
// );
// });
// });
//
// return (
// <ApiClientProvider client={{ fetch: mockFetcher }}>
// <QueryClientProvider client={queryClient}>
// <Story />
// </QueryClientProvider>
// </ApiClientProvider>
// );
// },
// ],
// };

View File

@@ -11,28 +11,35 @@ export default meta;
type Story = StoryObj<ClanSettingsModalProps>;
export const Default: Story = {
args: {
onClose: fn(),
model: {
uri: "/home/foo/my-clan",
const props: ClanSettingsModalProps = {
onClose: fn(),
model: {
uri: "/home/foo/my-clan",
details: {
name: "Sol",
description: null,
icon: null,
fieldsSchema: {
name: {
readonly: true,
reason: null,
},
description: {
readonly: false,
reason: null,
},
icon: {
readonly: false,
reason: null,
},
},
fieldsSchema: {
name: {
readonly: true,
reason: null,
readonly_members: [],
},
description: {
readonly: false,
reason: null,
readonly_members: [],
},
icon: {
readonly: false,
reason: null,
readonly_members: [],
},
},
},
};
export const Default: Story = {
args: props,
};

View File

@@ -22,9 +22,9 @@ import { Alert } from "@/src/components/Alert/Alert";
import { removeClanURI } from "@/src/stores/clan";
const schema = v.object({
name: v.pipe(v.optional(v.string())),
description: v.nullish(v.string()),
icon: v.pipe(v.nullish(v.string())),
name: v.string(),
description: v.optional(v.string()),
icon: v.optional(v.string()),
});
export interface ClanSettingsModalProps {

View File

@@ -1,15 +0,0 @@
import { Meta, StoryObj } from "@kachurun/storybook-solid";
import { CubeScene } from "./cubes";
const meta: Meta = {
title: "scene/cubes",
component: CubeScene,
};
export default meta;
type Story = StoryObj;
export const Default: Story = {
args: {},
};

View File

@@ -304,11 +304,10 @@ const FlashProgress = () => {
const [store, set] = getStepStore<InstallStoreType>(stepSignal);
onMount(async () => {
const result = await store.flash.progress.result;
if (result.status == "success") {
console.log("Flashing Success");
const result = await store.flash?.progress?.result;
if (result?.status == "success") {
stepSignal.next();
}
stepSignal.next();
});
const handleCancel = async () => {

View File

@@ -165,23 +165,23 @@ export default meta;
type Story = StoryObj<typeof ServiceWorkflow>;
export const Default: Story = {
args: {},
};
export const SelectRoleMembers: Story = {
render: () => (
<ServiceWorkflow
handleSubmit={(instance) => {
console.log("Submitted instance:", instance);
}}
onClose={() => {
console.log("Closed");
}}
initialStep="select:members"
initialStore={{
currentRole: "peer",
}}
/>
),
};
// export const Default: Story = {
// args: {},
// };
//
// export const SelectRoleMembers: Story = {
// render: () => (
// <ServiceWorkflow
// handleSubmit={(instance) => {
// console.log("Submitted instance:", instance);
// }}
// onClose={() => {
// console.log("Closed");
// }}
// initialStep="select:members"
// initialStore={{
// currentRole: "peer",
// }}
// />
// ),
// };

View File

@@ -9,7 +9,11 @@
"esModuleInterop": true,
"jsx": "preserve",
"jsxImportSource": "solid-js",
"types": ["vite/client", "vite-plugin-solid-svg/types-component-solid"],
"types": [
"vite/client",
"vite-plugin-solid-svg/types-component-solid",
"@vitest/browser/providers/playwright"
],
"noEmit": true,
"resolveJsonModule": true,
"allowJs": true,

View File

@@ -40,7 +40,14 @@ export default mergeConfig(
enabled: true,
headless: true,
provider: "playwright",
instances: [{ browser: "chromium" }],
instances: [
{
browser: "webkit",
launch: {
executablePath: process.env.PLAYWRIGHT_WEBKIT_EXECUTABLE,
},
},
],
},
// This setup file applies Storybook project annotations for Vitest
// More info at: https://storybook.js.org/docs/api/portable-stories/portable-stories-vitest#setprojectannotations

View File

@@ -1,24 +0,0 @@
{
# Use this path to our repo root e.g. for UI test
# inputs.clan-core.url = "../../../../.";
# this placeholder is replaced by the path to nixpkgs
inputs.clan-core.url = "__CLAN_CORE__";
outputs =
{ self, clan-core }:
let
clan = clan-core.lib.clan {
inherit self;
meta.name = "test_flake_with_core_dynamic_machines";
machines =
let
machineModules = builtins.readDir (self + "/machines");
in
builtins.mapAttrs (name: _type: import (self + "/machines/${name}")) machineModules;
};
in
{
inherit (clan.config) nixosConfigurations nixosModules clanInternals;
};
}

View File

@@ -1,5 +1,6 @@
import json
import logging
import os
import shutil
import subprocess
import time
@@ -429,9 +430,43 @@ def test_generated_shared_secret_sops(
machine1 = Machine(name="machine1", flake=Flake(str(flake.path)))
machine2 = Machine(name="machine2", flake=Flake(str(flake.path)))
cli.run(["vars", "generate", "--flake", str(flake.path), "machine1"])
assert check_vars(machine1.name, machine1.flake)
# Get the initial state of the flake directory after generation
def get_file_mtimes(path: str) -> dict[str, float]:
"""Get modification times of all files in a directory tree."""
mtimes = {}
for root, _dirs, files in os.walk(path):
# Skip .git directory
if ".git" in root:
continue
for file in files:
filepath = Path(root) / file
mtimes[str(filepath)] = filepath.stat().st_mtime
return mtimes
initial_mtimes = get_file_mtimes(str(flake.path))
# First check_vars should not write anything
assert check_vars(machine1.name, machine1.flake), (
"machine1 has already generated vars, so check_vars should return True\n"
f"Check result:\n{check_vars(machine1.name, machine1.flake)}"
)
# Verify no files were modified
after_check_mtimes = get_file_mtimes(str(flake.path))
assert initial_mtimes == after_check_mtimes, (
"check_vars should not modify any files when vars are already valid"
)
assert not check_vars(machine2.name, machine2.flake), (
"machine2 has not generated vars yet, so check_vars should return False"
)
# Verify no files were modified
after_check_mtimes_2 = get_file_mtimes(str(flake.path))
assert initial_mtimes == after_check_mtimes_2, (
"check_vars should not modify any files when vars are not valid"
)
cli.run(["vars", "generate", "--flake", str(flake.path), "machine2"])
assert check_vars(machine2.name, machine2.flake)
m1_sops_store = sops.SecretStore(machine1.flake)
m2_sops_store = sops.SecretStore(machine2.flake)
# Create generators with machine context for testing

View File

@@ -3,6 +3,7 @@ import logging
from typing import TYPE_CHECKING
from clan_cli.completions import add_dynamic_completer, complete_machines
from clan_cli.vars.secret_modules import sops
from clan_lib.errors import ClanError
from clan_lib.flake import Flake, require_flake
from clan_lib.machines.machines import Machine
@@ -26,6 +27,26 @@ class VarStatus:
self.unfixed_secret_vars = unfixed_secret_vars
self.invalid_generators = invalid_generators
def text(self) -> str:
log = ""
if self.missing_secret_vars:
log += "Missing secret vars:\n"
for var in self.missing_secret_vars:
log += f" - {var.id}\n"
if self.missing_public_vars:
log += "Missing public vars:\n"
for var in self.missing_public_vars:
log += f" - {var.id}\n"
if self.unfixed_secret_vars:
log += "Unfixed secret vars:\n"
for var in self.unfixed_secret_vars:
log += f" - {var.id}\n"
if self.invalid_generators:
log += "Invalid generators (outdated invalidation hash):\n"
for gen in self.invalid_generators:
log += f" - {gen}\n"
return log if log else "All vars are present and valid."
def vars_status(
machine_name: str,
@@ -66,15 +87,32 @@ def vars_status(
f"Secret var '{file.name}' for service '{generator.name}' in machine {machine.name} is missing.",
)
missing_secret_vars.append(file)
if (
isinstance(machine.secret_vars_store, sops.SecretStore)
and generator.share
and file.exists
and not machine.secret_vars_store.machine_has_access(
generator=generator,
secret_name=file.name,
machine=machine.name,
)
):
msg = (
f"Secret var '{generator.name}/{file.name}' is marked for deployment to machine '{machine.name}', but the machine does not have access to it.\n"
f"Run 'clan vars generate {machine.name}' to fix this.\n"
)
machine.info(msg)
missing_secret_vars.append(file)
else:
msg = machine.secret_vars_store.health_check(
health_msg = machine.secret_vars_store.health_check(
machine=machine.name,
generators=[generator],
file_name=file.name,
)
if msg:
if health_msg is not None:
machine.info(
f"Secret var '{file.name}' for service '{generator.name}' in machine {machine.name} needs update: {msg}",
f"Secret var '{file.name}' for service '{generator.name}' in machine {machine.name} needs update: {health_msg}",
)
unfixed_secret_vars.append(file)
@@ -106,6 +144,7 @@ def check_vars(
generator_name: None | str = None,
) -> bool:
status = vars_status(machine_name, flake, generator_name=generator_name)
log.info(f"Check results for machine '{machine_name}': \n{status.text()}")
return not (
status.missing_secret_vars
or status.missing_public_vars

View File

@@ -259,6 +259,10 @@ class Generator:
_secret_store=sec_store,
)
# link generator to its files
for file in files:
file.generator(generator)
if share:
# For shared generators, check if we already created it
existing = next(

View File

@@ -98,7 +98,8 @@ class SecretStore(StoreBase):
def machine_has_access(
self, generator: Generator, secret_name: str, machine: str
) -> bool:
self.ensure_machine_key(machine)
if not has_machine(self.flake.path, machine):
return False
key_dir = sops_machines_folder(self.flake.path) / machine
return self.key_has_access(key_dir, generator, secret_name)
@@ -156,8 +157,6 @@ class SecretStore(StoreBase):
else:
continue
if file.secret and self.exists(generator, file.name):
if file.deploy:
self.ensure_machine_has_access(generator, file.name, machine)
needs_update, msg = self.needs_fix(generator, file.name, machine)
if needs_update:
outdated.append((generator.name, file.name, msg))
@@ -283,6 +282,7 @@ class SecretStore(StoreBase):
) -> None:
if self.machine_has_access(generator, name, machine):
return
self.ensure_machine_key(machine)
secret_folder = self.secret_path(generator, name)
add_secret(
self.flake.path,

View File

@@ -119,6 +119,9 @@ def run_machine_hardware_info_init(
if opts.debug:
cmd += ["--debug"]
# Add nix options to nixos-anywhere
cmd.extend(opts.machine.flake.nix_options or [])
cmd += [target_host.target]
cmd = nix_shell(
["nixos-anywhere"],

View File

@@ -5,6 +5,7 @@ from clan_cli.vars import graph
from clan_cli.vars.generator import Generator
from clan_cli.vars.graph import requested_closure
from clan_cli.vars.migration import check_can_migrate, migrate_files
from clan_cli.vars.secret_modules import sops
from clan_lib.api import API
from clan_lib.errors import ClanError
@@ -152,15 +153,15 @@ def run_generators(
if not machines:
msg = "At least one machine must be provided"
raise ClanError(msg)
all_generators = get_generators(machines, full_closure=True)
if isinstance(generators, list):
# List of generator names - use them exactly as provided
if len(generators) == 0:
return
all_generators = get_generators(machines, full_closure=True)
generator_objects = [g for g in all_generators if g.key.name in generators]
generators_to_run = [g for g in all_generators if g.key.name in generators]
else:
# None or single string - use get_generators with closure parameter
generator_objects = get_generators(
generators_to_run = get_generators(
machines,
full_closure=full_closure,
generator_name=generators,
@@ -170,13 +171,30 @@ def run_generators(
# TODO: make this more lazy and ask for every generator on execution
if callable(prompt_values):
prompt_values = {
generator.name: prompt_values(generator) for generator in generator_objects
generator.name: prompt_values(generator) for generator in generators_to_run
}
# execute health check
for machine in machines:
_ensure_healthy(machine=machine)
# ensure all selected machines have access to all selected shared generators
for machine in machines:
# This is only relevant for the sops store
# TODO: improve store abstraction to use Protocols and introduce a proper SecretStore interface
if not isinstance(machine.secret_vars_store, sops.SecretStore):
continue
for generator in all_generators:
if generator.share:
for file in generator.files:
if not file.secret or not file.exists:
continue
machine.secret_vars_store.ensure_machine_has_access(
generator,
file.name,
machine.name,
)
# get the flake via any machine (they are all the same)
flake = machines[0].flake
@@ -188,13 +206,13 @@ def run_generators(
# preheat the select cache, to reduce repeated calls during execution
selectors = []
for generator in generator_objects:
for generator in generators_to_run:
machine = get_generator_machine(generator)
selectors.append(generator.final_script_selector(machine.name))
flake.precache(selectors)
# execute generators
for generator in generator_objects:
for generator in generators_to_run:
machine = get_generator_machine(generator)
if check_can_migrate(machine, generator):
migrate_files(machine, generator)