Compare commits

..

32 Commits

Author SHA1 Message Date
55e343c43e some ai generated work to get services working for macos 2025-11-08 11:48:34 +08:00
8bef2e6b2e Drop macOS-specific remote-program param from nix copy command 2025-11-06 11:11:57 +08:00
clan-bot
8eaca289ad Merge pull request 'Update treefmt-nix' (#5745) from update-treefmt-nix into main 2025-11-05 20:08:44 +00:00
clan-bot
6f2d482187 Merge pull request 'Update treefmt-nix in devFlake' (#5756) from update-devFlake-treefmt-nix into main 2025-11-05 20:08:18 +00:00
clan-bot
4c30418f12 Update treefmt-nix in devFlake 2025-11-05 20:02:31 +00:00
clan-bot
3c66094d89 Update treefmt-nix 2025-11-05 20:02:02 +00:00
clan-bot
a8f180f8da Merge pull request 'Update treefmt-nix in devFlake' (#5753) from update-devFlake-treefmt-nix into main 2025-11-05 15:09:20 +00:00
clan-bot
e22218d589 Merge pull request 'Update nixpkgs-dev in devFlake' (#5752) from update-devFlake-nixpkgs-dev into main 2025-11-05 15:09:02 +00:00
clan-bot
228c60bcf7 Update treefmt-nix in devFlake 2025-11-05 15:02:30 +00:00
clan-bot
ed2b2d9df9 Update nixpkgs-dev in devFlake 2025-11-05 15:02:24 +00:00
Kenji Berthold
7e2a127d11 Merge pull request 'pkgs/clan-vm-manager: wrapGAppsHook -> wrapGAppsHook3' (#5748) from ke-wrap-gapps into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/5748
2025-11-05 12:27:32 +00:00
a-kenji
8c8bacb1ab pkgs/clan-vm-manager: wrapGAppsHook -> wrapGAppsHook3 2025-11-05 12:50:48 +01:00
clan-bot
8ba71144b6 Merge pull request 'Update nix-darwin' (#5744) from update-nix-darwin into main 2025-11-05 10:04:33 +00:00
clan-bot
7f2d15c8a1 Update nix-darwin 2025-11-05 10:01:31 +00:00
clan-bot
486463c793 Merge pull request 'Update treefmt-nix in devFlake' (#5746) from update-devFlake-treefmt-nix into main 2025-11-05 05:16:48 +00:00
clan-bot
071603d688 Update treefmt-nix in devFlake 2025-11-05 05:02:33 +00:00
clan-bot
c612561ec3 Merge pull request 'Update disko' (#5742) from update-disko into main 2025-11-05 00:10:58 +00:00
clan-bot
a88cd2be40 Update disko 2025-11-05 00:01:25 +00:00
clan-bot
7140b417d3 Merge pull request 'Update nixos-facter-modules' (#5738) from update-nixos-facter-modules into main 2025-11-04 20:10:12 +00:00
clan-bot
c7a42cca7f Update nixos-facter-modules 2025-11-04 20:01:33 +00:00
clan-bot
29ca23c629 Merge pull request 'Update nixpkgs-dev in devFlake' (#5740) from update-devFlake-nixpkgs-dev into main 2025-11-04 15:08:00 +00:00
clan-bot
cd7210de1b Update nixpkgs-dev in devFlake 2025-11-04 15:02:30 +00:00
Mic92
c2ebafcf92 Merge pull request 'zfsUnstable -> zfs_unstable' (#5737) from zfs-fix into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/5737
2025-11-04 14:46:19 +00:00
Jörg Thalheim
2a9e4e7860 zfsUnstable -> zfs_unstable
nixpkgs has a new path for this.
2025-11-04 15:41:50 +01:00
hsjobeki
43a7652624 Merge pull request 'App: init delete machine' (#5734) from jpy-scene into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/5734
2025-11-04 11:03:26 +00:00
Johannes Kirschbauer
65fd25bc2e App: init delete machine 2025-11-04 11:37:29 +01:00
Kenji Berthold
f89ea15749 Merge pull request 'pkgs/cli/vars: Add dependency validation' (#5727) from ke-vars-dependency-validation into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/5727
Reviewed-by: Mic92 <joerg@thalheim.io>
2025-11-04 09:55:55 +00:00
hsjobeki
19d4833be8 Merge pull request 'UI: clean up unused scene code' (#5730) from jpy-scene into main
Reviewed-on: https://git.clan.lol/clan/clan-core/pulls/5730
2025-11-04 08:39:04 +00:00
Johannes Kirschbauer
82f12eaf6f UI: clean up unused scene code 2025-11-04 09:34:17 +01:00
clan-bot
0b5a8e98de Merge pull request 'Update nix-darwin' (#5729) from update-nix-darwin into main 2025-11-04 05:05:29 +00:00
clan-bot
c5bddada05 Update nix-darwin 2025-11-04 05:01:02 +00:00
a-kenji
83f78d9f59 pkgs/cli/vars: Add dependency validation
Add explicit dependency validation to vars, so that proper error
messages can be surfaced to the user.

Instead of:
```
Traceback (most recent call last):
  File "/home/lhebendanz/Projects/clan-core/pkgs/clan-cli/clan_lib/async_run/__init__.py", line 154, in run
    self.result = AsyncResult(_result=self.function(*self.args, **self.kwargs))
                                      ~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/home/lhebendanz/Projects/clan-core/pkgs/clan-cli/clan_cli/machines/update.py", line 62, in run_update_wit
h_network
    run_machine_update(
    ~~~~~~~~~~~~~~~~~~^
        machine=machine,
        ^^^^^^^^^^^^^^^^
    ...<2 lines>...
        upload_inputs=upload_inputs,
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    )
    ^
  File "/home/lhebendanz/Projects/clan-core/pkgs/clan-cli/clan_lib/machines/update.py", line 158, in run_machine_u
pdate
    run_generators([machine], generators=None, full_closure=False)
    ~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/home/lhebendanz/Projects/clan-core/pkgs/clan-cli/clan_lib/vars/generate.py", line 156, in run_generators
    all_generators = get_generators(machines, full_closure=True)
  File "/home/lhebendanz/Projects/clan-core/pkgs/clan-cli/clan_lib/vars/generate.py", line 50, in get_generators
    all_generators_list = Generator.get_machine_generators(
        all_machines,
        flake,
        include_previous_values=include_previous_values,
    )
  File "/home/lhebendanz/Projects/clan-core/pkgs/clan-cli/clan_cli/vars/generator.py", line 246, in get_machine_ge
nerators
    if generators_data[dep]["share"]
       ~~~~~~~~~~~~~~~^^^^^
KeyError: 'bla'
```

We now get:
```
$> Generator 'my_generator' on machine 'my_machine' depends on generator 'non_existing_generator', but 'non_existing_generator' does not exist
```

Closes: #5698
2025-11-03 14:00:38 +01:00
18 changed files with 377 additions and 107 deletions

105
PLAN.md Normal file
View File

@@ -0,0 +1,105 @@
Title: Add nix-darwin Support to Clan Services (clan.service)
Summary
- Extend clan services so authors can ship a darwinModule alongside nixosModule.
- Wire service results into darwin machines the same way we already do for NixOS.
- Keep full backward compatibility: existing services that only export nixosModule continue to work unchanged.
Goals
- Service authors can return perInstance/perMachine darwinModule similarly to nixosModule.
- Darwin machines import the correct aggregated service module outputs.
- Documentation describes the new result attribute and authoring pattern.
Non-Goals (initial phase)
- No rework of service settings schema or UI beyond documenting darwinModule.
- No OS-specific extraModules handling (we will keep extraModules affecting only nixos aggregation initially to avoid breaking existing users).
- No sweeping updates of all services; well add a concrete example (users) and leave others to be migrated incrementally.
Design Overview
- Service result attributes gain darwinModule in both roles.<name>.perInstance and perMachine results.
- The service aggregator composes both nixosModule and darwinModule per machine.
- The machine wiring picks the correct module based on the machines class (nixos vs darwin).
Changes By File (with anchors)
- lib/inventory/distributed-service/service-module.nix
- Add darwinModule to per-instance return type next to nixosModule.
- Where: lib/inventory/distributed-service/service-module.nix:536 (options.nixosModule = mkOption { … })
- Action: Add sibling options.darwinModule = mkOption { type = types.deferredModule; default = { }; description = "A single nix-darwin module for the instance."; }.
- Add darwinModule to per-machine return type next to nixosModule.
- Where: lib/inventory/distributed-service/service-module.nix:666 (options.nixosModule = mkOption { … })
- Action: Add sibling options.darwinModule = mkOption { type = types.deferredModule; default = { }; description = "A single nix-darwin module for the machine."; }.
- Compose darwinModule per (role, instance, machine) similarly to nixosModule.
- Where: lib/inventory/distributed-service/service-module.nix:878893 (wrapper that builds nixosModule = { imports = [ instanceRes.nixosModule ] ++ extraModules … })
- Action: Build darwinModule = { imports = [ instanceRes.darwinModule ]; }.
Note: Do NOT include roles.*.extraModules here for darwin initially to avoid importing nixos-specific modules into darwin eval.
- Aggregate darwinModules in final result.
- Where: lib/inventory/distributed-service/service-module.nix:958993 (instanceResults builder and final nixosModule = { imports = [ machineResult.nixosModule ] ++ instanceResults.nixosModules; })
- Actions:
- Track instanceResults.darwinModules in parallel to instanceResults.nixosModules.
- Add final darwinModule = { imports = [ machineResult.darwinModule ] ++ instanceResults.darwinModules; }.
- modules/clan/distributed-services.nix
- Feed the right service module to each machine based on machineClass.
- Where: modules/clan/distributed-services.nix:147152
- Current: machineImports = fold over services, collecting serviceModule.result.final.${machineName}.nixosModule
- Change: If inventory.machines.${machineName}.machineClass == "darwin" then collect .darwinModule else .nixosModule.
- modules/clan/module.nix
- Ensure machineImports are included for both nixos and darwin machines.
- Where: modules/clan/module.nix:195 (currently ++ lib.optionals (_class == "nixos") (v.machineImports or [ ]))
- Change: Include machineImports for darwin as well (or remove the conditional and always append v.machineImports).
- docs/site/decisions/01-Clan-Modules.md
- Document darwinModule as a result attribute.
- Where: docs/site/decisions/01-Clan-Modules.md:129146 (Result attributes and perMachine text mentioning only nixosModule)
- Change: Add “darwinModule” to the Result attributes list and examples, mirroring nixosModule.
- Example service update: clanServices/users/default.nix
- Add perInstance.darwinModule and perMachine.darwinModule mirroring nixos behavior where feasible.
- Where: clanServices/users/default.nix:2890 (roles.default.perInstance.nixosModule), 148153 (perMachine.nixosModule)
- Change: Provide minimal darwinModule that sets users.users.<name> (and any safe, cross-platform bits). If some nixos-only settings (e.g., systemd hooks) exist, keep them nixos-only.
Implementation Steps
1) Service API extensions
- Add options.darwinModule to roles.*.perInstance and perMachine (see anchors above).
- Keep defaults to {} so services can omit it safely.
2) Aggregation logic
- result.allRoles: emit darwinModule wrapper from instanceRes.darwinModule.
- result.final:
- Collect instanceResults.darwinModules alongside instanceResults.nixosModules.
- Produce final darwinModule with [ machineResult.darwinModule ] ++ instanceResults.darwinModules.
- Leave exports logic unchanged.
3) Machine wiring
- modules/clan/distributed-services.nix: choose .darwinModule vs .nixosModule based on inventory.machines.<name>.machineClass.
- modules/clan/module.nix: include v.machineImports for both OS classes.
4) Example migration (users)
- Add darwinModule in clanServices/users/default.nix.
- Validate that users service evaluates for a darwin machine and does not reference nixos-specific options.
5) Documentation
- Update ADR docs to mention darwinModule in Result attributes and examples.
- Add a short “Authoring for Darwin” snippet showing perInstance/perMachine returning both modules.
6) Tests and verification
- Unit-level: extend lib/inventory/distributed-service/tests to assert presence of result.final.<machine>.darwinModule when perInstance/perMachine return it.
- Integration-level: evaluate a sample darwin machine (e.g., inventory.json has test-darwin-machine) and assert clan.darwinModules.<machine> includes the aggregated module.
- Sanity: ensure existing nixos-only services still evaluate unchanged.
Backward Compatibility
- Existing services that only return nixosModule continue to work.
- Darwin machines wont import service modules until services provide darwinModule, avoiding accidental breakage.
- extraModules remain applied only to nixos aggregation initially to prevent nixos-only modules from breaking darwin evaluation. We can add OS-specific extraModules in a follow-up (e.g., roles.*.extraModulesDarwin).
Acceptance Criteria
- Services can return darwinModule in perInstance/perMachine without errors.
- Darwin machines import aggregated darwinModule outputs from all participating services.
- nixos behavior remains unchanged for existing services.
- Documentation updated to reflect the new attribute and example.
Rollout Notes
- Start by updating clanServices/users as a working example.
- Encourage service authors to add darwinModule incrementally; no global migration is required.

View File

@@ -120,6 +120,63 @@
share = settings.share;
script =
(
if settings.prompt then
''
prompt_value=$(cat "$prompts"/user-password)
if [[ -n "''${prompt_value-}" ]]; then
echo "$prompt_value" | tr -d "\n" > "$out"/user-password
else
xkcdpass --numwords 4 --delimiter - --count 1 | tr -d "\n" > "$out"/user-password
fi
''
else
''
xkcdpass --numwords 4 --delimiter - --count 1 | tr -d "\n" > "$out"/user-password
''
)
+ ''
mkpasswd -s -m sha-512 < "$out"/user-password | tr -d "\n" > "$out"/user-password-hash
'';
};
};
darwinModule =
{
config,
pkgs,
lib,
...
}:
{
# For darwin, we currently only generate and manage the password secret.
# Hooking into actual macOS account management may be added later.
clan.core.vars.generators."user-password-${settings.user}" = {
files.user-password-hash.neededFor = "users";
files.user-password.deploy = false;
prompts.user-password = lib.mkIf settings.prompt {
display = {
group = settings.user;
label = "password";
required = false;
helperText = ''
Your password will be encrypted and stored securely using the secret store you've configured.
'';
};
type = "hidden";
persist = true;
description = "Leave empty to generate automatically";
};
runtimeInputs = [
pkgs.coreutils
pkgs.xkcdpass
pkgs.mkpasswd
];
share = settings.share;
script =
(
if settings.prompt then
@@ -149,5 +206,7 @@
# Immutable users to ensure that this module has exclusive control over the users.
users.mutableUsers = false;
};
# No-op for darwin by default; can be extended later if needed.
darwinModule = { };
};
}

12
devFlake/flake.lock generated
View File

@@ -105,11 +105,11 @@
},
"nixpkgs-dev": {
"locked": {
"lastModified": 1762168314,
"narHash": "sha256-+DX6mIF47gRGoK0mqkTg1Jmcjcup0CAXJFHVkdUx8YA=",
"lastModified": 1762328495,
"narHash": "sha256-IUZvw5kvLiExApP9+SK/styzEKSqfe0NPclu9/z85OQ=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "94fc102d2c15d9c1a861e59de550807c65358e1b",
"rev": "4c621660e393922cf68cdbfc40eb5a2d54d3989a",
"type": "github"
},
"original": {
@@ -208,11 +208,11 @@
"nixpkgs": []
},
"locked": {
"lastModified": 1761311587,
"narHash": "sha256-Msq86cR5SjozQGCnC6H8C+0cD4rnx91BPltZ9KK613Y=",
"lastModified": 1762366246,
"narHash": "sha256-3xc/f/ZNb5ma9Fc9knIzEwygXotA+0BZFQ5V5XovSOQ=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "2eddae033e4e74bf581c2d1dfa101f9033dbd2dc",
"rev": "a82c779ca992190109e431d7d680860e6723e048",
"type": "github"
},
"original": {

24
flake.lock generated
View File

@@ -31,11 +31,11 @@
]
},
"locked": {
"lastModified": 1761899396,
"narHash": "sha256-XOpKBp6HLzzMCbzW50TEuXN35zN5WGQREC7n34DcNMM=",
"lastModified": 1762276996,
"narHash": "sha256-TtcPgPmp2f0FAnc+DMEw4ardEgv1SGNR3/WFGH0N19M=",
"owner": "nix-community",
"repo": "disko",
"rev": "6f4cf5abbe318e4cd1e879506f6eeafd83f7b998",
"rev": "af087d076d3860760b3323f6b583f4d828c1ac17",
"type": "github"
},
"original": {
@@ -71,11 +71,11 @@
]
},
"locked": {
"lastModified": 1762039661,
"narHash": "sha256-oM5BwAGE78IBLZn+AqxwH/saqwq3e926rNq5HmOulkc=",
"lastModified": 1762304480,
"narHash": "sha256-ikVIPB/ea/BAODk6aksgkup9k2jQdrwr4+ZRXtBgmSs=",
"owner": "nix-darwin",
"repo": "nix-darwin",
"rev": "c3c8c9f2a5ed43175ac4dc030308756620e6e4e4",
"rev": "b8c7ac030211f18bd1f41eae0b815571853db7a2",
"type": "github"
},
"original": {
@@ -99,11 +99,11 @@
},
"nixos-facter-modules": {
"locked": {
"lastModified": 1761137276,
"narHash": "sha256-4lDjGnWRBLwqKQ4UWSUq6Mvxu9r8DSqCCydodW/Jsi8=",
"lastModified": 1762264948,
"narHash": "sha256-iaRf6n0KPl9hndnIft3blm1YTAyxSREV1oX0MFZ6Tk4=",
"owner": "nix-community",
"repo": "nixos-facter-modules",
"rev": "70bcd64225d167c7af9b475c4df7b5abba5c7de8",
"rev": "fa695bff9ec37fd5bbd7ee3181dbeb5f97f53c96",
"type": "github"
},
"original": {
@@ -181,11 +181,11 @@
]
},
"locked": {
"lastModified": 1761311587,
"narHash": "sha256-Msq86cR5SjozQGCnC6H8C+0cD4rnx91BPltZ9KK613Y=",
"lastModified": 1762366246,
"narHash": "sha256-3xc/f/ZNb5ma9Fc9knIzEwygXotA+0BZFQ5V5XovSOQ=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "2eddae033e4e74bf581c2d1dfa101f9033dbd2dc",
"rev": "a82c779ca992190109e431d7d680860e6723e048",
"type": "github"
},
"original": {

View File

@@ -561,6 +561,15 @@ in
```
'';
};
options.darwinModule = mkOption {
type = types.deferredModule;
default = { };
description = ''
A single nix-darwin module for the instance.
This mirrors `nixosModule` but targets darwin machines.
'';
};
})
];
};
@@ -686,6 +695,15 @@ in
```
'';
};
options.darwinModule = mkOption {
type = types.deferredModule;
default = { };
description = ''
A single nix-darwin module for the machine.
This mirrors `nixosModule` but targets darwin machines.
'';
};
})
];
};
@@ -890,6 +908,11 @@ in
lib.setDefaultModuleLocation "via inventory.instances.${instanceName}.roles.${roleName}" s
) instanceCfg.roles.${roleName}.extraModules);
};
darwinModule = {
imports = [
instanceRes.darwinModule
];
};
}
) instanceCfg.roles.${roleName}.machines or { };
@@ -979,11 +1002,24 @@ in
else
instanceAcc.nixosModules
);
darwinModules = (
if instance.allMachines.${machineName}.darwinModule or { } != { } then
instanceAcc.darwinModules
++ [
(lib.setDefaultModuleLocation
"Via instances.${instanceName}.roles.${roleName}.machines.${machineName}"
instance.allMachines.${machineName}.darwinModule
)
]
else
instanceAcc.darwinModules
);
}
) roleAcc role.allInstances
)
{
nixosModules = [ ];
darwinModules = [ ];
# ...
}
config.result.allRoles;
@@ -1021,6 +1057,12 @@ in
]
++ instanceResults.nixosModules;
};
darwinModule = {
imports = [
(lib.setDefaultModuleLocation "Via ${config.manifest.name}.perMachine - machine='${machineName}';" machineResult.darwinModule)
]
++ instanceResults.darwinModules;
};
}
) config.result.allMachines;
};

View File

@@ -145,10 +145,23 @@ in
internal = true;
type = types.raw;
default = lib.mapAttrs (machineName: _: {
# This is the list of nixosModules for each machine
# This is the list of service modules for each machine (nixos or darwin)
machineImports = lib.foldlAttrs (
acc: _module_ident: serviceModule:
acc ++ [ serviceModule.result.final.${machineName}.nixosModule or { } ]
let
modName =
if inventory.machines.${machineName}.machineClass == "darwin" then
"darwinModule"
else
"nixosModule";
finalForMachine = serviceModule.result.final.${machineName} or { };
picked =
if builtins.hasAttr modName finalForMachine then
(builtins.getAttr modName finalForMachine)
else
{ };
in
acc ++ [ picked ]
) [ ] config._services.mappedServices;
}) inventory.machines or { };
};

View File

@@ -192,7 +192,7 @@ in
# - darwinModules (_class = darwin)
(lib.optionalAttrs (clan-core ? "${_class}Modules") clan-core."${_class}Modules".clanCore)
]
++ lib.optionals (_class == "nixos") (v.machineImports or [ ]);
++ (v.machineImports or [ ]);
# default hostname
networking.hostName = lib.mkDefault name;

View File

@@ -5,7 +5,7 @@
}:
{
# If we also need zfs, we can use the unstable version as we otherwise don't have a new enough kernel version
boot.zfs.package = pkgs.zfsUnstable;
boot.zfs.package = pkgs.zfs_unstable or pkgs.zfsUnstable;
# Enable bcachefs support
boot.supportedFilesystems.bcachefs = lib.mkDefault true;

View File

@@ -6,7 +6,7 @@
}:
let
isUnstable = config.boot.zfs.package == pkgs.zfsUnstable;
isUnstable = config.boot.zfs.package == pkgs.zfs_unstable or pkgs.zfsUnstable;
zfsCompatibleKernelPackages = lib.filterAttrs (
name: kernelPackages:
(builtins.match "linux_[0-9]+_[0-9]+" name) != null

View File

@@ -4,6 +4,7 @@
padding: 8px;
flex-direction: column;
align-items: flex-start;
gap: 4px;
border-radius: 5px;
border: 1px solid var(--clr-border-def-2, #d8e8eb);

View File

@@ -1,11 +1,13 @@
import { onCleanup, onMount } from "solid-js";
import styles from "./ContextMenu.module.css";
import { Typography } from "../Typography/Typography";
import { Divider } from "../Divider/Divider";
import Icon from "../Icon/Icon";
export const Menu = (props: {
x: number;
y: number;
onSelect: (option: "move") => void;
onSelect: (option: "move" | "delete") => void;
close: () => void;
intersect: string[];
}) => {
@@ -54,13 +56,31 @@ export const Menu = (props: {
>
<Typography
hierarchy="label"
size="s"
weight="bold"
color={currentMachine() ? "primary" : "quaternary"}
>
Move
</Typography>
</li>
<Divider />
<li
class={styles.item}
aria-disabled={!currentMachine()}
onClick={() => {
console.log("Delete clicked", currentMachine());
props.onSelect("delete");
props.close();
}}
>
<Typography
hierarchy="label"
color={currentMachine() ? "primary" : "quaternary"}
>
<span class="flex items-center gap-2">
Delete
<Icon icon="Trash" font-size="inherit" />
</span>
</Typography>
</li>
</ul>
);
};

View File

@@ -71,7 +71,7 @@ const Machines = () => {
}
const result = ctx.machinesQuery.data;
return Object.keys(result).length > 0 ? result : undefined;
return Object.keys(result).length > 0 ? result : [];
};
return (
@@ -117,7 +117,7 @@ const Machines = () => {
}
>
<nav>
<For each={Object.entries(machines()!)}>
<For each={Object.entries(machines())}>
{([id, machine]) => (
<MachineRoute
clanURI={clanURI}

View File

@@ -206,8 +206,8 @@ const ClanSceneController = (props: RouteSectionProps) => {
<AddMachine
onCreated={async (id) => {
const promise = currentPromise();
await ctx.machinesQuery.refetch();
if (promise) {
await ctx.machinesQuery.refetch();
promise.resolve({ id });
setCurrentPromise(null);
}

View File

@@ -18,12 +18,12 @@ export class MachineManager {
private disposeRoot: () => void;
private machinePositionsSignal: Accessor<SceneData>;
private machinePositionsSignal: Accessor<SceneData | undefined>;
constructor(
scene: THREE.Scene,
registry: ObjectRegistry,
machinePositionsSignal: Accessor<SceneData>,
machinePositionsSignal: Accessor<SceneData | undefined>,
machinesQueryResult: MachinesQueryResult,
selectedIds: Accessor<Set<string>>,
setMachinePos: (id: string, position: [number, number] | null) => void,
@@ -39,8 +39,9 @@ export class MachineManager {
if (!machinesQueryResult.data) return;
const actualIds = Object.keys(machinesQueryResult.data);
const machinePositions = machinePositionsSignal();
// Remove stale
const machinePositions = machinePositionsSignal() || {};
for (const id of Object.keys(machinePositions)) {
if (!actualIds.includes(id)) {
console.log("Removing stale machine", id);
@@ -61,8 +62,7 @@ export class MachineManager {
// Effect 2: sync store → scene
//
createEffect(() => {
const positions = machinePositionsSignal();
if (!positions) return;
const positions = machinePositionsSignal() || {};
// Remove machines from scene
for (const [id, repr] of this.machines) {
@@ -103,7 +103,7 @@ export class MachineManager {
nextGridPos(): [number, number] {
const occupiedPositions = new Set(
Object.values(this.machinePositionsSignal()).map((data) =>
Object.values(this.machinePositionsSignal() || {}).map((data) =>
keyFromPos(data.position),
),
);

View File

@@ -32,6 +32,9 @@ import {
} from "./highlightStore";
import { createMachineMesh } from "./MachineRepr";
import { useClanContext } from "@/src/routes/Clan/Clan";
import client from "@api/clan/client";
import { navigateToClan } from "../hooks/clan";
import { useNavigate } from "@solidjs/router";
function intersectMachines(
event: MouseEvent,
@@ -100,7 +103,7 @@ export function CubeScene(props: {
onCreate: () => Promise<{ id: string }>;
selectedIds: Accessor<Set<string>>;
onSelect: (v: Set<string>) => void;
sceneStore: Accessor<SceneData>;
sceneStore: Accessor<SceneData | undefined>;
setMachinePos: (machineId: string, pos: [number, number] | null) => void;
isLoading: boolean;
clanURI: string;
@@ -131,9 +134,6 @@ export function CubeScene(props: {
let machineManager: MachineManager;
const [positionMode, setPositionMode] = createSignal<"grid" | "circle">(
"grid",
);
// Managed by controls
const [isDragging, setIsDragging] = createSignal(false);
@@ -142,10 +142,6 @@ export function CubeScene(props: {
// TODO: Unify this with actionRepr position
const [cursorPosition, setCursorPosition] = createSignal<[number, number]>();
const [cameraInfo, setCameraInfo] = createSignal({
position: { x: 0, y: 0, z: 0 },
spherical: { radius: 0, theta: 0, phi: 0 },
});
// Context menu state
const [contextOpen, setContextOpen] = createSignal(false);
const [menuPos, setMenuPos] = createSignal<{ x: number; y: number }>();
@@ -157,7 +153,6 @@ export function CubeScene(props: {
const BASE_SIZE = 0.9; // Height of the cube above the ground
const CUBE_SIZE = BASE_SIZE / 1.5; //
const BASE_HEIGHT = 0.05; // Height of the cube above the ground
const CUBE_Y = 0 + CUBE_SIZE / 2 + BASE_HEIGHT / 2; // Y position of the cube above the ground
const CUBE_SEGMENT_HEIGHT = CUBE_SIZE / 1;
const FLOOR_COLOR = 0xcdd8d9;
@@ -201,6 +196,8 @@ export function CubeScene(props: {
const grid = new THREE.GridHelper(1000, 1000 / 1, 0xe1edef, 0xe1edef);
const navigate = useNavigate();
onMount(() => {
// Scene setup
scene = new THREE.Scene();
@@ -311,21 +308,12 @@ export function CubeScene(props: {
bgCamera,
);
// controls.addEventListener("start", (e) => {
// setIsDragging(true);
// });
// controls.addEventListener("end", (e) => {
// setIsDragging(false);
// });
// Lighting
const ambientLight = new THREE.AmbientLight(0xd9f2f7, 0.72);
scene.add(ambientLight);
const directionalLight = new THREE.DirectionalLight(0xffffff, 3.5);
// scene.add(new THREE.DirectionalLightHelper(directionalLight));
// scene.add(new THREE.CameraHelper(camera));
const lightPos = new THREE.Spherical(
15,
initialSphericalCameraPosition.phi - Math.PI / 8,
@@ -412,30 +400,6 @@ export function CubeScene(props: {
actionMachine = createActionMachine();
scene.add(actionMachine);
// const spherical = new THREE.Spherical();
// spherical.setFromVector3(camera.position);
// Function to update camera info
const updateCameraInfo = () => {
const spherical = new THREE.Spherical();
spherical.setFromVector3(camera.position);
setCameraInfo({
position: {
x: Math.round(camera.position.x * 100) / 100,
y: Math.round(camera.position.y * 100) / 100,
z: Math.round(camera.position.z * 100) / 100,
},
spherical: {
radius: Math.round(spherical.radius * 100) / 100,
theta: Math.round(spherical.theta * 100) / 100,
phi: Math.round(spherical.phi * 100) / 100,
},
});
};
// Initial camera info update
updateCameraInfo();
createEffect(
on(ctx.worldMode, (mode) => {
if (mode === "create") {
@@ -661,7 +625,8 @@ export function CubeScene(props: {
});
const snapToGrid = (point: THREE.Vector3) => {
if (!props.sceneStore) return;
const store = props.sceneStore() || {};
// Snap to grid
const snapped = new THREE.Vector3(
Math.round(point.x / GRID_SIZE) * GRID_SIZE,
@@ -670,7 +635,7 @@ export function CubeScene(props: {
);
// Skip snapping if there's already a cube at this position
const positions = Object.entries(props.sceneStore());
const positions = Object.entries(store);
const intersects = positions.some(
([_id, p]) => p.position[0] === snapped.x && p.position[1] === snapped.z,
);
@@ -694,7 +659,6 @@ export function CubeScene(props: {
};
const onAddClick = (event: MouseEvent) => {
setPositionMode("grid");
ctx.setWorldMode("create");
renderLoop.requestRender();
};
@@ -706,9 +670,6 @@ export function CubeScene(props: {
if (!actionRepr) return;
actionRepr.visible = true;
// (actionRepr.material as THREE.MeshPhongMaterial).emissive.set(
// worldMode() === "create" ? CREATE_BASE_EMISSIVE : MOVE_BASE_EMISSIVE,
// );
// Calculate mouse position in normalized device coordinates
// (-1 to +1) for both components
@@ -736,23 +697,38 @@ export function CubeScene(props: {
}
}
};
const handleMenuSelect = (mode: "move") => {
const handleMenuSelect = async (mode: "move" | "delete") => {
const firstId = menuIntersection()[0];
if (!firstId) {
return;
}
const machine = machineManager.machines.get(firstId);
if (mode === "delete") {
console.log("deleting machine", firstId);
await client.post("delete_machine", {
body: {
machine: { flake: { identifier: props.clanURI }, name: firstId },
},
});
navigateToClan(navigate, props.clanURI);
ctx.machinesQuery.refetch();
ctx.serviceInstancesQuery.refetch();
return;
}
// Else "move" mode
ctx.setWorldMode(mode);
setHighlightGroups({ move: new Set(menuIntersection()) });
// Find the position of the first selected machine
// Set the actionMachine position to that
const firstId = menuIntersection()[0];
if (firstId) {
const machine = machineManager.machines.get(firstId);
if (machine && actionMachine) {
actionMachine.position.set(
machine.group.position.x,
0,
machine.group.position.z,
);
setCursorPosition([machine.group.position.x, machine.group.position.z]);
}
if (machine && actionMachine) {
actionMachine.position.set(
machine.group.position.x,
0,
machine.group.position.z,
);
setCursorPosition([machine.group.position.x, machine.group.position.z]);
}
};

View File

@@ -766,6 +766,28 @@ def test_prompt(
assert sops_store.get(my_generator, "prompt_persist").decode() == "prompt_persist"
@pytest.mark.with_core
def test_non_existing_dependency_raises_error(
monkeypatch: pytest.MonkeyPatch,
flake_with_sops: ClanFlake,
) -> None:
"""Ensure that a generator with a non-existing dependency raises a clear error."""
flake = flake_with_sops
config = flake.machines["my_machine"] = create_test_machine_config()
my_generator = config["clan"]["core"]["vars"]["generators"]["my_generator"]
my_generator["files"]["my_value"]["secret"] = False
my_generator["script"] = 'echo "$RANDOM" > "$out"/my_value'
my_generator["dependencies"] = ["non_existing_generator"]
flake.refresh()
monkeypatch.chdir(flake.path)
with pytest.raises(
ClanError,
match="Generator 'my_generator' on machine 'my_machine' depends on generator 'non_existing_generator', but 'non_existing_generator' does not exist",
):
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
@pytest.mark.with_core
def test_shared_vars_must_never_depend_on_machine_specific_vars(
monkeypatch: pytest.MonkeyPatch,

View File

@@ -66,6 +66,41 @@ class Generator:
_public_store: "StoreBase | None" = None
_secret_store: "StoreBase | None" = None
@staticmethod
def validate_dependencies(
generator_name: str,
machine_name: str,
dependencies: list[str],
generators_data: dict[str, dict],
) -> list[GeneratorKey]:
"""Validate and build dependency keys for a generator.
Args:
generator_name: Name of the generator that has dependencies
machine_name: Name of the machine the generator belongs to
dependencies: List of dependency generator names
generators_data: Dictionary of all available generators for this machine
Returns:
List of GeneratorKey objects
Raises:
ClanError: If a dependency does not exist
"""
deps_list = []
for dep in dependencies:
if dep not in generators_data:
msg = f"Generator '{generator_name}' on machine '{machine_name}' depends on generator '{dep}', but '{dep}' does not exist. Please check your configuration."
raise ClanError(msg)
deps_list.append(
GeneratorKey(
machine=None if generators_data[dep]["share"] else machine_name,
name=dep,
)
)
return deps_list
@property
def key(self) -> GeneratorKey:
if self.share:
@@ -240,15 +275,12 @@ class Generator:
name=gen_name,
share=share,
files=files,
dependencies=[
GeneratorKey(
machine=None
if generators_data[dep]["share"]
else machine_name,
name=dep,
)
for dep in gen_data["dependencies"]
],
dependencies=cls.validate_dependencies(
gen_name,
machine_name,
gen_data["dependencies"],
generators_data,
),
migrate_fact=gen_data.get("migrateFact"),
validation_hash=gen_data.get("validationHash"),
prompts=prompts,

View File

@@ -17,7 +17,7 @@
runCommand,
setuptools,
webkitgtk_6_0,
wrapGAppsHook,
wrapGAppsHook3,
python,
lib,
stdenv,
@@ -87,7 +87,7 @@ buildPythonApplication rec {
nativeBuildInputs = [
setuptools
copyDesktopItems
wrapGAppsHook
wrapGAppsHook3
gobject-introspection
];