Compare commits
89 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4035c25b3d | ||
|
|
23c1ae031f | ||
|
|
10f9e5d11b | ||
|
|
fd07d02d2d | ||
|
|
2a3d1efc6f | ||
|
|
947e0a5488 | ||
|
|
57b5520143 | ||
|
|
9fd1031f4d | ||
|
|
c382e8f1f3 | ||
|
|
cf92303f31 | ||
|
|
80d0dc9805 | ||
|
|
4e2cbb188c | ||
|
|
155bd36d2b | ||
|
|
40ea5bf591 | ||
|
|
0cd9c84de0 | ||
|
|
e1ea44a2cc | ||
|
|
7c4865e8b0 | ||
|
|
b032cd4a29 | ||
|
|
61edc1e06f | ||
|
|
c369f3b5d1 | ||
|
|
0cc1f072f7 | ||
|
|
a2a011a47f | ||
|
|
972adc7a7c | ||
|
|
e1b4f296e3 | ||
|
|
1cb2156d87 | ||
|
|
84703fa293 | ||
|
|
0e10122d54 | ||
|
|
ecd731024c | ||
|
|
e0da575201 | ||
|
|
3577c689bd | ||
|
|
885103bfa4 | ||
|
|
afc1ca37bd | ||
|
|
4aa536a1bf | ||
|
|
c61dfbf8dd | ||
|
|
e6785fa1d0 | ||
|
|
89ea01fd04 | ||
|
|
a8a08e21e4 | ||
|
|
700f571598 | ||
|
|
08c15b3d9b | ||
|
|
2848b6d5d6 | ||
|
|
ddc1059799 | ||
|
|
b690515dd7 | ||
|
|
e9cef9c7c1 | ||
|
|
ca69864a20 | ||
|
|
5436f284fb | ||
|
|
00df032635 | ||
|
|
a2c016718a | ||
|
|
d1abebf068 | ||
|
|
9635fb03b7 | ||
|
|
f48c596617 | ||
|
|
0589c71601 | ||
|
|
a2c2d73e49 | ||
|
|
99b22dfcbf | ||
|
|
cd04686663 | ||
|
|
2b3e847c28 | ||
|
|
d0ec4fd8e6 | ||
|
|
bb5c523ac8 | ||
|
|
4df4f5220b | ||
|
|
a082fd2ed9 | ||
|
|
3161c10aa8 | ||
|
|
7ad8ed1af0 | ||
|
|
94919dc9b8 | ||
|
|
1502cfa4a7 | ||
|
|
cce0207225 | ||
|
|
38f98645ac | ||
|
|
74d2ae0619 | ||
|
|
c122201ff2 | ||
|
|
e72795904d | ||
|
|
32ddb4ffa7 | ||
|
|
db6220b57b | ||
|
|
e929f36f80 | ||
|
|
f71460c4f9 | ||
|
|
8302f3ffde | ||
|
|
bd82de6001 | ||
|
|
06613de825 | ||
|
|
76af63ee1c | ||
|
|
3baa43fd87 | ||
|
|
a6b8ca06ab | ||
|
|
f7faf2cd63 | ||
|
|
bff3908bb1 | ||
|
|
d0613b4030 | ||
|
|
52b711667e | ||
|
|
13d6db98d1 | ||
|
|
195134dd5e | ||
|
|
0670f0ad32 | ||
|
|
daf843eeab | ||
|
|
291b742fd7 | ||
|
|
f7d6c23aaa | ||
|
|
1f26135381 |
@@ -196,7 +196,7 @@ in
|
||||
pkgs.xkcdpass
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t ed25519 -N "" -f "$out"/borgbackup.ssh
|
||||
ssh-keygen -t ed25519 -N "" -C "" -f "$out"/borgbackup.ssh
|
||||
xkcdpass -n 4 -d - > "$out"/borgbackup.repokey
|
||||
'';
|
||||
};
|
||||
|
||||
@@ -7,7 +7,7 @@ The importer module allows users to configure importing modules in a flexible an
|
||||
|
||||
It exposes the `extraModules` functionality of the inventory, without any added configuration.
|
||||
|
||||
## Usage:
|
||||
## Usage
|
||||
|
||||
```nix
|
||||
inventory.services = {
|
||||
|
||||
@@ -54,7 +54,7 @@ in
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t ed25519 -N "" -f "$out"/ssh.id_ed25519
|
||||
ssh-keygen -t ed25519 -N "" -C "" -f "$out"/ssh.id_ed25519
|
||||
'';
|
||||
};
|
||||
|
||||
@@ -74,7 +74,7 @@ in
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t rsa -b 4096 -N "" -f "$out"/ssh.id_rsa
|
||||
ssh-keygen -t rsa -b 4096 -N "" -C "" -f "$out"/ssh.id_rsa
|
||||
'';
|
||||
};
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t ed25519 -N "" -f "$out"/id_ed25519
|
||||
ssh-keygen -t ed25519 -N "" -C "" -f "$out"/id_ed25519
|
||||
'';
|
||||
};
|
||||
|
||||
|
||||
@@ -256,7 +256,7 @@
|
||||
pkgs.xkcdpass
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t ed25519 -N "" -f "$out"/borgbackup.ssh
|
||||
ssh-keygen -t ed25519 -N "" -C "" -f "$out"/borgbackup.ssh
|
||||
xkcdpass -n 4 -d - > "$out"/borgbackup.repokey
|
||||
'';
|
||||
};
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
The importer module allows users to configure importing modules in a flexible and structured way.
|
||||
It exposes the `extraModules` functionality of the inventory, without any added configuration.
|
||||
|
||||
## Usage:
|
||||
## Usage
|
||||
|
||||
```nix
|
||||
inventory.instances = {
|
||||
|
||||
36
clanServices/sshd/README.md
Normal file
36
clanServices/sshd/README.md
Normal file
@@ -0,0 +1,36 @@
|
||||
The `sshd` Clan service manages SSH to make it easy to securely access your machines over the internet. The service uses `vars` to store the SSH host keys for each machine to ensure they remain stable across deployments.
|
||||
|
||||
`sshd` also generates SSH certificates for both servers and clients allowing for certificate-based authentication for SSH.
|
||||
|
||||
The service also disables password-based authentication over SSH, to access your machines you'll need to use public key authentication or certificate-based authentication.
|
||||
|
||||
## Usage
|
||||
|
||||
```nix
|
||||
{
|
||||
inventory.instances = {
|
||||
# By default this service only generates ed25519 host keys
|
||||
sshd-basic = {
|
||||
module = {
|
||||
name = "sshd";
|
||||
input = "clan-core";
|
||||
};
|
||||
roles.server.tags.all = { };
|
||||
roles.client.tags.all = { };
|
||||
};
|
||||
|
||||
# Also generate RSA host keys for all servers
|
||||
sshd-with-rsa = {
|
||||
module = {
|
||||
name = "sshd";
|
||||
input = "clan-core";
|
||||
};
|
||||
roles.server.tags.all = { };
|
||||
roles.server.settings = {
|
||||
hostKeys.rsa.enable = true;
|
||||
};
|
||||
roles.client.tags.all = { };
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
@@ -2,7 +2,7 @@
|
||||
{
|
||||
_class = "clan.service";
|
||||
manifest.name = "clan-core/sshd";
|
||||
manifest.description = "Enables secure remote access to the machine over ssh.";
|
||||
manifest.description = "Enables secure remote access to the machine over SSH";
|
||||
manifest.categories = [
|
||||
"System"
|
||||
"Network"
|
||||
@@ -49,7 +49,7 @@
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t ed25519 -N "" -f "$out"/id_ed25519
|
||||
ssh-keygen -t ed25519 -N "" -C "" -f "$out"/id_ed25519
|
||||
'';
|
||||
};
|
||||
|
||||
@@ -109,7 +109,7 @@
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t ed25519 -N "" -f "$out"/id_ed25519
|
||||
ssh-keygen -t ed25519 -N "" -C "" -f "$out"/id_ed25519
|
||||
'';
|
||||
};
|
||||
|
||||
@@ -151,7 +151,7 @@
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t rsa -b 4096 -N "" -f "$out"/ssh.id_rsa
|
||||
ssh-keygen -t rsa -b 4096 -N "" -C "" -f "$out"/ssh.id_rsa
|
||||
'';
|
||||
};
|
||||
|
||||
@@ -164,7 +164,7 @@
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t ed25519 -N "" -f "$out"/ssh.id_ed25519
|
||||
ssh-keygen -t ed25519 -N "" -C "" -f "$out"/ssh.id_ed25519
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,30 +1,31 @@
|
||||
## Usage
|
||||
|
||||
```
|
||||
inventory.instances = {
|
||||
|
||||
# Deploy user alice on all machines. Don't prompt for password (will be
|
||||
# auto-generated).
|
||||
|
||||
user-alice = {
|
||||
module = {
|
||||
name = "users";
|
||||
input = "clan";
|
||||
```nix
|
||||
{
|
||||
inventory.instances = {
|
||||
# Deploy user alice on all machines. Don't prompt for password (will be
|
||||
# auto-generated).
|
||||
user-alice = {
|
||||
module = {
|
||||
name = "users";
|
||||
input = "clan";
|
||||
};
|
||||
roles.default.tags.all = { };
|
||||
roles.default.settings = {
|
||||
user = "alice";
|
||||
prompt = false;
|
||||
};
|
||||
};
|
||||
roles.default.tags.all = { };
|
||||
roles.default.settings = {
|
||||
user = "alice";
|
||||
prompt = false;
|
||||
|
||||
# Deploy user bob only on his laptop. Prompt for a password.
|
||||
user-bob = {
|
||||
module = {
|
||||
name = "users";
|
||||
input = "clan";
|
||||
};
|
||||
roles.default.machines.bobs-laptop = { };
|
||||
roles.default.settings.user = "bob";
|
||||
};
|
||||
};
|
||||
|
||||
# Deploy user bob only on his laptop. Prompt for a password.
|
||||
user-bob = {
|
||||
module = {
|
||||
name = "users";
|
||||
input = "clan";
|
||||
};
|
||||
roles.default.machines.bobs-laptop = { };
|
||||
roles.default.settings.user = "bob";
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
@@ -62,6 +62,7 @@ nav:
|
||||
- Vars Backend: guides/vars-backend.md
|
||||
- Facts Backend: guides/secrets.md
|
||||
- Adding more machines: guides/more-machines.md
|
||||
- Target Host: guides/target-host.md
|
||||
- Inventory:
|
||||
- Inventory: guides/inventory.md
|
||||
- Secure Boot: guides/secure-boot.md
|
||||
@@ -154,6 +155,7 @@ nav:
|
||||
- reference/cli/show.md
|
||||
- reference/cli/ssh.md
|
||||
- reference/cli/state.md
|
||||
- reference/cli/templates.md
|
||||
- reference/cli/vars.md
|
||||
- reference/cli/vms.md
|
||||
- NixOS Modules:
|
||||
|
||||
@@ -28,7 +28,7 @@ Benefits:
|
||||
* Caching mechanism is very simple.
|
||||
|
||||
|
||||
### Method 2: Direct access:
|
||||
### Method 2: Direct access
|
||||
|
||||
Directly calling the evaluator / build sandbox via `nix build` and `nix eval`within the Python code
|
||||
|
||||
|
||||
@@ -122,8 +122,8 @@ CTRL+D
|
||||
4. Locally generate ssh host keys. You only need to generate ones for the algorithms you're using in `authorizedKeys`.
|
||||
|
||||
```bash
|
||||
ssh-keygen -q -N "" -t ed25519 -f ./initrd_host_ed25519_key
|
||||
ssh-keygen -q -N "" -t rsa -b 4096 -f ./initrd_host_rsa_key
|
||||
ssh-keygen -q -N "" -C "" -t ed25519 -f ./initrd_host_ed25519_key
|
||||
ssh-keygen -q -N "" -C "" -t rsa -b 4096 -f ./initrd_host_rsa_key
|
||||
```
|
||||
|
||||
5. Securely copy your local initrd ssh host keys to the installer's `/mnt` directory:
|
||||
|
||||
82
docs/site/guides/target-host.md
Normal file
82
docs/site/guides/target-host.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# How to Set `targetHost` for a Machine
|
||||
|
||||
The `targetHost` defines where the machine can be reached for operations like SSH or deployment. You can set it in two ways, depending on your use case.
|
||||
|
||||
---
|
||||
|
||||
## ✅ Option 1: Use the Inventory (Recommended for Static Hosts)
|
||||
|
||||
If the hostname is **static**, like `server.example.com`, set it in the **inventory**:
|
||||
|
||||
```{.nix title="flake.nix" hl_lines="8"}
|
||||
{
|
||||
# edlided
|
||||
outputs =
|
||||
{ self, clan-core, ... }:
|
||||
let
|
||||
clan = clan-core.lib.clan {
|
||||
inventory.machines.jon = {
|
||||
deploy.targetHost = "root@server.example.com";
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
inherit (clan.config) nixosConfigurations nixosModules clanInternals;
|
||||
# elided
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
This is fast, simple and explicit, and doesn’t require evaluating the NixOS config. We can also displayed it in the clan-cli or clan-app.
|
||||
|
||||
---
|
||||
|
||||
## ✅ Option 2: Use NixOS (Only for Dynamic Hosts)
|
||||
|
||||
If your target host depends on a **dynamic expression** (like using the machine’s evaluated FQDN), set it inside the NixOS module:
|
||||
|
||||
```{.nix title="flake.nix" hl_lines="8"}
|
||||
{
|
||||
# edlided
|
||||
outputs =
|
||||
{ self, clan-core, ... }:
|
||||
let
|
||||
clan = clan-core.lib.clan {
|
||||
machines.jon = {config, ...}: {
|
||||
clan.core.networking.targetHost = "jon@${config.networking.fqdn}";
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
inherit (clan.config) nixosConfigurations nixosModules clanInternals;
|
||||
# elided
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
Use this **only if the value cannot be made static**, because it’s slower and won't be displayed in the clan-cli or clan-app yet.
|
||||
|
||||
---
|
||||
|
||||
## 📝 TL;DR
|
||||
|
||||
| Use Case | Use Inventory? | Example |
|
||||
| ------------------------- | -------------- | -------------------------------- |
|
||||
| Static hostname | ✅ Yes | `root@server.example.com` |
|
||||
| Dynamic config expression | ❌ No | `jon@${config.networking.fqdn}` |
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Coming Soon: Unified Networking Module
|
||||
|
||||
We’re working on a new networking module that will automatically do all of this for you.
|
||||
|
||||
- Easier to use
|
||||
- Sane defaults: You’ll always be able to reach the machine — no need to worry about hostnames.
|
||||
- ✨ Migration from **either method** will be supported and simple.
|
||||
|
||||
## Summary
|
||||
|
||||
- Ask: *Does this hostname dynamically change based on NixOS config?*
|
||||
- If **no**, use the inventory.
|
||||
- If **yes**, then use NixOS config.
|
||||
20
flake.lock
generated
20
flake.lock
generated
@@ -16,11 +16,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1751413887,
|
||||
"narHash": "sha256-+ut7DrSwamExIvaCFdiTYD88NTSYJFG2CEOvCha59vI=",
|
||||
"rev": "246f0d66547d073af6249e4f7852466197e871ed",
|
||||
"lastModified": 1751846468,
|
||||
"narHash": "sha256-h0mpWZIOIAKj4fmLNyI2HDG+c0YOkbYmyJXSj/bQ9s0=",
|
||||
"rev": "a2166c13b0cb3febdaf36391cd2019aa2ccf4366",
|
||||
"type": "tarball",
|
||||
"url": "https://git.clan.lol/api/v1/repos/clan/data-mesher/archive/246f0d66547d073af6249e4f7852466197e871ed.tar.gz"
|
||||
"url": "https://git.clan.lol/api/v1/repos/clan/data-mesher/archive/a2166c13b0cb3febdaf36391cd2019aa2ccf4366.tar.gz"
|
||||
},
|
||||
"original": {
|
||||
"type": "tarball",
|
||||
@@ -34,11 +34,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1751607816,
|
||||
"narHash": "sha256-5PtrwjqCIJ4DKQhzYdm8RFePBuwb+yTzjV52wWoGSt4=",
|
||||
"lastModified": 1751854533,
|
||||
"narHash": "sha256-U/OQFplExOR1jazZY4KkaQkJqOl59xlh21HP9mI79Vc=",
|
||||
"owner": "nix-community",
|
||||
"repo": "disko",
|
||||
"rev": "da6109c917b48abc1f76dd5c9bf3901c8c80f662",
|
||||
"rev": "16b74a1e304197248a1bc663280f2548dbfcae3c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -164,10 +164,10 @@
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 315532800,
|
||||
"narHash": "sha256-0HRxGUoOMtOYnwlMWY0AkuU88WHaI3Q5GEILmsWpI8U=",
|
||||
"rev": "a48741b083d4f36dd79abd9f760c84da6b4dc0e5",
|
||||
"narHash": "sha256-mUlYenGbsUFP0A3EhfKJXmUl5+MQGJLhoEop2t3g5p4=",
|
||||
"rev": "ceb24d94c6feaa4e8737a8e2bd3cf71c3a7eaaa0",
|
||||
"type": "tarball",
|
||||
"url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.11pre823094.a48741b083d4/nixexprs.tar.xz"
|
||||
"url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.11pre826033.ceb24d94c6fe/nixexprs.tar.xz"
|
||||
},
|
||||
"original": {
|
||||
"type": "tarball",
|
||||
|
||||
@@ -229,8 +229,6 @@ in
|
||||
clanInternals = {
|
||||
inventoryClass =
|
||||
let
|
||||
localModuleSet =
|
||||
lib.filterAttrs (n: _: !inventory._legacyModules ? ${n}) inventory.modules // config.modules;
|
||||
flakeInputs = config.self.inputs;
|
||||
in
|
||||
{
|
||||
@@ -240,7 +238,7 @@ in
|
||||
imports = [
|
||||
../inventoryClass/builder/default.nix
|
||||
(lib.modules.importApply ../inventoryClass/service-list-from-inputs.nix {
|
||||
inherit flakeInputs clanLib localModuleSet;
|
||||
inherit flakeInputs clanLib;
|
||||
})
|
||||
{
|
||||
inherit inventory directory;
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
{
|
||||
flakeInputs,
|
||||
clanLib,
|
||||
localModuleSet,
|
||||
}:
|
||||
{ lib, config, ... }:
|
||||
|
||||
let
|
||||
|
||||
inspectModule =
|
||||
inputName: moduleName: module:
|
||||
let
|
||||
@@ -28,16 +25,30 @@ in
|
||||
{
|
||||
options.modulesPerSource = lib.mkOption {
|
||||
# { sourceName :: { moduleName :: {} }}
|
||||
readOnly = true;
|
||||
type = lib.types.raw;
|
||||
default =
|
||||
let
|
||||
inputsWithModules = lib.filterAttrs (_inputName: v: v ? clan.modules) flakeInputs;
|
||||
|
||||
in
|
||||
lib.mapAttrs (
|
||||
inputName: v: lib.mapAttrs (inspectModule inputName) v.clan.modules
|
||||
) inputsWithModules;
|
||||
};
|
||||
options.localModules = lib.mkOption {
|
||||
default = lib.mapAttrs (inspectModule "self") localModuleSet;
|
||||
readOnly = true;
|
||||
type = lib.types.raw;
|
||||
default = config.modulesPerSource.self;
|
||||
};
|
||||
options.templatesPerSource = lib.mkOption {
|
||||
# { sourceName :: { moduleName :: {} }}
|
||||
readOnly = true;
|
||||
type = lib.types.raw;
|
||||
default =
|
||||
let
|
||||
inputsWithTemplates = lib.filterAttrs (_inputName: v: v ? clan.templates) flakeInputs;
|
||||
in
|
||||
lib.mapAttrs (_inputName: v: lib.mapAttrs (_n: t: t) v.clan.templates) inputsWithTemplates;
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
@@ -40,6 +40,18 @@ in
|
||||
};
|
||||
|
||||
config = {
|
||||
# Check for removed passBackend option usage
|
||||
assertions = [
|
||||
{
|
||||
assertion = config.clan.core.vars.settings.passBackend == null;
|
||||
message = ''
|
||||
The option `clan.core.vars.settings.passBackend' has been removed.
|
||||
Use clan.core.vars.password-store.passPackage instead.
|
||||
Set it to pkgs.pass for GPG or pkgs.passage for age encryption.
|
||||
'';
|
||||
}
|
||||
];
|
||||
|
||||
# check all that all non-secret files have no owner/group/mode set
|
||||
warnings = lib.foldl' (
|
||||
warnings: generator:
|
||||
|
||||
@@ -54,7 +54,7 @@ in
|
||||
{
|
||||
_class = "nixos";
|
||||
|
||||
options.clan.vars.password-store = {
|
||||
options.clan.core.vars.password-store = {
|
||||
secretLocation = lib.mkOption {
|
||||
type = lib.types.path;
|
||||
default = "/etc/secret-vars";
|
||||
@@ -62,6 +62,13 @@ in
|
||||
location where the tarball with the password-store secrets will be uploaded to and the manifest
|
||||
'';
|
||||
};
|
||||
passPackage = lib.mkOption {
|
||||
type = lib.types.package;
|
||||
default = pkgs.pass;
|
||||
description = ''
|
||||
Password store package to use. Can be pkgs.pass for GPG-based storage or pkgs.passage for age-based storage.
|
||||
'';
|
||||
};
|
||||
};
|
||||
config = {
|
||||
clan.core.vars.settings =
|
||||
@@ -76,7 +83,7 @@ in
|
||||
else if file.config.neededFor == "services" then
|
||||
"/run/secrets/${file.config.generatorName}/${file.config.name}"
|
||||
else if file.config.neededFor == "activation" then
|
||||
"${config.clan.password-store.secretLocation}/activation/${file.config.generatorName}/${file.config.name}"
|
||||
"${config.clan.core.vars.password-store.secretLocation}/activation/${file.config.generatorName}/${file.config.name}"
|
||||
else if file.config.neededFor == "partitioning" then
|
||||
"/run/partitioning-secrets/${file.config.generatorName}/${file.config.name}"
|
||||
else
|
||||
@@ -95,7 +102,7 @@ in
|
||||
]
|
||||
''
|
||||
[ -e /run/current-system ] || echo setting up secrets...
|
||||
${installSecretTarball}/bin/install-secret-tarball ${config.clan.vars.password-store.secretLocation}/secrets_for_users.tar.gz /run/user-secrets
|
||||
${installSecretTarball}/bin/install-secret-tarball ${config.clan.core.vars.password-store.secretLocation}/secrets_for_users.tar.gz /run/user-secrets
|
||||
''
|
||||
// lib.optionalAttrs (config.system ? dryActivationScript) {
|
||||
supportsDryActivation = true;
|
||||
@@ -111,7 +118,7 @@ in
|
||||
]
|
||||
''
|
||||
[ -e /run/current-system ] || echo setting up secrets...
|
||||
${installSecretTarball}/bin/install-secret-tarball ${config.clan.vars.password-store.secretLocation}/secrets.tar.gz /run/secrets
|
||||
${installSecretTarball}/bin/install-secret-tarball ${config.clan.core.vars.password-store.secretLocation}/secrets.tar.gz /run/secrets
|
||||
''
|
||||
// lib.optionalAttrs (config.system ? dryActivationScript) {
|
||||
supportsDryActivation = true;
|
||||
@@ -129,7 +136,7 @@ in
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
ExecStart = [
|
||||
"${installSecretTarball}/bin/install-secret-tarball ${config.clan.vars.password-store.secretLocation}/secrets_for_users.tar.gz /run/user-secrets"
|
||||
"${installSecretTarball}/bin/install-secret-tarball ${config.clan.core.vars.password-store.secretLocation}/secrets_for_users.tar.gz /run/user-secrets"
|
||||
];
|
||||
RemainAfterExit = true;
|
||||
};
|
||||
@@ -142,7 +149,7 @@ in
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
ExecStart = [
|
||||
"${installSecretTarball}/bin/install-secret-tarball ${config.clan.vars.password-store.secretLocation}/secrets.tar.gz /run/secrets"
|
||||
"${installSecretTarball}/bin/install-secret-tarball ${config.clan.core.vars.password-store.secretLocation}/secrets.tar.gz /run/secrets"
|
||||
];
|
||||
RemainAfterExit = true;
|
||||
};
|
||||
|
||||
@@ -15,17 +15,6 @@
|
||||
'';
|
||||
};
|
||||
|
||||
passBackend = lib.mkOption {
|
||||
type = lib.types.enum [
|
||||
"passage"
|
||||
"pass"
|
||||
];
|
||||
default = "pass";
|
||||
description = ''
|
||||
password-store backend to use. Valid options are `pass` and `passage`
|
||||
'';
|
||||
};
|
||||
|
||||
secretModule = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
internal = true;
|
||||
@@ -65,4 +54,15 @@
|
||||
the python import path to the public module
|
||||
'';
|
||||
};
|
||||
|
||||
# Legacy option that guides migration
|
||||
passBackend = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.str;
|
||||
default = null;
|
||||
visible = false;
|
||||
description = ''
|
||||
DEPRECATED: This option has been removed. Use clan.vars.password-store.passPackage instead.
|
||||
Set it to pkgs.pass for GPG or pkgs.passage for age encryption.
|
||||
'';
|
||||
};
|
||||
}
|
||||
|
||||
@@ -90,7 +90,7 @@ const handleCancel = async <K extends OperationNames>(
|
||||
orig_task: Promise<BackendReturnType<K>>,
|
||||
) => {
|
||||
console.log("Canceling operation: ", ops_key);
|
||||
const { promise, op_key } = _callApi("cancel_task", { task_id: ops_key });
|
||||
const { promise, op_key } = _callApi("delete_task", { task_id: ops_key });
|
||||
promise.catch((error) => {
|
||||
toast.custom(
|
||||
(t) => (
|
||||
|
||||
@@ -75,7 +75,7 @@ export const MachineListItem = (props: MachineListItemProps) => {
|
||||
}
|
||||
|
||||
setInstalling(true);
|
||||
await callApi("install_machine", {
|
||||
await callApi("run_machine_install", {
|
||||
opts: {
|
||||
machine: {
|
||||
name: name,
|
||||
@@ -163,7 +163,7 @@ export const MachineListItem = (props: MachineListItemProps) => {
|
||||
}
|
||||
|
||||
await callApi(
|
||||
"deploy_machine",
|
||||
"run_machine_deploy",
|
||||
{
|
||||
machine: {
|
||||
name: name,
|
||||
|
||||
@@ -13,7 +13,7 @@ export const clanMetaQuery = (uri: string | undefined = undefined) =>
|
||||
queryFn: async () => {
|
||||
console.log("fetching clan meta", clanURI);
|
||||
|
||||
const result = await callApi("show_clan_meta", {
|
||||
const result = await callApi("get_clan_details", {
|
||||
flake: { identifier: clanURI! },
|
||||
}).promise;
|
||||
|
||||
|
||||
@@ -33,27 +33,6 @@ export const createModulesQuery = (
|
||||
},
|
||||
}));
|
||||
|
||||
export const tagsQuery = (uri: string | undefined) =>
|
||||
useQuery<string[]>(() => ({
|
||||
queryKey: [uri, "tags"],
|
||||
placeholderData: [],
|
||||
queryFn: async () => {
|
||||
if (!uri) return [];
|
||||
|
||||
const response = await callApi("get_inventory", {
|
||||
flake: { identifier: uri },
|
||||
}).promise;
|
||||
if (response.status === "error") {
|
||||
console.error("Failed to fetch data");
|
||||
} else {
|
||||
const machines = response.data.machines || {};
|
||||
const tags = Object.values(machines).flatMap((m) => m.tags || []);
|
||||
return tags;
|
||||
}
|
||||
return [];
|
||||
},
|
||||
}));
|
||||
|
||||
export const machinesQuery = (uri: string | undefined) =>
|
||||
useQuery<string[]>(() => ({
|
||||
queryKey: [uri, "machines"],
|
||||
@@ -61,7 +40,7 @@ export const machinesQuery = (uri: string | undefined) =>
|
||||
queryFn: async () => {
|
||||
if (!uri) return [];
|
||||
|
||||
const response = await callApi("get_inventory", {
|
||||
const response = await callApi("list_machines", {
|
||||
flake: { identifier: uri },
|
||||
}).promise;
|
||||
if (response.status === "error") {
|
||||
|
||||
@@ -66,7 +66,7 @@ export const CreateClan = () => {
|
||||
}
|
||||
|
||||
// Will generate a key if it doesn't exist, and add a user to the clan
|
||||
const k = await callApi("keygen", {
|
||||
const k = await callApi("create_secrets_user", {
|
||||
flake_dir: target_dir[0],
|
||||
}).promise;
|
||||
|
||||
@@ -203,6 +203,6 @@ export const CreateClan = () => {
|
||||
};
|
||||
|
||||
type Meta = Extract<
|
||||
OperationResponse<"show_clan_meta">,
|
||||
OperationResponse<"get_clan_details">,
|
||||
{ status: "success" }
|
||||
>["data"];
|
||||
|
||||
@@ -23,7 +23,7 @@ const EditClanForm = (props: EditClanFormProps) => {
|
||||
const handleSubmit: SubmitHandler<GeneralData> = async (values, event) => {
|
||||
await toast.promise(
|
||||
(async () => {
|
||||
await callApi("update_clan_meta", {
|
||||
await callApi("set_clan_details", {
|
||||
options: {
|
||||
flake: { identifier: props.directory },
|
||||
meta: values,
|
||||
@@ -128,7 +128,7 @@ const EditClanForm = (props: EditClanFormProps) => {
|
||||
);
|
||||
};
|
||||
|
||||
type GeneralData = SuccessQuery<"show_clan_meta">["data"];
|
||||
type GeneralData = SuccessQuery<"get_clan_details">["data"];
|
||||
|
||||
export const ClanDetails = () => {
|
||||
const params = useParams();
|
||||
|
||||
@@ -100,7 +100,7 @@ export const Flash = () => {
|
||||
const deviceQuery = createQuery(() => ({
|
||||
queryKey: ["block_devices"],
|
||||
queryFn: async () => {
|
||||
const result = await callApi("show_block_devices", {}).promise;
|
||||
const result = await callApi("list_block_devices", {}).promise;
|
||||
if (result.status === "error") throw new Error("Failed to fetch data");
|
||||
return result.data;
|
||||
},
|
||||
@@ -110,7 +110,7 @@ export const Flash = () => {
|
||||
const keymapQuery = createQuery(() => ({
|
||||
queryKey: ["list_keymaps"],
|
||||
queryFn: async () => {
|
||||
const result = await callApi("list_possible_keymaps", {}).promise;
|
||||
const result = await callApi("list_keymaps", {}).promise;
|
||||
if (result.status === "error") throw new Error("Failed to fetch data");
|
||||
return result.data;
|
||||
},
|
||||
@@ -120,7 +120,7 @@ export const Flash = () => {
|
||||
const langQuery = createQuery(() => ({
|
||||
queryKey: ["list_languages"],
|
||||
queryFn: async () => {
|
||||
const result = await callApi("list_possible_languages", {}).promise;
|
||||
const result = await callApi("list_languages", {}).promise;
|
||||
if (result.status === "error") throw new Error("Failed to fetch data");
|
||||
return result.data;
|
||||
},
|
||||
@@ -157,7 +157,7 @@ export const Flash = () => {
|
||||
console.log("Confirmed flash:", values);
|
||||
try {
|
||||
await toast.promise(
|
||||
callApi("flash_machine", {
|
||||
callApi("run_machine_flash", {
|
||||
machine: {
|
||||
name: values.machine.devicePath,
|
||||
flake: {
|
||||
|
||||
@@ -4,7 +4,7 @@ import { Button } from "../../components/Button/Button";
|
||||
import Icon from "@/src/components/icon";
|
||||
|
||||
type ServiceModel = Extract<
|
||||
OperationResponse<"show_mdns">,
|
||||
OperationResponse<"list_mdns_services">,
|
||||
{ status: "success" }
|
||||
>["data"]["services"];
|
||||
|
||||
@@ -16,7 +16,7 @@ export const HostList: Component = () => {
|
||||
<div class="" data-tip="Refresh install targets">
|
||||
<Button
|
||||
variant="light"
|
||||
onClick={() => callApi("show_mdns", {})}
|
||||
onClick={() => callApi("list_mdns_services", {})}
|
||||
startIcon={<Icon icon="Update" />}
|
||||
></Button>
|
||||
</div>
|
||||
|
||||
@@ -120,7 +120,7 @@ export function InstallMachine(props: InstallMachineProps) {
|
||||
throw new Error("No target host found for the machine");
|
||||
}
|
||||
|
||||
const installPromise = callApi("install_machine", {
|
||||
const installPromise = callApi("run_machine_install", {
|
||||
opts: {
|
||||
machine: {
|
||||
name: props.name,
|
||||
|
||||
@@ -149,7 +149,7 @@ export function MachineForm(props: MachineFormProps) {
|
||||
|
||||
setIsUpdating(true);
|
||||
const r = await callApi(
|
||||
"deploy_machine",
|
||||
"run_machine_deploy",
|
||||
{
|
||||
machine: {
|
||||
name: machine,
|
||||
|
||||
@@ -71,7 +71,7 @@ export const HWStep = (props: StepProps<HardwareValues>) => {
|
||||
const hwReportQuery = useQuery(() => ({
|
||||
queryKey: [props.dir, props.machine_id, "hw_report"],
|
||||
queryFn: async () => {
|
||||
const result = await callApi("show_machine_hardware_config", {
|
||||
const result = await callApi("get_machine_hardware_summary", {
|
||||
machine: {
|
||||
flake: {
|
||||
identifier: props.dir,
|
||||
@@ -127,7 +127,7 @@ export const HWStep = (props: StepProps<HardwareValues>) => {
|
||||
return;
|
||||
}
|
||||
|
||||
const r = await callApi("generate_machine_hardware_info", {
|
||||
const r = await callApi("run_machine_hardware_info", {
|
||||
opts: {
|
||||
machine: {
|
||||
name: props.machine_id,
|
||||
|
||||
@@ -173,7 +173,7 @@ export const VarsStep = (props: VarsStepProps) => {
|
||||
toast.error("Error fetching data");
|
||||
return;
|
||||
}
|
||||
const result = await callApi("generate_vars_for_machine", {
|
||||
const result = await callApi("run_generators", {
|
||||
machine_name: props.machine_id,
|
||||
base_dir: props.dir,
|
||||
generators: generatorsQuery.data.map((generator) => generator.name),
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { BackButton } from "@/src/components/BackButton";
|
||||
import { createModulesQuery, machinesQuery, tagsQuery } from "@/src/queries";
|
||||
import { createModulesQuery, machinesQuery } from "@/src/queries";
|
||||
import { useParams } from "@solidjs/router";
|
||||
import { For, Match, Switch } from "solid-js";
|
||||
import { ModuleInfo } from "./list";
|
||||
@@ -34,28 +34,11 @@ interface AddModuleProps {
|
||||
|
||||
const AddModule = (props: AddModuleProps) => {
|
||||
const { activeClanURI } = useClanContext();
|
||||
const tags = tagsQuery(activeClanURI());
|
||||
const machines = machinesQuery(activeClanURI());
|
||||
return (
|
||||
<div>
|
||||
<div>Add to your clan</div>
|
||||
<Switch fallback="loading">
|
||||
<Match when={tags.data}>
|
||||
{(tags) => (
|
||||
<For each={Object.keys(props.data.roles)}>
|
||||
{(role) => (
|
||||
<>
|
||||
<div class="text-neutral-600">{role}s</div>
|
||||
<RoleForm
|
||||
avilableTags={tags()}
|
||||
availableMachines={machines.data || []}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</For>
|
||||
)}
|
||||
</Match>
|
||||
</Switch>
|
||||
<Switch fallback="loading">Removed</Switch>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -62,7 +62,6 @@ const Details = (props: DetailsProps) => {
|
||||
navigate(`/modules/add/${props.id}`);
|
||||
// const uri = activeURI();
|
||||
// if (!uri) return;
|
||||
// const res = await callApi("get_inventory", { base_path: uri });
|
||||
// if (res.status === "error") {
|
||||
// toast.error("Failed to fetch inventory");
|
||||
// return;
|
||||
|
||||
@@ -90,7 +90,7 @@ const handleCancel = async <K extends OperationNames>(
|
||||
orig_task: Promise<BackendReturnType<K>>,
|
||||
) => {
|
||||
console.log("Canceling operation: ", ops_key);
|
||||
const { promise, op_key } = _callApi("cancel_task", { task_id: ops_key });
|
||||
const { promise, op_key } = _callApi("delete_task", { task_id: ops_key });
|
||||
promise.catch((error) => {
|
||||
toast.custom(
|
||||
(t) => (
|
||||
|
||||
39
pkgs/clan-app/ui/src/components/v2/Alert/Alert.css
Normal file
39
pkgs/clan-app/ui/src/components/v2/Alert/Alert.css
Normal file
@@ -0,0 +1,39 @@
|
||||
div.alert {
|
||||
@apply flex gap-2.5 px-6 py-4 size-full rounded-md items-start;
|
||||
|
||||
&.has-icon {
|
||||
@apply pl-4;
|
||||
|
||||
svg.icon {
|
||||
@apply relative top-0.5;
|
||||
}
|
||||
}
|
||||
|
||||
&.has-dismiss {
|
||||
@apply pr-4;
|
||||
}
|
||||
|
||||
& > div.content {
|
||||
@apply flex flex-col gap-2 size-full;
|
||||
}
|
||||
|
||||
&.info {
|
||||
@apply bg-semantic-info-1 border border-semantic-info-3 fg-semantic-info-3;
|
||||
}
|
||||
|
||||
&.error {
|
||||
@apply bg-semantic-error-2 border border-semantic-error-3 fg-semantic-error-3;
|
||||
}
|
||||
|
||||
&.warning {
|
||||
@apply bg-semantic-warning-2 border border-semantic-warning-3 fg-semantic-warning-3;
|
||||
}
|
||||
|
||||
&.success {
|
||||
@apply bg-semantic-success-1 border border-semantic-success-3 fg-semantic-success-3;
|
||||
}
|
||||
|
||||
& > button.dismiss-trigger {
|
||||
@apply relative top-0.5;
|
||||
}
|
||||
}
|
||||
138
pkgs/clan-app/ui/src/components/v2/Alert/Alert.stories.tsx
Normal file
138
pkgs/clan-app/ui/src/components/v2/Alert/Alert.stories.tsx
Normal file
@@ -0,0 +1,138 @@
|
||||
import type { Meta, StoryObj } from "@kachurun/storybook-solid";
|
||||
import { Alert, AlertProps } from "@/src/components/v2/Alert/Alert";
|
||||
import { expect, fn } from "storybook/test";
|
||||
import { StoryContext } from "@kachurun/storybook-solid-vite";
|
||||
|
||||
const meta: Meta<AlertProps> = {
|
||||
title: "Components/Alert",
|
||||
component: Alert,
|
||||
decorators: [
|
||||
(Story: StoryObj) => (
|
||||
<div class="w-72">
|
||||
<Story />
|
||||
</div>
|
||||
),
|
||||
],
|
||||
};
|
||||
|
||||
export default meta;
|
||||
|
||||
type Story = StoryObj<AlertProps>;
|
||||
|
||||
export const Info: Story = {
|
||||
args: {
|
||||
type: "info",
|
||||
title: "Headline",
|
||||
description:
|
||||
"Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua.",
|
||||
},
|
||||
};
|
||||
|
||||
export const Error: Story = {
|
||||
args: {
|
||||
...Info.args,
|
||||
type: "error",
|
||||
},
|
||||
};
|
||||
|
||||
export const Warning: Story = {
|
||||
args: {
|
||||
...Info.args,
|
||||
type: "warning",
|
||||
},
|
||||
};
|
||||
|
||||
export const Success: Story = {
|
||||
args: {
|
||||
...Info.args,
|
||||
type: "success",
|
||||
},
|
||||
};
|
||||
|
||||
export const InfoIcon: Story = {
|
||||
args: {
|
||||
...Info.args,
|
||||
icon: "Info",
|
||||
},
|
||||
};
|
||||
|
||||
export const ErrorIcon: Story = {
|
||||
args: {
|
||||
...Error.args,
|
||||
icon: "WarningFilled",
|
||||
},
|
||||
};
|
||||
|
||||
export const WarningIcon: Story = {
|
||||
args: {
|
||||
...Warning.args,
|
||||
icon: "WarningFilled",
|
||||
},
|
||||
};
|
||||
|
||||
export const SuccessIcon: Story = {
|
||||
args: {
|
||||
...Success.args,
|
||||
icon: "Checkmark",
|
||||
},
|
||||
};
|
||||
|
||||
export const InfoDismiss: Story = {
|
||||
args: {
|
||||
...Info.args,
|
||||
onDismiss: fn(),
|
||||
play: async ({ canvas, step, userEvent, args }: StoryContext) => {
|
||||
await userEvent.click(canvas.getByRole("button"));
|
||||
await expect(args.onDismiss).toHaveBeenCalled();
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const ErrorDismiss: Story = {
|
||||
args: {
|
||||
...InfoDismiss.args,
|
||||
type: "error",
|
||||
},
|
||||
};
|
||||
|
||||
export const WarningDismiss: Story = {
|
||||
args: {
|
||||
...InfoDismiss.args,
|
||||
type: "warning",
|
||||
},
|
||||
};
|
||||
|
||||
export const SuccessDismiss: Story = {
|
||||
args: {
|
||||
...InfoDismiss.args,
|
||||
type: "success",
|
||||
},
|
||||
};
|
||||
|
||||
export const InfoIconDismiss: Story = {
|
||||
args: {
|
||||
...InfoDismiss.args,
|
||||
icon: "Info",
|
||||
},
|
||||
};
|
||||
|
||||
export const ErrorIconDismiss: Story = {
|
||||
args: {
|
||||
...ErrorDismiss.args,
|
||||
icon: "WarningFilled",
|
||||
},
|
||||
};
|
||||
|
||||
export const WarningIconDismiss: Story = {
|
||||
args: {
|
||||
...WarningDismiss.args,
|
||||
icon: "WarningFilled",
|
||||
},
|
||||
};
|
||||
|
||||
export const SuccessIconDismiss: Story = {
|
||||
args: {
|
||||
...SuccessDismiss.args,
|
||||
icon: "Checkmark",
|
||||
},
|
||||
};
|
||||
43
pkgs/clan-app/ui/src/components/v2/Alert/Alert.tsx
Normal file
43
pkgs/clan-app/ui/src/components/v2/Alert/Alert.tsx
Normal file
@@ -0,0 +1,43 @@
|
||||
import "./Alert.css";
|
||||
import cx from "classnames";
|
||||
import Icon, { IconVariant } from "@/src/components/v2/Icon/Icon";
|
||||
import { Typography } from "@/src/components/v2/Typography/Typography";
|
||||
import { Button } from "@kobalte/core/button";
|
||||
import { Alert as KAlert } from "@kobalte/core/alert";
|
||||
|
||||
export interface AlertProps {
|
||||
type: "success" | "error" | "warning" | "info";
|
||||
title: string;
|
||||
description: string;
|
||||
icon?: IconVariant;
|
||||
onDismiss?: () => void;
|
||||
}
|
||||
|
||||
export const Alert = (props: AlertProps) => (
|
||||
<KAlert
|
||||
class={cx("alert", props.type, {
|
||||
"has-icon": props.icon,
|
||||
"has-dismiss": props.onDismiss,
|
||||
})}
|
||||
>
|
||||
{props.icon && <Icon icon={props.icon} color="inherit" size="1rem" />}
|
||||
<div class="content">
|
||||
<Typography hierarchy="body" size="default" weight="bold" color="inherit">
|
||||
{props.title}
|
||||
</Typography>
|
||||
<Typography hierarchy="body" size="xs" color="inherit">
|
||||
{props.description}
|
||||
</Typography>
|
||||
</div>
|
||||
{props.onDismiss && (
|
||||
<Button
|
||||
name="dismiss-alert"
|
||||
class="dismiss-trigger"
|
||||
onClick={props.onDismiss}
|
||||
aria-label={`Dismiss ${props.type} alert`}
|
||||
>
|
||||
<Icon icon="Close" color="primary" size="0.75rem" />
|
||||
</Button>
|
||||
)}
|
||||
</KAlert>
|
||||
);
|
||||
@@ -13,7 +13,7 @@ export const clanMetaQuery = (uri: string | undefined = undefined) =>
|
||||
queryFn: async () => {
|
||||
console.log("fetching clan meta", clanURI);
|
||||
|
||||
const result = await callApi("show_clan_meta", {
|
||||
const result = await callApi("get_clan_details", {
|
||||
flake: { identifier: clanURI! },
|
||||
}).promise;
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ export const CreateClan = () => {
|
||||
const r = await callApi("create_clan", {
|
||||
opts: {
|
||||
dest: target_dir[0],
|
||||
template_name: template,
|
||||
template: template,
|
||||
initial: {
|
||||
meta,
|
||||
services: {},
|
||||
@@ -65,7 +65,7 @@ export const CreateClan = () => {
|
||||
}
|
||||
|
||||
// Will generate a key if it doesn't exist, and add a user to the clan
|
||||
const k = await callApi("keygen", {
|
||||
const k = await callApi("create_secrets_user", {
|
||||
flake_dir: target_dir[0],
|
||||
}).promise;
|
||||
|
||||
@@ -202,6 +202,6 @@ export const CreateClan = () => {
|
||||
};
|
||||
|
||||
type Meta = Extract<
|
||||
OperationResponse<"show_clan_meta">,
|
||||
OperationResponse<"get_clan_details">,
|
||||
{ status: "success" }
|
||||
>["data"];
|
||||
|
||||
@@ -23,7 +23,7 @@ const EditClanForm = (props: EditClanFormProps) => {
|
||||
const handleSubmit: SubmitHandler<GeneralData> = async (values, event) => {
|
||||
await toast.promise(
|
||||
(async () => {
|
||||
await callApi("update_clan_meta", {
|
||||
await callApi("set_clan_details", {
|
||||
options: {
|
||||
flake: { identifier: props.directory },
|
||||
meta: values,
|
||||
@@ -128,7 +128,7 @@ const EditClanForm = (props: EditClanFormProps) => {
|
||||
);
|
||||
};
|
||||
|
||||
type GeneralData = SuccessQuery<"show_clan_meta">["data"];
|
||||
type GeneralData = SuccessQuery<"get_clan_details">["data"];
|
||||
|
||||
export const ClanDetails = () => {
|
||||
const params = useParams();
|
||||
|
||||
@@ -4,7 +4,7 @@ import { Button } from "../../components/Button/Button";
|
||||
import Icon from "@/src/components/icon";
|
||||
|
||||
type ServiceModel = Extract<
|
||||
OperationResponse<"show_mdns">,
|
||||
OperationResponse<"list_mdns_services">,
|
||||
{ status: "success" }
|
||||
>["data"]["services"];
|
||||
|
||||
@@ -16,7 +16,7 @@ export const HostList: Component = () => {
|
||||
<div class="" data-tip="Refresh install targets">
|
||||
<Button
|
||||
variant="light"
|
||||
onClick={() => callApi("show_mdns", {})}
|
||||
onClick={() => callApi("list_mdns_services", {})}
|
||||
startIcon={<Icon icon="Update" />}
|
||||
></Button>
|
||||
</div>
|
||||
|
||||
@@ -15,6 +15,7 @@ from . import (
|
||||
clan,
|
||||
secrets,
|
||||
select,
|
||||
templates,
|
||||
state,
|
||||
vms,
|
||||
)
|
||||
@@ -195,6 +196,13 @@ For more detailed information, visit: {help_hyperlink("getting-started", "https:
|
||||
|
||||
clan.register_parser(parser_flake)
|
||||
|
||||
parser_templates = subparsers.add_parser(
|
||||
"templates",
|
||||
help="Subcommands to interact with templates",
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
)
|
||||
templates.register_parser(parser_templates)
|
||||
|
||||
parser_flash = subparsers.add_parser(
|
||||
"flash",
|
||||
help="Flashes your machine to an USB drive",
|
||||
|
||||
@@ -4,7 +4,6 @@ import argparse
|
||||
from clan_cli.clan.inspect import register_inspect_parser
|
||||
|
||||
from .create import register_create_parser
|
||||
from .list import register_list_parser
|
||||
|
||||
|
||||
# takes a (sub)parser and configures it
|
||||
@@ -19,5 +18,3 @@ def register_parser(parser: argparse.ArgumentParser) -> None:
|
||||
register_create_parser(create_parser)
|
||||
inspect_parser = subparser.add_parser("inspect", help="Inspect a clan ")
|
||||
register_inspect_parser(inspect_parser)
|
||||
list_parser = subparser.add_parser("list", help="List clan templates")
|
||||
register_list_parser(list_parser)
|
||||
|
||||
@@ -4,36 +4,17 @@ import logging
|
||||
from pathlib import Path
|
||||
|
||||
from clan_lib.clan.create import CreateOptions, create_clan
|
||||
from clan_lib.templates import (
|
||||
InputPrio,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def register_create_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"--input",
|
||||
type=str,
|
||||
help="""Flake input name to use as template source
|
||||
can be specified multiple times, inputs are tried in order of definition
|
||||
Example: --input clan --input clan-core
|
||||
""",
|
||||
action="append",
|
||||
default=[],
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--no-self",
|
||||
help="Do not look into own flake for templates",
|
||||
action="store_true",
|
||||
default=False,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--template",
|
||||
type=str,
|
||||
help="Clan template name",
|
||||
help="""Reference to the template to use for the clan. default="default". In the format '<flake_ref>#template_name' Where <flake_ref> is a flake reference (e.g. github:org/repo) or a local path (e.g. '.' ).
|
||||
Omitting '<flake_ref>#' will use the builtin templates (e.g. just 'default' from clan-core ).
|
||||
""",
|
||||
default="default",
|
||||
)
|
||||
|
||||
@@ -59,19 +40,10 @@ def register_create_parser(parser: argparse.ArgumentParser) -> None:
|
||||
)
|
||||
|
||||
def create_flake_command(args: argparse.Namespace) -> None:
|
||||
if len(args.input) == 0:
|
||||
args.input = ["clan", "clan-core"]
|
||||
|
||||
if args.no_self:
|
||||
input_prio = InputPrio.try_inputs(tuple(args.input))
|
||||
else:
|
||||
input_prio = InputPrio.try_self_then_inputs(tuple(args.input))
|
||||
|
||||
create_clan(
|
||||
CreateOptions(
|
||||
input_prio=input_prio,
|
||||
dest=args.path,
|
||||
template_name=args.template,
|
||||
template=args.template,
|
||||
setup_git=not args.no_git,
|
||||
src_flake=args.flake,
|
||||
update_clan=not args.no_update,
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from clan_lib.templates import list_templates
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
template_list = list_templates("clan", args.flake)
|
||||
|
||||
print("Available local templates:")
|
||||
for name, template in template_list.self.items():
|
||||
print(f" {name}: {template['description']}")
|
||||
|
||||
print("Available templates from inputs:")
|
||||
for input_name, input_templates in template_list.inputs.items():
|
||||
print(f" {input_name}:")
|
||||
for name, template in input_templates.items():
|
||||
print(f" {name}: {template['description']}")
|
||||
|
||||
|
||||
def register_list_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.set_defaults(func=list_command)
|
||||
@@ -1,14 +1,14 @@
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from clan_lib.clan.get import show_clan_meta
|
||||
from clan_lib.clan.get import get_clan_details
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def show_command(args: argparse.Namespace) -> None:
|
||||
flake_path = args.flake.path
|
||||
meta = show_clan_meta(flake_path)
|
||||
meta = get_clan_details(flake_path)
|
||||
|
||||
print(f"Name: {meta.get('name')}")
|
||||
print(f"Description: {meta.get('description', '-')}")
|
||||
|
||||
@@ -17,7 +17,7 @@ from clan_cli.vars.generate import generate_vars
|
||||
from clan_cli.vars.upload import populate_secret_vars
|
||||
|
||||
from .automount import pause_automounting
|
||||
from .list import list_possible_keymaps, list_possible_languages
|
||||
from .list import list_keymaps, list_languages
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -37,7 +37,7 @@ class Disk:
|
||||
|
||||
# TODO: unify this with machine install
|
||||
@API.register
|
||||
def flash_machine(
|
||||
def run_machine_flash(
|
||||
machine: Machine,
|
||||
*,
|
||||
mode: str,
|
||||
@@ -59,7 +59,7 @@ def flash_machine(
|
||||
generate_vars([machine])
|
||||
|
||||
if system_config.language:
|
||||
if system_config.language not in list_possible_languages():
|
||||
if system_config.language not in list_languages():
|
||||
msg = (
|
||||
f"Language '{system_config.language}' is not a valid language. "
|
||||
f"Run 'clan flash list languages' to see a list of possible languages."
|
||||
@@ -68,7 +68,7 @@ def flash_machine(
|
||||
system_config_nix["i18n"] = {"defaultLocale": system_config.language}
|
||||
|
||||
if system_config.keymap:
|
||||
if system_config.keymap not in list_possible_keymaps():
|
||||
if system_config.keymap not in list_keymaps():
|
||||
msg = (
|
||||
f"Keymap '{system_config.keymap}' is not a valid keymap. "
|
||||
f"Run 'clan flash list keymaps' to see a list of possible keymaps."
|
||||
|
||||
@@ -11,7 +11,7 @@ from clan_lib.machines.machines import Machine
|
||||
|
||||
from clan_cli.completions import add_dynamic_completer, complete_machines
|
||||
|
||||
from .flash import Disk, SystemConfig, flash_machine
|
||||
from .flash import Disk, SystemConfig, run_machine_flash
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -84,7 +84,7 @@ def flash_command(args: argparse.Namespace) -> None:
|
||||
if ask != "y":
|
||||
return
|
||||
|
||||
flash_machine(
|
||||
run_machine_flash(
|
||||
machine,
|
||||
mode=opts.mode,
|
||||
disks=opts.disks,
|
||||
|
||||
@@ -2,6 +2,7 @@ import argparse
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import TypedDict
|
||||
|
||||
from clan_lib.api import API
|
||||
from clan_lib.cmd import Log, RunOpts, run
|
||||
@@ -11,8 +12,17 @@ from clan_lib.nix import nix_build
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FlashOptions(TypedDict):
|
||||
languages: list[str]
|
||||
keymaps: list[str]
|
||||
|
||||
|
||||
@API.register
|
||||
def list_possible_languages() -> list[str]:
|
||||
def get_flash_options() -> FlashOptions:
|
||||
return {"languages": list_languages(), "keymaps": list_keymaps()}
|
||||
|
||||
|
||||
def list_languages() -> list[str]:
|
||||
cmd = nix_build(["nixpkgs#glibcLocales"])
|
||||
result = run(cmd, RunOpts(log=Log.STDERR, error_msg="Failed to find glibc locales"))
|
||||
locale_file = Path(result.stdout.strip()) / "share" / "i18n" / "SUPPORTED"
|
||||
@@ -37,8 +47,7 @@ def list_possible_languages() -> list[str]:
|
||||
return languages
|
||||
|
||||
|
||||
@API.register
|
||||
def list_possible_keymaps() -> list[str]:
|
||||
def list_keymaps() -> list[str]:
|
||||
cmd = nix_build(["nixpkgs#kbd"])
|
||||
result = run(cmd, RunOpts(log=Log.STDERR, error_msg="Failed to find kbdinfo"))
|
||||
keymaps_dir = Path(result.stdout.strip()) / "share" / "keymaps"
|
||||
@@ -61,11 +70,11 @@ def list_possible_keymaps() -> list[str]:
|
||||
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
if args.cmd == "languages":
|
||||
languages = list_possible_languages()
|
||||
languages = list_languages()
|
||||
for language in languages:
|
||||
print(language)
|
||||
elif args.cmd == "keymaps":
|
||||
keymaps = list_possible_keymaps()
|
||||
keymaps = list_keymaps()
|
||||
for keymap in keymaps:
|
||||
print(keymap)
|
||||
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
"""Common argument types and utilities for host key checking in clan CLI commands."""
|
||||
|
||||
import argparse
|
||||
|
||||
from clan_lib.ssh.host_key import HostKeyCheck
|
||||
|
||||
|
||||
def host_key_check_type(value: str) -> HostKeyCheck:
|
||||
"""
|
||||
Argparse type converter for HostKeyCheck enum.
|
||||
"""
|
||||
try:
|
||||
return HostKeyCheck(value)
|
||||
except ValueError:
|
||||
valid_values = [e.value for e in HostKeyCheck]
|
||||
msg = f"Invalid host key check mode: {value}. Valid options: {', '.join(valid_values)}"
|
||||
raise argparse.ArgumentTypeError(msg) from None
|
||||
|
||||
|
||||
def add_host_key_check_arg(
|
||||
parser: argparse.ArgumentParser, default: HostKeyCheck = HostKeyCheck.ASK
|
||||
) -> None:
|
||||
parser.add_argument(
|
||||
"--host-key-check",
|
||||
type=host_key_check_type,
|
||||
default=default,
|
||||
help=f"Host key (.ssh/known_hosts) check mode. Options: {', '.join([e.value for e in HostKeyCheck])}",
|
||||
)
|
||||
@@ -34,7 +34,7 @@ Examples:
|
||||
$ clan machines update [MACHINES]
|
||||
Will update the specified machines [MACHINES], if [MACHINES] is omitted, the command
|
||||
will attempt to update every configured machine.
|
||||
To exclude machines being updated `clan.deployment.requireExplicitUpdate = true;`
|
||||
To exclude machines being updated `clan.core.deployment.requireExplicitUpdate = true;`
|
||||
can be set in the machine config.
|
||||
|
||||
$ clan machines update --tags [TAGS..]
|
||||
|
||||
@@ -5,14 +5,13 @@ from pathlib import Path
|
||||
from clan_lib.machines.hardware import (
|
||||
HardwareConfig,
|
||||
HardwareGenerateOptions,
|
||||
generate_machine_hardware_info,
|
||||
run_machine_hardware_info,
|
||||
)
|
||||
from clan_lib.machines.machines import Machine
|
||||
from clan_lib.machines.suggestions import validate_machine_names
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
from clan_cli.completions import add_dynamic_completer, complete_machines
|
||||
from clan_cli.host_key_check import add_host_key_check_arg
|
||||
|
||||
from .types import machine_name_type
|
||||
|
||||
@@ -39,7 +38,7 @@ def update_hardware_config_command(args: argparse.Namespace) -> None:
|
||||
host_key_check=args.host_key_check, private_key=args.identity_file
|
||||
)
|
||||
|
||||
generate_machine_hardware_info(opts, target_host)
|
||||
run_machine_hardware_info(opts, target_host)
|
||||
|
||||
|
||||
def register_update_hardware_config(parser: argparse.ArgumentParser) -> None:
|
||||
@@ -56,7 +55,12 @@ def register_update_hardware_config(parser: argparse.ArgumentParser) -> None:
|
||||
nargs="?",
|
||||
help="ssh address to install to in the form of user@host:2222",
|
||||
)
|
||||
add_host_key_check_arg(parser)
|
||||
parser.add_argument(
|
||||
"--host-key-check",
|
||||
choices=["strict", "ask", "tofu", "none"],
|
||||
default="ask",
|
||||
help="Host key (.ssh/known_hosts) check mode.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--password",
|
||||
help="Pre-provided password the cli will prompt otherwise if needed.",
|
||||
|
||||
@@ -4,7 +4,7 @@ import sys
|
||||
from pathlib import Path
|
||||
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.machines.install import BuildOn, InstallOptions, install_machine
|
||||
from clan_lib.machines.install import BuildOn, InstallOptions, run_machine_install
|
||||
from clan_lib.machines.machines import Machine
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
@@ -13,7 +13,6 @@ from clan_cli.completions import (
|
||||
complete_machines,
|
||||
complete_target_host,
|
||||
)
|
||||
from clan_cli.host_key_check import add_host_key_check_arg
|
||||
from clan_cli.machines.hardware import HardwareConfig
|
||||
from clan_cli.ssh.deploy_info import DeployInfo, find_reachable_host, ssh_command_parse
|
||||
|
||||
@@ -66,7 +65,7 @@ def install_command(args: argparse.Namespace) -> None:
|
||||
if ask != "y":
|
||||
return None
|
||||
|
||||
return install_machine(
|
||||
return run_machine_install(
|
||||
InstallOptions(
|
||||
machine=machine,
|
||||
kexec=args.kexec,
|
||||
@@ -98,7 +97,12 @@ def register_install_parser(parser: argparse.ArgumentParser) -> None:
|
||||
help="do not reboot after installation (deprecated)",
|
||||
default=False,
|
||||
)
|
||||
add_host_key_check_arg(parser)
|
||||
parser.add_argument(
|
||||
"--host-key-check",
|
||||
choices=["strict", "ask", "tofu", "none"],
|
||||
default="ask",
|
||||
help="Host key (.ssh/known_hosts) check mode.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--build-on",
|
||||
choices=[x.value for x in BuildOn],
|
||||
|
||||
@@ -4,10 +4,12 @@ import sys
|
||||
|
||||
from clan_lib.async_run import AsyncContext, AsyncOpts, AsyncRuntime
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.machines.list import list_full_machines, query_machines_by_tags
|
||||
from clan_lib.flake.flake import Flake
|
||||
from clan_lib.machines.actions import list_machines
|
||||
from clan_lib.machines.list import instantiate_inventory_to_machines
|
||||
from clan_lib.machines.machines import Machine
|
||||
from clan_lib.machines.suggestions import validate_machine_names
|
||||
from clan_lib.machines.update import deploy_machine
|
||||
from clan_lib.machines.update import run_machine_deploy
|
||||
from clan_lib.nix import nix_config
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
@@ -16,129 +18,139 @@ from clan_cli.completions import (
|
||||
complete_machines,
|
||||
complete_tags,
|
||||
)
|
||||
from clan_cli.host_key_check import add_host_key_check_arg
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def requires_explicit_update(m: Machine) -> bool:
|
||||
try:
|
||||
if m.select("config.clan.deployment.requireExplicitUpdate"):
|
||||
return False
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
# check if the machine has a target host set
|
||||
m.target_host # noqa: B018
|
||||
except ClanError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_machines_for_update(
|
||||
flake: Flake,
|
||||
explicit_names: list[str],
|
||||
filter_tags: list[str],
|
||||
) -> list[Machine]:
|
||||
all_machines = list_machines(flake)
|
||||
machines_with_tags = list_machines(flake, {"filter": {"tags": filter_tags}})
|
||||
|
||||
if filter_tags and not machines_with_tags:
|
||||
msg = f"No machines found with tags: {' AND '.join(filter_tags)}"
|
||||
raise ClanError(msg)
|
||||
|
||||
# Implicit update all machines / with tags
|
||||
# Using tags is not an explizit update
|
||||
if not explicit_names:
|
||||
machines_to_update = list(
|
||||
filter(
|
||||
requires_explicit_update,
|
||||
instantiate_inventory_to_machines(flake, machines_with_tags).values(),
|
||||
)
|
||||
)
|
||||
# all machines that are in the clan but not included in the update list
|
||||
machine_names_to_update = [m.name for m in machines_to_update]
|
||||
ignored_machines = {
|
||||
m_name for m_name in all_machines if m_name not in machine_names_to_update
|
||||
}
|
||||
|
||||
if not machines_to_update and ignored_machines:
|
||||
print(
|
||||
"WARNING: No machines to update.\n"
|
||||
"The following defined machines were ignored because they\n"
|
||||
"- Require explicit update (see 'requireExplicitUpdate')\n",
|
||||
file=sys.stderr,
|
||||
)
|
||||
for m in ignored_machines:
|
||||
print(m, file=sys.stderr)
|
||||
|
||||
return machines_to_update
|
||||
|
||||
# Else: Explicit update
|
||||
machines_to_update = []
|
||||
valid_names = validate_machine_names(explicit_names, flake)
|
||||
for name in valid_names:
|
||||
inventory_machine = machines_with_tags.get(name)
|
||||
if not inventory_machine:
|
||||
msg = "This is an internal bug"
|
||||
raise ClanError(msg)
|
||||
|
||||
machines_to_update.append(
|
||||
Machine.from_inventory(name, flake, inventory_machine)
|
||||
)
|
||||
|
||||
return machines_to_update
|
||||
|
||||
|
||||
def update_command(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
if args.flake is None:
|
||||
msg = "Could not find clan flake toplevel directory"
|
||||
raise ClanError(msg)
|
||||
|
||||
all_machines: list[Machine] = []
|
||||
if args.tags:
|
||||
tag_filtered_machines = query_machines_by_tags(args.flake, args.tags)
|
||||
if args.machines:
|
||||
selected_machines = [
|
||||
name for name in args.machines if name in tag_filtered_machines
|
||||
]
|
||||
else:
|
||||
selected_machines = list(tag_filtered_machines.keys())
|
||||
else:
|
||||
selected_machines = (
|
||||
args.machines
|
||||
if args.machines
|
||||
else list(list_full_machines(args.flake).keys())
|
||||
)
|
||||
machines_to_update = get_machines_for_update(
|
||||
args.flake, args.machines, args.tags
|
||||
)
|
||||
|
||||
if args.tags and not selected_machines:
|
||||
msg = f"No machines found with tags: {', '.join(args.tags)}"
|
||||
raise ClanError(msg)
|
||||
|
||||
if args.machines:
|
||||
validate_machine_names(args.machines, args.flake)
|
||||
|
||||
for machine_name in selected_machines:
|
||||
machine = Machine(name=machine_name, flake=args.flake)
|
||||
all_machines.append(machine)
|
||||
|
||||
if args.target_host is not None and len(all_machines) > 1:
|
||||
if args.target_host is not None and len(machines_to_update) > 1:
|
||||
msg = "Target Host can only be set for one machines"
|
||||
raise ClanError(msg)
|
||||
|
||||
def filter_machine(m: Machine) -> bool:
|
||||
try:
|
||||
if m.select("config.clan.deployment.requireExplicitUpdate"):
|
||||
return False
|
||||
except Exception:
|
||||
pass
|
||||
# Prepopulate the cache
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
machine_names = [machine.name for machine in machines_to_update]
|
||||
args.flake.precache(
|
||||
[
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.generators.*.validationHash",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.deployment.requireExplicitUpdate",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.system.clan.deployment.nixosMobileWorkaround",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.facts.secretModule",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.facts.publicModule",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.settings.secretModule",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.settings.publicModule",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.facts.services",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.generators.*.{{share,dependencies,migrateFact,prompts}}",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.generators.*.files.*.{{secret,deploy,owner,group,mode,neededFor}}",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.facts.secretUploadDirectory",
|
||||
]
|
||||
)
|
||||
|
||||
try:
|
||||
# check if the machine has a target host set
|
||||
m.target_host # noqa: B018
|
||||
except ClanError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
machines_to_update = all_machines
|
||||
implicit_all: bool = len(args.machines) == 0 and not args.tags
|
||||
if implicit_all:
|
||||
machines_to_update = list(filter(filter_machine, all_machines))
|
||||
|
||||
# machines that are in the list but not included in the update list
|
||||
ignored_machines = {m.name for m in all_machines if m not in machines_to_update}
|
||||
|
||||
if not machines_to_update and ignored_machines:
|
||||
print(
|
||||
"WARNING: No machines to update.\n"
|
||||
"The following defined machines were ignored because they\n"
|
||||
"- Require explicit update (see 'requireExplicitUpdate')\n",
|
||||
"- Might not have the `clan.core.networking.targetHost` nixos option set:\n",
|
||||
file=sys.stderr,
|
||||
)
|
||||
for m in ignored_machines:
|
||||
print(m, file=sys.stderr)
|
||||
|
||||
if machines_to_update:
|
||||
# Prepopulate the cache
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
machine_names = [machine.name for machine in machines_to_update]
|
||||
args.flake.precache(
|
||||
[
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.generators.*.validationHash",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.deployment.requireExplicitUpdate",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.system.clan.deployment.nixosMobileWorkaround",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.facts.secretModule",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.facts.publicModule",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.settings.secretModule",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.settings.publicModule",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.facts.services",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.generators.*.{{share,dependencies,migrateFact,prompts}}",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.generators.*.files.*.{{secret,deploy,owner,group,mode,neededFor}}",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.facts.secretUploadDirectory",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.vars.password-store.secretLocation",
|
||||
f"clanInternals.machines.{system}.{{{','.join(machine_names)}}}.config.clan.core.vars.settings.passBackend",
|
||||
]
|
||||
)
|
||||
|
||||
host_key_check = args.host_key_check
|
||||
with AsyncRuntime() as runtime:
|
||||
for machine in machines_to_update:
|
||||
if args.target_host:
|
||||
target_host = Remote.from_ssh_uri(
|
||||
machine_name=machine.name,
|
||||
address=args.target_host,
|
||||
).override(host_key_check=host_key_check)
|
||||
else:
|
||||
target_host = machine.target_host().override(
|
||||
host_key_check=host_key_check
|
||||
)
|
||||
runtime.async_run(
|
||||
AsyncOpts(
|
||||
tid=machine.name,
|
||||
async_ctx=AsyncContext(prefix=machine.name),
|
||||
),
|
||||
deploy_machine,
|
||||
machine=machine,
|
||||
target_host=target_host,
|
||||
build_host=machine.build_host(),
|
||||
host_key_check = args.host_key_check
|
||||
with AsyncRuntime() as runtime:
|
||||
for machine in machines_to_update:
|
||||
if args.target_host:
|
||||
target_host = Remote.from_ssh_uri(
|
||||
machine_name=machine.name,
|
||||
address=args.target_host,
|
||||
).override(host_key_check=host_key_check)
|
||||
else:
|
||||
target_host = machine.target_host().override(
|
||||
host_key_check=host_key_check
|
||||
)
|
||||
runtime.join_all()
|
||||
runtime.check_all()
|
||||
runtime.async_run(
|
||||
AsyncOpts(
|
||||
tid=machine.name,
|
||||
async_ctx=AsyncContext(prefix=machine.name),
|
||||
),
|
||||
run_machine_deploy,
|
||||
machine=machine,
|
||||
target_host=target_host,
|
||||
build_host=machine.build_host(),
|
||||
)
|
||||
runtime.join_all()
|
||||
runtime.check_all()
|
||||
|
||||
except KeyboardInterrupt:
|
||||
log.warning("Interrupted by user")
|
||||
@@ -164,7 +176,12 @@ def register_update_parser(parser: argparse.ArgumentParser) -> None:
|
||||
)
|
||||
add_dynamic_completer(tag_parser, complete_tags)
|
||||
|
||||
add_host_key_check_arg(parser)
|
||||
parser.add_argument(
|
||||
"--host-key-check",
|
||||
choices=["strict", "ask", "tofu", "none"],
|
||||
default="ask",
|
||||
help="Host key (.ssh/known_hosts) check mode.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target-host",
|
||||
type=str,
|
||||
|
||||
162
pkgs/clan-cli/clan_cli/machines/update_test.py
Normal file
162
pkgs/clan-cli/clan_cli/machines/update_test.py
Normal file
@@ -0,0 +1,162 @@
|
||||
import pytest
|
||||
from clan_lib.flake import Flake
|
||||
|
||||
from clan_cli.machines.update import get_machines_for_update
|
||||
|
||||
# Functions to test
|
||||
from clan_cli.tests.fixtures_flakes import FlakeForTest
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("test_flake_with_core", "explicit_names", "filter_tags", "expected_names"),
|
||||
[
|
||||
(
|
||||
{
|
||||
"inventory_expr": r"""{
|
||||
machines.jon = { tags = [ "foo" "bar" ]; };
|
||||
machines.sara = { tags = [ "foo" "baz" ]; };
|
||||
}"""
|
||||
},
|
||||
["jon"], # explizit names
|
||||
[], # filter tags
|
||||
["jon"], # expected
|
||||
)
|
||||
],
|
||||
# Important!
|
||||
# tells pytest to pass these values to the fixture
|
||||
# So we can write it to the flake fixtures
|
||||
indirect=["test_flake_with_core"],
|
||||
)
|
||||
@pytest.mark.with_core
|
||||
def test_get_machines_for_update_single_name(
|
||||
test_flake_with_core: FlakeForTest,
|
||||
explicit_names: list[str],
|
||||
filter_tags: list[str],
|
||||
expected_names: list[str],
|
||||
) -> None:
|
||||
selected_for_update = get_machines_for_update(
|
||||
Flake(str(test_flake_with_core.path)),
|
||||
explicit_names=explicit_names,
|
||||
filter_tags=filter_tags,
|
||||
)
|
||||
names = [m.name for m in selected_for_update]
|
||||
|
||||
print(explicit_names, filter_tags)
|
||||
assert names == expected_names
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("test_flake_with_core", "explicit_names", "filter_tags", "expected_names"),
|
||||
[
|
||||
(
|
||||
{
|
||||
"inventory_expr": r"""{
|
||||
machines.jon = { tags = [ "foo" "bar" ]; };
|
||||
machines.sara = { tags = [ "foo" "baz" ]; };
|
||||
}"""
|
||||
},
|
||||
[], # explizit names
|
||||
["foo"], # filter tags
|
||||
["jon", "sara"], # expected
|
||||
)
|
||||
],
|
||||
# Important!
|
||||
# tells pytest to pass these values to the fixture
|
||||
# So we can write it to the flake fixtures
|
||||
indirect=["test_flake_with_core"],
|
||||
)
|
||||
@pytest.mark.with_core
|
||||
def test_get_machines_for_update_tags(
|
||||
test_flake_with_core: FlakeForTest,
|
||||
explicit_names: list[str],
|
||||
filter_tags: list[str],
|
||||
expected_names: list[str],
|
||||
) -> None:
|
||||
selected_for_update = get_machines_for_update(
|
||||
Flake(str(test_flake_with_core.path)),
|
||||
explicit_names=explicit_names,
|
||||
filter_tags=filter_tags,
|
||||
)
|
||||
names = [m.name for m in selected_for_update]
|
||||
|
||||
print(explicit_names, filter_tags)
|
||||
assert names == expected_names
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("test_flake_with_core", "explicit_names", "filter_tags", "expected_names"),
|
||||
[
|
||||
(
|
||||
{
|
||||
"inventory_expr": r"""{
|
||||
machines.jon = { tags = [ "foo" "bar" ]; };
|
||||
machines.sara = { tags = [ "foo" "baz" ]; };
|
||||
}"""
|
||||
},
|
||||
["sara"], # explizit names
|
||||
["foo"], # filter tags
|
||||
["sara"], # expected
|
||||
)
|
||||
],
|
||||
# Important!
|
||||
# tells pytest to pass these values to the fixture
|
||||
# So we can write it to the flake fixtures
|
||||
indirect=["test_flake_with_core"],
|
||||
)
|
||||
@pytest.mark.with_core
|
||||
def test_get_machines_for_update_tags_and_name(
|
||||
test_flake_with_core: FlakeForTest,
|
||||
explicit_names: list[str],
|
||||
filter_tags: list[str],
|
||||
expected_names: list[str],
|
||||
) -> None:
|
||||
selected_for_update = get_machines_for_update(
|
||||
Flake(str(test_flake_with_core.path)),
|
||||
explicit_names=explicit_names,
|
||||
filter_tags=filter_tags,
|
||||
)
|
||||
names = [m.name for m in selected_for_update]
|
||||
|
||||
print(explicit_names, filter_tags)
|
||||
assert names == expected_names
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("test_flake_with_core", "explicit_names", "filter_tags", "expected_names"),
|
||||
[
|
||||
(
|
||||
{
|
||||
"inventory_expr": r"""{
|
||||
machines.jon = { tags = [ "foo" "bar" ]; };
|
||||
machines.sara = { tags = [ "foo" "baz" ]; };
|
||||
}"""
|
||||
},
|
||||
[], # no explizit names
|
||||
[], # no filter tags
|
||||
["jon", "sara", "vm1", "vm2"], # all machines
|
||||
),
|
||||
],
|
||||
# Important!
|
||||
# tells pytest to pass these values to the fixture
|
||||
# So we can write it to the flake fixtures
|
||||
indirect=["test_flake_with_core"],
|
||||
)
|
||||
@pytest.mark.with_core
|
||||
def test_get_machines_for_update_implicit_all(
|
||||
test_flake_with_core: FlakeForTest,
|
||||
explicit_names: list[str],
|
||||
filter_tags: list[str],
|
||||
expected_names: list[str],
|
||||
) -> None:
|
||||
selected_for_update = get_machines_for_update(
|
||||
Flake(str(test_flake_with_core.path)),
|
||||
explicit_names=explicit_names,
|
||||
filter_tags=filter_tags,
|
||||
)
|
||||
names = [m.name for m in selected_for_update]
|
||||
|
||||
print(explicit_names, filter_tags)
|
||||
assert names == expected_names
|
||||
|
||||
|
||||
# TODO: Add more tests for requireExplicitUpdate
|
||||
@@ -13,7 +13,6 @@ from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import IO, Any
|
||||
|
||||
from clan_lib.api import API
|
||||
from clan_lib.cmd import Log, RunOpts, run
|
||||
from clan_lib.dirs import user_config_dir
|
||||
from clan_lib.errors import ClanError
|
||||
@@ -62,7 +61,7 @@ class KeyType(enum.Enum):
|
||||
|
||||
try:
|
||||
for public_key in get_public_age_keys(content):
|
||||
log.info(
|
||||
log.debug(
|
||||
f"Found age public key from a private key "
|
||||
f"in {key_path}: {public_key}"
|
||||
)
|
||||
@@ -85,7 +84,7 @@ class KeyType(enum.Enum):
|
||||
|
||||
try:
|
||||
for public_key in get_public_age_keys(content):
|
||||
log.info(
|
||||
log.debug(
|
||||
f"Found age public key from a private key "
|
||||
f"in the environment (SOPS_AGE_KEY): {public_key}"
|
||||
)
|
||||
@@ -107,7 +106,7 @@ class KeyType(enum.Enum):
|
||||
if pgp_fingerprints := os.environ.get("SOPS_PGP_FP"):
|
||||
for fp in pgp_fingerprints.strip().split(","):
|
||||
msg = f"Found PGP public key in the environment (SOPS_PGP_FP): {fp}"
|
||||
log.info(msg)
|
||||
log.debug(msg)
|
||||
keyring.append(fp)
|
||||
return keyring
|
||||
|
||||
@@ -398,7 +397,6 @@ def default_admin_private_key_path() -> Path:
|
||||
return user_config_dir() / "sops" / "age" / "keys.txt"
|
||||
|
||||
|
||||
@API.register
|
||||
def maybe_get_admin_public_keys() -> list[SopsKey] | None:
|
||||
keyring = SopsKey.collect_public_keys()
|
||||
|
||||
|
||||
@@ -8,14 +8,12 @@ from typing import Any
|
||||
from clan_lib.cmd import run
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.nix import nix_shell
|
||||
from clan_lib.ssh.host_key import HostKeyCheck
|
||||
from clan_lib.ssh.remote import Remote
|
||||
from clan_lib.ssh.remote import HostKeyCheck, Remote
|
||||
|
||||
from clan_cli.completions import (
|
||||
add_dynamic_completer,
|
||||
complete_machines,
|
||||
)
|
||||
from clan_cli.host_key_check import add_host_key_check_arg
|
||||
from clan_cli.ssh.tor import TorTarget, spawn_tor, ssh_tor_reachable
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -100,7 +98,7 @@ def find_reachable_host(deploy_info: DeployInfo) -> Remote | None:
|
||||
return deploy_info.addrs[0]
|
||||
|
||||
for addr in deploy_info.addrs:
|
||||
if addr.is_ssh_reachable():
|
||||
if addr.check_machine_ssh_reachable():
|
||||
return addr
|
||||
return None
|
||||
|
||||
@@ -183,5 +181,10 @@ def register_parser(parser: argparse.ArgumentParser) -> None:
|
||||
"--png",
|
||||
help="specify the json file for ssh data as the qrcode image (generated by starting the clan installer)",
|
||||
)
|
||||
add_host_key_check_arg(parser, default=HostKeyCheck.TOFU)
|
||||
parser.add_argument(
|
||||
"--host-key-check",
|
||||
choices=["strict", "ask", "tofu", "none"],
|
||||
default="tofu",
|
||||
help="Host key (.ssh/known_hosts) check mode.",
|
||||
)
|
||||
parser.set_defaults(func=ssh_command)
|
||||
|
||||
@@ -4,7 +4,6 @@ from pathlib import Path
|
||||
import pytest
|
||||
from clan_lib.cmd import RunOpts, run
|
||||
from clan_lib.nix import nix_shell
|
||||
from clan_lib.ssh.host_key import HostKeyCheck
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
from clan_cli.ssh.deploy_info import DeployInfo, find_reachable_host
|
||||
@@ -24,7 +23,7 @@ def test_qrcode_scan(temp_dir: Path) -> None:
|
||||
run(cmd, RunOpts(input=data.encode()))
|
||||
|
||||
# Call the qrcode_scan function
|
||||
deploy_info = DeployInfo.from_qr_code(img_path, HostKeyCheck.NONE)
|
||||
deploy_info = DeployInfo.from_qr_code(img_path, "none")
|
||||
|
||||
host = deploy_info.addrs[0]
|
||||
assert host.address == "192.168.122.86"
|
||||
@@ -47,7 +46,7 @@ def test_qrcode_scan(temp_dir: Path) -> None:
|
||||
|
||||
def test_from_json() -> None:
|
||||
data = '{"pass":"scabbed-defender-headlock","tor":"qjeerm4r6t55hcfum4pinnvscn5njlw2g3k7ilqfuu7cdt3ahaxhsbid.onion","addrs":["192.168.122.86"]}'
|
||||
deploy_info = DeployInfo.from_json(json.loads(data), HostKeyCheck.NONE)
|
||||
deploy_info = DeployInfo.from_json(json.loads(data), "none")
|
||||
|
||||
host = deploy_info.addrs[0]
|
||||
assert host.password == "scabbed-defender-headlock"
|
||||
@@ -70,9 +69,7 @@ def test_from_json() -> None:
|
||||
@pytest.mark.with_core
|
||||
def test_find_reachable_host(hosts: list[Remote]) -> None:
|
||||
host = hosts[0]
|
||||
deploy_info = DeployInfo.from_hostnames(
|
||||
["172.19.1.2", host.ssh_url()], HostKeyCheck.NONE
|
||||
)
|
||||
deploy_info = DeployInfo.from_hostnames(["172.19.1.2", host.ssh_url()], "none")
|
||||
|
||||
assert deploy_info.addrs[0].address == "172.19.1.2"
|
||||
|
||||
|
||||
15
pkgs/clan-cli/clan_cli/templates/__init__.py
Normal file
15
pkgs/clan-cli/clan_cli/templates/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# !/usr/bin/env python3
|
||||
import argparse
|
||||
from .list import register_list_parser
|
||||
|
||||
|
||||
# takes a (sub)parser and configures it
|
||||
def register_parser(parser: argparse.ArgumentParser) -> None:
|
||||
subparser = parser.add_subparsers(
|
||||
title="command",
|
||||
description="the command to run",
|
||||
help="the command to run",
|
||||
required=True,
|
||||
)
|
||||
list_parser = subparser.add_parser("list", help="List avilable templates")
|
||||
register_list_parser(list_parser)
|
||||
60
pkgs/clan-cli/clan_cli/templates/list.py
Normal file
60
pkgs/clan-cli/clan_cli/templates/list.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from clan_lib.nix_models.clan import TemplateClanType
|
||||
from clan_lib.templates import list_templates
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
templates = list_templates(args.flake)
|
||||
|
||||
# Display all templates
|
||||
for i, (template_type, _builtin_template_set) in enumerate(
|
||||
templates.builtins.items()
|
||||
):
|
||||
builtin_template_set: TemplateClanType | None = templates.builtins.get(
|
||||
template_type, None
|
||||
) # type: ignore
|
||||
if not builtin_template_set:
|
||||
continue
|
||||
|
||||
print(f"Avilable '{template_type}' templates")
|
||||
print("├── <builtin>")
|
||||
for i, (name, template) in enumerate(builtin_template_set.items()):
|
||||
description = template.get("description", "no description")
|
||||
is_last_template = i == len(builtin_template_set.items()) - 1
|
||||
if not is_last_template:
|
||||
print(f"│ ├── {name}: {description}")
|
||||
else:
|
||||
print(f"│ └── {name}: {description}")
|
||||
|
||||
for i, (input_name, input_templates) in enumerate(templates.custom.items()):
|
||||
custom_templates: TemplateClanType | None = input_templates.get(
|
||||
template_type, None
|
||||
) # type: ignore
|
||||
if not custom_templates:
|
||||
continue
|
||||
|
||||
is_last_input = i == len(templates.custom.items()) - 1
|
||||
prefix = "│" if not is_last_input else " "
|
||||
if not is_last_input:
|
||||
print(f"├── inputs.{input_name}:")
|
||||
else:
|
||||
print(f"└── inputs.{input_name}:")
|
||||
|
||||
for i, (name, template) in enumerate(custom_templates.items()):
|
||||
is_last_template = i == len(custom_templates.items()) - 1
|
||||
if not is_last_template:
|
||||
print(
|
||||
f"{prefix} ├── {name}: {template.get('description', 'no description')}"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"{prefix} └── {name}: {template.get('description', 'no description')}"
|
||||
)
|
||||
|
||||
|
||||
def register_list_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.set_defaults(func=list_command)
|
||||
@@ -4,7 +4,6 @@ from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from clan_cli.tests.sshd import Sshd
|
||||
from clan_lib.ssh.host_key import HostKeyCheck
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
|
||||
@@ -17,7 +16,7 @@ def hosts(sshd: Sshd) -> list[Remote]:
|
||||
port=sshd.port,
|
||||
user=login,
|
||||
private_key=Path(sshd.key),
|
||||
host_key_check=HostKeyCheck.NONE,
|
||||
host_key_check="none",
|
||||
command_prefix="local_test",
|
||||
)
|
||||
]
|
||||
|
||||
@@ -1,63 +1,15 @@
|
||||
# mypy: disable-error-code="var-annotated"
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
|
||||
from clan_cli.tests.fixtures_flakes import FlakeForTest
|
||||
from clan_lib.cmd import run
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.git import commit_file
|
||||
from clan_lib.locked_open import locked_open
|
||||
from clan_lib.nix import nix_command
|
||||
from clan_lib.templates import (
|
||||
ClanExports,
|
||||
InputName,
|
||||
TemplateName,
|
||||
get_clan_nix_attrset,
|
||||
get_template,
|
||||
list_templates,
|
||||
)
|
||||
|
||||
from clan_lib.templates import list_templates
|
||||
from clan_lib.templates.filesystem import copy_from_nixstore
|
||||
|
||||
|
||||
# Function to write clan attributes to a file
|
||||
def write_clan_attr(clan_attrset: dict[str, Any], flake: FlakeForTest) -> None:
|
||||
file = flake.path / "clan_attrs.json"
|
||||
with locked_open(file, "w") as cfile:
|
||||
json.dump(clan_attrset, cfile, indent=2)
|
||||
|
||||
commit_file(file, flake.path, "Add clan attributes")
|
||||
|
||||
|
||||
# Common function to test clan nix attrset
|
||||
def nix_attr_tester(
|
||||
test_flake_with_core: FlakeForTest,
|
||||
injected: dict[str, Any],
|
||||
expected_self: dict[str, Any],
|
||||
test_number: int,
|
||||
) -> ClanExports:
|
||||
write_clan_attr(injected, test_flake_with_core)
|
||||
clan_dir = Flake(str(test_flake_with_core.path))
|
||||
nix_attrset = get_clan_nix_attrset(clan_dir)
|
||||
|
||||
def recursive_sort(item: Any) -> Any:
|
||||
if isinstance(item, dict):
|
||||
return {k: recursive_sort(item[k]) for k in sorted(item)}
|
||||
if isinstance(item, list):
|
||||
return sorted(recursive_sort(elem) for elem in item)
|
||||
return item
|
||||
|
||||
returned_sorted = recursive_sort(nix_attrset["self"])
|
||||
expected_sorted = recursive_sort(expected_self["self"])
|
||||
|
||||
assert json.dumps(returned_sorted, indent=2) == json.dumps(
|
||||
expected_sorted, indent=2
|
||||
)
|
||||
return nix_attrset
|
||||
|
||||
|
||||
@pytest.mark.impure
|
||||
def test_copy_from_nixstore_symlink(
|
||||
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
|
||||
@@ -86,170 +38,37 @@ def test_clan_core_templates(
|
||||
temporary_home: Path,
|
||||
) -> None:
|
||||
clan_dir = Flake(str(test_flake_with_core.path))
|
||||
nix_attrset = get_clan_nix_attrset(clan_dir)
|
||||
|
||||
clan_core_templates = nix_attrset["inputs"][InputName("clan-core")]["templates"][
|
||||
"clan"
|
||||
templates = list_templates(clan_dir)
|
||||
|
||||
assert list(templates.builtins.get("clan", {}).keys()) == [
|
||||
"default",
|
||||
"flake-parts",
|
||||
"minimal",
|
||||
"minimal-flake-parts",
|
||||
]
|
||||
clan_core_template_keys = list(clan_core_templates.keys())
|
||||
|
||||
expected_templates = ["default", "flake-parts", "minimal", "minimal-flake-parts"]
|
||||
assert clan_core_template_keys == expected_templates
|
||||
# clan.default
|
||||
default_template = templates.builtins.get("clan", {}).get("default")
|
||||
assert default_template is not None
|
||||
|
||||
vlist_temps = list_templates("clan", clan_dir)
|
||||
list_template_keys = list(vlist_temps.inputs[InputName("clan-core")].keys())
|
||||
assert list_template_keys == expected_templates
|
||||
|
||||
default_template = get_template(
|
||||
TemplateName("default"),
|
||||
"clan",
|
||||
input_prio=None,
|
||||
clan_dir=clan_dir,
|
||||
)
|
||||
template_path = default_template.get("path", None)
|
||||
assert template_path is not None
|
||||
|
||||
new_clan = temporary_home / "new_clan"
|
||||
|
||||
copy_from_nixstore(
|
||||
Path(default_template.src["path"]),
|
||||
Path(template_path),
|
||||
new_clan,
|
||||
)
|
||||
assert (new_clan / "flake.nix").exists()
|
||||
assert (new_clan / "machines").is_dir()
|
||||
assert (new_clan / "machines" / "jon").is_dir()
|
||||
config_nix_p = new_clan / "machines" / "jon" / "configuration.nix"
|
||||
assert (config_nix_p).is_file()
|
||||
|
||||
# Test if we can write to the configuration.nix file
|
||||
with config_nix_p.open("r+") as f:
|
||||
flake_nix = new_clan / "flake.nix"
|
||||
assert (flake_nix).exists()
|
||||
assert (flake_nix).is_file()
|
||||
|
||||
assert (new_clan / "machines").is_dir()
|
||||
|
||||
# Test if we can write to the flake.nix file
|
||||
with flake_nix.open("r+") as f:
|
||||
data = f.read()
|
||||
f.write(data)
|
||||
|
||||
|
||||
# Test Case 1: Minimal input with empty templates
|
||||
@pytest.mark.with_core
|
||||
def test_clan_get_nix_attrset_case_1(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
temporary_home: Path,
|
||||
test_flake_with_core: FlakeForTest,
|
||||
) -> None:
|
||||
test_number = 1
|
||||
injected = {"templates": {"disko": {}, "machine": {}}}
|
||||
expected = {
|
||||
"inputs": {},
|
||||
"self": {"templates": {"disko": {}, "machine": {}, "clan": {}}},
|
||||
}
|
||||
nix_attr_tester(test_flake_with_core, injected, expected, test_number)
|
||||
|
||||
|
||||
# Test Case 2: Input with one template under 'clan'
|
||||
@pytest.mark.with_core
|
||||
def test_clan_get_nix_attrset_case_2(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
temporary_home: Path,
|
||||
test_flake_with_core: FlakeForTest,
|
||||
) -> None:
|
||||
test_number = 2
|
||||
injected = {
|
||||
"templates": {
|
||||
"clan": {
|
||||
"example_template": {
|
||||
"description": "An example clan template.",
|
||||
"path": "/example/path",
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
expected = {
|
||||
"inputs": {},
|
||||
"self": {
|
||||
"templates": {
|
||||
"clan": {
|
||||
"example_template": {
|
||||
"description": "An example clan template.",
|
||||
"path": "/example/path",
|
||||
}
|
||||
},
|
||||
"disko": {},
|
||||
"machine": {},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
nix_attrset = nix_attr_tester(test_flake_with_core, injected, expected, test_number)
|
||||
|
||||
assert "default" in list(
|
||||
nix_attrset["inputs"][InputName("clan-core")]["templates"]["clan"].keys()
|
||||
)
|
||||
|
||||
|
||||
# Test Case 3: Input with templates under multiple types
|
||||
@pytest.mark.with_core
|
||||
def test_clan_get_nix_attrset_case_3(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
temporary_home: Path,
|
||||
test_flake_with_core: FlakeForTest,
|
||||
) -> None:
|
||||
test_number = 3
|
||||
injected = {
|
||||
"templates": {
|
||||
"clan": {
|
||||
"clan_template": {
|
||||
"description": "A clan template.",
|
||||
"path": "/clan/path",
|
||||
}
|
||||
},
|
||||
"disko": {
|
||||
"disko_template": {
|
||||
"description": "A disko template.",
|
||||
"path": "/disko/path",
|
||||
}
|
||||
},
|
||||
"machine": {
|
||||
"machine_template": {
|
||||
"description": "A machine template.",
|
||||
"path": "/machine/path",
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
expected = {
|
||||
"inputs": {},
|
||||
"self": {
|
||||
"templates": {
|
||||
"clan": {
|
||||
"clan_template": {
|
||||
"description": "A clan template.",
|
||||
"path": "/clan/path",
|
||||
}
|
||||
},
|
||||
"disko": {
|
||||
"disko_template": {
|
||||
"description": "A disko template.",
|
||||
"path": "/disko/path",
|
||||
}
|
||||
},
|
||||
"machine": {
|
||||
"machine_template": {
|
||||
"description": "A machine template.",
|
||||
"path": "/machine/path",
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
nix_attr_tester(test_flake_with_core, injected, expected, test_number)
|
||||
|
||||
|
||||
# Test Case 6: Input with missing 'templates' and 'modules' (empty clan attrset)
|
||||
@pytest.mark.with_core
|
||||
def test_clan_get_nix_attrset_case_6(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
temporary_home: Path,
|
||||
test_flake_with_core: FlakeForTest,
|
||||
) -> None:
|
||||
test_number = 6
|
||||
injected = {}
|
||||
expected = {
|
||||
"inputs": {},
|
||||
"self": {"templates": {"disko": {}, "machine": {}, "clan": {}}},
|
||||
}
|
||||
nix_attr_tester(test_flake_with_core, injected, expected, test_number)
|
||||
|
||||
@@ -10,11 +10,11 @@ from clan_cli.tests.helpers import cli
|
||||
from clan_cli.vars.check import check_vars
|
||||
from clan_cli.vars.generate import (
|
||||
Generator,
|
||||
generate_vars_for_machine,
|
||||
generate_vars_for_machine_interactive,
|
||||
get_generators_closure,
|
||||
run_generators,
|
||||
create_machine_vars_interactive,
|
||||
get_generators,
|
||||
)
|
||||
from clan_cli.vars.get import get_var
|
||||
from clan_cli.vars.get import get_machine_var
|
||||
from clan_cli.vars.graph import all_missing_closure, requested_closure
|
||||
from clan_cli.vars.list import stringify_all_vars
|
||||
from clan_cli.vars.public_modules import in_repo
|
||||
@@ -172,25 +172,23 @@ def test_generate_public_and_secret_vars(
|
||||
in commit_message
|
||||
)
|
||||
assert (
|
||||
get_var(
|
||||
get_machine_var(
|
||||
str(machine.flake.path), machine.name, "my_generator/my_value"
|
||||
).printable_value
|
||||
== "public"
|
||||
)
|
||||
assert (
|
||||
get_var(
|
||||
get_machine_var(
|
||||
str(machine.flake.path), machine.name, "my_shared_generator/my_shared_value"
|
||||
).printable_value
|
||||
== "shared"
|
||||
)
|
||||
vars_text = stringify_all_vars(machine)
|
||||
in_repo_store = in_repo.FactStore(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
machine="my_machine", flake=Flake(str(flake.path))
|
||||
)
|
||||
assert not in_repo_store.exists(Generator("my_generator"), "my_secret")
|
||||
sops_store = sops.SecretStore(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
)
|
||||
sops_store = sops.SecretStore(machine="my_machine", flake=Flake(str(flake.path)))
|
||||
assert sops_store.exists(Generator("my_generator"), "my_secret")
|
||||
assert sops_store.get(Generator("my_generator"), "my_secret").decode() == "secret"
|
||||
assert sops_store.exists(Generator("dependent_generator"), "my_secret")
|
||||
@@ -265,12 +263,10 @@ def test_generate_secret_var_sops_with_default_group(
|
||||
cli.run(["secrets", "groups", "add-user", "my_group", sops_setup.user])
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
|
||||
in_repo_store = in_repo.FactStore(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
machine="my_machine", flake=Flake(str(flake.path))
|
||||
)
|
||||
assert not in_repo_store.exists(Generator("first_generator"), "my_secret")
|
||||
sops_store = sops.SecretStore(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
)
|
||||
sops_store = sops.SecretStore(machine="my_machine", flake=Flake(str(flake.path)))
|
||||
assert sops_store.exists(Generator("first_generator"), "my_secret")
|
||||
assert (
|
||||
sops_store.get(Generator("first_generator"), "my_secret").decode() == "hello\n"
|
||||
@@ -355,8 +351,8 @@ def test_generated_shared_secret_sops(
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "machine2"])
|
||||
assert check_vars(machine2.name, machine2.flake)
|
||||
assert check_vars(machine2.name, machine2.flake)
|
||||
m1_sops_store = sops.SecretStore(machine1)
|
||||
m2_sops_store = sops.SecretStore(machine2)
|
||||
m1_sops_store = sops.SecretStore(machine1.name, machine1.flake)
|
||||
m2_sops_store = sops.SecretStore(machine2.name, machine2.flake)
|
||||
assert m1_sops_store.exists(
|
||||
Generator("my_shared_generator", share=True), "my_shared_secret"
|
||||
)
|
||||
@@ -403,12 +399,25 @@ def test_generate_secret_var_password_store(
|
||||
shutil.copytree(test_root / "data" / "password-store", password_store_dir)
|
||||
monkeypatch.setenv("PASSWORD_STORE_DIR", str(password_store_dir))
|
||||
|
||||
# Initialize password store as a git repository
|
||||
import subprocess
|
||||
|
||||
subprocess.run(["git", "init"], cwd=password_store_dir, check=True)
|
||||
subprocess.run(
|
||||
["git", "config", "user.email", "test@example.com"],
|
||||
cwd=password_store_dir,
|
||||
check=True,
|
||||
)
|
||||
subprocess.run(
|
||||
["git", "config", "user.name", "Test User"], cwd=password_store_dir, check=True
|
||||
)
|
||||
|
||||
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
assert not check_vars(machine.name, machine.flake)
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
|
||||
assert check_vars(machine.name, machine.flake)
|
||||
store = password_store.SecretStore(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
machine="my_machine", flake=Flake(str(flake.path))
|
||||
)
|
||||
assert store.exists(Generator("my_generator", share=False, files=[]), "my_secret")
|
||||
assert not store.exists(
|
||||
@@ -483,12 +492,8 @@ def test_generate_secret_for_multiple_machines(
|
||||
monkeypatch.chdir(flake.path)
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path)])
|
||||
# check if public vars have been created correctly
|
||||
in_repo_store1 = in_repo.FactStore(
|
||||
Machine(name="machine1", flake=Flake(str(flake.path)))
|
||||
)
|
||||
in_repo_store2 = in_repo.FactStore(
|
||||
Machine(name="machine2", flake=Flake(str(flake.path)))
|
||||
)
|
||||
in_repo_store1 = in_repo.FactStore(machine="machine1", flake=Flake(str(flake.path)))
|
||||
in_repo_store2 = in_repo.FactStore(machine="machine2", flake=Flake(str(flake.path)))
|
||||
assert in_repo_store1.exists(Generator("my_generator"), "my_value")
|
||||
assert in_repo_store2.exists(Generator("my_generator"), "my_value")
|
||||
assert (
|
||||
@@ -500,12 +505,8 @@ def test_generate_secret_for_multiple_machines(
|
||||
== "machine2\n"
|
||||
)
|
||||
# check if secret vars have been created correctly
|
||||
sops_store1 = sops.SecretStore(
|
||||
Machine(name="machine1", flake=Flake(str(flake.path)))
|
||||
)
|
||||
sops_store2 = sops.SecretStore(
|
||||
Machine(name="machine2", flake=Flake(str(flake.path)))
|
||||
)
|
||||
sops_store1 = sops.SecretStore(machine="machine1", flake=Flake(str(flake.path)))
|
||||
sops_store2 = sops.SecretStore(machine="machine2", flake=Flake(str(flake.path)))
|
||||
assert sops_store1.exists(Generator("my_generator"), "my_secret")
|
||||
assert sops_store2.exists(Generator("my_generator"), "my_secret")
|
||||
assert (
|
||||
@@ -550,7 +551,7 @@ def test_prompt(
|
||||
)
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
|
||||
in_repo_store = in_repo.FactStore(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
machine="my_machine", flake=Flake(str(flake.path))
|
||||
)
|
||||
assert in_repo_store.exists(Generator("my_generator"), "line_value")
|
||||
assert (
|
||||
@@ -563,9 +564,7 @@ def test_prompt(
|
||||
in_repo_store.get(Generator("my_generator"), "multiline_value").decode()
|
||||
== "my\nmultiline\ninput\n"
|
||||
)
|
||||
sops_store = sops.SecretStore(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
)
|
||||
sops_store = sops.SecretStore(machine="my_machine", flake=Flake(str(flake.path)))
|
||||
assert sops_store.exists(
|
||||
Generator(name="my_generator", share=False, files=[]), "prompt_persist"
|
||||
)
|
||||
@@ -607,10 +606,10 @@ def test_multi_machine_shared_vars(
|
||||
monkeypatch.chdir(flake.path)
|
||||
machine1 = Machine(name="machine1", flake=Flake(str(flake.path)))
|
||||
machine2 = Machine(name="machine2", flake=Flake(str(flake.path)))
|
||||
sops_store_1 = sops.SecretStore(machine1)
|
||||
sops_store_2 = sops.SecretStore(machine2)
|
||||
in_repo_store_1 = in_repo.FactStore(machine1)
|
||||
in_repo_store_2 = in_repo.FactStore(machine2)
|
||||
sops_store_1 = sops.SecretStore(machine1.name, machine1.flake)
|
||||
sops_store_2 = sops.SecretStore(machine2.name, machine2.flake)
|
||||
in_repo_store_1 = in_repo.FactStore(machine1.name, machine1.flake)
|
||||
in_repo_store_2 = in_repo.FactStore(machine2.name, machine2.flake)
|
||||
generator = Generator("shared_generator", share=True)
|
||||
# generate for machine 1
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "machine1"])
|
||||
@@ -655,7 +654,7 @@ def test_api_set_prompts(
|
||||
|
||||
monkeypatch.chdir(flake.path)
|
||||
|
||||
generate_vars_for_machine(
|
||||
run_generators(
|
||||
machine_name="my_machine",
|
||||
base_dir=flake.path,
|
||||
generators=["my_generator"],
|
||||
@@ -666,10 +665,10 @@ def test_api_set_prompts(
|
||||
},
|
||||
)
|
||||
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
store = in_repo.FactStore(machine)
|
||||
store = in_repo.FactStore(machine.name, machine.flake)
|
||||
assert store.exists(Generator("my_generator"), "prompt1")
|
||||
assert store.get(Generator("my_generator"), "prompt1").decode() == "input1"
|
||||
generate_vars_for_machine(
|
||||
run_generators(
|
||||
machine_name="my_machine",
|
||||
base_dir=flake.path,
|
||||
generators=["my_generator"],
|
||||
@@ -681,7 +680,7 @@ def test_api_set_prompts(
|
||||
)
|
||||
assert store.get(Generator("my_generator"), "prompt1").decode() == "input2"
|
||||
|
||||
generators = get_generators_closure(
|
||||
generators = get_generators(
|
||||
machine_name="my_machine",
|
||||
base_dir=flake.path,
|
||||
full_closure=True,
|
||||
@@ -714,11 +713,11 @@ def test_stdout_of_generate(
|
||||
flake_.refresh()
|
||||
monkeypatch.chdir(flake_.path)
|
||||
flake = Flake(str(flake_.path))
|
||||
from clan_cli.vars.generate import generate_vars_for_machine_interactive
|
||||
from clan_cli.vars.generate import create_machine_vars_interactive
|
||||
|
||||
# with capture_output as output:
|
||||
with caplog.at_level(logging.INFO):
|
||||
generate_vars_for_machine_interactive(
|
||||
create_machine_vars_interactive(
|
||||
Machine(name="my_machine", flake=flake),
|
||||
"my_generator",
|
||||
regenerate=False,
|
||||
@@ -731,7 +730,7 @@ def test_stdout_of_generate(
|
||||
|
||||
set_var("my_machine", "my_generator/my_value", b"world", flake)
|
||||
with caplog.at_level(logging.INFO):
|
||||
generate_vars_for_machine_interactive(
|
||||
create_machine_vars_interactive(
|
||||
Machine(name="my_machine", flake=flake),
|
||||
"my_generator",
|
||||
regenerate=True,
|
||||
@@ -742,7 +741,7 @@ def test_stdout_of_generate(
|
||||
caplog.clear()
|
||||
# check the output when nothing gets regenerated
|
||||
with caplog.at_level(logging.INFO):
|
||||
generate_vars_for_machine_interactive(
|
||||
create_machine_vars_interactive(
|
||||
Machine(name="my_machine", flake=flake),
|
||||
"my_generator",
|
||||
regenerate=True,
|
||||
@@ -751,7 +750,7 @@ def test_stdout_of_generate(
|
||||
assert "hello" in caplog.text
|
||||
caplog.clear()
|
||||
with caplog.at_level(logging.INFO):
|
||||
generate_vars_for_machine_interactive(
|
||||
create_machine_vars_interactive(
|
||||
Machine(name="my_machine", flake=flake),
|
||||
"my_secret_generator",
|
||||
regenerate=False,
|
||||
@@ -766,7 +765,7 @@ def test_stdout_of_generate(
|
||||
Flake(str(flake.path)),
|
||||
)
|
||||
with caplog.at_level(logging.INFO):
|
||||
generate_vars_for_machine_interactive(
|
||||
create_machine_vars_interactive(
|
||||
Machine(name="my_machine", flake=flake),
|
||||
"my_secret_generator",
|
||||
regenerate=True,
|
||||
@@ -817,11 +816,9 @@ def test_migration(
|
||||
assert "Migrated var my_generator/my_value" in caplog.text
|
||||
assert "Migrated secret var my_generator/my_secret" in caplog.text
|
||||
in_repo_store = in_repo.FactStore(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
)
|
||||
sops_store = sops.SecretStore(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
machine="my_machine", flake=Flake(str(flake.path))
|
||||
)
|
||||
sops_store = sops.SecretStore(machine="my_machine", flake=Flake(str(flake.path)))
|
||||
assert in_repo_store.exists(Generator("my_generator"), "my_value")
|
||||
assert in_repo_store.get(Generator("my_generator"), "my_value").decode() == "hello"
|
||||
assert sops_store.exists(Generator("my_generator"), "my_secret")
|
||||
@@ -856,7 +853,7 @@ def test_fails_when_files_are_left_from_other_backend(
|
||||
flake.refresh()
|
||||
monkeypatch.chdir(flake.path)
|
||||
for generator in ["my_secret_generator", "my_value_generator"]:
|
||||
generate_vars_for_machine_interactive(
|
||||
create_machine_vars_interactive(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path))),
|
||||
generator,
|
||||
regenerate=False,
|
||||
@@ -873,13 +870,13 @@ def test_fails_when_files_are_left_from_other_backend(
|
||||
# This should raise an error
|
||||
if generator == "my_secret_generator":
|
||||
with pytest.raises(ClanError):
|
||||
generate_vars_for_machine_interactive(
|
||||
create_machine_vars_interactive(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path))),
|
||||
generator,
|
||||
regenerate=False,
|
||||
)
|
||||
else:
|
||||
generate_vars_for_machine_interactive(
|
||||
create_machine_vars_interactive(
|
||||
Machine(name="my_machine", flake=Flake(str(flake.path))),
|
||||
generator,
|
||||
regenerate=False,
|
||||
@@ -887,7 +884,9 @@ def test_fails_when_files_are_left_from_other_backend(
|
||||
|
||||
|
||||
@pytest.mark.with_core
|
||||
def test_keygen(monkeypatch: pytest.MonkeyPatch, flake: ClanFlake) -> None:
|
||||
def test_create_sops_age_secrets(
|
||||
monkeypatch: pytest.MonkeyPatch, flake: ClanFlake
|
||||
) -> None:
|
||||
monkeypatch.chdir(flake.path)
|
||||
cli.run(["vars", "keygen", "--flake", str(flake.path), "--user", "user"])
|
||||
# check public key exists
|
||||
@@ -917,12 +916,12 @@ def test_invalidation(
|
||||
monkeypatch.chdir(flake.path)
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
|
||||
machine = Machine(name="my_machine", flake=Flake(str(flake.path)))
|
||||
value1 = get_var(
|
||||
value1 = get_machine_var(
|
||||
str(machine.flake.path), machine.name, "my_generator/my_value"
|
||||
).printable_value
|
||||
# generate again and make sure nothing changes without the invalidation data being set
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
|
||||
value1_new = get_var(
|
||||
value1_new = get_machine_var(
|
||||
str(machine.flake.path), machine.name, "my_generator/my_value"
|
||||
).printable_value
|
||||
assert value1 == value1_new
|
||||
@@ -931,13 +930,13 @@ def test_invalidation(
|
||||
flake.refresh()
|
||||
# generate again and make sure the value changes
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
|
||||
value2 = get_var(
|
||||
value2 = get_machine_var(
|
||||
str(machine.flake.path), machine.name, "my_generator/my_value"
|
||||
).printable_value
|
||||
assert value1 != value2
|
||||
# generate again without changing invalidation data -> value should not change
|
||||
cli.run(["vars", "generate", "--flake", str(flake.path), "my_machine"])
|
||||
value2_new = get_var(
|
||||
value2_new = get_machine_var(
|
||||
str(machine.flake.path), machine.name, "my_generator/my_value"
|
||||
).printable_value
|
||||
assert value2 == value2_new
|
||||
|
||||
@@ -6,7 +6,7 @@ from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.machines import machines
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -29,8 +29,9 @@ class GeneratorUpdate:
|
||||
|
||||
|
||||
class StoreBase(ABC):
|
||||
def __init__(self, machine: "machines.Machine") -> None:
|
||||
def __init__(self, machine: str, flake: Flake) -> None:
|
||||
self.machine = machine
|
||||
self.flake = flake
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
@@ -86,10 +87,10 @@ class StoreBase(ABC):
|
||||
def rel_dir(self, generator: "Generator", var_name: str) -> Path:
|
||||
if generator.share:
|
||||
return Path("shared") / generator.name / var_name
|
||||
return Path("per-machine") / self.machine.name / generator.name / var_name
|
||||
return Path("per-machine") / self.machine / generator.name / var_name
|
||||
|
||||
def directory(self, generator: "Generator", var_name: str) -> Path:
|
||||
return Path(self.machine.flake_dir) / "vars" / self.rel_dir(generator, var_name)
|
||||
return self.flake.path / "vars" / self.rel_dir(generator, var_name)
|
||||
|
||||
def set(
|
||||
self,
|
||||
|
||||
@@ -82,11 +82,6 @@ class Generator:
|
||||
files = []
|
||||
gen_files = files_data.get(gen_name, {})
|
||||
for file_name, file_data in gen_files.items():
|
||||
# Handle mode conversion properly
|
||||
mode = file_data["mode"]
|
||||
if isinstance(mode, str):
|
||||
mode = int(mode, 8)
|
||||
|
||||
var = Var(
|
||||
id=f"{gen_name}/{file_name}",
|
||||
name=file_name,
|
||||
@@ -94,7 +89,11 @@ class Generator:
|
||||
deploy=file_data["deploy"],
|
||||
owner=file_data["owner"],
|
||||
group=file_data["group"],
|
||||
mode=mode,
|
||||
mode=(
|
||||
file_data["mode"]
|
||||
if isinstance(file_data["mode"], int)
|
||||
else int(file_data["mode"], 8)
|
||||
),
|
||||
needed_for=file_data["neededFor"],
|
||||
)
|
||||
files.append(var)
|
||||
@@ -424,7 +423,7 @@ def get_closure(
|
||||
|
||||
|
||||
@API.register
|
||||
def get_generators_closure(
|
||||
def get_generators(
|
||||
machine_name: str,
|
||||
base_dir: Path,
|
||||
full_closure: bool = False,
|
||||
@@ -462,7 +461,7 @@ def _generate_vars_for_machine(
|
||||
|
||||
|
||||
@API.register
|
||||
def generate_vars_for_machine(
|
||||
def run_generators(
|
||||
machine_name: str,
|
||||
generators: list[str],
|
||||
all_prompt_values: dict[str, dict[str, str]],
|
||||
@@ -487,7 +486,7 @@ def generate_vars_for_machine(
|
||||
)
|
||||
|
||||
|
||||
def generate_vars_for_machine_interactive(
|
||||
def create_machine_vars_interactive(
|
||||
machine: "Machine",
|
||||
generator_name: str | None,
|
||||
regenerate: bool,
|
||||
@@ -541,7 +540,7 @@ def generate_vars(
|
||||
for machine in machines:
|
||||
errors = []
|
||||
try:
|
||||
was_regenerated |= generate_vars_for_machine_interactive(
|
||||
was_regenerated |= create_machine_vars_interactive(
|
||||
machine,
|
||||
generator_name,
|
||||
regenerate,
|
||||
|
||||
@@ -3,19 +3,17 @@ import logging
|
||||
import sys
|
||||
|
||||
from clan_cli.completions import add_dynamic_completer, complete_machines
|
||||
from clan_lib.api import API
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.flake import Flake
|
||||
|
||||
from .generate import Var
|
||||
from .list import get_vars
|
||||
from .list import get_machine_vars
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@API.register
|
||||
def get_var(base_dir: str, machine_name: str, var_id: str) -> Var:
|
||||
vars_ = get_vars(base_dir=base_dir, machine_name=machine_name)
|
||||
def get_machine_var(base_dir: str, machine_name: str, var_id: str) -> Var:
|
||||
vars_ = get_machine_vars(base_dir=base_dir, machine_name=machine_name)
|
||||
results = []
|
||||
for var in vars_:
|
||||
if var.id == var_id:
|
||||
@@ -41,7 +39,7 @@ def get_var(base_dir: str, machine_name: str, var_id: str) -> Var:
|
||||
|
||||
|
||||
def get_command(machine_name: str, var_id: str, flake: Flake) -> None:
|
||||
var = get_var(str(flake.path), machine_name, var_id)
|
||||
var = get_machine_var(str(flake.path), machine_name, var_id)
|
||||
if not var.exists:
|
||||
msg = f"Var {var.id} has not been generated yet"
|
||||
raise ClanError(msg)
|
||||
|
||||
@@ -12,12 +12,18 @@ from clan_lib.errors import ClanError
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# TODO: Unify with "create clan" should be done automatically
|
||||
@API.register
|
||||
def keygen(flake_dir: Path, user: str | None = None, force: bool = False) -> None:
|
||||
def create_secrets_user(
|
||||
flake_dir: Path, user: str | None = None, force: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
initialize sops keys for vars
|
||||
"""
|
||||
if user is None:
|
||||
user = os.getenv("USER", None)
|
||||
if not user:
|
||||
msg = "No user provided and $USER is not set. Please provide a user via --user."
|
||||
msg = "No user provided and environment variable: '$USER' is not set. Please provide an explizit username via argument"
|
||||
raise ClanError(msg)
|
||||
pub_keys = maybe_get_admin_public_keys()
|
||||
if not pub_keys:
|
||||
@@ -34,7 +40,7 @@ def keygen(flake_dir: Path, user: str | None = None, force: bool = False) -> Non
|
||||
def _command(
|
||||
args: argparse.Namespace,
|
||||
) -> None:
|
||||
keygen(
|
||||
create_secrets_user(
|
||||
flake_dir=args.flake.path,
|
||||
user=args.user,
|
||||
force=args.force,
|
||||
|
||||
@@ -2,19 +2,15 @@ import argparse
|
||||
import logging
|
||||
|
||||
from clan_cli.completions import add_dynamic_completer, complete_machines
|
||||
from clan_lib.api import API
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.machines.machines import Machine
|
||||
|
||||
from ._types import GeneratorUpdate
|
||||
from .generate import Generator, Prompt, Var, execute_generator
|
||||
from .generate import Var
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@API.register
|
||||
def get_vars(base_dir: str, machine_name: str) -> list[Var]:
|
||||
def get_machine_vars(base_dir: str, machine_name: str) -> list[Var]:
|
||||
machine = Machine(name=machine_name, flake=Flake(base_dir))
|
||||
pub_store = machine.public_vars_store
|
||||
sec_store = machine.secret_vars_store
|
||||
@@ -32,70 +28,12 @@ def get_vars(base_dir: str, machine_name: str) -> list[Var]:
|
||||
return all_vars
|
||||
|
||||
|
||||
def _get_previous_value(
|
||||
machine: Machine,
|
||||
generator: Generator,
|
||||
prompt: Prompt,
|
||||
) -> str | None:
|
||||
if not prompt.persist:
|
||||
return None
|
||||
|
||||
pub_store = machine.public_vars_store
|
||||
if pub_store.exists(generator, prompt.name):
|
||||
return pub_store.get(generator, prompt.name).decode()
|
||||
sec_store = machine.secret_vars_store
|
||||
if sec_store.exists(generator, prompt.name):
|
||||
return sec_store.get(generator, prompt.name).decode()
|
||||
return None
|
||||
|
||||
|
||||
@API.register
|
||||
def get_generators(base_dir: str, machine_name: str) -> list[Generator]:
|
||||
from clan_cli.vars.generate import Generator
|
||||
|
||||
machine = Machine(name=machine_name, flake=Flake(base_dir))
|
||||
generators: list[Generator] = Generator.generators_from_flake(
|
||||
machine_name, machine.flake
|
||||
)
|
||||
for generator in generators:
|
||||
for prompt in generator.prompts:
|
||||
prompt.previous_value = _get_previous_value(machine, generator, prompt)
|
||||
return generators
|
||||
|
||||
|
||||
# TODO: Ensure generator dependencies are met (executed in correct order etc.)
|
||||
# TODO: for missing prompts, default to existing values
|
||||
# TODO: raise error if mandatory prompt not provided
|
||||
@API.register
|
||||
def set_prompts(
|
||||
base_dir: str, machine_name: str, updates: list[GeneratorUpdate]
|
||||
) -> None:
|
||||
from clan_cli.vars.generate import Generator
|
||||
|
||||
machine = Machine(name=machine_name, flake=Flake(base_dir))
|
||||
for update in updates:
|
||||
generators = Generator.generators_from_flake(machine_name, machine.flake)
|
||||
for generator in generators:
|
||||
if generator.name == update.generator:
|
||||
break
|
||||
else:
|
||||
msg = f"Generator '{update.generator}' not found in machine {machine.name}"
|
||||
raise ClanError(msg)
|
||||
execute_generator(
|
||||
machine,
|
||||
generator,
|
||||
secret_vars_store=machine.secret_vars_store,
|
||||
public_vars_store=machine.public_vars_store,
|
||||
prompt_values=update.prompt_values,
|
||||
)
|
||||
|
||||
|
||||
def stringify_vars(_vars: list[Var]) -> str:
|
||||
return "\n".join([str(var) for var in _vars])
|
||||
|
||||
|
||||
def stringify_all_vars(machine: Machine) -> str:
|
||||
return stringify_vars(get_vars(str(machine.flake), machine.name))
|
||||
return stringify_vars(get_machine_vars(str(machine.flake), machine.name))
|
||||
|
||||
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
|
||||
@@ -5,7 +5,7 @@ from pathlib import Path
|
||||
from clan_cli.vars._types import StoreBase
|
||||
from clan_cli.vars.generate import Generator, Var
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.machines.machines import Machine
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
|
||||
@@ -14,8 +14,8 @@ class FactStore(StoreBase):
|
||||
def is_secret_store(self) -> bool:
|
||||
return False
|
||||
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
self.machine = machine
|
||||
def __init__(self, machine: str, flake: Flake) -> None:
|
||||
super().__init__(machine, flake)
|
||||
self.works_remotely = False
|
||||
|
||||
@property
|
||||
@@ -28,8 +28,8 @@ class FactStore(StoreBase):
|
||||
var: Var,
|
||||
value: bytes,
|
||||
) -> Path | None:
|
||||
if not self.machine.flake.is_local:
|
||||
msg = f"in_flake fact storage is only supported for local flakes: {self.machine.flake}"
|
||||
if not self.flake.is_local:
|
||||
msg = f"in_flake fact storage is only supported for local flakes: {self.flake}"
|
||||
raise ClanError(msg)
|
||||
folder = self.directory(generator, var.name)
|
||||
file_path = folder / "value"
|
||||
@@ -62,8 +62,8 @@ class FactStore(StoreBase):
|
||||
return [fact_folder]
|
||||
|
||||
def delete_store(self) -> Iterable[Path]:
|
||||
flake_root = Path(self.machine.flake_dir)
|
||||
store_folder = flake_root / "vars/per-machine" / self.machine.name
|
||||
flake_root = self.flake.path
|
||||
store_folder = flake_root / "vars/per-machine" / self.machine
|
||||
if not store_folder.exists():
|
||||
return []
|
||||
shutil.rmtree(store_folder)
|
||||
|
||||
@@ -7,7 +7,7 @@ from clan_cli.vars._types import StoreBase
|
||||
from clan_cli.vars.generate import Generator, Var
|
||||
from clan_lib.dirs import vm_state_dir
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.machines.machines import Machine
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -18,11 +18,14 @@ class FactStore(StoreBase):
|
||||
def is_secret_store(self) -> bool:
|
||||
return False
|
||||
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
self.machine = machine
|
||||
def __init__(self, machine: str, flake: Flake) -> None:
|
||||
super().__init__(machine, flake)
|
||||
self.works_remotely = False
|
||||
self.dir = vm_state_dir(machine.flake.identifier, machine.name) / "facts"
|
||||
machine.debug(f"FactStore initialized with dir {self.dir}")
|
||||
self.dir = vm_state_dir(flake.identifier, machine) / "facts"
|
||||
log.debug(
|
||||
f"FactStore initialized with dir {self.dir}",
|
||||
extra={"command_prefix": machine},
|
||||
)
|
||||
|
||||
@property
|
||||
def store_name(self) -> str:
|
||||
|
||||
@@ -4,7 +4,7 @@ from pathlib import Path
|
||||
|
||||
from clan_cli.vars._types import StoreBase
|
||||
from clan_cli.vars.generate import Generator, Var
|
||||
from clan_lib.machines.machines import Machine
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
|
||||
@@ -13,8 +13,8 @@ class SecretStore(StoreBase):
|
||||
def is_secret_store(self) -> bool:
|
||||
return True
|
||||
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
self.machine = machine
|
||||
def __init__(self, machine: str, flake: Flake) -> None:
|
||||
super().__init__(machine, flake)
|
||||
self.dir = Path(tempfile.gettempdir()) / "clan_secrets"
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@@ -46,6 +46,17 @@ class SecretStore(StoreBase):
|
||||
shutil.copytree(self.dir, output_dir)
|
||||
shutil.rmtree(self.dir)
|
||||
|
||||
def delete(self, generator: Generator, name: str) -> list[Path]:
|
||||
secret_file = self.dir / generator.name / name
|
||||
if secret_file.exists():
|
||||
secret_file.unlink()
|
||||
return []
|
||||
|
||||
def delete_store(self) -> list[Path]:
|
||||
if self.dir.exists():
|
||||
shutil.rmtree(self.dir)
|
||||
return []
|
||||
|
||||
def upload(self, host: Remote, phases: list[str]) -> None:
|
||||
msg = "Cannot upload secrets with FS backend"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
@@ -1,18 +1,15 @@
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import tarfile
|
||||
import subprocess
|
||||
from collections.abc import Iterable
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from clan_cli.ssh.upload import upload
|
||||
from clan_cli.vars._types import StoreBase
|
||||
from clan_cli.vars.generate import Generator, Var
|
||||
from clan_lib.cmd import CmdOut, Log, RunOpts, run
|
||||
from clan_lib.machines.machines import Machine
|
||||
from clan_lib.nix import nix_shell
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -23,35 +20,78 @@ class SecretStore(StoreBase):
|
||||
def is_secret_store(self) -> bool:
|
||||
return True
|
||||
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
self.machine = machine
|
||||
def __init__(self, machine: str, flake: Flake) -> None:
|
||||
super().__init__(machine, flake)
|
||||
self.entry_prefix = "clan-vars"
|
||||
self._store_dir: Path | None = None
|
||||
|
||||
@property
|
||||
def store_name(self) -> str:
|
||||
return "password_store"
|
||||
|
||||
@property
|
||||
def _store_backend(self) -> str:
|
||||
backend = self.machine.select("config.clan.core.vars.settings.passBackend")
|
||||
return backend
|
||||
def store_dir(self) -> Path:
|
||||
"""Get the password store directory, cached after first access."""
|
||||
if self._store_dir is None:
|
||||
result = self._run_pass("git", "rev-parse", "--show-toplevel", check=False)
|
||||
if result.returncode != 0:
|
||||
msg = "Password store must be a git repository"
|
||||
raise ValueError(msg)
|
||||
self._store_dir = Path(result.stdout.strip().decode())
|
||||
return self._store_dir
|
||||
|
||||
@property
|
||||
def _password_store_dir(self) -> Path:
|
||||
if self._store_backend == "passage":
|
||||
lookup = os.environ.get("PASSAGE_DIR")
|
||||
default = Path.home() / ".passage/store"
|
||||
else:
|
||||
lookup = os.environ.get("PASSWORD_STORE_DIR")
|
||||
default = Path.home() / ".password-store"
|
||||
return Path(lookup) if lookup else default
|
||||
def _pass_command(self) -> str:
|
||||
out_path = self.flake.select_machine(
|
||||
self.machine, "config.clan.core.vars.password-store.passPackage.outPath"
|
||||
)
|
||||
main_program = (
|
||||
self.flake.select_machine(
|
||||
self.machine,
|
||||
"config.clan.core.vars.password-store.passPackage.?meta.?mainProgram",
|
||||
)
|
||||
.get("meta", {})
|
||||
.get("mainProgram")
|
||||
)
|
||||
|
||||
if main_program:
|
||||
binary_path = Path(out_path) / "bin" / main_program
|
||||
if binary_path.exists():
|
||||
return str(binary_path)
|
||||
|
||||
# Look for common password store binaries
|
||||
bin_dir = Path(out_path) / "bin"
|
||||
if bin_dir.exists():
|
||||
for binary in ["pass", "passage"]:
|
||||
binary_path = bin_dir / binary
|
||||
if binary_path.exists():
|
||||
return str(binary_path)
|
||||
|
||||
# If only one binary exists, use it
|
||||
binaries = [f for f in bin_dir.iterdir() if f.is_file()]
|
||||
if len(binaries) == 1:
|
||||
return str(binaries[0])
|
||||
|
||||
msg = "Could not find password store binary in package"
|
||||
raise ValueError(msg)
|
||||
|
||||
def entry_dir(self, generator: Generator, name: str) -> Path:
|
||||
return Path(self.entry_prefix) / self.rel_dir(generator, name)
|
||||
|
||||
def _run_pass(self, *args: str, options: RunOpts | None = None) -> CmdOut:
|
||||
cmd = nix_shell(packages=["pass"], cmd=[self._store_backend, *args])
|
||||
return run(cmd, options)
|
||||
def _run_pass(
|
||||
self, *args: str, input: bytes | None = None, check: bool = True
|
||||
) -> subprocess.CompletedProcess[bytes]:
|
||||
cmd = [self._pass_command, *args]
|
||||
# We need bytes support here, so we can not use clan cmd.
|
||||
# If you change this to run( add bytes support to it first!
|
||||
# otherwise we mangle binary secrets (which is annoying to debug)
|
||||
return subprocess.run(
|
||||
cmd,
|
||||
input=input,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
check=check,
|
||||
)
|
||||
|
||||
def _set(
|
||||
self,
|
||||
@@ -60,121 +100,77 @@ class SecretStore(StoreBase):
|
||||
value: bytes,
|
||||
) -> Path | None:
|
||||
pass_call = ["insert", "-m", str(self.entry_dir(generator, var.name))]
|
||||
self._run_pass(*pass_call, options=RunOpts(input=value, check=True))
|
||||
self._run_pass(*pass_call, input=value, check=True)
|
||||
return None # we manage the files outside of the git repo
|
||||
|
||||
def get(self, generator: Generator, name: str) -> bytes:
|
||||
pass_name = str(self.entry_dir(generator, name))
|
||||
return self._run_pass("show", pass_name).stdout.encode()
|
||||
return self._run_pass("show", pass_name).stdout
|
||||
|
||||
def exists(self, generator: Generator, name: str) -> bool:
|
||||
extension = "age" if self._store_backend == "passage" else "gpg"
|
||||
filename = f"{self.entry_dir(generator, name)}.{extension}"
|
||||
return (self._password_store_dir / filename).exists()
|
||||
pass_name = str(self.entry_dir(generator, name))
|
||||
# Check if the file exists with either .age or .gpg extension
|
||||
age_file = self.store_dir / f"{pass_name}.age"
|
||||
gpg_file = self.store_dir / f"{pass_name}.gpg"
|
||||
return age_file.exists() or gpg_file.exists()
|
||||
|
||||
def delete(self, generator: Generator, name: str) -> Iterable[Path]:
|
||||
pass_name = str(self.entry_dir(generator, name))
|
||||
self._run_pass("rm", "--force", pass_name, options=RunOpts(check=True))
|
||||
self._run_pass("rm", "--force", pass_name, check=True)
|
||||
return []
|
||||
|
||||
def delete_store(self) -> Iterable[Path]:
|
||||
machine_dir = Path(self.entry_prefix) / "per-machine" / self.machine.name
|
||||
if not (self._password_store_dir / machine_dir).exists():
|
||||
# The directory may not exist if the machine
|
||||
# has no vars, or they have been deleted already.
|
||||
return []
|
||||
pass_call = ["rm", "--force", "--recursive", str(machine_dir)]
|
||||
self._run_pass(*pass_call, options=RunOpts(check=True))
|
||||
machine_dir = Path(self.entry_prefix) / "per-machine" / self.machine
|
||||
# Check if the directory exists in the password store before trying to delete
|
||||
result = self._run_pass("ls", str(machine_dir), check=False)
|
||||
if result.returncode == 0:
|
||||
self._run_pass("rm", "--force", "--recursive", str(machine_dir), check=True)
|
||||
return []
|
||||
|
||||
def generate_hash(self) -> bytes:
|
||||
hashes = []
|
||||
hashes.append(
|
||||
run(
|
||||
nix_shell(
|
||||
["git"],
|
||||
[
|
||||
"git",
|
||||
"-C",
|
||||
str(self._password_store_dir),
|
||||
"log",
|
||||
"-1",
|
||||
"--format=%H",
|
||||
self.entry_prefix,
|
||||
],
|
||||
),
|
||||
RunOpts(check=False),
|
||||
)
|
||||
.stdout.strip()
|
||||
.encode()
|
||||
result = self._run_pass(
|
||||
"git", "log", "-1", "--format=%H", self.entry_prefix, check=False
|
||||
)
|
||||
shared_dir = self._password_store_dir / self.entry_prefix / "shared"
|
||||
machine_dir = (
|
||||
self._password_store_dir
|
||||
/ self.entry_prefix
|
||||
/ "per-machine"
|
||||
/ self.machine.name
|
||||
)
|
||||
for symlink in chain(shared_dir.glob("**/*"), machine_dir.glob("**/*")):
|
||||
if symlink.is_symlink():
|
||||
hashes.append(
|
||||
run(
|
||||
nix_shell(
|
||||
["git"],
|
||||
[
|
||||
"git",
|
||||
"-C",
|
||||
str(self._password_store_dir),
|
||||
"log",
|
||||
"-1",
|
||||
"--format=%H",
|
||||
str(symlink),
|
||||
],
|
||||
),
|
||||
RunOpts(check=False),
|
||||
)
|
||||
.stdout.strip()
|
||||
.encode()
|
||||
)
|
||||
git_hash = result.stdout.strip()
|
||||
|
||||
# we sort the hashes to make sure that the order is always the same
|
||||
hashes.sort()
|
||||
if not git_hash:
|
||||
return b""
|
||||
|
||||
from clan_cli.vars.generate import Generator
|
||||
|
||||
manifest = []
|
||||
generators = Generator.generators_from_flake(
|
||||
self.machine.name, self.machine.flake
|
||||
)
|
||||
generators = Generator.generators_from_flake(self.machine, self.flake)
|
||||
for generator in generators:
|
||||
for file in generator.files:
|
||||
manifest.append(f"{generator.name}/{file.name}".encode())
|
||||
manifest += hashes
|
||||
|
||||
manifest.append(git_hash)
|
||||
return b"\n".join(manifest)
|
||||
|
||||
def needs_upload(self, host: Remote) -> bool:
|
||||
local_hash = self.generate_hash()
|
||||
if not local_hash:
|
||||
return True
|
||||
|
||||
from clan_lib.cmd import RunOpts, Log
|
||||
|
||||
remote_hash = host.run(
|
||||
# TODO get the path to the secrets from the machine
|
||||
[
|
||||
"cat",
|
||||
f"{self.machine.select('config.clan.vars.password-store.secretLocation')}/.{self._store_backend}_info",
|
||||
f"{self.flake.select_machine(self.machine, 'config.clan.core.vars.password-store.secretLocation')}/.pass_info",
|
||||
],
|
||||
RunOpts(log=Log.STDERR, check=False),
|
||||
).stdout.strip()
|
||||
|
||||
if not remote_hash:
|
||||
print("remote hash is empty")
|
||||
return True
|
||||
|
||||
return local_hash.decode() != remote_hash
|
||||
return local_hash != remote_hash.encode()
|
||||
|
||||
def populate_dir(self, output_dir: Path, phases: list[str]) -> None:
|
||||
from clan_cli.vars.generate import Generator
|
||||
|
||||
vars_generators = Generator.generators_from_flake(
|
||||
self.machine.name, self.machine.flake
|
||||
)
|
||||
vars_generators = Generator.generators_from_flake(self.machine, self.flake)
|
||||
if "users" in phases:
|
||||
with tarfile.open(
|
||||
output_dir / "secrets_for_users.tar.gz", "w:gz"
|
||||
@@ -233,7 +229,9 @@ class SecretStore(StoreBase):
|
||||
out_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
out_file.write_bytes(self.get(generator, file.name))
|
||||
|
||||
(output_dir / f".{self._store_backend}_info").write_bytes(self.generate_hash())
|
||||
hash_data = self.generate_hash()
|
||||
if hash_data:
|
||||
(output_dir / ".pass_info").write_bytes(hash_data)
|
||||
|
||||
def upload(self, host: Remote, phases: list[str]) -> None:
|
||||
if "partitioning" in phases:
|
||||
@@ -246,6 +244,8 @@ class SecretStore(StoreBase):
|
||||
pass_dir = Path(_tempdir).resolve()
|
||||
self.populate_dir(pass_dir, phases)
|
||||
upload_dir = Path(
|
||||
self.machine.select("config.clan.vars.password-store.secretLocation")
|
||||
self.flake.select_machine(
|
||||
self.machine, "config.clan.core.vars.password-store.secretLocation"
|
||||
)
|
||||
)
|
||||
upload(host, pass_dir, upload_dir)
|
||||
|
||||
@@ -27,7 +27,7 @@ from clan_cli.vars._types import StoreBase
|
||||
from clan_cli.vars.generate import Generator
|
||||
from clan_cli.vars.var import Var
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.machines.machines import Machine
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
|
||||
@@ -48,15 +48,13 @@ class SecretStore(StoreBase):
|
||||
def is_secret_store(self) -> bool:
|
||||
return True
|
||||
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
self.machine = machine
|
||||
def __init__(self, machine: str, flake: Flake) -> None:
|
||||
super().__init__(machine, flake)
|
||||
|
||||
# no need to generate keys if we don't manage secrets
|
||||
from clan_cli.vars.generate import Generator
|
||||
|
||||
vars_generators = Generator.generators_from_flake(
|
||||
self.machine.name, self.machine.flake
|
||||
)
|
||||
vars_generators = Generator.generators_from_flake(self.machine, self.flake)
|
||||
if not vars_generators:
|
||||
return
|
||||
has_secrets = False
|
||||
@@ -67,18 +65,19 @@ class SecretStore(StoreBase):
|
||||
if not has_secrets:
|
||||
return
|
||||
|
||||
if has_machine(self.machine.flake_dir, self.machine.name):
|
||||
if has_machine(self.flake.path, self.machine):
|
||||
return
|
||||
priv_key, pub_key = sops.generate_private_key()
|
||||
encrypt_secret(
|
||||
self.machine.flake_dir,
|
||||
sops_secrets_folder(self.machine.flake_dir)
|
||||
/ f"{self.machine.name}-age.key",
|
||||
self.flake.path,
|
||||
sops_secrets_folder(self.flake.path) / f"{self.machine}-age.key",
|
||||
priv_key,
|
||||
add_groups=self.machine.select("config.clan.core.sops.defaultGroups"),
|
||||
age_plugins=load_age_plugins(self.machine.flake),
|
||||
add_groups=self.flake.select_machine(
|
||||
self.machine, "config.clan.core.sops.defaultGroups"
|
||||
),
|
||||
age_plugins=load_age_plugins(self.flake),
|
||||
)
|
||||
add_machine(self.machine.flake_dir, self.machine.name, pub_key, False)
|
||||
add_machine(self.flake.path, self.machine, pub_key, False)
|
||||
|
||||
@property
|
||||
def store_name(self) -> str:
|
||||
@@ -87,11 +86,11 @@ class SecretStore(StoreBase):
|
||||
def user_has_access(
|
||||
self, user: str, generator: Generator, secret_name: str
|
||||
) -> bool:
|
||||
key_dir = sops_users_folder(self.machine.flake_dir) / user
|
||||
key_dir = sops_users_folder(self.flake.path) / user
|
||||
return self.key_has_access(key_dir, generator, secret_name)
|
||||
|
||||
def machine_has_access(self, generator: Generator, secret_name: str) -> bool:
|
||||
key_dir = sops_machines_folder(self.machine.flake_dir) / self.machine.name
|
||||
key_dir = sops_machines_folder(self.flake.path) / self.machine
|
||||
return self.key_has_access(key_dir, generator, secret_name)
|
||||
|
||||
def key_has_access(
|
||||
@@ -117,9 +116,7 @@ class SecretStore(StoreBase):
|
||||
if generator is None:
|
||||
from clan_cli.vars.generate import Generator
|
||||
|
||||
generators = Generator.generators_from_flake(
|
||||
self.machine.name, self.machine.flake
|
||||
)
|
||||
generators = Generator.generators_from_flake(self.machine, self.flake)
|
||||
else:
|
||||
generators = [generator]
|
||||
file_found = False
|
||||
@@ -144,7 +141,7 @@ class SecretStore(StoreBase):
|
||||
if outdated:
|
||||
msg = (
|
||||
"The local state of some secret vars is inconsistent and needs to be updated.\n"
|
||||
f"Run 'clan vars fix {self.machine.name}' to apply the necessary changes."
|
||||
f"Run 'clan vars fix {self.machine}' to apply the necessary changes."
|
||||
"Problems to fix:\n"
|
||||
"\n".join(o[2] for o in outdated if o[2])
|
||||
)
|
||||
@@ -162,20 +159,22 @@ class SecretStore(StoreBase):
|
||||
secret_folder.mkdir(parents=True, exist_ok=True)
|
||||
# initialize the secret
|
||||
encrypt_secret(
|
||||
self.machine.flake_dir,
|
||||
self.flake.path,
|
||||
secret_folder,
|
||||
value,
|
||||
add_machines=[self.machine.name] if var.deploy else [],
|
||||
add_groups=self.machine.select("config.clan.core.sops.defaultGroups"),
|
||||
add_machines=[self.machine] if var.deploy else [],
|
||||
add_groups=self.flake.select_machine(
|
||||
self.machine, "config.clan.core.sops.defaultGroups"
|
||||
),
|
||||
git_commit=False,
|
||||
age_plugins=load_age_plugins(self.machine.flake),
|
||||
age_plugins=load_age_plugins(self.flake),
|
||||
)
|
||||
return secret_folder
|
||||
|
||||
def get(self, generator: Generator, name: str) -> bytes:
|
||||
return decrypt_secret(
|
||||
self.secret_path(generator, name),
|
||||
age_plugins=load_age_plugins(self.machine.flake),
|
||||
age_plugins=load_age_plugins(self.flake),
|
||||
).encode("utf-8")
|
||||
|
||||
def delete(self, generator: "Generator", name: str) -> Iterable[Path]:
|
||||
@@ -184,8 +183,8 @@ class SecretStore(StoreBase):
|
||||
return [secret_dir]
|
||||
|
||||
def delete_store(self) -> Iterable[Path]:
|
||||
flake_root = Path(self.machine.flake_dir)
|
||||
store_folder = flake_root / "vars/per-machine" / self.machine.name
|
||||
flake_root = self.flake.path
|
||||
store_folder = flake_root / "vars/per-machine" / self.machine
|
||||
if not store_folder.exists():
|
||||
return []
|
||||
shutil.rmtree(store_folder)
|
||||
@@ -194,17 +193,15 @@ class SecretStore(StoreBase):
|
||||
def populate_dir(self, output_dir: Path, phases: list[str]) -> None:
|
||||
from clan_cli.vars.generate import Generator
|
||||
|
||||
vars_generators = Generator.generators_from_flake(
|
||||
self.machine.name, self.machine.flake
|
||||
)
|
||||
vars_generators = Generator.generators_from_flake(self.machine, self.flake)
|
||||
if "users" in phases or "services" in phases:
|
||||
key_name = f"{self.machine.name}-age.key"
|
||||
if not has_secret(sops_secrets_folder(self.machine.flake_dir) / key_name):
|
||||
key_name = f"{self.machine}-age.key"
|
||||
if not has_secret(sops_secrets_folder(self.flake.path) / key_name):
|
||||
# skip uploading the secret, not managed by us
|
||||
return
|
||||
key = decrypt_secret(
|
||||
sops_secrets_folder(self.machine.flake_dir) / key_name,
|
||||
age_plugins=load_age_plugins(self.machine.flake),
|
||||
sops_secrets_folder(self.flake.path) / key_name,
|
||||
age_plugins=load_age_plugins(self.flake),
|
||||
)
|
||||
(output_dir / "key.txt").touch(mode=0o600)
|
||||
(output_dir / "key.txt").write_text(key)
|
||||
@@ -258,10 +255,10 @@ class SecretStore(StoreBase):
|
||||
return
|
||||
secret_folder = self.secret_path(generator, name)
|
||||
add_secret(
|
||||
self.machine.flake_dir,
|
||||
self.machine.name,
|
||||
self.flake.path,
|
||||
self.machine,
|
||||
secret_folder,
|
||||
age_plugins=load_age_plugins(self.machine.flake),
|
||||
age_plugins=load_age_plugins(self.flake),
|
||||
)
|
||||
|
||||
def collect_keys_for_secret(self, path: Path) -> set[sops.SopsKey]:
|
||||
@@ -271,15 +268,17 @@ class SecretStore(StoreBase):
|
||||
)
|
||||
|
||||
keys = collect_keys_for_path(path)
|
||||
for group in self.machine.select("config.clan.core.sops.defaultGroups"):
|
||||
for group in self.flake.select_machine(
|
||||
self.machine, "config.clan.core.sops.defaultGroups"
|
||||
):
|
||||
keys.update(
|
||||
collect_keys_for_type(
|
||||
self.machine.flake_dir / "sops" / "groups" / group / "machines"
|
||||
self.flake.path / "sops" / "groups" / group / "machines"
|
||||
)
|
||||
)
|
||||
keys.update(
|
||||
collect_keys_for_type(
|
||||
self.machine.flake_dir / "sops" / "groups" / group / "users"
|
||||
self.flake.path / "sops" / "groups" / group / "users"
|
||||
)
|
||||
)
|
||||
|
||||
@@ -296,7 +295,7 @@ class SecretStore(StoreBase):
|
||||
f"One or more recipient keys were added to secret{' shared' if generator.share else ''} var '{var_id}', but it was never re-encrypted.\n"
|
||||
f"This could have been a malicious actor trying to add their keys, please investigate.\n"
|
||||
f"Added keys: {', '.join(f'{r.key_type.name}:{r.pubkey}' for r in recipients_to_add)}\n"
|
||||
f"If this is intended, run 'clan vars fix {self.machine.name}' to re-encrypt the secret."
|
||||
f"If this is intended, run 'clan vars fix {self.machine}' to re-encrypt the secret."
|
||||
)
|
||||
return needs_update, msg
|
||||
|
||||
@@ -309,9 +308,7 @@ class SecretStore(StoreBase):
|
||||
if generator is None:
|
||||
from clan_cli.vars.generate import Generator
|
||||
|
||||
generators = Generator.generators_from_flake(
|
||||
self.machine.name, self.machine.flake
|
||||
)
|
||||
generators = Generator.generators_from_flake(self.machine, self.flake)
|
||||
else:
|
||||
generators = [generator]
|
||||
file_found = False
|
||||
@@ -328,12 +325,14 @@ class SecretStore(StoreBase):
|
||||
|
||||
secret_path = self.secret_path(generator, file.name)
|
||||
|
||||
age_plugins = load_age_plugins(self.machine.flake)
|
||||
age_plugins = load_age_plugins(self.flake)
|
||||
|
||||
for group in self.machine.select("config.clan.core.sops.defaultGroups"):
|
||||
for group in self.flake.select_machine(
|
||||
self.machine, "config.clan.core.sops.defaultGroups"
|
||||
):
|
||||
allow_member(
|
||||
groups_folder(secret_path),
|
||||
sops_groups_folder(self.machine.flake_dir),
|
||||
sops_groups_folder(self.flake.path),
|
||||
group,
|
||||
# we just want to create missing symlinks, we call update_keys below:
|
||||
do_update_keys=False,
|
||||
|
||||
@@ -5,7 +5,7 @@ from pathlib import Path
|
||||
from clan_cli.vars._types import StoreBase
|
||||
from clan_cli.vars.generate import Generator, Var
|
||||
from clan_lib.dirs import vm_state_dir
|
||||
from clan_lib.machines.machines import Machine
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
|
||||
@@ -14,9 +14,9 @@ class SecretStore(StoreBase):
|
||||
def is_secret_store(self) -> bool:
|
||||
return True
|
||||
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
self.machine = machine
|
||||
self.dir = vm_state_dir(machine.flake.identifier, machine.name) / "secrets"
|
||||
def __init__(self, machine: str, flake: Flake) -> None:
|
||||
super().__init__(machine, flake)
|
||||
self.dir = vm_state_dir(flake.identifier, machine) / "secrets"
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@property
|
||||
|
||||
@@ -3,7 +3,7 @@ import logging
|
||||
import sys
|
||||
|
||||
from clan_cli.completions import add_dynamic_completer, complete_machines
|
||||
from clan_cli.vars.get import get_var
|
||||
from clan_cli.vars.get import get_machine_var
|
||||
from clan_cli.vars.prompt import PromptType
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.git import commit_files
|
||||
@@ -21,7 +21,7 @@ def set_var(machine: str | Machine, var: str | Var, value: bytes, flake: Flake)
|
||||
else:
|
||||
_machine = machine
|
||||
if isinstance(var, str):
|
||||
_var = get_var(str(flake.path), _machine.name, var)
|
||||
_var = get_machine_var(str(flake.path), _machine.name, var)
|
||||
else:
|
||||
_var = var
|
||||
path = _var.set(value)
|
||||
@@ -35,7 +35,7 @@ def set_var(machine: str | Machine, var: str | Var, value: bytes, flake: Flake)
|
||||
|
||||
def set_via_stdin(machine_name: str, var_id: str, flake: Flake) -> None:
|
||||
machine = Machine(name=machine_name, flake=flake)
|
||||
var = get_var(str(flake.path), machine_name, var_id)
|
||||
var = get_machine_var(str(flake.path), machine_name, var_id)
|
||||
if sys.stdin.isatty():
|
||||
new_value = ask(
|
||||
var.id,
|
||||
|
||||
@@ -254,6 +254,7 @@ API.register(open_file)
|
||||
"type": "object",
|
||||
"required": ["arguments", "return"],
|
||||
"additionalProperties": False,
|
||||
"description": func.__doc__,
|
||||
"properties": {
|
||||
"return": return_type,
|
||||
"arguments": {
|
||||
|
||||
@@ -122,11 +122,12 @@ def blk_from_dict(data: dict) -> BlkInfo:
|
||||
|
||||
|
||||
@API.register
|
||||
def show_block_devices() -> Blockdevices:
|
||||
def list_block_devices() -> Blockdevices:
|
||||
"""
|
||||
Api method to show local block devices.
|
||||
List local block devices by running `lsblk`.
|
||||
|
||||
It must return a list of block devices.
|
||||
Returns:
|
||||
A list of detected block devices with metadata like size, path, type, etc.
|
||||
"""
|
||||
|
||||
cmd = nix_shell(
|
||||
|
||||
@@ -10,7 +10,7 @@ from clan_lib.api.modules import Frontmatter, extract_frontmatter
|
||||
from clan_lib.dirs import TemplateType, clan_templates
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.git import commit_file
|
||||
from clan_lib.machines.hardware import HardwareConfig, show_machine_hardware_config
|
||||
from clan_lib.machines.hardware import HardwareConfig, get_machine_hardware_config
|
||||
from clan_lib.machines.machines import Machine
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -137,7 +137,7 @@ def set_machine_disk_schema(
|
||||
Set the disk placeholders of the template
|
||||
"""
|
||||
# Assert the hw-config must exist before setting the disk
|
||||
hw_config = show_machine_hardware_config(machine)
|
||||
hw_config = get_machine_hardware_config(machine)
|
||||
hw_config_path = hw_config.config_path(machine)
|
||||
|
||||
if not hw_config_path.exists():
|
||||
|
||||
@@ -88,7 +88,7 @@ def parse_avahi_output(output: str) -> DNSInfo:
|
||||
|
||||
|
||||
@API.register
|
||||
def show_mdns() -> DNSInfo:
|
||||
def list_mdns_services() -> DNSInfo:
|
||||
cmd = nix_shell(
|
||||
["avahi"],
|
||||
[
|
||||
@@ -107,7 +107,7 @@ def show_mdns() -> DNSInfo:
|
||||
|
||||
|
||||
def mdns_command(args: argparse.Namespace) -> None:
|
||||
dns_info = show_mdns()
|
||||
dns_info = list_mdns_services()
|
||||
for name, info in dns_info.services.items():
|
||||
print(f"Hostname: {name} - ip: {info.ip}")
|
||||
|
||||
|
||||
@@ -168,7 +168,6 @@ def list_modules(base_path: str) -> ModuleLists:
|
||||
modules = flake.select(
|
||||
"clanInternals.inventoryClass.{?modulesPerSource,?localModules}"
|
||||
)
|
||||
print("Modules found:", modules)
|
||||
|
||||
return modules
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ BAKEND_THREADS: dict[str, WebThread] | None = None
|
||||
|
||||
|
||||
@API.register_abstract
|
||||
def cancel_task(task_id: str) -> None:
|
||||
def delete_task(task_id: str) -> None:
|
||||
"""Cancel a task by its op_key."""
|
||||
assert BAKEND_THREADS is not None, "Backend threads not initialized"
|
||||
future = BAKEND_THREADS.get(task_id)
|
||||
|
||||
@@ -4,16 +4,12 @@ from pathlib import Path
|
||||
|
||||
from clan_lib.api import API
|
||||
from clan_lib.cmd import RunOpts, run
|
||||
from clan_lib.dirs import clan_templates
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.nix import nix_command, nix_metadata, nix_shell
|
||||
from clan_lib.persist.inventory_store import InventorySnapshot, InventoryStore
|
||||
from clan_lib.templates import (
|
||||
InputPrio,
|
||||
TemplateName,
|
||||
get_template,
|
||||
)
|
||||
from clan_lib.templates.filesystem import copy_from_nixstore
|
||||
from clan_lib.templates.handler import clan_template
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -21,9 +17,9 @@ log = logging.getLogger(__name__)
|
||||
@dataclass
|
||||
class CreateOptions:
|
||||
dest: Path
|
||||
template_name: str
|
||||
template: str
|
||||
|
||||
src_flake: Flake | None = None
|
||||
input_prio: InputPrio | None = None
|
||||
setup_git: bool = True
|
||||
initial: InventorySnapshot | None = None
|
||||
update_clan: bool = True
|
||||
@@ -47,44 +43,31 @@ def create_clan(opts: CreateOptions) -> None:
|
||||
log.warning("Setting src_flake to None")
|
||||
opts.src_flake = None
|
||||
|
||||
template = get_template(
|
||||
TemplateName(opts.template_name),
|
||||
"clan",
|
||||
input_prio=opts.input_prio,
|
||||
clan_dir=opts.src_flake,
|
||||
)
|
||||
log.info(f"Found template '{template.name}' in '{template.input_variant}'")
|
||||
if opts.src_flake is None:
|
||||
opts.src_flake = Flake(str(clan_templates()))
|
||||
|
||||
if dest.exists():
|
||||
dest /= template.name
|
||||
with clan_template(
|
||||
opts.src_flake, template_ident=opts.template, dst_dir=opts.dest
|
||||
) as _clan_dir:
|
||||
if opts.setup_git:
|
||||
run(git_command(dest, "init"))
|
||||
run(git_command(dest, "add", "."))
|
||||
|
||||
if dest.exists():
|
||||
msg = f"Destination directory {dest} already exists"
|
||||
raise ClanError(msg)
|
||||
# check if username is set
|
||||
has_username = run(
|
||||
git_command(dest, "config", "user.name"), RunOpts(check=False)
|
||||
)
|
||||
if has_username.returncode != 0:
|
||||
run(git_command(dest, "config", "user.name", "clan-tool"))
|
||||
|
||||
src = Path(template.src["path"])
|
||||
has_username = run(
|
||||
git_command(dest, "config", "user.email"), RunOpts(check=False)
|
||||
)
|
||||
if has_username.returncode != 0:
|
||||
run(git_command(dest, "config", "user.email", "clan@example.com"))
|
||||
|
||||
copy_from_nixstore(src, dest)
|
||||
|
||||
if opts.setup_git:
|
||||
run(git_command(dest, "init"))
|
||||
run(git_command(dest, "add", "."))
|
||||
|
||||
# check if username is set
|
||||
has_username = run(
|
||||
git_command(dest, "config", "user.name"), RunOpts(check=False)
|
||||
)
|
||||
if has_username.returncode != 0:
|
||||
run(git_command(dest, "config", "user.name", "clan-tool"))
|
||||
|
||||
has_username = run(
|
||||
git_command(dest, "config", "user.email"), RunOpts(check=False)
|
||||
)
|
||||
if has_username.returncode != 0:
|
||||
run(git_command(dest, "config", "user.email", "clan@example.com"))
|
||||
|
||||
if opts.update_clan:
|
||||
run(nix_command(["flake", "update"]), RunOpts(cwd=dest))
|
||||
if opts.update_clan:
|
||||
run(nix_command(["flake", "update"]), RunOpts(cwd=dest))
|
||||
|
||||
if opts.initial:
|
||||
inventory_store = InventoryStore(flake=Flake(str(opts.dest)))
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from clan_lib.api import API
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.nix_models.clan import InventoryMeta as Meta
|
||||
from clan_lib.nix_models.clan import InventoryMeta
|
||||
from clan_lib.persist.inventory_store import InventoryStore
|
||||
|
||||
|
||||
@API.register
|
||||
def show_clan_meta(flake: Flake) -> Meta:
|
||||
def get_clan_details(flake: Flake) -> InventoryMeta:
|
||||
if flake.is_local and not flake.path.exists():
|
||||
msg = f"Path {flake} does not exist"
|
||||
raise ClanError(msg, description="clan directory does not exist")
|
||||
|
||||
@@ -14,7 +14,7 @@ class UpdateOptions:
|
||||
|
||||
|
||||
@API.register
|
||||
def update_clan_meta(options: UpdateOptions) -> InventorySnapshot:
|
||||
def set_clan_details(options: UpdateOptions) -> InventorySnapshot:
|
||||
inventory_store = InventoryStore(options.flake)
|
||||
inventory = inventory_store.read()
|
||||
set_value_by_path(inventory, "meta", options.meta)
|
||||
|
||||
@@ -290,6 +290,7 @@ class RunOpts:
|
||||
# Ask for sudo password in a graphical way.
|
||||
# This is needed for GUI applications
|
||||
graphical_perm: bool = False
|
||||
trace: bool = True
|
||||
|
||||
|
||||
def cmd_with_root(cmd: list[str], graphical: bool = False) -> list[str]:
|
||||
@@ -344,7 +345,7 @@ def run(
|
||||
# Use our sudo ask proxy here as well
|
||||
options.needs_user_terminal = True
|
||||
|
||||
if cmdlog.isEnabledFor(logging.DEBUG):
|
||||
if cmdlog.isEnabledFor(logging.DEBUG) and options.trace:
|
||||
if options.input and isinstance(options.input, bytes):
|
||||
if any(
|
||||
not ch.isprintable() for ch in options.input.decode("ascii", "replace")
|
||||
|
||||
@@ -345,6 +345,23 @@ class FlakeCacheEntry:
|
||||
msg = f"Cannot insert {value} into cache, already have {self.value}"
|
||||
raise TypeError(msg)
|
||||
|
||||
def _check_path_exists(self, path_str: str) -> bool:
|
||||
"""Check if a path exists, handling potential line number suffixes."""
|
||||
path = Path(path_str)
|
||||
if path.exists():
|
||||
return True
|
||||
|
||||
# Try stripping line numbers if the path doesn't exist
|
||||
# Handle format: /path/to/file:123 or /path/to/file:123:456
|
||||
if ":" in path_str:
|
||||
parts = path_str.split(":")
|
||||
if len(parts) >= 2:
|
||||
# Check if all parts after the first colon are numbers
|
||||
if all(part.isdigit() for part in parts[1:]):
|
||||
base_path = parts[0]
|
||||
return Path(base_path).exists()
|
||||
return False
|
||||
|
||||
def is_cached(self, selectors: list[Selector]) -> bool:
|
||||
selector: Selector
|
||||
|
||||
@@ -353,12 +370,12 @@ class FlakeCacheEntry:
|
||||
# Check if it's a regular nix store path
|
||||
nix_store_dir = os.environ.get("NIX_STORE_DIR", "/nix/store")
|
||||
if self.value.startswith(nix_store_dir):
|
||||
return Path(self.value).exists()
|
||||
return self._check_path_exists(self.value)
|
||||
|
||||
# Check if it's a test store path
|
||||
test_store = os.environ.get("CLAN_TEST_STORE")
|
||||
if test_store and self.value.startswith(test_store):
|
||||
return Path(self.value).exists()
|
||||
return self._check_path_exists(self.value)
|
||||
|
||||
# if self.value is not dict but we request more selectors, we assume we are cached and an error will be thrown in the select function
|
||||
if isinstance(self.value, str | float | int | None):
|
||||
@@ -621,7 +638,9 @@ class Flake:
|
||||
return self._is_local
|
||||
|
||||
def get_input_names(self) -> list[str]:
|
||||
return self.select("inputs", apply="builtins.attrNames")
|
||||
log.debug("flake.get_input_names is deprecated and will be removed")
|
||||
flakes = self.select("inputs.*._type")
|
||||
return list(flakes.keys())
|
||||
|
||||
@property
|
||||
def path(self) -> Path:
|
||||
@@ -710,7 +729,6 @@ class Flake:
|
||||
def get_from_nix(
|
||||
self,
|
||||
selectors: list[str],
|
||||
apply: str = "v: v",
|
||||
) -> None:
|
||||
"""
|
||||
Retrieves specific attributes from a Nix flake using the provided selectors.
|
||||
@@ -729,7 +747,7 @@ class Flake:
|
||||
ClanError: If the number of outputs does not match the number of selectors.
|
||||
AssertionError: If the cache or flake cache path is not properly initialized.
|
||||
"""
|
||||
from clan_lib.cmd import Log, RunOpts, run
|
||||
from clan_lib.cmd import run, RunOpts, Log
|
||||
from clan_lib.dirs import select_source
|
||||
from clan_lib.nix import (
|
||||
nix_build,
|
||||
@@ -772,7 +790,7 @@ class Flake:
|
||||
result = builtins.toJSON [
|
||||
{" ".join(
|
||||
[
|
||||
f"(({apply}) (selectLib.applySelectors (builtins.fromJSON ''{attr}'') flake))"
|
||||
f"(selectLib.applySelectors (builtins.fromJSON ''{attr}'') flake)"
|
||||
for attr in str_selectors
|
||||
]
|
||||
)}
|
||||
@@ -795,11 +813,41 @@ class Flake:
|
||||
];
|
||||
}}
|
||||
"""
|
||||
if len(selectors) > 1:
|
||||
log.debug(f"""
|
||||
selecting: {selectors}
|
||||
to debug run:
|
||||
nix repl --expr 'rec {{
|
||||
flake = builtins.getFlake "self.identifier";
|
||||
selectLib = (builtins.getFlake "path:{select_source()}?narHash={select_hash}").lib;
|
||||
query = [
|
||||
{" ".join(
|
||||
[
|
||||
f"(selectLib.select ''{selector}'' flake)"
|
||||
for selector in selectors
|
||||
]
|
||||
)}
|
||||
];
|
||||
}}'
|
||||
""")
|
||||
# fmt: on
|
||||
elif len(selectors) == 1:
|
||||
log.debug(
|
||||
f"""
|
||||
selecting: {selectors[0]}
|
||||
to debug run:
|
||||
nix repl --expr 'rec {{
|
||||
flake = builtins.getFlake "{self.identifier}";
|
||||
selectLib = (builtins.getFlake "path:{select_source()}?narHash={select_hash}").lib;
|
||||
query = selectLib.select '"''{selectors[0]}''"' flake;
|
||||
}}'
|
||||
"""
|
||||
)
|
||||
|
||||
build_output = Path(
|
||||
run(
|
||||
nix_build(["--expr", nix_code, *nix_options]), RunOpts(log=Log.NONE)
|
||||
nix_build(["--expr", nix_code, *nix_options]),
|
||||
RunOpts(log=Log.NONE, trace=False),
|
||||
).stdout.strip()
|
||||
)
|
||||
|
||||
@@ -840,7 +888,6 @@ class Flake:
|
||||
def select(
|
||||
self,
|
||||
selector: str,
|
||||
apply: str = "v: v",
|
||||
) -> Any:
|
||||
"""
|
||||
Selects a value from the cache based on the provided selector string.
|
||||
@@ -856,6 +903,23 @@ class Flake:
|
||||
|
||||
if not self._cache.is_cached(selector):
|
||||
log.debug(f"Cache miss for {selector}")
|
||||
self.get_from_nix([selector], apply=apply)
|
||||
self.get_from_nix([selector])
|
||||
value = self._cache.select(selector)
|
||||
return value
|
||||
|
||||
def select_machine(self, machine_name: str, selector: str) -> Any:
|
||||
"""
|
||||
Select a nix attribute for a specific machine.
|
||||
|
||||
Args:
|
||||
machine_name: The name of the machine
|
||||
selector: The attribute selector string relative to the machine config
|
||||
apply: Optional function to apply to the result
|
||||
"""
|
||||
from clan_lib.nix import nix_config
|
||||
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
|
||||
full_selector = f'clanInternals.machines."{system}"."{machine_name}".{selector}'
|
||||
return self.select(full_selector)
|
||||
|
||||
@@ -296,3 +296,81 @@ def test_cache_gc(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
assert my_flake._cache.is_cached("testfile") # noqa: SLF001
|
||||
subprocess.run(["nix-collect-garbage"], check=True)
|
||||
assert not my_flake._cache.is_cached("testfile") # noqa: SLF001
|
||||
|
||||
|
||||
def test_cache_path_with_line_numbers(
|
||||
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
|
||||
) -> None:
|
||||
"""Test that is_cached correctly handles store paths with line numbers appended.
|
||||
|
||||
This is a regression test for the bug where cached store paths with line numbers
|
||||
(e.g., /nix/store/path:123) are not properly checked for existence.
|
||||
"""
|
||||
# Create a temporary store
|
||||
test_store = tmp_path / "test-store"
|
||||
test_store.mkdir()
|
||||
|
||||
# Set CLAN_TEST_STORE environment variable
|
||||
monkeypatch.setenv("CLAN_TEST_STORE", str(test_store))
|
||||
|
||||
# Create a fake store path
|
||||
fake_store_path = test_store / "abc123-source-file.nix"
|
||||
fake_store_path.write_text("# nix source file\n{ foo = 123; }")
|
||||
|
||||
# Create cache entries for paths with line numbers
|
||||
cache = FlakeCacheEntry()
|
||||
|
||||
# Test single line number format
|
||||
path_with_line = f"{fake_store_path}:42"
|
||||
selectors = parse_selector("testPath1")
|
||||
cache.insert(path_with_line, selectors)
|
||||
|
||||
# Test line:column format
|
||||
path_with_line_col = f"{fake_store_path}:42:10"
|
||||
selectors2 = parse_selector("testPath2")
|
||||
cache.insert(path_with_line_col, selectors2)
|
||||
|
||||
# Test path with colon but non-numeric suffix (should not be treated as line number)
|
||||
path_with_colon = test_store / "file:with:colons"
|
||||
path_with_colon.write_text("test")
|
||||
selectors3 = parse_selector("testPath3")
|
||||
cache.insert(str(path_with_colon), selectors3)
|
||||
|
||||
# Before the fix: These would return True even though the exact paths don't exist
|
||||
# After the fix: They check the base file path exists
|
||||
assert cache.is_cached(parse_selector("testPath1")), (
|
||||
"Path with line number should be cached when base file exists"
|
||||
)
|
||||
assert cache.is_cached(parse_selector("testPath2")), (
|
||||
"Path with line:column should be cached when base file exists"
|
||||
)
|
||||
assert cache.is_cached(parse_selector("testPath3")), (
|
||||
"Path with colons in filename should be cached when file exists"
|
||||
)
|
||||
|
||||
# Now delete the base file
|
||||
fake_store_path.unlink()
|
||||
|
||||
# After deletion, paths with line numbers should not be cached
|
||||
assert not cache.is_cached(parse_selector("testPath1")), (
|
||||
"Path with line number should not be cached when base file doesn't exist"
|
||||
)
|
||||
assert not cache.is_cached(parse_selector("testPath2")), (
|
||||
"Path with line:column should not be cached when base file doesn't exist"
|
||||
)
|
||||
|
||||
# Path with colons in name still exists
|
||||
assert cache.is_cached(parse_selector("testPath3")), (
|
||||
"Path with colons in filename should still be cached"
|
||||
)
|
||||
|
||||
# Test with regular /nix/store paths
|
||||
monkeypatch.delenv("CLAN_TEST_STORE", raising=False)
|
||||
cache2 = FlakeCacheEntry()
|
||||
nix_path_with_line = "/nix/store/fake-source.nix:123"
|
||||
cache2.insert(nix_path_with_line, parse_selector("nixPath"))
|
||||
|
||||
# Should return False because neither the exact path nor base path exists
|
||||
assert not cache2.is_cached(parse_selector("nixPath")), (
|
||||
"Nix store path with line number should not be cached when file doesn't exist"
|
||||
)
|
||||
|
||||
@@ -10,14 +10,3 @@ Which is an abstraction over the inventory
|
||||
|
||||
Interacting with 'clan_lib.inventory' is NOT recommended and will be removed
|
||||
"""
|
||||
|
||||
from clan_lib.api import API
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.persist.inventory_store import InventorySnapshot, InventoryStore
|
||||
|
||||
|
||||
@API.register
|
||||
def get_inventory(flake: Flake) -> InventorySnapshot:
|
||||
inventory_store = InventoryStore(flake)
|
||||
inventory = inventory_store.read()
|
||||
return inventory
|
||||
|
||||
@@ -17,7 +17,7 @@ def example_function() -> None:
|
||||
"""Example function for creating logs."""
|
||||
|
||||
|
||||
def deploy_machine() -> None:
|
||||
def run_machine_deploy() -> None:
|
||||
"""Function for deploying machines."""
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ def main() -> None:
|
||||
for repo in repos:
|
||||
for machine in machines:
|
||||
log_manager.create_log_file(
|
||||
deploy_machine,
|
||||
run_machine_deploy,
|
||||
f"deploy_{machine}",
|
||||
["clans", repo, "machines", machine],
|
||||
)
|
||||
|
||||
@@ -17,7 +17,7 @@ from clan_lib.log_manager import (
|
||||
|
||||
|
||||
# Test functions for log creation
|
||||
def deploy_machine() -> None:
|
||||
def run_machine_deploy() -> None:
|
||||
"""Test function for deploying machines."""
|
||||
|
||||
|
||||
@@ -194,13 +194,13 @@ class TestLogFileCreation:
|
||||
for repo in repos:
|
||||
for machine in machines:
|
||||
log_file = configured_log_manager.create_log_file(
|
||||
deploy_machine,
|
||||
run_machine_deploy,
|
||||
f"deploy_{machine}",
|
||||
["clans", repo, "machines", machine],
|
||||
)
|
||||
|
||||
assert log_file.op_key == f"deploy_{machine}"
|
||||
assert log_file.func_name == "deploy_machine"
|
||||
assert log_file.func_name == "run_machine_deploy"
|
||||
assert log_file.get_file_path().exists()
|
||||
|
||||
# Check the group structure includes URL encoding for dynamic parts
|
||||
@@ -241,7 +241,7 @@ class TestFilterFunction:
|
||||
"""Test that empty filter returns top-level groups."""
|
||||
# Create some log files first
|
||||
configured_log_manager.create_log_file(
|
||||
deploy_machine, "test_op", ["clans", "repo1", "machines", "machine1"]
|
||||
run_machine_deploy, "test_op", ["clans", "repo1", "machines", "machine1"]
|
||||
)
|
||||
|
||||
top_level = configured_log_manager.filter([])
|
||||
@@ -258,7 +258,7 @@ class TestFilterFunction:
|
||||
for repo in repos:
|
||||
for machine in machines:
|
||||
configured_log_manager.create_log_file(
|
||||
deploy_machine,
|
||||
run_machine_deploy,
|
||||
f"deploy_{machine}",
|
||||
["clans", repo, "machines", machine],
|
||||
)
|
||||
@@ -281,7 +281,7 @@ class TestFilterFunction:
|
||||
"""Test filtering with specific date."""
|
||||
# Create log file
|
||||
log_file = configured_log_manager.create_log_file(
|
||||
deploy_machine, "test_op", ["clans", "repo1", "machines", "machine1"]
|
||||
run_machine_deploy, "test_op", ["clans", "repo1", "machines", "machine1"]
|
||||
)
|
||||
|
||||
# Filter with the specific date
|
||||
@@ -308,14 +308,16 @@ class TestGetLogFile:
|
||||
"""Test getting log file by operation key."""
|
||||
# Create log file
|
||||
configured_log_manager.create_log_file(
|
||||
deploy_machine, "deploy_wintux", ["clans", "repo1", "machines", "wintux"]
|
||||
run_machine_deploy,
|
||||
"deploy_wintux",
|
||||
["clans", "repo1", "machines", "wintux"],
|
||||
)
|
||||
|
||||
# Find it by op_key
|
||||
found_log_file = configured_log_manager.get_log_file("deploy_wintux")
|
||||
assert found_log_file is not None
|
||||
assert found_log_file.op_key == "deploy_wintux"
|
||||
assert found_log_file.func_name == "deploy_machine"
|
||||
assert found_log_file.func_name == "run_machine_deploy"
|
||||
|
||||
def test_get_log_file_with_selector(
|
||||
self, configured_log_manager: LogManager
|
||||
@@ -323,10 +325,14 @@ class TestGetLogFile:
|
||||
"""Test getting log file with specific selector like example_usage.py."""
|
||||
# Create log files in different locations
|
||||
configured_log_manager.create_log_file(
|
||||
deploy_machine, "deploy_wintux", ["clans", "repo1", "machines", "wintux"]
|
||||
run_machine_deploy,
|
||||
"deploy_wintux",
|
||||
["clans", "repo1", "machines", "wintux"],
|
||||
)
|
||||
configured_log_manager.create_log_file(
|
||||
deploy_machine, "deploy_wintux", ["clans", "repo2", "machines", "wintux"]
|
||||
run_machine_deploy,
|
||||
"deploy_wintux",
|
||||
["clans", "repo2", "machines", "wintux"],
|
||||
)
|
||||
|
||||
# Find specific one using selector
|
||||
@@ -341,7 +347,7 @@ class TestGetLogFile:
|
||||
"""Test getting log file with specific date."""
|
||||
# Create log file
|
||||
log_file = configured_log_manager.create_log_file(
|
||||
deploy_machine, "deploy_demo", ["clans", "repo1", "machines", "demo"]
|
||||
run_machine_deploy, "deploy_demo", ["clans", "repo1", "machines", "demo"]
|
||||
)
|
||||
|
||||
# Find it by op_key and date
|
||||
@@ -378,10 +384,10 @@ class TestListLogDays:
|
||||
"""Test listing log days when logs exist."""
|
||||
# Create log files
|
||||
configured_log_manager.create_log_file(
|
||||
deploy_machine, "op1", ["clans", "repo1", "machines", "machine1"]
|
||||
run_machine_deploy, "op1", ["clans", "repo1", "machines", "machine1"]
|
||||
)
|
||||
configured_log_manager.create_log_file(
|
||||
deploy_machine, "op2", ["clans", "repo2", "machines", "machine2"]
|
||||
run_machine_deploy, "op2", ["clans", "repo2", "machines", "machine2"]
|
||||
)
|
||||
|
||||
days = configured_log_manager.list_log_days()
|
||||
@@ -406,7 +412,7 @@ class TestApiCompatibility:
|
||||
for repo in repos:
|
||||
for machine in machines:
|
||||
configured_log_manager.create_log_file(
|
||||
deploy_machine,
|
||||
run_machine_deploy,
|
||||
f"deploy_{machine}",
|
||||
["clans", repo, "machines", machine],
|
||||
)
|
||||
@@ -741,19 +747,19 @@ class TestLogFileSorting:
|
||||
# This simulates the realistic scenario where the same operation runs on different machines
|
||||
|
||||
configured_log_manager.create_log_file(
|
||||
deploy_machine,
|
||||
run_machine_deploy,
|
||||
"deploy_operation",
|
||||
["clans", "repo1", "machines", "machine1"],
|
||||
)
|
||||
|
||||
configured_log_manager.create_log_file(
|
||||
deploy_machine,
|
||||
run_machine_deploy,
|
||||
"deploy_operation",
|
||||
["clans", "repo1", "machines", "machine2"],
|
||||
)
|
||||
|
||||
configured_log_manager.create_log_file(
|
||||
deploy_machine,
|
||||
run_machine_deploy,
|
||||
"deploy_operation",
|
||||
["clans", "repo2", "machines", "machine1"],
|
||||
)
|
||||
@@ -819,7 +825,7 @@ class TestURLEncoding:
|
||||
|
||||
# Create log file with special characters
|
||||
log_file = configured_log_manager.create_log_file(
|
||||
deploy_machine,
|
||||
run_machine_deploy,
|
||||
"deploy_special",
|
||||
["clans", special_repo, "machines", special_machine],
|
||||
)
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import TypedDict
|
||||
|
||||
from clan_lib.api import API
|
||||
from clan_lib.errors import ClanError
|
||||
from clan_lib.flake.flake import Flake
|
||||
from clan_lib.machines.machines import Machine
|
||||
from clan_lib.nix_models.clan import (
|
||||
InventoryMachine,
|
||||
)
|
||||
@@ -65,16 +65,8 @@ def get_machine(flake: Flake, name: str) -> InventoryMachine:
|
||||
return InventoryMachine(**machine_inv)
|
||||
|
||||
|
||||
# TODO: remove this machine, once the Machine class is refactored
|
||||
# We added this now, to allow for dispatching actions. To require only 'name' and 'flake' of a machine.
|
||||
@dataclass(frozen=True)
|
||||
class MachineID:
|
||||
name: str
|
||||
flake: Flake
|
||||
|
||||
|
||||
@API.register
|
||||
def set_machine(machine: MachineID, update: InventoryMachine) -> None:
|
||||
def set_machine(machine: Machine, update: InventoryMachine) -> None:
|
||||
"""
|
||||
Update the machine information in the inventory.
|
||||
"""
|
||||
|
||||
@@ -9,7 +9,7 @@ from clan_cli.secrets.secrets import (
|
||||
list_secrets,
|
||||
)
|
||||
|
||||
from clan_lib import inventory
|
||||
from clan_lib.persist.inventory_store import InventoryStore
|
||||
from clan_lib.api import API
|
||||
from clan_lib.dirs import specific_machine_dir
|
||||
from clan_lib.machines.machines import Machine
|
||||
@@ -19,7 +19,7 @@ log = logging.getLogger(__name__)
|
||||
|
||||
@API.register
|
||||
def delete_machine(machine: Machine) -> None:
|
||||
inventory_store = inventory.InventoryStore(machine.flake)
|
||||
inventory_store = InventoryStore(machine.flake)
|
||||
try:
|
||||
inventory_store.delete(
|
||||
{f"machines.{machine.name}"},
|
||||
|
||||
@@ -3,6 +3,7 @@ import logging
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import TypedDict
|
||||
|
||||
from clan_lib.api import API
|
||||
from clan_lib.cmd import RunOpts, run
|
||||
@@ -40,19 +41,7 @@ class HardwareConfig(Enum):
|
||||
return HardwareConfig.NONE
|
||||
|
||||
|
||||
@API.register
|
||||
def show_machine_hardware_config(machine: Machine) -> HardwareConfig:
|
||||
"""
|
||||
Show hardware information for a machine returns None if none exist.
|
||||
"""
|
||||
return HardwareConfig.detect_type(machine)
|
||||
|
||||
|
||||
@API.register
|
||||
def show_machine_hardware_platform(machine: Machine) -> str | None:
|
||||
"""
|
||||
Show hardware information for a machine returns None if none exist.
|
||||
"""
|
||||
def get_machine_target_platform(machine: Machine) -> str | None:
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
cmd = nix_eval(
|
||||
@@ -78,7 +67,7 @@ class HardwareGenerateOptions:
|
||||
|
||||
|
||||
@API.register
|
||||
def generate_machine_hardware_info(
|
||||
def run_machine_hardware_info(
|
||||
opts: HardwareGenerateOptions, target_host: Remote
|
||||
) -> HardwareConfig:
|
||||
"""
|
||||
@@ -132,7 +121,7 @@ def generate_machine_hardware_info(
|
||||
f"machines/{opts.machine}/{hw_file.name}: update hardware configuration",
|
||||
)
|
||||
try:
|
||||
show_machine_hardware_platform(opts.machine)
|
||||
get_machine_target_platform(opts.machine)
|
||||
if backup_file:
|
||||
backup_file.unlink(missing_ok=True)
|
||||
except ClanCmdError as e:
|
||||
@@ -150,3 +139,29 @@ def generate_machine_hardware_info(
|
||||
) from e
|
||||
|
||||
return opts.backend
|
||||
|
||||
|
||||
def get_machine_hardware_config(machine: Machine) -> HardwareConfig:
|
||||
"""
|
||||
Detect and return the full hardware configuration for the given machine.
|
||||
|
||||
Returns:
|
||||
HardwareConfig: Structured hardware information, or None if unavailable.
|
||||
"""
|
||||
return HardwareConfig.detect_type(machine)
|
||||
|
||||
|
||||
class MachineHardwareBrief(TypedDict):
|
||||
hardware_config: HardwareConfig
|
||||
platform: str | None
|
||||
|
||||
|
||||
@API.register
|
||||
def get_machine_hardware_summary(machine: Machine) -> MachineHardwareBrief:
|
||||
"""
|
||||
Return a high-level summary of hardware config and platform type.
|
||||
"""
|
||||
return {
|
||||
"hardware_config": get_machine_hardware_config(machine),
|
||||
"platform": get_machine_target_platform(machine),
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ class InstallOptions:
|
||||
|
||||
|
||||
@API.register
|
||||
def install_machine(opts: InstallOptions, target_host: Remote) -> None:
|
||||
def run_machine_install(opts: InstallOptions, target_host: Remote) -> None:
|
||||
machine = opts.machine
|
||||
|
||||
machine.debug(f"installing {machine.name}")
|
||||
|
||||
@@ -16,12 +16,12 @@ from clan_lib.nix_models.clan import InventoryMachine
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def convert_inventory_to_machines(
|
||||
def instantiate_inventory_to_machines(
|
||||
flake: Flake, machines: dict[str, InventoryMachine]
|
||||
) -> dict[str, Machine]:
|
||||
return {
|
||||
name: Machine.from_inventory(name, flake, inventory_machine)
|
||||
for name, inventory_machine in machines.items()
|
||||
name: Machine.from_inventory(name, flake, _inventory_machine)
|
||||
for name, _inventory_machine in machines.items()
|
||||
}
|
||||
|
||||
|
||||
@@ -31,26 +31,7 @@ def list_full_machines(flake: Flake) -> dict[str, Machine]:
|
||||
"""
|
||||
machines = list_machines(flake)
|
||||
|
||||
return convert_inventory_to_machines(flake, machines)
|
||||
|
||||
|
||||
def query_machines_by_tags(
|
||||
flake: Flake, tags: list[str]
|
||||
) -> dict[str, InventoryMachine]:
|
||||
"""
|
||||
Query machines by their respective tags, if multiple tags are specified
|
||||
then only machines that have those respective tags specified will be listed.
|
||||
It is an intersection of the tags and machines.
|
||||
"""
|
||||
machines = list_machines(flake)
|
||||
|
||||
filtered_machines = {}
|
||||
for machine_name, machine in machines.items():
|
||||
machine_tags = machine.get("tags", [])
|
||||
if all(tag in machine_tags for tag in tags):
|
||||
filtered_machines[machine_name] = machine
|
||||
|
||||
return filtered_machines
|
||||
return instantiate_inventory_to_machines(flake, machines)
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -13,8 +13,6 @@ from clan_cli.vars._types import StoreBase
|
||||
from clan_lib.api import API
|
||||
from clan_lib.errors import ClanCmdError, ClanError
|
||||
from clan_lib.flake import Flake
|
||||
from clan_lib.machines.actions import get_machine
|
||||
from clan_lib.nix import nix_config
|
||||
from clan_lib.nix_models.clan import InventoryMachine
|
||||
from clan_lib.ssh.remote import Remote
|
||||
|
||||
@@ -39,6 +37,9 @@ class Machine:
|
||||
return cls(name=name, flake=flake)
|
||||
|
||||
def get_inv_machine(self) -> "InventoryMachine":
|
||||
# Import on demand to avoid circular imports
|
||||
from clan_lib.machines.actions import get_machine
|
||||
|
||||
return get_machine(self.flake, self.name)
|
||||
|
||||
def get_id(self) -> str:
|
||||
@@ -103,13 +104,13 @@ class Machine:
|
||||
def secret_vars_store(self) -> StoreBase:
|
||||
secret_module = self.select("config.clan.core.vars.settings.secretModule")
|
||||
module = importlib.import_module(secret_module)
|
||||
return module.SecretStore(machine=self)
|
||||
return module.SecretStore(machine=self.name, flake=self.flake)
|
||||
|
||||
@cached_property
|
||||
def public_vars_store(self) -> StoreBase:
|
||||
public_module = self.select("config.clan.core.vars.settings.publicModule")
|
||||
module = importlib.import_module(public_module)
|
||||
return module.FactStore(machine=self)
|
||||
return module.FactStore(machine=self.name, flake=self.flake)
|
||||
|
||||
@property
|
||||
def facts_data(self) -> dict[str, dict[str, Any]]:
|
||||
@@ -127,7 +128,7 @@ class Machine:
|
||||
return self.flake.path
|
||||
|
||||
def target_host(self) -> Remote:
|
||||
remote = get_host(self.name, self.flake, field="targetHost")
|
||||
remote = get_machine_host(self.name, self.flake, field="targetHost")
|
||||
if remote is None:
|
||||
msg = f"'targetHost' is not set for machine '{self.name}'"
|
||||
raise ClanError(
|
||||
@@ -142,7 +143,7 @@ class Machine:
|
||||
The host where the machine is built and deployed from.
|
||||
Can be the same as the target host.
|
||||
"""
|
||||
remote = get_host(self.name, self.flake, field="buildHost")
|
||||
remote = get_machine_host(self.name, self.flake, field="buildHost")
|
||||
|
||||
if remote:
|
||||
data = remote.data
|
||||
@@ -158,13 +159,7 @@ class Machine:
|
||||
Select a nix attribute of the machine
|
||||
@attr: the attribute to get
|
||||
"""
|
||||
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
|
||||
return self.flake.select(
|
||||
f'clanInternals.machines."{system}"."{self.name}".{attr}'
|
||||
)
|
||||
return self.flake.select_machine(self.name, attr)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -174,7 +169,7 @@ class RemoteSource:
|
||||
|
||||
|
||||
@API.register
|
||||
def get_host(
|
||||
def get_machine_host(
|
||||
name: str, flake: Flake, field: Literal["targetHost", "buildHost"]
|
||||
) -> RemoteSource | None:
|
||||
"""
|
||||
@@ -187,9 +182,10 @@ def get_host(
|
||||
host_str = inv_machine.get("deploy", {}).get(field)
|
||||
|
||||
if host_str is None:
|
||||
machine.warn(
|
||||
f"'{field}' is not set in `inventory.machines.${name}.deploy.targetHost` - falling back to _slower_ nixos option: `clan.core.networking.targetHost`"
|
||||
machine.info(
|
||||
f"`inventory.machines.{machine.name}.deploy.{field}` is not set — falling back to `clan.core.networking.{field}`. See: https://docs.clan.lol/guides/target-host"
|
||||
)
|
||||
|
||||
host_str = machine.select(f'config.clan.core.networking."{field}"')
|
||||
source = "machine"
|
||||
|
||||
|
||||
@@ -48,9 +48,13 @@ def get_available_machines(flake: Flake) -> list[str]:
|
||||
return list(machines.keys())
|
||||
|
||||
|
||||
def validate_machine_names(machine_names: list[str], flake: Flake) -> None:
|
||||
def validate_machine_names(machine_names: list[str], flake: Flake) -> list[str]:
|
||||
"""
|
||||
Returns a list of valid machine names
|
||||
that are guaranteed to exist in the referenced clan
|
||||
"""
|
||||
if not machine_names:
|
||||
return
|
||||
return []
|
||||
|
||||
available_machines = get_available_machines(flake)
|
||||
invalid_machines = [
|
||||
@@ -70,3 +74,5 @@ def validate_machine_names(machine_names: list[str], flake: Flake) -> None:
|
||||
error_lines.append(f"Machine '{machine_name}' not found. {suggestion_text}")
|
||||
|
||||
raise ClanError("\n".join(error_lines))
|
||||
|
||||
return machine_names
|
||||
|
||||
@@ -103,7 +103,7 @@ def upload_sources(machine: Machine, ssh: Remote) -> str:
|
||||
|
||||
|
||||
@API.register
|
||||
def deploy_machine(
|
||||
def run_machine_deploy(
|
||||
machine: Machine, target_host: Remote, build_host: Remote | None
|
||||
) -> None:
|
||||
with ExitStack() as stack:
|
||||
@@ -167,7 +167,11 @@ def deploy_machine(
|
||||
if become_root:
|
||||
host = sudo_host
|
||||
|
||||
remote_env = host.nix_ssh_env(control_master=False)
|
||||
# Disable SSH_ASKPASS when using a build host (headless machines)
|
||||
use_ssh_askpass = build_host is None
|
||||
remote_env = host.nix_ssh_env(
|
||||
control_master=False, use_ssh_askpass=use_ssh_askpass
|
||||
)
|
||||
ret = host.run(
|
||||
switch_cmd,
|
||||
RunOpts(
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
# Adapted from https://github.com/numtide/deploykit
|
||||
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
from clan_lib.errors import ClanError
|
||||
|
||||
|
||||
class HostKeyCheck(Enum):
|
||||
STRICT = "strict" # Strictly check ssh host keys, prompt for unknown ones
|
||||
ASK = "ask" # Ask for confirmation on first use
|
||||
TOFU = "tofu" # Trust on ssh keys on first use
|
||||
NONE = "none" # Do not check ssh host keys
|
||||
HostKeyCheck = Literal[
|
||||
"strict", # Strictly check ssh host keys, prompt for unknown ones
|
||||
"ask", # Ask for confirmation on first use
|
||||
"tofu", # Trust on ssh keys on first use
|
||||
"none", # Do not check ssh host keys
|
||||
]
|
||||
|
||||
|
||||
def hostkey_to_ssh_opts(host_key_check: HostKeyCheck) -> list[str]:
|
||||
@@ -17,13 +17,13 @@ def hostkey_to_ssh_opts(host_key_check: HostKeyCheck) -> list[str]:
|
||||
Convert a HostKeyCheck value to SSH options.
|
||||
"""
|
||||
match host_key_check:
|
||||
case HostKeyCheck.STRICT:
|
||||
case "strict":
|
||||
return ["-o", "StrictHostKeyChecking=yes"]
|
||||
case HostKeyCheck.ASK:
|
||||
case "ask":
|
||||
return []
|
||||
case HostKeyCheck.TOFU:
|
||||
case "tofu":
|
||||
return ["-o", "StrictHostKeyChecking=accept-new"]
|
||||
case HostKeyCheck.NONE:
|
||||
case "none":
|
||||
return [
|
||||
"-o",
|
||||
"StrictHostKeyChecking=no",
|
||||
|
||||
51
pkgs/clan-cli/clan_lib/ssh/password_prompt.py
Normal file
51
pkgs/clan-cli/clan_lib/ssh/password_prompt.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""
|
||||
Password prompt utilities for SSH and sudo operations.
|
||||
|
||||
This module provides functions to create password prompts using either
|
||||
GUI (zenity) or terminal (dialog) interfaces based on the environment.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from clan_lib.nix import nix_shell
|
||||
|
||||
|
||||
def get_password_command(title: str = "SSH Password", message: str = "") -> list[str]:
|
||||
"""
|
||||
Determine the appropriate password prompt command based on environment.
|
||||
|
||||
This function checks if a GUI environment is available and selects either zenity (for GUI)
|
||||
or dialog (for terminal) to create a password prompt. It then returns a command that will
|
||||
execute the selected tool within a Nix shell with the necessary dependencies.
|
||||
|
||||
Args:
|
||||
title: Title for the password dialog
|
||||
message: Optional message for the dialog (only used by dialog, not zenity)
|
||||
|
||||
Returns:
|
||||
A list of strings representing the shell command to execute the password prompt
|
||||
"""
|
||||
if (
|
||||
os.environ.get("DISPLAY")
|
||||
or os.environ.get("WAYLAND_DISPLAY")
|
||||
or sys.platform == "darwin"
|
||||
):
|
||||
# GUI environment - use zenity
|
||||
cmd = ["zenity", "--password", "--title", title]
|
||||
dependencies = ["zenity"]
|
||||
else:
|
||||
# Terminal environment - use dialog
|
||||
cmd = [
|
||||
"dialog",
|
||||
"--stdout",
|
||||
"--insecure",
|
||||
"--title",
|
||||
title,
|
||||
"--passwordbox",
|
||||
message or "Enter password:",
|
||||
"10",
|
||||
"50",
|
||||
]
|
||||
dependencies = ["dialog"]
|
||||
return nix_shell(dependencies, cmd)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user