Compare commits
255 Commits
fix/combob
...
terst
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5fda2a12c8 | ||
|
|
295af516f8 | ||
|
|
d4283f170e | ||
|
|
f86fe07b63 | ||
|
|
84bf9fe3cf | ||
|
|
9737ce51b1 | ||
|
|
3c1c6c1942 | ||
|
|
0904c9da60 | ||
|
|
73e03c21f7 | ||
|
|
743f05a1b5 | ||
|
|
e28b8dc944 | ||
|
|
8d871aafe8 | ||
|
|
26559ff88b | ||
|
|
b39aead1db | ||
|
|
caae6c7a55 | ||
|
|
df90fb20a0 | ||
|
|
d6577ec05d | ||
|
|
9a66170aa1 | ||
|
|
3effd8fd9a | ||
|
|
86d6b42f4c | ||
|
|
5e2ffa9491 | ||
|
|
ba58d6d91a | ||
|
|
38e2d00bbc | ||
|
|
4f29f2e2ca | ||
|
|
5c23e24315 | ||
|
|
1009c61c9f | ||
|
|
0817f83b0b | ||
|
|
4f191f3ebe | ||
|
|
9f48b7a2fa | ||
|
|
b17466c84b | ||
|
|
d2814efcde | ||
|
|
6a5a83f57a | ||
|
|
9e9ab22c37 | ||
|
|
2840d3a5fc | ||
|
|
a305f98586 | ||
|
|
96fe387399 | ||
|
|
d022f26c2c | ||
|
|
d1f5a8e263 | ||
|
|
a6a5c1e21d | ||
|
|
f1867bdd7a | ||
|
|
ee8e44d255 | ||
|
|
f730f4fa06 | ||
|
|
567570e89c | ||
|
|
54797dd5f5 | ||
|
|
c70c588c1c | ||
|
|
bb6fab1168 | ||
|
|
0859a86ce0 | ||
|
|
1524dc963e | ||
|
|
eebd3fa4ec | ||
|
|
a1ff794d57 | ||
|
|
6693cda465 | ||
|
|
bf0691587d | ||
|
|
deecb966ce | ||
|
|
2d2d9c9dca | ||
|
|
e0e16de144 | ||
|
|
75c60a6103 | ||
|
|
1373670dfc | ||
|
|
03b13e9ed4 | ||
|
|
a79027c312 | ||
|
|
bdcdf4e788 | ||
|
|
e3ed9d7b4b | ||
|
|
ddf2b57b3f | ||
|
|
5ab3a164c8 | ||
|
|
073027f7c6 | ||
|
|
d0374c0d7c | ||
|
|
6137701532 | ||
|
|
1560c5f8cf | ||
|
|
5d884cecc2 | ||
|
|
8a3cade082 | ||
|
|
10b4389309 | ||
|
|
2879c72a89 | ||
|
|
547e912c4e | ||
|
|
87125f1ff7 | ||
|
|
804f606384 | ||
|
|
997c7de942 | ||
|
|
e7323999f2 | ||
|
|
31d3997358 | ||
|
|
62b748624d | ||
|
|
29f440a482 | ||
|
|
f15fd1be52 | ||
|
|
beaacf81c6 | ||
|
|
1ae023f4bf | ||
|
|
9becd4e0c4 | ||
|
|
454b09a67e | ||
|
|
787781c2ad | ||
|
|
469c6ba42d | ||
|
|
21f335fa15 | ||
|
|
d98b76e734 | ||
|
|
e2cb1fd83f | ||
|
|
91646b323a | ||
|
|
121548ffb7 | ||
|
|
966a3ee919 | ||
|
|
d007b0f1b3 | ||
|
|
034982bff2 | ||
|
|
7c37bddeea | ||
|
|
c1a87e5c6a | ||
|
|
e5bea3d49a | ||
|
|
40682972ef | ||
|
|
6035455cba | ||
|
|
9be53a9a63 | ||
|
|
29ec9dbe26 | ||
|
|
a1874c940e | ||
|
|
d115705cb4 | ||
|
|
eceb6eb999 | ||
|
|
d25cace522 | ||
|
|
3c6567e67d | ||
|
|
628e45293e | ||
|
|
954c14513a | ||
|
|
cb8a01d448 | ||
|
|
8e53e42b74 | ||
|
|
ed596a57f0 | ||
|
|
b6bccd218a | ||
|
|
1df9b6e97d | ||
|
|
58fa7ac32b | ||
|
|
110d1d4921 | ||
|
|
46aee098c4 | ||
|
|
9d6735e8c4 | ||
|
|
47c94c51b6 | ||
|
|
1eb567682c | ||
|
|
fcd83e7a60 | ||
|
|
af4b00408a | ||
|
|
aaff3b9b38 | ||
|
|
c13741602c | ||
|
|
6cbe221f44 | ||
|
|
3cf8f605d5 | ||
|
|
cc07e0ea44 | ||
|
|
ccb9340478 | ||
|
|
df096fe53b | ||
|
|
f248cc91ad | ||
|
|
1129862293 | ||
|
|
e2cb75784c | ||
|
|
a8d48b22f8 | ||
|
|
c0f2bcf751 | ||
|
|
20c23fa64b | ||
|
|
23573e16c4 | ||
|
|
eaee4e8cad | ||
|
|
10e43a8884 | ||
|
|
dc1cd03717 | ||
|
|
a71a5880c1 | ||
|
|
6b137f21de | ||
|
|
fbc14bf20f | ||
|
|
2f2f3b6898 | ||
|
|
3ae0f37bcb | ||
|
|
e49d432542 | ||
|
|
76955533cf | ||
|
|
d0ebc75135 | ||
|
|
40503306d1 | ||
|
|
da99407e74 | ||
|
|
915178765b | ||
|
|
518de45d41 | ||
|
|
7d23189c1c | ||
|
|
eec55f73a2 | ||
|
|
484d274c3c | ||
|
|
a4b20f9167 | ||
|
|
dc7291c62b | ||
|
|
a814a44bc6 | ||
|
|
86a6177126 | ||
|
|
4536a5b4f5 | ||
|
|
a9cfda9acb | ||
|
|
b9f60218d7 | ||
|
|
f69e28a133 | ||
|
|
1968230c28 | ||
|
|
9cad074732 | ||
|
|
4859a9ab7c | ||
|
|
b53ecdc89d | ||
|
|
19603e1a1c | ||
|
|
7d20f3a33b | ||
|
|
fa03c190f8 | ||
|
|
65101ad55a | ||
|
|
e5db3e269b | ||
|
|
073750e4c5 | ||
|
|
8bafbcb295 | ||
|
|
dbef6ced77 | ||
|
|
65e7f9e6ca | ||
|
|
e1062ed97c | ||
|
|
2eb1a56d8f | ||
|
|
0f499fc651 | ||
|
|
bcb7a1aa60 | ||
|
|
273c83ec27 | ||
|
|
c74d7857da | ||
|
|
11405966c6 | ||
|
|
220839598d | ||
|
|
44dcfa7844 | ||
|
|
98f685f3ca | ||
|
|
9e43285ba8 | ||
|
|
c0bc0417a6 | ||
|
|
c90b69d499 | ||
|
|
0240acdf3e | ||
|
|
92726ecebc | ||
|
|
b8e9546762 | ||
|
|
2039f034b1 | ||
|
|
0a329f43a8 | ||
|
|
bde0a2845c | ||
|
|
af3c6282c9 | ||
|
|
73ab4d2a6e | ||
|
|
cc269c4f58 | ||
|
|
20021a92ea | ||
|
|
7b54e9b033 | ||
|
|
7971eceb74 | ||
|
|
49a5763f69 | ||
|
|
10694e58c8 | ||
|
|
0d919c4fce | ||
|
|
8cccf757a8 | ||
|
|
80c8cc8628 | ||
|
|
ab63f0d7a4 | ||
|
|
06e0461ec9 | ||
|
|
60ba00dd8f | ||
|
|
90ef55f040 | ||
|
|
de81a5d810 | ||
|
|
3fe65f1f12 | ||
|
|
6bb998f9dd | ||
|
|
af7ce9b8ed | ||
|
|
b74193514d | ||
|
|
c33fd4e504 | ||
|
|
65f3cb562a | ||
|
|
355ff648d7 | ||
|
|
f314eb04d6 | ||
|
|
ebe206cdc0 | ||
|
|
2a138d3248 | ||
|
|
77810b1d4f | ||
|
|
77c840c9ba | ||
|
|
9df7e6df1e | ||
|
|
a5e51f658d | ||
|
|
98d5b3651b | ||
|
|
713a1a550e | ||
|
|
d51d656391 | ||
|
|
0f79af697e | ||
|
|
0119fc06ca | ||
|
|
5361261bd5 | ||
|
|
86e7bcc389 | ||
|
|
79281aba90 | ||
|
|
dade91c292 | ||
|
|
d285a0e716 | ||
|
|
a97128db17 | ||
|
|
ff7b49be5f | ||
|
|
0b816a2672 | ||
|
|
e6ec331da0 | ||
|
|
0b05b0b1ec | ||
|
|
efd9beba15 | ||
|
|
dc03a9183f | ||
|
|
ab3158ca07 | ||
|
|
75a1f7b67f | ||
|
|
d453720a57 | ||
|
|
a4331cc109 | ||
|
|
434ce1af49 | ||
|
|
488ee1ae63 | ||
|
|
fc2e619046 | ||
|
|
cf6c3604ca | ||
|
|
a3ea62caba | ||
|
|
e2e4837b29 | ||
|
|
96fc3d409a | ||
|
|
392f244361 | ||
|
|
d2529704d5 | ||
|
|
62a3503987 | ||
|
|
c39aa89e29 |
0
.gitea/test
Normal file
0
.gitea/test
Normal file
@@ -1,28 +0,0 @@
|
||||
name: "Update pinned clan-core for checks"
|
||||
on:
|
||||
repository_dispatch:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "51 2 * * *"
|
||||
jobs:
|
||||
update-pinned-clan-core:
|
||||
runs-on: nix
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
- name: Update clan-core for checks
|
||||
run: nix run .#update-clan-core-for-checks
|
||||
- name: Create pull request
|
||||
env:
|
||||
CI_BOT_TOKEN: ${{ secrets.CI_BOT_TOKEN }}
|
||||
run: |
|
||||
export GIT_AUTHOR_NAME=clan-bot GIT_AUTHOR_EMAIL=clan-bot@clan.lol GIT_COMMITTER_NAME=clan-bot GIT_COMMITTER_EMAIL=clan-bot@clan.lol
|
||||
git commit -am "Update pinned clan-core for checks"
|
||||
|
||||
# Use shared PR creation script
|
||||
export PR_BRANCH="update-clan-core-for-checks"
|
||||
export PR_TITLE="Update Clan Core for Checks"
|
||||
export PR_BODY="This PR updates the pinned clan-core flake input that is used for checks."
|
||||
|
||||
./.gitea/workflows/create-pr.sh
|
||||
@@ -1,210 +0,0 @@
|
||||
{ self, ... }:
|
||||
{
|
||||
clan.machines.test-backup = {
|
||||
imports = [ self.nixosModules.test-backup ];
|
||||
fileSystems."/".device = "/dev/null";
|
||||
boot.loader.grub.device = "/dev/null";
|
||||
};
|
||||
clan.inventory.services = {
|
||||
borgbackup.test-backup = {
|
||||
roles.client.machines = [ "test-backup" ];
|
||||
roles.server.machines = [ "test-backup" ];
|
||||
};
|
||||
};
|
||||
flake.nixosModules = {
|
||||
test-backup =
|
||||
{
|
||||
pkgs,
|
||||
lib,
|
||||
...
|
||||
}:
|
||||
let
|
||||
dependencies =
|
||||
[
|
||||
pkgs.stdenv.drvPath
|
||||
]
|
||||
++ builtins.map (i: i.outPath) (builtins.attrValues (builtins.removeAttrs self.inputs [ "self" ]));
|
||||
closureInfo = pkgs.closureInfo { rootPaths = dependencies; };
|
||||
in
|
||||
{
|
||||
imports = [
|
||||
# Do not import inventory modules. They should be configured via 'clan.inventory'
|
||||
#
|
||||
# TODO: Configure localbackup via inventory
|
||||
self.clanModules.localbackup
|
||||
];
|
||||
# Borgbackup overrides
|
||||
services.borgbackup.repos.test-backups = {
|
||||
path = "/var/lib/borgbackup/test-backups";
|
||||
authorizedKeys = [ (builtins.readFile ../assets/ssh/pubkey) ];
|
||||
};
|
||||
clan.borgbackup.destinations.test-backup.repo = lib.mkForce "borg@machine:.";
|
||||
|
||||
clan.core.networking.targetHost = "machine";
|
||||
networking.hostName = "machine";
|
||||
|
||||
programs.ssh.knownHosts = {
|
||||
machine.hostNames = [ "machine" ];
|
||||
machine.publicKey = builtins.readFile ../assets/ssh/pubkey;
|
||||
};
|
||||
|
||||
services.openssh = {
|
||||
enable = true;
|
||||
settings.UsePAM = false;
|
||||
settings.UseDns = false;
|
||||
hostKeys = [
|
||||
{
|
||||
path = "/root/.ssh/id_ed25519";
|
||||
type = "ed25519";
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
users.users.root.openssh.authorizedKeys.keyFiles = [ ../assets/ssh/pubkey ];
|
||||
|
||||
# This is needed to unlock the user for sshd
|
||||
# Because we use sshd without setuid binaries
|
||||
users.users.borg.initialPassword = "hello";
|
||||
|
||||
systemd.tmpfiles.settings."vmsecrets" = {
|
||||
"/root/.ssh/id_ed25519" = {
|
||||
C.argument = "${../assets/ssh/privkey}";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "root";
|
||||
};
|
||||
};
|
||||
"/etc/secrets/ssh.id_ed25519" = {
|
||||
C.argument = "${../assets/ssh/privkey}";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "root";
|
||||
};
|
||||
};
|
||||
"/etc/secrets/borgbackup/borgbackup.ssh" = {
|
||||
C.argument = "${../assets/ssh/privkey}";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "root";
|
||||
};
|
||||
};
|
||||
"/etc/secrets/borgbackup/borgbackup.repokey" = {
|
||||
C.argument = builtins.toString (pkgs.writeText "repokey" "repokey12345");
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "root";
|
||||
};
|
||||
};
|
||||
};
|
||||
clan.core.facts.secretStore = "vm";
|
||||
clan.core.vars.settings.secretStore = "vm";
|
||||
|
||||
environment.systemPackages = [ self.packages.${pkgs.system}.clan-cli ];
|
||||
environment.etc.install-closure.source = "${closureInfo}/store-paths";
|
||||
nix.settings = {
|
||||
substituters = lib.mkForce [ ];
|
||||
hashed-mirrors = null;
|
||||
connect-timeout = lib.mkForce 3;
|
||||
flake-registry = pkgs.writeText "flake-registry" ''{"flakes":[],"version":2}'';
|
||||
};
|
||||
system.extraDependencies = dependencies;
|
||||
clan.core.state.test-backups.folders = [ "/var/test-backups" ];
|
||||
|
||||
clan.core.state.test-service = {
|
||||
preBackupScript = ''
|
||||
touch /var/test-service/pre-backup-command
|
||||
'';
|
||||
preRestoreScript = ''
|
||||
touch /var/test-service/pre-restore-command
|
||||
'';
|
||||
postRestoreScript = ''
|
||||
touch /var/test-service/post-restore-command
|
||||
'';
|
||||
folders = [ "/var/test-service" ];
|
||||
};
|
||||
|
||||
fileSystems."/mnt/external-disk" = {
|
||||
device = "/dev/vdb"; # created in tests with virtualisation.emptyDisks
|
||||
autoFormat = true;
|
||||
fsType = "ext4";
|
||||
options = [
|
||||
"defaults"
|
||||
"noauto"
|
||||
];
|
||||
};
|
||||
|
||||
clan.localbackup.targets.hdd = {
|
||||
directory = "/mnt/external-disk";
|
||||
preMountHook = ''
|
||||
touch /run/mount-external-disk
|
||||
'';
|
||||
postUnmountHook = ''
|
||||
touch /run/unmount-external-disk
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
perSystem =
|
||||
{ pkgs, ... }:
|
||||
let
|
||||
clanCore = self.checks.x86_64-linux.clan-core-for-checks;
|
||||
in
|
||||
{
|
||||
checks = pkgs.lib.mkIf pkgs.stdenv.isLinux {
|
||||
nixos-test-backups = self.clanLib.test.containerTest {
|
||||
name = "nixos-test-backups";
|
||||
nodes.machine = {
|
||||
imports =
|
||||
[
|
||||
self.nixosModules.clanCore
|
||||
# Some custom overrides for the backup tests
|
||||
self.nixosModules.test-backup
|
||||
]
|
||||
++
|
||||
# import the inventory generated nixosModules
|
||||
self.clan.clanInternals.inventoryClass.machines.test-backup.machineImports;
|
||||
clan.core.settings.directory = ./.;
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
import json
|
||||
start_all()
|
||||
|
||||
# dummy data
|
||||
machine.succeed("mkdir -p /var/test-backups /var/test-service")
|
||||
machine.succeed("echo testing > /var/test-backups/somefile")
|
||||
|
||||
# create
|
||||
machine.succeed("clan backups create --debug --flake ${clanCore} test-backup")
|
||||
machine.wait_until_succeeds("! systemctl is-active borgbackup-job-test-backup >&2")
|
||||
machine.succeed("test -f /run/mount-external-disk")
|
||||
machine.succeed("test -f /run/unmount-external-disk")
|
||||
|
||||
# list
|
||||
backup_id = json.loads(machine.succeed("borg-job-test-backup list --json"))["archives"][0]["archive"]
|
||||
out = machine.succeed("clan backups list --debug --flake ${clanCore} test-backup").strip()
|
||||
print(out)
|
||||
assert backup_id in out, f"backup {backup_id} not found in {out}"
|
||||
localbackup_id = "hdd::/mnt/external-disk/snapshot.0"
|
||||
assert localbackup_id in out, "localbackup not found in {out}"
|
||||
|
||||
## borgbackup restore
|
||||
machine.succeed("rm -f /var/test-backups/somefile")
|
||||
machine.succeed(f"clan backups restore --debug --flake ${clanCore} test-backup borgbackup 'test-backup::borg@machine:.::{backup_id}' >&2")
|
||||
assert machine.succeed("cat /var/test-backups/somefile").strip() == "testing", "restore failed"
|
||||
machine.succeed("test -f /var/test-service/pre-restore-command")
|
||||
machine.succeed("test -f /var/test-service/post-restore-command")
|
||||
machine.succeed("test -f /var/test-service/pre-backup-command")
|
||||
|
||||
## localbackup restore
|
||||
machine.succeed("rm -rf /var/test-backups/somefile /var/test-service/ && mkdir -p /var/test-service")
|
||||
machine.succeed(f"clan backups restore --debug --flake ${clanCore} test-backup localbackup '{localbackup_id}' >&2")
|
||||
assert machine.succeed("cat /var/test-backups/somefile").strip() == "testing", "restore failed"
|
||||
machine.succeed("test -f /var/test-service/pre-restore-command")
|
||||
machine.succeed("test -f /var/test-service/post-restore-command")
|
||||
machine.succeed("test -f /var/test-service/pre-backup-command")
|
||||
'';
|
||||
} { inherit pkgs self; };
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{ fetchgit }:
|
||||
fetchgit {
|
||||
url = "https://git.clan.lol/clan/clan-core.git";
|
||||
rev = "ba8a80eccf091fc7f99aef3895e31617d3813d20";
|
||||
sha256 = "189srg4mc5y3prapm8day0x0wpibbqc72hrnl61agsmiq7cfmbkd";
|
||||
rev = "5d884cecc2585a29b6a3596681839d081b4de192";
|
||||
sha256 = "09is1afmncamavb2q88qac37vmsijxzsy1iz1vr6gsyjq2rixaxc";
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
self,
|
||||
lib,
|
||||
inputs,
|
||||
privateInputs ? { },
|
||||
...
|
||||
}:
|
||||
let
|
||||
@@ -33,7 +34,6 @@ in
|
||||
in
|
||||
getClanCoreTestModules
|
||||
++ filter pathExists [
|
||||
./backups/flake-module.nix
|
||||
./devshell/flake-module.nix
|
||||
./flash/flake-module.nix
|
||||
./impure/flake-module.nix
|
||||
@@ -157,7 +157,7 @@ in
|
||||
'';
|
||||
|
||||
clan-core-for-checks = pkgs.runCommand "clan-core-for-checks" { } ''
|
||||
cp -r ${pkgs.callPackage ./clan-core-for-checks.nix { }} $out
|
||||
cp -r ${privateInputs.clan-core-for-checks} $out
|
||||
chmod -R +w $out
|
||||
cp ${../flake.lock} $out/flake.lock
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
config,
|
||||
self,
|
||||
lib,
|
||||
privateInputs,
|
||||
...
|
||||
}:
|
||||
{
|
||||
@@ -50,7 +51,8 @@
|
||||
self.nixosConfigurations."test-flash-machine-${pkgs.hostPlatform.system}".config.system.build.toplevel
|
||||
self.nixosConfigurations."test-flash-machine-${pkgs.hostPlatform.system}".config.system.build.diskoScript
|
||||
self.nixosConfigurations."test-flash-machine-${pkgs.hostPlatform.system}".config.system.build.diskoScript.drvPath
|
||||
] ++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
]
|
||||
++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
closureInfo = pkgs.closureInfo { rootPaths = dependencies; };
|
||||
in
|
||||
{
|
||||
@@ -60,6 +62,10 @@
|
||||
nodes.target = {
|
||||
virtualisation.emptyDiskImages = [ 4096 ];
|
||||
virtualisation.memorySize = 4096;
|
||||
|
||||
virtualisation.useNixStoreImage = true;
|
||||
virtualisation.writableStore = true;
|
||||
|
||||
environment.systemPackages = [ self.packages.${pkgs.system}.clan-cli ];
|
||||
environment.etc."install-closure".source = "${closureInfo}/store-paths";
|
||||
|
||||
@@ -78,8 +84,8 @@
|
||||
start_all()
|
||||
|
||||
# Some distros like to automount disks with spaces
|
||||
machine.succeed('mkdir -p "/mnt/with spaces" && mkfs.ext4 /dev/vdb && mount /dev/vdb "/mnt/with spaces"')
|
||||
machine.succeed("clan flash write --debug --flake ${self.checks.x86_64-linux.clan-core-for-checks} --yes --disk main /dev/vdb test-flash-machine-${pkgs.hostPlatform.system}")
|
||||
machine.succeed('mkdir -p "/mnt/with spaces" && mkfs.ext4 /dev/vdc && mount /dev/vdc "/mnt/with spaces"')
|
||||
machine.succeed("clan flash write --debug --flake ${privateInputs.clan-core-for-checks} --yes --disk main /dev/vdc test-flash-machine-${pkgs.hostPlatform.system}")
|
||||
'';
|
||||
} { inherit pkgs self; };
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
self,
|
||||
lib,
|
||||
privateInputs,
|
||||
|
||||
...
|
||||
}:
|
||||
@@ -151,14 +152,15 @@
|
||||
let
|
||||
closureInfo = pkgs.closureInfo {
|
||||
rootPaths = [
|
||||
self.checks.x86_64-linux.clan-core-for-checks
|
||||
privateInputs.clan-core-for-checks
|
||||
self.clanInternals.machines.${pkgs.hostPlatform.system}.test-install-machine-with-system.config.system.build.toplevel
|
||||
self.clanInternals.machines.${pkgs.hostPlatform.system}.test-install-machine-with-system.config.system.build.initialRamdisk
|
||||
self.clanInternals.machines.${pkgs.hostPlatform.system}.test-install-machine-with-system.config.system.build.diskoScript
|
||||
pkgs.stdenv.drvPath
|
||||
pkgs.bash.drvPath
|
||||
pkgs.buildPackages.xorg.lndir
|
||||
] ++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
]
|
||||
++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
};
|
||||
in
|
||||
pkgs.lib.mkIf (pkgs.stdenv.isLinux && !pkgs.stdenv.isAarch64) {
|
||||
@@ -206,7 +208,7 @@
|
||||
# Prepare test flake and Nix store
|
||||
flake_dir = prepare_test_flake(
|
||||
temp_dir,
|
||||
"${self.checks.x86_64-linux.clan-core-for-checks}",
|
||||
"${privateInputs.clan-core-for-checks}",
|
||||
"${closureInfo}"
|
||||
)
|
||||
|
||||
@@ -270,7 +272,7 @@
|
||||
# Prepare test flake and Nix store
|
||||
flake_dir = prepare_test_flake(
|
||||
temp_dir,
|
||||
"${self.checks.x86_64-linux.clan-core-for-checks}",
|
||||
"${privateInputs.clan-core-for-checks}",
|
||||
"${closureInfo}"
|
||||
)
|
||||
|
||||
|
||||
@@ -159,7 +159,8 @@ let
|
||||
pkgs.stdenv.drvPath
|
||||
pkgs.bash.drvPath
|
||||
pkgs.buildPackages.xorg.lndir
|
||||
] ++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
]
|
||||
++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
self,
|
||||
privateInputs,
|
||||
...
|
||||
}:
|
||||
{
|
||||
@@ -35,7 +36,8 @@
|
||||
pkgs.stdenv.drvPath
|
||||
pkgs.stdenvNoCC
|
||||
self.nixosConfigurations.test-morph-machine.config.system.build.toplevel
|
||||
] ++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
]
|
||||
++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
closureInfo = pkgs.closureInfo { rootPaths = dependencies; };
|
||||
in
|
||||
|
||||
@@ -53,7 +55,7 @@
|
||||
testScript = ''
|
||||
start_all()
|
||||
actual.fail("cat /etc/testfile")
|
||||
actual.succeed("env CLAN_DIR=${self.checks.x86_64-linux.clan-core-for-checks} clan machines morph test-morph-template --i-will-be-fired-for-using-this --debug --name test-morph-machine")
|
||||
actual.succeed("env CLAN_DIR=${privateInputs.clan-core-for-checks} clan machines morph test-morph-template --i-will-be-fired-for-using-this --debug --name test-morph-machine")
|
||||
assert actual.succeed("cat /etc/testfile") == "morphed"
|
||||
'';
|
||||
} { inherit pkgs self; };
|
||||
|
||||
@@ -35,6 +35,13 @@
|
||||
services.openssh.enable = true;
|
||||
services.openssh.settings.PasswordAuthentication = false;
|
||||
users.users.root.openssh.authorizedKeys.keys = [ (builtins.readFile ../assets/ssh/pubkey) ];
|
||||
services.openssh.knownHosts.localhost.publicKeyFile = ../assets/ssh/pubkey;
|
||||
services.openssh.hostKeys = [
|
||||
{
|
||||
path = ../assets/ssh/privkey;
|
||||
type = "ed25519";
|
||||
}
|
||||
];
|
||||
security.sudo.wheelNeedsPassword = false;
|
||||
|
||||
boot.consoleLogLevel = lib.mkForce 100;
|
||||
@@ -99,12 +106,14 @@
|
||||
let
|
||||
closureInfo = pkgs.closureInfo {
|
||||
rootPaths = [
|
||||
self.checks.x86_64-linux.clan-core-for-checks
|
||||
self.packages.${pkgs.system}.clan-cli
|
||||
self.checks.${pkgs.system}.clan-core-for-checks
|
||||
self.clanInternals.machines.${pkgs.hostPlatform.system}.test-update-machine.config.system.build.toplevel
|
||||
pkgs.stdenv.drvPath
|
||||
pkgs.bash.drvPath
|
||||
pkgs.buildPackages.xorg.lndir
|
||||
] ++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
]
|
||||
++ builtins.map (i: i.outPath) (builtins.attrValues self.inputs);
|
||||
};
|
||||
in
|
||||
self.clanLib.test.containerTest {
|
||||
@@ -150,15 +159,7 @@
|
||||
# Update the machine configuration to add a new file
|
||||
machine_config_path = os.path.join(flake_dir, "machines", "test-update-machine", "configuration.nix")
|
||||
os.makedirs(os.path.dirname(machine_config_path), exist_ok=True)
|
||||
|
||||
with open(machine_config_path, "w") as f:
|
||||
f.write("""
|
||||
{
|
||||
environment.etc."update-successful".text = "ok";
|
||||
}
|
||||
""")
|
||||
|
||||
# Run clan update command
|
||||
# Note: update command doesn't accept -i flag, SSH key must be in ssh-agent
|
||||
# Start ssh-agent and add the key
|
||||
agent_output = subprocess.check_output(["${pkgs.openssh}/bin/ssh-agent", "-s"], text=True)
|
||||
@@ -167,11 +168,86 @@
|
||||
os.environ["SSH_AUTH_SOCK"] = line.split("=", 1)[1].split(";")[0]
|
||||
elif line.startswith("SSH_AGENT_PID="):
|
||||
os.environ["SSH_AGENT_PID"] = line.split("=", 1)[1].split(";")[0]
|
||||
|
||||
|
||||
# Add the SSH key to the agent
|
||||
subprocess.run(["${pkgs.openssh}/bin/ssh-add", ssh_conn.ssh_key], check=True)
|
||||
|
||||
|
||||
##############
|
||||
print("TEST: update with --build-host localhost --target-host localhost")
|
||||
with open(machine_config_path, "w") as f:
|
||||
f.write("""
|
||||
{
|
||||
environment.etc."update-build-local-successful".text = "ok";
|
||||
}
|
||||
""")
|
||||
|
||||
# rsync the flake into the container
|
||||
os.environ["PATH"] = f"{os.environ['PATH']}:${pkgs.openssh}/bin"
|
||||
subprocess.run(
|
||||
[
|
||||
"${pkgs.rsync}/bin/rsync",
|
||||
"-a",
|
||||
"--delete",
|
||||
"-e",
|
||||
"ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no",
|
||||
f"{str(flake_dir)}/",
|
||||
f"root@192.168.1.1:/flake",
|
||||
],
|
||||
check=True
|
||||
)
|
||||
|
||||
# install the clan-cli package into the container's Nix store
|
||||
subprocess.run(
|
||||
[
|
||||
"${pkgs.nix}/bin/nix",
|
||||
"copy",
|
||||
"--to",
|
||||
"ssh://root@192.168.1.1",
|
||||
"--no-check-sigs",
|
||||
f"${self.packages.${pkgs.system}.clan-cli}",
|
||||
"--extra-experimental-features", "nix-command flakes",
|
||||
"--from", f"{os.environ["TMPDIR"]}/store"
|
||||
],
|
||||
check=True,
|
||||
env={
|
||||
**os.environ,
|
||||
"NIX_SSHOPTS": "-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no",
|
||||
},
|
||||
)
|
||||
|
||||
# Run ssh on the host to run the clan update command via --build-host localhost
|
||||
subprocess.run([
|
||||
"ssh",
|
||||
"-o", "UserKnownHostsFile=/dev/null",
|
||||
"-o", "StrictHostKeyChecking=no",
|
||||
f"root@192.168.1.1",
|
||||
"${self.packages.${pkgs.system}.clan-cli}/bin/clan",
|
||||
"machines",
|
||||
"update",
|
||||
"--debug",
|
||||
"--flake", "/flake",
|
||||
"--host-key-check", "none",
|
||||
"--upload-inputs", # Use local store instead of fetching from network
|
||||
"--build-host", "localhost",
|
||||
"--target-host", "localhost",
|
||||
"test-update-machine",
|
||||
], check=True)
|
||||
|
||||
# Verify the update was successful
|
||||
machine.succeed("test -f /etc/update-build-local-successful")
|
||||
|
||||
|
||||
##############
|
||||
print("TEST: update with --target-host")
|
||||
|
||||
with open(machine_config_path, "w") as f:
|
||||
f.write("""
|
||||
{
|
||||
environment.etc."target-host-update-successful".text = "ok";
|
||||
}
|
||||
""")
|
||||
|
||||
# Run clan update command
|
||||
subprocess.run([
|
||||
"${self.packages.${pkgs.system}.clan-cli-full}/bin/clan",
|
||||
@@ -180,16 +256,18 @@
|
||||
"--debug",
|
||||
"--flake", flake_dir,
|
||||
"--host-key-check", "none",
|
||||
"--fetch-local", # Use local store instead of fetching from network
|
||||
"--upload-inputs", # Use local store instead of fetching from network
|
||||
"test-update-machine",
|
||||
"--target-host", f"root@192.168.1.1:{ssh_conn.host_port}",
|
||||
], check=True)
|
||||
|
||||
# Verify the update was successful
|
||||
machine.succeed("test -f /etc/update-successful")
|
||||
machine.succeed("test -f /etc/target-host-update-successful")
|
||||
|
||||
# Test update with --build-host
|
||||
# Update configuration again to test build-host functionality
|
||||
|
||||
##############
|
||||
print("TEST: update with --build-host")
|
||||
# Update configuration again
|
||||
with open(machine_config_path, "w") as f:
|
||||
f.write("""
|
||||
{
|
||||
@@ -205,24 +283,7 @@
|
||||
"--debug",
|
||||
"--flake", flake_dir,
|
||||
"--host-key-check", "none",
|
||||
"--fetch-local", # Use local store instead of fetching from network
|
||||
"--build-host", f"root@192.168.1.1:{ssh_conn.host_port}",
|
||||
"test-update-machine",
|
||||
"--target-host", f"root@192.168.1.1:{ssh_conn.host_port}",
|
||||
], check=True)
|
||||
|
||||
# Verify the second update was successful
|
||||
machine.succeed("test -f /etc/build-host-update-successful")
|
||||
|
||||
# Run clan update command with --build-host
|
||||
subprocess.run([
|
||||
"${self.packages.${pkgs.system}.clan-cli-full}/bin/clan",
|
||||
"machines",
|
||||
"update",
|
||||
"--debug",
|
||||
"--flake", flake_dir,
|
||||
"--host-key-check", "none",
|
||||
"--fetch-local", # Use local store instead of fetching from network
|
||||
"--upload-inputs", # Use local store instead of fetching from network
|
||||
"--build-host", f"root@192.168.1.1:{ssh_conn.host_port}",
|
||||
"test-update-machine",
|
||||
"--target-host", f"root@192.168.1.1:{ssh_conn.host_port}",
|
||||
|
||||
@@ -112,125 +112,124 @@ in
|
||||
'';
|
||||
in
|
||||
lib.mkIf (cfg.targets != { }) {
|
||||
environment.systemPackages =
|
||||
[
|
||||
(pkgs.writeShellScriptBin "localbackup-create" ''
|
||||
set -efu -o pipefail
|
||||
export PATH=${
|
||||
lib.makeBinPath [
|
||||
pkgs.rsnapshot
|
||||
pkgs.coreutils
|
||||
pkgs.util-linux
|
||||
]
|
||||
}
|
||||
${lib.concatMapStringsSep "\n" (target: ''
|
||||
${mountHook target}
|
||||
echo "Creating backup '${target.name}'"
|
||||
environment.systemPackages = [
|
||||
(pkgs.writeShellScriptBin "localbackup-create" ''
|
||||
set -efu -o pipefail
|
||||
export PATH=${
|
||||
lib.makeBinPath [
|
||||
pkgs.rsnapshot
|
||||
pkgs.coreutils
|
||||
pkgs.util-linux
|
||||
]
|
||||
}
|
||||
${lib.concatMapStringsSep "\n" (target: ''
|
||||
${mountHook target}
|
||||
echo "Creating backup '${target.name}'"
|
||||
|
||||
${lib.optionalString (target.preBackupHook != null) ''
|
||||
(
|
||||
${target.preBackupHook}
|
||||
)
|
||||
''}
|
||||
|
||||
declare -A preCommandErrors
|
||||
${lib.concatMapStringsSep "\n" (
|
||||
state:
|
||||
lib.optionalString (state.preBackupCommand != null) ''
|
||||
echo "Running pre-backup command for ${state.name}"
|
||||
if ! /run/current-system/sw/bin/${state.preBackupCommand}; then
|
||||
preCommandErrors["${state.name}"]=1
|
||||
fi
|
||||
''
|
||||
) (builtins.attrValues config.clan.core.state)}
|
||||
|
||||
rsnapshot -c "${pkgs.writeText "rsnapshot.conf" (rsnapshotConfig target)}" sync
|
||||
rsnapshot -c "${pkgs.writeText "rsnapshot.conf" (rsnapshotConfig target)}" snapshot
|
||||
'') (builtins.attrValues cfg.targets)}'')
|
||||
(pkgs.writeShellScriptBin "localbackup-list" ''
|
||||
set -efu -o pipefail
|
||||
export PATH=${
|
||||
lib.makeBinPath [
|
||||
pkgs.jq
|
||||
pkgs.findutils
|
||||
pkgs.coreutils
|
||||
pkgs.util-linux
|
||||
]
|
||||
}
|
||||
(${
|
||||
lib.concatMapStringsSep "\n" (target: ''
|
||||
(
|
||||
${mountHook target}
|
||||
find ${lib.escapeShellArg target.directory} -mindepth 1 -maxdepth 1 -name "snapshot.*" -print0 -type d \
|
||||
| jq -Rs 'split("\u0000") | .[] | select(. != "") | { "name": ("${target.name}::" + .)}'
|
||||
)
|
||||
'') (builtins.attrValues cfg.targets)
|
||||
}) | jq -s .
|
||||
'')
|
||||
(pkgs.writeShellScriptBin "localbackup-restore" ''
|
||||
set -efu -o pipefail
|
||||
export PATH=${
|
||||
lib.makeBinPath [
|
||||
pkgs.rsync
|
||||
pkgs.coreutils
|
||||
pkgs.util-linux
|
||||
pkgs.gawk
|
||||
]
|
||||
}
|
||||
if [[ "''${NAME:-}" == "" ]]; then
|
||||
echo "No backup name given via NAME environment variable"
|
||||
exit 1
|
||||
fi
|
||||
if [[ "''${FOLDERS:-}" == "" ]]; then
|
||||
echo "No folders given via FOLDERS environment variable"
|
||||
exit 1
|
||||
fi
|
||||
name=$(awk -F'::' '{print $1}' <<< $NAME)
|
||||
backupname=''${NAME#$name::}
|
||||
|
||||
if command -v localbackup-mount-$name; then
|
||||
localbackup-mount-$name
|
||||
fi
|
||||
if command -v localbackup-unmount-$name; then
|
||||
trap "localbackup-unmount-$name" EXIT
|
||||
fi
|
||||
|
||||
if [[ ! -d $backupname ]]; then
|
||||
echo "No backup found $backupname"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
IFS=':' read -ra FOLDER <<< "''$FOLDERS"
|
||||
for folder in "''${FOLDER[@]}"; do
|
||||
mkdir -p "$folder"
|
||||
rsync -a "$backupname/${config.networking.hostName}$folder/" "$folder"
|
||||
done
|
||||
'')
|
||||
]
|
||||
++ (lib.mapAttrsToList (
|
||||
name: target:
|
||||
pkgs.writeShellScriptBin ("localbackup-mount-" + name) ''
|
||||
set -efu -o pipefail
|
||||
${lib.optionalString (target.preMountHook != null) target.preMountHook}
|
||||
${lib.optionalString (target.mountpoint != null) ''
|
||||
if ! ${pkgs.util-linux}/bin/mountpoint -q ${lib.escapeShellArg target.mountpoint}; then
|
||||
${pkgs.util-linux}/bin/mount -o X-mount.mkdir ${lib.escapeShellArg target.mountpoint}
|
||||
fi
|
||||
${lib.optionalString (target.preBackupHook != null) ''
|
||||
(
|
||||
${target.preBackupHook}
|
||||
)
|
||||
''}
|
||||
${lib.optionalString (target.postMountHook != null) target.postMountHook}
|
||||
''
|
||||
) cfg.targets)
|
||||
++ lib.mapAttrsToList (
|
||||
name: target:
|
||||
pkgs.writeShellScriptBin ("localbackup-unmount-" + name) ''
|
||||
set -efu -o pipefail
|
||||
${lib.optionalString (target.preUnmountHook != null) target.preUnmountHook}
|
||||
${lib.optionalString (
|
||||
target.mountpoint != null
|
||||
) "${pkgs.util-linux}/bin/umount ${lib.escapeShellArg target.mountpoint}"}
|
||||
${lib.optionalString (target.postUnmountHook != null) target.postUnmountHook}
|
||||
''
|
||||
) cfg.targets;
|
||||
|
||||
declare -A preCommandErrors
|
||||
${lib.concatMapStringsSep "\n" (
|
||||
state:
|
||||
lib.optionalString (state.preBackupCommand != null) ''
|
||||
echo "Running pre-backup command for ${state.name}"
|
||||
if ! /run/current-system/sw/bin/${state.preBackupCommand}; then
|
||||
preCommandErrors["${state.name}"]=1
|
||||
fi
|
||||
''
|
||||
) (builtins.attrValues config.clan.core.state)}
|
||||
|
||||
rsnapshot -c "${pkgs.writeText "rsnapshot.conf" (rsnapshotConfig target)}" sync
|
||||
rsnapshot -c "${pkgs.writeText "rsnapshot.conf" (rsnapshotConfig target)}" snapshot
|
||||
'') (builtins.attrValues cfg.targets)}'')
|
||||
(pkgs.writeShellScriptBin "localbackup-list" ''
|
||||
set -efu -o pipefail
|
||||
export PATH=${
|
||||
lib.makeBinPath [
|
||||
pkgs.jq
|
||||
pkgs.findutils
|
||||
pkgs.coreutils
|
||||
pkgs.util-linux
|
||||
]
|
||||
}
|
||||
(${
|
||||
lib.concatMapStringsSep "\n" (target: ''
|
||||
(
|
||||
${mountHook target}
|
||||
find ${lib.escapeShellArg target.directory} -mindepth 1 -maxdepth 1 -name "snapshot.*" -print0 -type d \
|
||||
| jq -Rs 'split("\u0000") | .[] | select(. != "") | { "name": ("${target.name}::" + .)}'
|
||||
)
|
||||
'') (builtins.attrValues cfg.targets)
|
||||
}) | jq -s .
|
||||
'')
|
||||
(pkgs.writeShellScriptBin "localbackup-restore" ''
|
||||
set -efu -o pipefail
|
||||
export PATH=${
|
||||
lib.makeBinPath [
|
||||
pkgs.rsync
|
||||
pkgs.coreutils
|
||||
pkgs.util-linux
|
||||
pkgs.gawk
|
||||
]
|
||||
}
|
||||
if [[ "''${NAME:-}" == "" ]]; then
|
||||
echo "No backup name given via NAME environment variable"
|
||||
exit 1
|
||||
fi
|
||||
if [[ "''${FOLDERS:-}" == "" ]]; then
|
||||
echo "No folders given via FOLDERS environment variable"
|
||||
exit 1
|
||||
fi
|
||||
name=$(awk -F'::' '{print $1}' <<< $NAME)
|
||||
backupname=''${NAME#$name::}
|
||||
|
||||
if command -v localbackup-mount-$name; then
|
||||
localbackup-mount-$name
|
||||
fi
|
||||
if command -v localbackup-unmount-$name; then
|
||||
trap "localbackup-unmount-$name" EXIT
|
||||
fi
|
||||
|
||||
if [[ ! -d $backupname ]]; then
|
||||
echo "No backup found $backupname"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
IFS=':' read -ra FOLDER <<< "''$FOLDERS"
|
||||
for folder in "''${FOLDER[@]}"; do
|
||||
mkdir -p "$folder"
|
||||
rsync -a "$backupname/${config.networking.hostName}$folder/" "$folder"
|
||||
done
|
||||
'')
|
||||
]
|
||||
++ (lib.mapAttrsToList (
|
||||
name: target:
|
||||
pkgs.writeShellScriptBin ("localbackup-mount-" + name) ''
|
||||
set -efu -o pipefail
|
||||
${lib.optionalString (target.preMountHook != null) target.preMountHook}
|
||||
${lib.optionalString (target.mountpoint != null) ''
|
||||
if ! ${pkgs.util-linux}/bin/mountpoint -q ${lib.escapeShellArg target.mountpoint}; then
|
||||
${pkgs.util-linux}/bin/mount -o X-mount.mkdir ${lib.escapeShellArg target.mountpoint}
|
||||
fi
|
||||
''}
|
||||
${lib.optionalString (target.postMountHook != null) target.postMountHook}
|
||||
''
|
||||
) cfg.targets)
|
||||
++ lib.mapAttrsToList (
|
||||
name: target:
|
||||
pkgs.writeShellScriptBin ("localbackup-unmount-" + name) ''
|
||||
set -efu -o pipefail
|
||||
${lib.optionalString (target.preUnmountHook != null) target.preUnmountHook}
|
||||
${lib.optionalString (
|
||||
target.mountpoint != null
|
||||
) "${pkgs.util-linux}/bin/umount ${lib.escapeShellArg target.mountpoint}"}
|
||||
${lib.optionalString (target.postUnmountHook != null) target.postUnmountHook}
|
||||
''
|
||||
) cfg.targets;
|
||||
|
||||
clan.core.backups.providers.localbackup = {
|
||||
# TODO list needs to run locally or on the remote machine
|
||||
|
||||
@@ -116,47 +116,45 @@ in
|
||||
};
|
||||
clan.core.postgresql.databases.matrix-synapse.restore.stopOnRestore = [ "matrix-synapse" ];
|
||||
|
||||
clan.core.vars.generators =
|
||||
{
|
||||
"matrix-synapse" = {
|
||||
files."synapse-registration_shared_secret" = { };
|
||||
runtimeInputs = with pkgs; [
|
||||
coreutils
|
||||
pwgen
|
||||
];
|
||||
migrateFact = "matrix-synapse";
|
||||
script = ''
|
||||
echo -n "$(pwgen -s 32 1)" > "$out"/synapse-registration_shared_secret
|
||||
'';
|
||||
};
|
||||
clan.core.vars.generators = {
|
||||
"matrix-synapse" = {
|
||||
files."synapse-registration_shared_secret" = { };
|
||||
runtimeInputs = with pkgs; [
|
||||
coreutils
|
||||
pwgen
|
||||
];
|
||||
migrateFact = "matrix-synapse";
|
||||
script = ''
|
||||
echo -n "$(pwgen -s 32 1)" > "$out"/synapse-registration_shared_secret
|
||||
'';
|
||||
};
|
||||
}
|
||||
// lib.mapAttrs' (
|
||||
name: user:
|
||||
lib.nameValuePair "matrix-password-${user.name}" {
|
||||
files."matrix-password-${user.name}" = { };
|
||||
migrateFact = "matrix-password-${user.name}";
|
||||
runtimeInputs = with pkgs; [ xkcdpass ];
|
||||
script = ''
|
||||
xkcdpass -n 4 -d - > "$out"/${lib.escapeShellArg "matrix-password-${user.name}"}
|
||||
'';
|
||||
}
|
||||
// lib.mapAttrs' (
|
||||
name: user:
|
||||
lib.nameValuePair "matrix-password-${user.name}" {
|
||||
files."matrix-password-${user.name}" = { };
|
||||
migrateFact = "matrix-password-${user.name}";
|
||||
runtimeInputs = with pkgs; [ xkcdpass ];
|
||||
script = ''
|
||||
xkcdpass -n 4 -d - > "$out"/${lib.escapeShellArg "matrix-password-${user.name}"}
|
||||
'';
|
||||
}
|
||||
) cfg.users;
|
||||
) cfg.users;
|
||||
|
||||
systemd.services.matrix-synapse =
|
||||
let
|
||||
usersScript =
|
||||
''
|
||||
while ! ${pkgs.netcat}/bin/nc -z -v ::1 8008; do
|
||||
if ! kill -0 "$MAINPID"; then exit 1; fi
|
||||
sleep 1;
|
||||
done
|
||||
''
|
||||
+ lib.concatMapStringsSep "\n" (user: ''
|
||||
# only create user if it doesn't exist
|
||||
/run/current-system/sw/bin/matrix-synapse-register_new_matrix_user --exists-ok --password-file ${
|
||||
config.clan.core.vars.generators."matrix-password-${user.name}".files."matrix-password-${user.name}".path
|
||||
} --user "${user.name}" ${if user.admin then "--admin" else "--no-admin"}
|
||||
'') (lib.attrValues cfg.users);
|
||||
usersScript = ''
|
||||
while ! ${pkgs.netcat}/bin/nc -z -v ::1 8008; do
|
||||
if ! kill -0 "$MAINPID"; then exit 1; fi
|
||||
sleep 1;
|
||||
done
|
||||
''
|
||||
+ lib.concatMapStringsSep "\n" (user: ''
|
||||
# only create user if it doesn't exist
|
||||
/run/current-system/sw/bin/matrix-synapse-register_new_matrix_user --exists-ok --password-file ${
|
||||
config.clan.core.vars.generators."matrix-password-${user.name}".files."matrix-password-${user.name}".path
|
||||
} --user "${user.name}" ${if user.admin then "--admin" else "--no-admin"}
|
||||
'') (lib.attrValues cfg.users);
|
||||
in
|
||||
{
|
||||
path = [ pkgs.curl ];
|
||||
|
||||
@@ -18,13 +18,12 @@
|
||||
config.clan.core.vars.generators.root-password.files.password-hash.path;
|
||||
|
||||
clan.core.vars.generators.root-password = {
|
||||
files.password-hash =
|
||||
{
|
||||
neededFor = "users";
|
||||
}
|
||||
// (lib.optionalAttrs (_class == "nixos") {
|
||||
restartUnits = lib.optional (config.services.userborn.enable) "userborn.service";
|
||||
});
|
||||
files.password-hash = {
|
||||
neededFor = "users";
|
||||
}
|
||||
// (lib.optionalAttrs (_class == "nixos") {
|
||||
restartUnits = lib.optional (config.services.userborn.enable) "userborn.service";
|
||||
});
|
||||
files.password = {
|
||||
deploy = false;
|
||||
};
|
||||
|
||||
@@ -32,17 +32,16 @@ in
|
||||
cfg.certificate.searchDomains != [ ]
|
||||
) config.clan.core.vars.generators.openssh-cert.files."ssh.id_ed25519-cert.pub".path;
|
||||
|
||||
hostKeys =
|
||||
[
|
||||
{
|
||||
path = config.clan.core.vars.generators.openssh.files."ssh.id_ed25519".path;
|
||||
type = "ed25519";
|
||||
}
|
||||
]
|
||||
++ lib.optional cfg.hostKeys.rsa.enable {
|
||||
path = config.clan.core.vars.generators.openssh-rsa.files."ssh.id_rsa".path;
|
||||
type = "rsa";
|
||||
};
|
||||
hostKeys = [
|
||||
{
|
||||
path = config.clan.core.vars.generators.openssh.files."ssh.id_ed25519".path;
|
||||
type = "ed25519";
|
||||
}
|
||||
]
|
||||
++ lib.optional cfg.hostKeys.rsa.enable {
|
||||
path = config.clan.core.vars.generators.openssh-rsa.files."ssh.id_rsa".path;
|
||||
type = "rsa";
|
||||
};
|
||||
};
|
||||
|
||||
clan.core.vars.generators.openssh = {
|
||||
@@ -62,7 +61,8 @@ in
|
||||
hostNames = [
|
||||
"localhost"
|
||||
config.networking.hostName
|
||||
] ++ (lib.optional (config.networking.domain != null) config.networking.fqdn);
|
||||
]
|
||||
++ (lib.optional (config.networking.domain != null) config.networking.fqdn);
|
||||
publicKey = config.clan.core.vars.generators.openssh.files."ssh.id_ed25519.pub".value;
|
||||
};
|
||||
|
||||
|
||||
@@ -1,3 +1,27 @@
|
||||
---
|
||||
description = "Statically configure syncthing peers through clan"
|
||||
description = "DEPRECATED: Statically configure syncthing peers through clan"
|
||||
---
|
||||
|
||||
# ⚠️ DEPRECATED
|
||||
|
||||
This module has been migrated to the new clanServices system.
|
||||
|
||||
Please use the new syncthing service instead:
|
||||
|
||||
```nix
|
||||
{
|
||||
services.syncthing = {
|
||||
instances.default = {
|
||||
roles.peer.machines = {
|
||||
machine1 = { };
|
||||
machine2 = { };
|
||||
machine3 = {
|
||||
excludeMachines = [ "machine4" ];
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
The new service provides the same functionality with better integration into clan's inventory system.
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
# DEPRECATED: This module has been migrated to clanServices/syncthing
|
||||
# Please use the syncthing service instead: services.syncthing.instances.default.roles.peer.machines = { ... };
|
||||
let
|
||||
dir = config.clan.core.settings.directory;
|
||||
machineVarDir = "${dir}/vars/per-machine/";
|
||||
@@ -32,14 +34,15 @@ let
|
||||
value = {
|
||||
name = machine;
|
||||
id = (lib.removeSuffix "\n" (builtins.readFile (syncthingPublicKeyPath machine)));
|
||||
addresses =
|
||||
[ "dynamic" ]
|
||||
++ (
|
||||
if (lib.elem machine networkIpMachines) then
|
||||
[ "tcp://[${(lib.removeSuffix "\n" (builtins.readFile (zerotierIpMachinePath machine)))}]:22000" ]
|
||||
else
|
||||
[ ]
|
||||
);
|
||||
addresses = [
|
||||
"dynamic"
|
||||
]
|
||||
++ (
|
||||
if (lib.elem machine networkIpMachines) then
|
||||
[ "tcp://[${(lib.removeSuffix "\n" (builtins.readFile (zerotierIpMachinePath machine)))}]:22000" ]
|
||||
else
|
||||
[ ]
|
||||
);
|
||||
};
|
||||
}) syncthingPublicKeyMachines;
|
||||
in
|
||||
|
||||
@@ -41,25 +41,13 @@
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
perInstance =
|
||||
{ settings, ... }:
|
||||
{
|
||||
nixosModule =
|
||||
{ ... }:
|
||||
{
|
||||
imports = [
|
||||
# We don't have a good way to specify dependencies between
|
||||
# clanServices for now. When it get's implemtende, we should just
|
||||
# use the ssh and users modules here.
|
||||
./ssh.nix
|
||||
./root-password.nix
|
||||
];
|
||||
|
||||
_module.args = { inherit settings; };
|
||||
|
||||
users.users.root.openssh.authorizedKeys.keys = builtins.attrValues settings.allowedKeys;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
# We don't have a good way to specify dependencies between
|
||||
# clanServices for now. When it get's implemtende, we should just
|
||||
# use the ssh and users modules here.
|
||||
imports = [
|
||||
./ssh.nix
|
||||
./root-password.nix
|
||||
];
|
||||
}
|
||||
|
||||
@@ -1,39 +1,55 @@
|
||||
# We don't have a way of specifying dependencies between clanServices for now.
|
||||
# When it get's added this file should be removed and the users module used instead.
|
||||
{
|
||||
config,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
{
|
||||
roles.default.perInstance =
|
||||
{ ... }:
|
||||
{
|
||||
nixosModule =
|
||||
{
|
||||
config,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
{
|
||||
|
||||
users.mutableUsers = false;
|
||||
users.users.root.hashedPasswordFile =
|
||||
config.clan.core.vars.generators.root-password.files.password-hash.path;
|
||||
users.mutableUsers = false;
|
||||
users.users.root.hashedPasswordFile =
|
||||
config.clan.core.vars.generators.root-password.files.password-hash.path;
|
||||
|
||||
clan.core.vars.generators.root-password = {
|
||||
files.password-hash.neededFor = "users";
|
||||
clan.core.vars.generators.root-password = {
|
||||
files.password-hash.neededFor = "users";
|
||||
|
||||
files.password.deploy = false;
|
||||
files.password.deploy = false;
|
||||
|
||||
runtimeInputs = [
|
||||
pkgs.coreutils
|
||||
pkgs.mkpasswd
|
||||
pkgs.xkcdpass
|
||||
];
|
||||
runtimeInputs = [
|
||||
pkgs.coreutils
|
||||
pkgs.mkpasswd
|
||||
pkgs.xkcdpass
|
||||
];
|
||||
|
||||
prompts.password.type = "hidden";
|
||||
prompts.password.persist = true;
|
||||
prompts.password.description = "You can autogenerate a password, if you leave this prompt blank.";
|
||||
prompts.password.display = {
|
||||
group = "Root User";
|
||||
label = "Password";
|
||||
required = false;
|
||||
helperText = ''
|
||||
Your password will be encrypted and stored securely using the secret store you've configured.
|
||||
'';
|
||||
};
|
||||
|
||||
script = ''
|
||||
prompt_value="$(cat "$prompts"/password)"
|
||||
if [[ -n "''${prompt_value-}" ]]; then
|
||||
echo "$prompt_value" | tr -d "\n" > "$out"/password
|
||||
else
|
||||
xkcdpass --numwords 5 --delimiter - --count 1 | tr -d "\n" > "$out"/password
|
||||
fi
|
||||
mkpasswd -s -m sha-512 < "$out"/password | tr -d "\n" > "$out"/password-hash
|
||||
'';
|
||||
};
|
||||
prompts.password.type = "hidden";
|
||||
prompts.password.persist = true;
|
||||
prompts.password.description = "Leave empty to generate automatically";
|
||||
|
||||
script = ''
|
||||
prompt_value="$(cat "$prompts"/password)"
|
||||
if [[ -n "''${prompt_value-}" ]]; then
|
||||
echo "$prompt_value" | tr -d "\n" > "$out"/password
|
||||
else
|
||||
xkcdpass --numwords 5 --delimiter - --count 1 | tr -d "\n" > "$out"/password
|
||||
fi
|
||||
mkpasswd -s -m sha-512 < "$out"/password | tr -d "\n" > "$out"/password-hash
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,115 +1,124 @@
|
||||
{
|
||||
config,
|
||||
pkgs,
|
||||
lib,
|
||||
settings,
|
||||
...
|
||||
}:
|
||||
let
|
||||
stringSet = list: builtins.attrNames (builtins.groupBy lib.id list);
|
||||
roles.default.perInstance =
|
||||
{ settings, ... }:
|
||||
{
|
||||
nixosModule =
|
||||
|
||||
domains = stringSet settings.certificateSearchDomains;
|
||||
|
||||
in
|
||||
{
|
||||
|
||||
services.openssh = {
|
||||
enable = true;
|
||||
settings.PasswordAuthentication = false;
|
||||
|
||||
settings.HostCertificate = lib.mkIf (
|
||||
settings.certificateSearchDomains != [ ]
|
||||
) config.clan.core.vars.generators.openssh-cert.files."ssh.id_ed25519-cert.pub".path;
|
||||
|
||||
hostKeys =
|
||||
[
|
||||
{
|
||||
path = config.clan.core.vars.generators.openssh.files."ssh.id_ed25519".path;
|
||||
type = "ed25519";
|
||||
}
|
||||
]
|
||||
++ lib.optional settings.rsaHostKey.enable {
|
||||
path = config.clan.core.vars.generators.openssh-rsa.files."ssh.id_rsa".path;
|
||||
type = "rsa";
|
||||
};
|
||||
};
|
||||
config,
|
||||
pkgs,
|
||||
lib,
|
||||
...
|
||||
}:
|
||||
let
|
||||
stringSet = list: builtins.attrNames (builtins.groupBy lib.id list);
|
||||
|
||||
clan.core.vars.generators.openssh = {
|
||||
files."ssh.id_ed25519" = { };
|
||||
files."ssh.id_ed25519.pub".secret = false;
|
||||
migrateFact = "openssh";
|
||||
runtimeInputs = [
|
||||
pkgs.coreutils
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t ed25519 -N "" -C "" -f "$out"/ssh.id_ed25519
|
||||
'';
|
||||
};
|
||||
domains = stringSet settings.certificateSearchDomains;
|
||||
|
||||
programs.ssh.knownHosts.clan-sshd-self-ed25519 = {
|
||||
hostNames = [
|
||||
"localhost"
|
||||
config.networking.hostName
|
||||
] ++ (lib.optional (config.networking.domain != null) config.networking.fqdn);
|
||||
publicKey = config.clan.core.vars.generators.openssh.files."ssh.id_ed25519.pub".value;
|
||||
};
|
||||
in
|
||||
{
|
||||
|
||||
clan.core.vars.generators.openssh-rsa = lib.mkIf settings.rsaHostKey.enable {
|
||||
files."ssh.id_rsa" = { };
|
||||
files."ssh.id_rsa.pub".secret = false;
|
||||
runtimeInputs = [
|
||||
pkgs.coreutils
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t rsa -b 4096 -N "" -C "" -f "$out"/ssh.id_rsa
|
||||
'';
|
||||
};
|
||||
users.users.root.openssh.authorizedKeys.keys = builtins.attrValues settings.allowedKeys;
|
||||
|
||||
clan.core.vars.generators.openssh-cert = lib.mkIf (settings.certificateSearchDomains != [ ]) {
|
||||
files."ssh.id_ed25519-cert.pub".secret = false;
|
||||
dependencies = [
|
||||
"openssh"
|
||||
"openssh-ca"
|
||||
];
|
||||
validation = {
|
||||
name = config.clan.core.settings.machine.name;
|
||||
domains = lib.genAttrs settings.certificateSearchDomains lib.id;
|
||||
services.openssh = {
|
||||
enable = true;
|
||||
settings.PasswordAuthentication = false;
|
||||
|
||||
settings.HostCertificate = lib.mkIf (
|
||||
settings.certificateSearchDomains != [ ]
|
||||
) config.clan.core.vars.generators.openssh-cert.files."ssh.id_ed25519-cert.pub".path;
|
||||
|
||||
hostKeys = [
|
||||
{
|
||||
path = config.clan.core.vars.generators.openssh.files."ssh.id_ed25519".path;
|
||||
type = "ed25519";
|
||||
}
|
||||
]
|
||||
++ lib.optional settings.rsaHostKey.enable {
|
||||
path = config.clan.core.vars.generators.openssh-rsa.files."ssh.id_rsa".path;
|
||||
type = "rsa";
|
||||
};
|
||||
};
|
||||
|
||||
clan.core.vars.generators.openssh = {
|
||||
files."ssh.id_ed25519" = { };
|
||||
files."ssh.id_ed25519.pub".secret = false;
|
||||
migrateFact = "openssh";
|
||||
runtimeInputs = [
|
||||
pkgs.coreutils
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t ed25519 -N "" -C "" -f "$out"/ssh.id_ed25519
|
||||
'';
|
||||
};
|
||||
|
||||
programs.ssh.knownHosts.clan-sshd-self-ed25519 = {
|
||||
hostNames = [
|
||||
"localhost"
|
||||
config.networking.hostName
|
||||
]
|
||||
++ (lib.optional (config.networking.domain != null) config.networking.fqdn);
|
||||
publicKey = config.clan.core.vars.generators.openssh.files."ssh.id_ed25519.pub".value;
|
||||
};
|
||||
|
||||
clan.core.vars.generators.openssh-rsa = lib.mkIf settings.rsaHostKey.enable {
|
||||
files."ssh.id_rsa" = { };
|
||||
files."ssh.id_rsa.pub".secret = false;
|
||||
runtimeInputs = [
|
||||
pkgs.coreutils
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t rsa -b 4096 -N "" -C "" -f "$out"/ssh.id_rsa
|
||||
'';
|
||||
};
|
||||
|
||||
clan.core.vars.generators.openssh-cert = lib.mkIf (settings.certificateSearchDomains != [ ]) {
|
||||
files."ssh.id_ed25519-cert.pub".secret = false;
|
||||
dependencies = [
|
||||
"openssh"
|
||||
"openssh-ca"
|
||||
];
|
||||
validation = {
|
||||
name = config.clan.core.settings.machine.name;
|
||||
domains = lib.genAttrs settings.certificateSearchDomains lib.id;
|
||||
};
|
||||
runtimeInputs = [
|
||||
pkgs.openssh
|
||||
pkgs.jq
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen \
|
||||
-s $in/openssh-ca/id_ed25519 \
|
||||
-I ${config.clan.core.settings.machine.name} \
|
||||
-h \
|
||||
-n ${lib.concatMapStringsSep "," (d: "${config.clan.core.settings.machine.name}.${d}") domains} \
|
||||
$in/openssh/ssh.id_ed25519.pub
|
||||
mv $in/openssh/ssh.id_ed25519-cert.pub "$out"/ssh.id_ed25519-cert.pub
|
||||
'';
|
||||
};
|
||||
|
||||
clan.core.vars.generators.openssh-ca = lib.mkIf (settings.certificateSearchDomains != [ ]) {
|
||||
share = true;
|
||||
files.id_ed25519.deploy = false;
|
||||
files."id_ed25519.pub" = {
|
||||
deploy = false;
|
||||
secret = false;
|
||||
};
|
||||
runtimeInputs = [
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t ed25519 -N "" -C "" -f "$out"/id_ed25519
|
||||
'';
|
||||
};
|
||||
|
||||
programs.ssh.knownHosts.ssh-ca = lib.mkIf (settings.certificateSearchDomains != [ ]) {
|
||||
certAuthority = true;
|
||||
extraHostNames = builtins.map (domain: "*.${domain}") settings.certificateSearchDomains;
|
||||
publicKey = config.clan.core.vars.generators.openssh-ca.files."id_ed25519.pub".value;
|
||||
};
|
||||
};
|
||||
};
|
||||
runtimeInputs = [
|
||||
pkgs.openssh
|
||||
pkgs.jq
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen \
|
||||
-s $in/openssh-ca/id_ed25519 \
|
||||
-I ${config.clan.core.settings.machine.name} \
|
||||
-h \
|
||||
-n ${lib.concatMapStringsSep "," (d: "${config.clan.core.settings.machine.name}.${d}") domains} \
|
||||
$in/openssh/ssh.id_ed25519.pub
|
||||
mv $in/openssh/ssh.id_ed25519-cert.pub "$out"/ssh.id_ed25519-cert.pub
|
||||
'';
|
||||
};
|
||||
|
||||
clan.core.vars.generators.openssh-ca = lib.mkIf (settings.certificateSearchDomains != [ ]) {
|
||||
share = true;
|
||||
files.id_ed25519.deploy = false;
|
||||
files."id_ed25519.pub" = {
|
||||
deploy = false;
|
||||
secret = false;
|
||||
};
|
||||
runtimeInputs = [
|
||||
pkgs.openssh
|
||||
];
|
||||
script = ''
|
||||
ssh-keygen -t ed25519 -N "" -C "" -f "$out"/id_ed25519
|
||||
'';
|
||||
};
|
||||
|
||||
programs.ssh.knownHosts.ssh-ca = lib.mkIf (settings.certificateSearchDomains != [ ]) {
|
||||
certAuthority = true;
|
||||
extraHostNames = builtins.map (domain: "*.${domain}") settings.certificateSearchDomains;
|
||||
publicKey = config.clan.core.vars.generators.openssh-ca.files."id_ed25519.pub".value;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
{
|
||||
lib,
|
||||
config,
|
||||
settings,
|
||||
...
|
||||
}:
|
||||
{
|
||||
|
||||
services.data-mesher.initNetwork =
|
||||
let
|
||||
# for a given machine, read it's public key and remove any new lines
|
||||
readHostKey =
|
||||
machine:
|
||||
let
|
||||
path = "${config.clan.core.settings.directory}/vars/per-machine/${machine}/data-mesher-host-key/public_key/value";
|
||||
in
|
||||
builtins.elemAt (lib.splitString "\n" (builtins.readFile path)) 1;
|
||||
in
|
||||
{
|
||||
enable = true;
|
||||
keyPath = config.clan.core.vars.generators.data-mesher-network-key.files.private_key.path;
|
||||
|
||||
tld = settings.network.tld;
|
||||
hostTTL = settings.network.hostTTL;
|
||||
|
||||
# admin and signer host public keys
|
||||
signingKeys = builtins.map readHostKey (builtins.attrNames settings.bootstrapNodes);
|
||||
};
|
||||
}
|
||||
@@ -5,31 +5,15 @@ let
|
||||
{
|
||||
options = {
|
||||
bootstrapNodes = lib.mkOption {
|
||||
type = lib.types.nullOr (lib.types.attrsOf lib.types.str);
|
||||
# the default bootstrap nodes are any machines with the admin or signers role
|
||||
# we iterate through those machines, determining an IP address for them based on their VPN
|
||||
# currently only supports zerotier
|
||||
# default = builtins.foldl' (
|
||||
# urls: name:
|
||||
# let
|
||||
# ipPath = "${config.clan.core.settings.directory}/vars/per-machine/${name}/zerotier/zerotier-ip/value";
|
||||
# in
|
||||
# if builtins.pathExists ipPath then
|
||||
# let
|
||||
# ip = builtins.readFile ipPath;
|
||||
# in
|
||||
# urls ++ [ "[${ip}]:${builtins.toString settings.network.port}" ]
|
||||
# else
|
||||
# urls
|
||||
# ) [ ] (dmLib.machines config).bootstrap;
|
||||
type = lib.types.nullOr (lib.types.listOf lib.types.str);
|
||||
description = ''
|
||||
A list of bootstrap nodes that act as an initial gateway when joining
|
||||
the cluster.
|
||||
'';
|
||||
example = {
|
||||
"node1" = "192.168.1.1:7946";
|
||||
"node2" = "192.168.1.2:7946";
|
||||
};
|
||||
example = [
|
||||
"192.168.1.1:7946"
|
||||
"192.168.1.2:7946"
|
||||
];
|
||||
};
|
||||
|
||||
network = {
|
||||
@@ -55,6 +39,59 @@ let
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
mkBootstrapNodes =
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
roles,
|
||||
settings,
|
||||
}:
|
||||
lib.mkDefault (
|
||||
builtins.foldl' (
|
||||
urls: name:
|
||||
let
|
||||
ipPath = "${config.clan.core.settings.directory}/vars/per-machine/${name}/zerotier/zerotier-ip/value";
|
||||
in
|
||||
if builtins.pathExists ipPath then
|
||||
let
|
||||
ip = builtins.readFile ipPath;
|
||||
in
|
||||
urls ++ [ "[${ip}]:${builtins.toString settings.network.port}" ]
|
||||
else
|
||||
urls
|
||||
) [ ] (builtins.attrNames ((roles.admin.machines or { }) // (roles.signer.machines or { })))
|
||||
);
|
||||
|
||||
mkDmService = dmSettings: config: {
|
||||
enable = true;
|
||||
openFirewall = true;
|
||||
|
||||
settings = {
|
||||
log_level = "warn";
|
||||
state_dir = "/var/lib/data-mesher";
|
||||
|
||||
# read network id from vars
|
||||
network.id = config.clan.core.vars.generators.data-mesher-network-key.files.public_key.value;
|
||||
|
||||
host = {
|
||||
names = [ config.networking.hostName ];
|
||||
key_path = config.clan.core.vars.generators.data-mesher-host-key.files.private_key.path;
|
||||
};
|
||||
|
||||
cluster = {
|
||||
port = dmSettings.network.port;
|
||||
join_interval = "30s";
|
||||
push_pull_interval = "30s";
|
||||
interface = dmSettings.network.interface;
|
||||
bootstrap_nodes = dmSettings.bootstrapNodes;
|
||||
};
|
||||
|
||||
http.port = 7331;
|
||||
http.interface = "lo";
|
||||
};
|
||||
};
|
||||
|
||||
in
|
||||
{
|
||||
_class = "clan.service";
|
||||
@@ -67,11 +104,9 @@ in
|
||||
interface =
|
||||
{ lib, ... }:
|
||||
{
|
||||
|
||||
imports = [ sharedInterface ];
|
||||
|
||||
options = {
|
||||
|
||||
network = {
|
||||
tld = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
@@ -89,54 +124,117 @@ in
|
||||
};
|
||||
};
|
||||
perInstance =
|
||||
{ settings, roles, ... }:
|
||||
{
|
||||
nixosModule = {
|
||||
imports = [
|
||||
./admin.nix
|
||||
./shared.nix
|
||||
];
|
||||
_module.args = { inherit settings roles; };
|
||||
};
|
||||
extendSettings,
|
||||
roles,
|
||||
lib,
|
||||
...
|
||||
}:
|
||||
{
|
||||
nixosModule =
|
||||
{ config, ... }:
|
||||
let
|
||||
settings = extendSettings {
|
||||
bootstrapNodes = mkBootstrapNodes {
|
||||
inherit
|
||||
config
|
||||
lib
|
||||
roles
|
||||
settings
|
||||
;
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
imports = [ ./shared.nix ];
|
||||
|
||||
services.data-mesher = (mkDmService settings config) // {
|
||||
initNetwork =
|
||||
let
|
||||
# for a given machine, read it's public key and remove any new lines
|
||||
readHostKey =
|
||||
machine:
|
||||
let
|
||||
path = "${config.clan.core.settings.directory}/vars/per-machine/${machine}/data-mesher-host-key/public_key/value";
|
||||
in
|
||||
builtins.elemAt (lib.splitString "\n" (builtins.readFile path)) 1;
|
||||
in
|
||||
{
|
||||
enable = true;
|
||||
keyPath = config.clan.core.vars.generators.data-mesher-network-key.files.private_key.path;
|
||||
|
||||
tld = settings.network.tld;
|
||||
hostTTL = settings.network.hostTTL;
|
||||
|
||||
# admin and signer host public keys
|
||||
signingKeys = builtins.map readHostKey (
|
||||
builtins.attrNames ((roles.admin.machines or { }) // (roles.signer.machines or { }))
|
||||
);
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
roles.signer = {
|
||||
interface =
|
||||
{ ... }:
|
||||
{
|
||||
imports = [ sharedInterface ];
|
||||
};
|
||||
interface = sharedInterface;
|
||||
perInstance =
|
||||
{ settings, roles, ... }:
|
||||
{
|
||||
nixosModule = {
|
||||
imports = [
|
||||
./signer.nix
|
||||
./shared.nix
|
||||
];
|
||||
_module.args = { inherit settings roles; };
|
||||
};
|
||||
extendSettings,
|
||||
lib,
|
||||
roles,
|
||||
...
|
||||
}:
|
||||
{
|
||||
nixosModule =
|
||||
{ config, ... }:
|
||||
let
|
||||
settings = extendSettings {
|
||||
bootstrapNodes = mkBootstrapNodes {
|
||||
inherit
|
||||
config
|
||||
lib
|
||||
roles
|
||||
settings
|
||||
;
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
imports = [ ./shared.nix ];
|
||||
services.data-mesher = (mkDmService settings config);
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
roles.peer = {
|
||||
interface =
|
||||
{ ... }:
|
||||
{
|
||||
imports = [ sharedInterface ];
|
||||
};
|
||||
interface = sharedInterface;
|
||||
perInstance =
|
||||
{ settings, roles, ... }:
|
||||
{
|
||||
nixosModule = {
|
||||
imports = [
|
||||
./peer.nix
|
||||
./shared.nix
|
||||
];
|
||||
_module.args = { inherit settings roles; };
|
||||
};
|
||||
extendSettings,
|
||||
lib,
|
||||
roles,
|
||||
...
|
||||
}:
|
||||
{
|
||||
nixosModule =
|
||||
{ config, ... }:
|
||||
let
|
||||
settings = extendSettings {
|
||||
bootstrapNodes = mkBootstrapNodes {
|
||||
inherit
|
||||
config
|
||||
lib
|
||||
roles
|
||||
settings
|
||||
;
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
imports = [ ./shared.nix ];
|
||||
services.data-mesher = (mkDmService settings config);
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
@@ -1,39 +1,9 @@
|
||||
{
|
||||
config,
|
||||
settings,
|
||||
...
|
||||
}:
|
||||
{
|
||||
|
||||
services.data-mesher = {
|
||||
enable = true;
|
||||
openFirewall = true;
|
||||
|
||||
settings = {
|
||||
log_level = "warn";
|
||||
state_dir = "/var/lib/data-mesher";
|
||||
|
||||
# read network id from vars
|
||||
network.id = config.clan.core.vars.generators.data-mesher-network-key.files.public_key.value;
|
||||
|
||||
host = {
|
||||
names = [ config.networking.hostName ];
|
||||
key_path = config.clan.core.vars.generators.data-mesher-host-key.files.private_key.path;
|
||||
};
|
||||
|
||||
cluster = {
|
||||
port = settings.network.port;
|
||||
join_interval = "30s";
|
||||
push_pull_interval = "30s";
|
||||
interface = settings.network.interface;
|
||||
bootstrap_nodes = (builtins.attrValues settings.bootstrapNodes);
|
||||
};
|
||||
|
||||
http.port = 7331;
|
||||
http.interface = "lo";
|
||||
};
|
||||
};
|
||||
|
||||
# Generate host key.
|
||||
clan.core.vars.generators.data-mesher-host-key = {
|
||||
files =
|
||||
|
||||
@@ -16,11 +16,11 @@
|
||||
instances = {
|
||||
data-mesher =
|
||||
let
|
||||
bootstrapNodes = {
|
||||
admin = "[2001:db8:1::1]:7946";
|
||||
peer = "[2001:db8:1::2]:7946";
|
||||
# signer = "2001:db8:1::3:7946";
|
||||
};
|
||||
bootstrapNodes = [
|
||||
"[2001:db8:1::1]:7946" # admin
|
||||
"[2001:db8:1::2]:7946" # peer
|
||||
# "2001:db8:1::3:7946" #signer
|
||||
];
|
||||
in
|
||||
{
|
||||
roles.peer.machines.peer.settings = {
|
||||
|
||||
86
clanServices/dyndns/README.md
Normal file
86
clanServices/dyndns/README.md
Normal file
@@ -0,0 +1,86 @@
|
||||
|
||||
A Dynamic-DNS (DDNS) service continuously keeps one or more DNS records in sync with the current public IP address of your machine.
|
||||
In *clan* this service is backed by [qdm12/ddns-updater](https://github.com/qdm12/ddns-updater).
|
||||
|
||||
> Info
|
||||
> ddns-updater itself is **heavily opinionated and version-specific**. Whenever you need the exhaustive list of flags or
|
||||
> provider-specific fields refer to its *versioned* documentation – **not** the GitHub README
|
||||
---
|
||||
|
||||
# 1. Configuration model
|
||||
|
||||
Internally ddns-updater consumes a single file named `config.json`.
|
||||
A minimal configuration for the registrar *Namecheap* looks like:
|
||||
|
||||
```json
|
||||
{
|
||||
"settings": [
|
||||
{
|
||||
"provider": "namecheap",
|
||||
"domain": "sub.example.com",
|
||||
"password": "e5322165c1d74692bfa6d807100c0310"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Another example for *Porkbun*:
|
||||
|
||||
```json
|
||||
{
|
||||
"settings": [
|
||||
{
|
||||
"provider": "porkbun",
|
||||
"domain": "domain.com",
|
||||
"api_key": "sk1_…",
|
||||
"secret_api_key": "pk1_…",
|
||||
"ip_version": "ipv4",
|
||||
"ipv6_suffix": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
When you write a `clan.nix` the **common** fields (`provider`, `domain`, `period`, …) are already exposed as typed
|
||||
*Nix options*.
|
||||
Registrar-specific or very new keys can be passed through an open attribute set called **extraSettings**.
|
||||
|
||||
---
|
||||
|
||||
# 2. Full Porkbun example
|
||||
|
||||
Manage three records – `@`, `home` and `test` – of the domain
|
||||
`jon.blog` and refresh them every 15 minutes:
|
||||
|
||||
```nix title="clan.nix" hl_lines="10-11"
|
||||
inventory.instances = {
|
||||
dyndns = {
|
||||
roles.default.machines."jon" = { };
|
||||
roles.default.settings = {
|
||||
period = 15; # minutes
|
||||
settings = {
|
||||
"all-jon-blog" = {
|
||||
provider = "porkbun";
|
||||
domain = "jon.blog";
|
||||
|
||||
# (1) tell the secret-manager which key we are going to store
|
||||
secret_field_name = "secret_api_key";
|
||||
|
||||
# everything below is copied verbatim into config.json
|
||||
extraSettings = {
|
||||
host = "@,home,test"; # (2) comma-separated list of sub-domains
|
||||
ip_version = "ipv4";
|
||||
ipv6_suffix = "";
|
||||
api_key = "pk1_4bb2b231275a02fdc23b7e6f3552s01S213S"; # (3) public – safe to commit
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
1. `secret_field_name` tells the *vars-generator* to store the entered secret under the specified JSON field name in the configuration.
|
||||
2. ddns-updater allows multiple hosts by separating them with a comma.
|
||||
3. The `api_key` above is *public*; the corresponding **private key** is retrieved through `secret_field_name`.
|
||||
|
||||
277
clanServices/dyndns/default.nix
Normal file
277
clanServices/dyndns/default.nix
Normal file
@@ -0,0 +1,277 @@
|
||||
{ ... }:
|
||||
{
|
||||
_class = "clan.service";
|
||||
manifest.name = "clan-core/dyndns";
|
||||
manifest.description = "A dynamic DNS service to update domain IPs";
|
||||
manifest.categories = [ "Network" ];
|
||||
manifest.readme = builtins.readFile ./README.md;
|
||||
|
||||
roles.default = {
|
||||
interface =
|
||||
{ lib, ... }:
|
||||
{
|
||||
options = {
|
||||
server = {
|
||||
enable = lib.mkEnableOption "dyndns webserver";
|
||||
domain = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
description = "Domain to serve the webservice on";
|
||||
};
|
||||
port = lib.mkOption {
|
||||
type = lib.types.int;
|
||||
default = 54805;
|
||||
description = "Port to listen on";
|
||||
};
|
||||
acmeEmail = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
description = ''
|
||||
Email address for account creation and correspondence from the CA.
|
||||
It is recommended to use the same email for all certs to avoid account
|
||||
creation limits.
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
period = lib.mkOption {
|
||||
type = lib.types.int;
|
||||
default = 5;
|
||||
description = "Domain update period in minutes";
|
||||
};
|
||||
|
||||
settings = lib.mkOption {
|
||||
type = lib.types.attrsOf (
|
||||
lib.types.submodule (
|
||||
{ ... }:
|
||||
{
|
||||
options = {
|
||||
provider = lib.mkOption {
|
||||
example = "namecheap";
|
||||
type = lib.types.str;
|
||||
description = "The dyndns provider to use";
|
||||
};
|
||||
domain = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
example = "example.com";
|
||||
description = "The top level domain to update.";
|
||||
};
|
||||
secret_field_name = lib.mkOption {
|
||||
example = "api_key";
|
||||
|
||||
type = lib.types.enum [
|
||||
"password"
|
||||
"token"
|
||||
"api_key"
|
||||
"secret_api_key"
|
||||
];
|
||||
default = "password";
|
||||
description = "The field name for the secret";
|
||||
};
|
||||
extraSettings = lib.mkOption {
|
||||
type = lib.types.attrsOf lib.types.str;
|
||||
default = { };
|
||||
description = ''
|
||||
Extra settings for the provider.
|
||||
Provider specific settings: https://github.com/qdm12/ddns-updater#configuration
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
default = { };
|
||||
description = "Configuration for which domains to update";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
perInstance =
|
||||
{ settings, ... }:
|
||||
{
|
||||
nixosModule =
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
let
|
||||
name = "dyndns";
|
||||
cfg = settings;
|
||||
|
||||
# We dedup secrets if they have the same provider + base domain
|
||||
secret_id = opt: "${name}-${opt.provider}-${opt.domain}";
|
||||
secret_path =
|
||||
opt: config.clan.core.vars.generators."${secret_id opt}".files."${secret_id opt}".path;
|
||||
|
||||
# We check that a secret has not been set in extraSettings.
|
||||
extraSettingsSafe =
|
||||
opt:
|
||||
if (builtins.hasAttr opt.secret_field_name opt.extraSettings) then
|
||||
throw "Please do not set ${opt.secret_field_name} in extraSettings, it is automatically set by the dyndns module."
|
||||
else
|
||||
opt.extraSettings;
|
||||
|
||||
service_config = {
|
||||
settings = builtins.catAttrs "value" (
|
||||
builtins.attrValues (
|
||||
lib.mapAttrs (_: opt: {
|
||||
value =
|
||||
(extraSettingsSafe opt)
|
||||
// {
|
||||
domain = opt.domain;
|
||||
provider = opt.provider;
|
||||
}
|
||||
// {
|
||||
"${opt.secret_field_name}" = secret_id opt;
|
||||
};
|
||||
}) cfg.settings
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
secret_generator = _: opt: {
|
||||
name = secret_id opt;
|
||||
value = {
|
||||
share = true;
|
||||
migrateFact = "${secret_id opt}";
|
||||
prompts.${secret_id opt} = {
|
||||
type = "hidden";
|
||||
persist = true;
|
||||
};
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
imports = lib.optional cfg.server.enable (
|
||||
lib.modules.importApply ./nginx.nix {
|
||||
inherit config;
|
||||
inherit settings;
|
||||
inherit lib;
|
||||
}
|
||||
);
|
||||
|
||||
clan.core.vars.generators = lib.mkIf (cfg.settings != { }) (
|
||||
lib.mapAttrs' secret_generator cfg.settings
|
||||
);
|
||||
|
||||
users.groups.${name} = lib.mkIf (cfg.settings != { }) { };
|
||||
users.users.${name} = lib.mkIf (cfg.settings != { }) {
|
||||
group = name;
|
||||
isSystemUser = true;
|
||||
description = "User for ${name} service";
|
||||
home = "/var/lib/${name}";
|
||||
createHome = true;
|
||||
};
|
||||
|
||||
services.nginx = lib.mkIf cfg.server.enable {
|
||||
virtualHosts = {
|
||||
"${cfg.server.domain}" = {
|
||||
forceSSL = true;
|
||||
enableACME = true;
|
||||
locations."/" = {
|
||||
proxyPass = "http://localhost:${toString cfg.server.port}";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
systemd.services.${name} = lib.mkIf (cfg.settings != { }) {
|
||||
path = [ ];
|
||||
description = "Dynamic DNS updater";
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
environment = {
|
||||
MYCONFIG = "${builtins.toJSON service_config}";
|
||||
SERVER_ENABLED = if cfg.server.enable then "yes" else "no";
|
||||
PERIOD = "${toString cfg.period}m";
|
||||
LISTENING_ADDRESS = ":${toString cfg.server.port}";
|
||||
GODEBUG = "netdns=go"; # We need to set this untill this has been merged. https://github.com/NixOS/nixpkgs/pull/432758
|
||||
};
|
||||
|
||||
serviceConfig =
|
||||
let
|
||||
pyscript =
|
||||
pkgs.writers.writePython3Bin "generate_secret_config.py"
|
||||
{
|
||||
libraries = [ ];
|
||||
doCheck = false;
|
||||
}
|
||||
''
|
||||
import json
|
||||
from pathlib import Path
|
||||
import os
|
||||
|
||||
cred_dir = Path(os.getenv("CREDENTIALS_DIRECTORY"))
|
||||
config_str = os.getenv("MYCONFIG")
|
||||
|
||||
|
||||
def get_credential(name):
|
||||
secret_p = cred_dir / name
|
||||
with open(secret_p, 'r') as f:
|
||||
return f.read().strip()
|
||||
|
||||
|
||||
config = json.loads(config_str)
|
||||
print(f"Config: {config}")
|
||||
for attrset in config["settings"]:
|
||||
if "password" in attrset:
|
||||
attrset['password'] = get_credential(attrset['password'])
|
||||
elif "token" in attrset:
|
||||
attrset['token'] = get_credential(attrset['token'])
|
||||
elif "secret_api_key" in attrset:
|
||||
attrset['secret_api_key'] = get_credential(attrset['secret_api_key'])
|
||||
elif "api_key" in attrset:
|
||||
attrset['api_key'] = get_credential(attrset['api_key'])
|
||||
else:
|
||||
raise ValueError(f"Missing secret field in {attrset}")
|
||||
|
||||
# create directory data if it does not exist
|
||||
data_dir = Path('data')
|
||||
data_dir.mkdir(mode=0o770, exist_ok=True)
|
||||
|
||||
# Create a temporary config file
|
||||
# with appropriate permissions
|
||||
tmp_config_path = data_dir / '.config.json'
|
||||
tmp_config_path.touch(mode=0o660, exist_ok=False)
|
||||
|
||||
# Write the config with secrets back
|
||||
with open(tmp_config_path, 'w') as f:
|
||||
f.write(json.dumps(config, indent=4))
|
||||
|
||||
# Move config into place
|
||||
config_path = data_dir / 'config.json'
|
||||
tmp_config_path.rename(config_path)
|
||||
|
||||
# Set file permissions to read
|
||||
# and write only by the user and group
|
||||
for file in data_dir.iterdir():
|
||||
file.chmod(0o660)
|
||||
'';
|
||||
in
|
||||
{
|
||||
ExecStartPre = lib.getExe pyscript;
|
||||
ExecStart = lib.getExe pkgs.ddns-updater;
|
||||
LoadCredential = lib.mapAttrsToList (_: opt: "${secret_id opt}:${secret_path opt}") cfg.settings;
|
||||
User = name;
|
||||
Group = name;
|
||||
NoNewPrivileges = true;
|
||||
PrivateTmp = true;
|
||||
ProtectSystem = "strict";
|
||||
ReadOnlyPaths = "/";
|
||||
PrivateDevices = "yes";
|
||||
ProtectKernelModules = "yes";
|
||||
ProtectKernelTunables = "yes";
|
||||
WorkingDirectory = "/var/lib/${name}";
|
||||
ReadWritePaths = [
|
||||
"/proc/self"
|
||||
"/var/lib/${name}"
|
||||
];
|
||||
|
||||
Restart = "always";
|
||||
RestartSec = 60;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
19
clanServices/dyndns/flake-module.nix
Normal file
19
clanServices/dyndns/flake-module.nix
Normal file
@@ -0,0 +1,19 @@
|
||||
{ lib, ... }:
|
||||
let
|
||||
module = lib.modules.importApply ./default.nix { };
|
||||
in
|
||||
{
|
||||
clan.modules = {
|
||||
dyndns = module;
|
||||
};
|
||||
|
||||
perSystem =
|
||||
{ ... }:
|
||||
{
|
||||
clan.nixosTests.dyndns = {
|
||||
imports = [ ./tests/vm/default.nix ];
|
||||
|
||||
clan.modules."@clan/dyndns" = module;
|
||||
};
|
||||
};
|
||||
}
|
||||
50
clanServices/dyndns/nginx.nix
Normal file
50
clanServices/dyndns/nginx.nix
Normal file
@@ -0,0 +1,50 @@
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
settings,
|
||||
...
|
||||
}:
|
||||
{
|
||||
security.acme.acceptTerms = true;
|
||||
security.acme.defaults.email = settings.server.acmeEmail;
|
||||
|
||||
networking.firewall.allowedTCPPorts = [
|
||||
443
|
||||
80
|
||||
];
|
||||
|
||||
services.nginx = {
|
||||
enable = true;
|
||||
|
||||
statusPage = lib.mkDefault true;
|
||||
recommendedBrotliSettings = lib.mkDefault true;
|
||||
recommendedGzipSettings = lib.mkDefault true;
|
||||
recommendedOptimisation = lib.mkDefault true;
|
||||
recommendedProxySettings = lib.mkDefault true;
|
||||
recommendedTlsSettings = lib.mkDefault true;
|
||||
|
||||
# Nginx sends all the access logs to /var/log/nginx/access.log by default.
|
||||
# instead of going to the journal!
|
||||
commonHttpConfig = "access_log syslog:server=unix:/dev/log;";
|
||||
|
||||
resolver.addresses =
|
||||
let
|
||||
isIPv6 = addr: builtins.match ".*:.*:.*" addr != null;
|
||||
escapeIPv6 = addr: if isIPv6 addr then "[${addr}]" else addr;
|
||||
cloudflare = [
|
||||
"1.1.1.1"
|
||||
"2606:4700:4700::1111"
|
||||
];
|
||||
resolvers =
|
||||
if config.networking.nameservers == [ ] then cloudflare else config.networking.nameservers;
|
||||
in
|
||||
map escapeIPv6 resolvers;
|
||||
|
||||
sslDhparam = config.security.dhparams.params.nginx.path;
|
||||
};
|
||||
|
||||
security.dhparams = {
|
||||
enable = true;
|
||||
params.nginx = { };
|
||||
};
|
||||
}
|
||||
77
clanServices/dyndns/tests/vm/default.nix
Normal file
77
clanServices/dyndns/tests/vm/default.nix
Normal file
@@ -0,0 +1,77 @@
|
||||
{
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
{
|
||||
name = "service-dyndns";
|
||||
|
||||
clan = {
|
||||
directory = ./.;
|
||||
inventory = {
|
||||
machines.server = { };
|
||||
|
||||
instances = {
|
||||
dyndns-test = {
|
||||
module.name = "@clan/dyndns";
|
||||
module.input = "self";
|
||||
roles.default.machines."server".settings = {
|
||||
server = {
|
||||
enable = true;
|
||||
domain = "test.example.com";
|
||||
port = 54805;
|
||||
acmeEmail = "test@example.com";
|
||||
};
|
||||
period = 1;
|
||||
settings = {
|
||||
"test.example.com" = {
|
||||
provider = "namecheap";
|
||||
domain = "example.com";
|
||||
secret_field_name = "password";
|
||||
extraSettings = {
|
||||
host = "test";
|
||||
server = "dynamicdns.park-your-domain.com";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
nodes = {
|
||||
server = {
|
||||
# Disable firewall for testing
|
||||
networking.firewall.enable = false;
|
||||
|
||||
# Mock ACME for testing (avoid real certificate requests)
|
||||
security.acme.defaults.server = "https://localhost:14000/dir";
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
start_all()
|
||||
|
||||
# Test that dyndns service starts (will fail without secrets, but that's expected)
|
||||
server.wait_for_unit("multi-user.target")
|
||||
|
||||
# Test that nginx service is running
|
||||
server.wait_for_unit("nginx.service")
|
||||
|
||||
# Test that nginx is listening on expected ports
|
||||
server.wait_for_open_port(80)
|
||||
server.wait_for_open_port(443)
|
||||
|
||||
# Test that the dyndns user was created
|
||||
# server.succeed("getent passwd dyndns")
|
||||
# server.succeed("getent group dyndns")
|
||||
#
|
||||
# Test that the home directory was created
|
||||
server.succeed("test -d /var/lib/dyndns")
|
||||
|
||||
# Test that nginx configuration includes our domain
|
||||
server.succeed("${pkgs.nginx}/bin/nginx -t")
|
||||
|
||||
print("All tests passed!")
|
||||
'';
|
||||
}
|
||||
@@ -1,3 +1,9 @@
|
||||
# Example clan service. See https://docs.clan.lol/guides/services/community/
|
||||
# for more details
|
||||
|
||||
# The test for this module in ./tests/vm/default.nix shows an example of how
|
||||
# the service is used.
|
||||
|
||||
{ packages }:
|
||||
{ ... }:
|
||||
{
|
||||
@@ -5,30 +11,94 @@
|
||||
manifest.name = "clan-core/hello-word";
|
||||
manifest.description = "This is a test";
|
||||
|
||||
roles.peer = {
|
||||
# This service provides two roles: "morning" and "evening". Roles can be
|
||||
# defined in this file directly (e.g. the "morning" role) or split up into a
|
||||
# separate file (e.g. the "evening" role)
|
||||
roles.morning = {
|
||||
interface =
|
||||
{ lib, ... }:
|
||||
{
|
||||
options.foo = lib.mkOption {
|
||||
# Here we define the settings for this role. They will be accessible
|
||||
# via `roles.morning.settings` in the role
|
||||
|
||||
options.greeting = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
# default = "";
|
||||
description = "Some option";
|
||||
default = "Good morning";
|
||||
description = "The greeting to use";
|
||||
};
|
||||
};
|
||||
# Maps over all instances and produces one result per instance.
|
||||
perInstance =
|
||||
{
|
||||
# Role settings for this machine/instance
|
||||
settings,
|
||||
|
||||
# The name of this instance of the service
|
||||
instanceName,
|
||||
|
||||
# The current machine
|
||||
machine,
|
||||
|
||||
# All roles of this service, with their assigned machines
|
||||
roles,
|
||||
...
|
||||
}:
|
||||
{
|
||||
# Analog to 'perSystem' of flake-parts.
|
||||
# For every instance of this service we will add a nixosModule to a morning-machine
|
||||
nixosModule =
|
||||
{ config, ... }:
|
||||
{
|
||||
# Interaction examples what you could do here:
|
||||
# - Get some settings of this machine
|
||||
# settings.ipRanges
|
||||
#
|
||||
# - Get all evening names:
|
||||
# allEveningNames = lib.attrNames roles.evening.machines
|
||||
#
|
||||
# - Get all roles of the machine:
|
||||
# machine.roles
|
||||
#
|
||||
# - Get the settings that where applied to a specific evening machine:
|
||||
# roles.evening.machines.peer1.settings
|
||||
imports = [ ];
|
||||
environment.etc.hello.text = "${settings.greeting} World!";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
# The impnlementation of the evening role is in a separate file. We have kept
|
||||
# the interface here, so we can see all settings of the service in one place,
|
||||
# but you can also move it to the respective file
|
||||
roles.evening = {
|
||||
interface =
|
||||
{ lib, ... }:
|
||||
{
|
||||
options.greeting = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "Good evening";
|
||||
description = "The greeting to use";
|
||||
};
|
||||
};
|
||||
};
|
||||
imports = [ ./evening.nix ];
|
||||
|
||||
# This part gets applied to all machines, regardless of their role.
|
||||
perMachine =
|
||||
{ machine, ... }:
|
||||
{
|
||||
nixosModule = {
|
||||
clan.core.vars.generators.hello = {
|
||||
files.hello = {
|
||||
secret = false;
|
||||
};
|
||||
script = ''
|
||||
echo "Hello world from ${machine.name}" > $out/hello
|
||||
'';
|
||||
nixosModule =
|
||||
{ pkgs, ... }:
|
||||
{
|
||||
environment.systemPackages = [
|
||||
(pkgs.writeShellScriptBin "greet-world" ''
|
||||
#!${pkgs.bash}/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
cat /etc/hello
|
||||
echo " I'm ${machine.name}"
|
||||
'')
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
12
clanServices/hello-world/evening.nix
Normal file
12
clanServices/hello-world/evening.nix
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
roles.evening.perInstance =
|
||||
{ settings, ... }:
|
||||
{
|
||||
nixosModule =
|
||||
{ ... }:
|
||||
{
|
||||
imports = [ ];
|
||||
environment.etc.hello.text = "${settings.greeting} World!";
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -27,20 +27,10 @@ let
|
||||
module.name = "hello-world";
|
||||
module.input = "self";
|
||||
|
||||
roles.peer.machines.jon = { };
|
||||
roles.evening.machines.jon = { };
|
||||
};
|
||||
};
|
||||
};
|
||||
# NOTE:
|
||||
# If you wonder why 'self-zerotier-redux':
|
||||
# A local module has prefix 'self', otherwise it is the name of the 'input'
|
||||
# The rest is the name of the service as in the instance 'module.name';
|
||||
#
|
||||
# -> ${module.input}-${module.name}
|
||||
# In this case it is 'self-zerotier-redux'
|
||||
# This is usually only used internally, but we can use it to test the evaluation of service module in isolation
|
||||
# evaluatedService =
|
||||
# testFlake.clanInternals.inventoryClass.distributedServices.importedModulesEvaluated.self-zerotier-redux.config;
|
||||
in
|
||||
{
|
||||
test_simple = {
|
||||
|
||||
@@ -5,22 +5,35 @@
|
||||
directory = ./.;
|
||||
inventory = {
|
||||
machines.peer1 = { };
|
||||
machines.peer2 = { };
|
||||
|
||||
instances."test" = {
|
||||
module.name = "hello-service";
|
||||
module.input = "self";
|
||||
roles.peer.machines.peer1 = { };
|
||||
|
||||
# Assign the roles to the two machines
|
||||
roles.morning.machines.peer1 = { };
|
||||
|
||||
roles.evening.machines.peer2 = {
|
||||
# Set roles settings for the peers, where we want to differ from
|
||||
# the role defaults
|
||||
settings = {
|
||||
greeting = "Good night";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript =
|
||||
{ nodes, ... }:
|
||||
{ ... }:
|
||||
''
|
||||
start_all()
|
||||
|
||||
# peer1 should have the 'hello' file
|
||||
value = peer1.succeed("cat ${nodes.peer1.clan.core.vars.generators.hello.files.hello.path}")
|
||||
assert value.strip() == "Hello world from peer1", value
|
||||
value = peer1.succeed("greet-world")
|
||||
assert value.strip() == "Good morning World! I'm peer1", value
|
||||
|
||||
value = peer2.succeed("greet-world")
|
||||
assert value.strip() == "Good night World! I'm peer2", value
|
||||
'';
|
||||
}
|
||||
|
||||
35
clanServices/localbackup/README.md
Normal file
35
clanServices/localbackup/README.md
Normal file
@@ -0,0 +1,35 @@
|
||||
## Features
|
||||
|
||||
- Creates incremental snapshots using rsnapshot
|
||||
- Supports multiple backup targets
|
||||
- Mount/unmount hooks for external storage
|
||||
- Pre/post backup hooks for custom scripts
|
||||
- Configurable snapshot retention
|
||||
- Automatic state folder detection
|
||||
|
||||
## Usage
|
||||
|
||||
Enable the localbackup service and configure backup targets:
|
||||
|
||||
```nix
|
||||
instances = {
|
||||
localbackup = {
|
||||
module.name = "@clan/localbackup";
|
||||
module.input = "self";
|
||||
roles.default.machines."machine".settings = {
|
||||
targets.external= {
|
||||
directory = "/mnt/backup";
|
||||
mountpoint = "/mnt/backup";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
## Commands
|
||||
|
||||
The service provides these commands:
|
||||
|
||||
- `localbackup-create`: Create a new backup
|
||||
- `localbackup-list`: List available backups
|
||||
- `localbackup-restore`: Restore from backup (requires NAME and FOLDERS environment variables)
|
||||
267
clanServices/localbackup/default.nix
Normal file
267
clanServices/localbackup/default.nix
Normal file
@@ -0,0 +1,267 @@
|
||||
{ ... }:
|
||||
{
|
||||
_class = "clan.service";
|
||||
manifest.name = "localbackup";
|
||||
manifest.description = "Automatically backups current machine to local directory.";
|
||||
manifest.categories = [ "System" ];
|
||||
manifest.readme = builtins.readFile ./README.md;
|
||||
|
||||
roles.default = {
|
||||
interface =
|
||||
{ lib, ... }:
|
||||
{
|
||||
|
||||
options = {
|
||||
|
||||
targets = lib.mkOption {
|
||||
type = lib.types.attrsOf (
|
||||
lib.types.submodule (
|
||||
{ name, ... }:
|
||||
{
|
||||
options = {
|
||||
name = lib.mkOption {
|
||||
type = lib.types.strMatching "^[a-zA-Z0-9._-]+$";
|
||||
default = name;
|
||||
description = "the name of the backup job";
|
||||
};
|
||||
directory = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
description = "the directory to backup";
|
||||
};
|
||||
mountpoint = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.str;
|
||||
default = null;
|
||||
description = "mountpoint of the directory to backup. If set, the directory will be mounted before the backup and unmounted afterwards";
|
||||
};
|
||||
preMountHook = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.lines;
|
||||
default = null;
|
||||
description = "Shell commands to run before the directory is mounted";
|
||||
};
|
||||
postMountHook = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.lines;
|
||||
default = null;
|
||||
description = "Shell commands to run after the directory is mounted";
|
||||
};
|
||||
preUnmountHook = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.lines;
|
||||
default = null;
|
||||
description = "Shell commands to run before the directory is unmounted";
|
||||
};
|
||||
postUnmountHook = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.lines;
|
||||
default = null;
|
||||
description = "Shell commands to run after the directory is unmounted";
|
||||
};
|
||||
preBackupHook = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.lines;
|
||||
default = null;
|
||||
description = "Shell commands to run before the backup";
|
||||
};
|
||||
postBackupHook = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.lines;
|
||||
default = null;
|
||||
description = "Shell commands to run after the backup";
|
||||
};
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
# default = { };
|
||||
description = "List of directories where backups are stored";
|
||||
};
|
||||
|
||||
snapshots = lib.mkOption {
|
||||
type = lib.types.int;
|
||||
default = 20;
|
||||
description = "Number of snapshots to keep";
|
||||
};
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
perInstance =
|
||||
{
|
||||
settings,
|
||||
...
|
||||
}:
|
||||
{
|
||||
nixosModule =
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
|
||||
let
|
||||
mountHook = target: ''
|
||||
if [[ -x /run/current-system/sw/bin/localbackup-mount-${target.name} ]]; then
|
||||
/run/current-system/sw/bin/localbackup-mount-${target.name}
|
||||
fi
|
||||
if [[ -x /run/current-system/sw/bin/localbackup-unmount-${target.name} ]]; then
|
||||
trap "/run/current-system/sw/bin/localbackup-unmount-${target.name}" EXIT
|
||||
fi
|
||||
'';
|
||||
|
||||
uniqueFolders = lib.unique (
|
||||
lib.flatten (lib.mapAttrsToList (_name: state: state.folders) config.clan.core.state)
|
||||
);
|
||||
|
||||
rsnapshotConfig = target: ''
|
||||
config_version 1.2
|
||||
snapshot_root ${target.directory}
|
||||
sync_first 1
|
||||
cmd_cp ${pkgs.coreutils}/bin/cp
|
||||
cmd_rm ${pkgs.coreutils}/bin/rm
|
||||
cmd_rsync ${pkgs.rsync}/bin/rsync
|
||||
cmd_ssh ${pkgs.openssh}/bin/ssh
|
||||
cmd_logger ${pkgs.inetutils}/bin/logger
|
||||
cmd_du ${pkgs.coreutils}/bin/du
|
||||
cmd_rsnapshot_diff ${pkgs.rsnapshot}/bin/rsnapshot-diff
|
||||
|
||||
${lib.optionalString (target.postBackupHook != null) ''
|
||||
cmd_postexec ${pkgs.writeShellScript "postexec.sh" ''
|
||||
set -efu -o pipefail
|
||||
${target.postBackupHook}
|
||||
''}
|
||||
''}
|
||||
retain snapshot ${builtins.toString settings.snapshots}
|
||||
${lib.concatMapStringsSep "\n" (folder: ''
|
||||
backup ${folder} ${config.networking.hostName}/
|
||||
'') uniqueFolders}
|
||||
'';
|
||||
in
|
||||
|
||||
{
|
||||
|
||||
environment.systemPackages = [
|
||||
(pkgs.writeShellScriptBin "localbackup-create" ''
|
||||
set -efu -o pipefail
|
||||
export PATH=${
|
||||
lib.makeBinPath [
|
||||
pkgs.rsnapshot
|
||||
pkgs.coreutils
|
||||
pkgs.util-linux
|
||||
]
|
||||
}
|
||||
${lib.concatMapStringsSep "\n" (target: ''
|
||||
${mountHook target}
|
||||
echo "Creating backup '${target.name}'"
|
||||
|
||||
${lib.optionalString (target.preBackupHook != null) ''
|
||||
(
|
||||
${target.preBackupHook}
|
||||
)
|
||||
''}
|
||||
|
||||
declare -A preCommandErrors
|
||||
${lib.concatMapStringsSep "\n" (
|
||||
state:
|
||||
lib.optionalString (state.preBackupCommand != null) ''
|
||||
echo "Running pre-backup command for ${state.name}"
|
||||
if ! /run/current-system/sw/bin/${state.preBackupCommand}; then
|
||||
preCommandErrors["${state.name}"]=1
|
||||
fi
|
||||
''
|
||||
) (builtins.attrValues config.clan.core.state)}
|
||||
|
||||
rsnapshot -c "${pkgs.writeText "rsnapshot.conf" (rsnapshotConfig target)}" sync
|
||||
rsnapshot -c "${pkgs.writeText "rsnapshot.conf" (rsnapshotConfig target)}" snapshot
|
||||
'') (builtins.attrValues settings.targets)}'')
|
||||
(pkgs.writeShellScriptBin "localbackup-list" ''
|
||||
set -efu -o pipefail
|
||||
export PATH=${
|
||||
lib.makeBinPath [
|
||||
pkgs.jq
|
||||
pkgs.findutils
|
||||
pkgs.coreutils
|
||||
pkgs.util-linux
|
||||
]
|
||||
}
|
||||
(${
|
||||
lib.concatMapStringsSep "\n" (target: ''
|
||||
(
|
||||
${mountHook target}
|
||||
find ${lib.escapeShellArg target.directory} -mindepth 1 -maxdepth 1 -name "snapshot.*" -print0 -type d \
|
||||
| jq -Rs 'split("\u0000") | .[] | select(. != "") | { "name": ("${target.name}::" + .)}'
|
||||
)
|
||||
'') (builtins.attrValues settings.targets)
|
||||
}) | jq -s .
|
||||
'')
|
||||
(pkgs.writeShellScriptBin "localbackup-restore" ''
|
||||
set -efu -o pipefail
|
||||
export PATH=${
|
||||
lib.makeBinPath [
|
||||
pkgs.rsync
|
||||
pkgs.coreutils
|
||||
pkgs.util-linux
|
||||
pkgs.gawk
|
||||
]
|
||||
}
|
||||
if [[ "''${NAME:-}" == "" ]]; then
|
||||
echo "No backup name given via NAME environment variable"
|
||||
exit 1
|
||||
fi
|
||||
if [[ "''${FOLDERS:-}" == "" ]]; then
|
||||
echo "No folders given via FOLDERS environment variable"
|
||||
exit 1
|
||||
fi
|
||||
name=$(awk -F'::' '{print $1}' <<< $NAME)
|
||||
backupname=''${NAME#$name::}
|
||||
|
||||
if command -v localbackup-mount-$name; then
|
||||
localbackup-mount-$name
|
||||
fi
|
||||
if command -v localbackup-unmount-$name; then
|
||||
trap "localbackup-unmount-$name" EXIT
|
||||
fi
|
||||
|
||||
if [[ ! -d $backupname ]]; then
|
||||
echo "No backup found $backupname"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
IFS=':' read -ra FOLDER <<< "''$FOLDERS"
|
||||
for folder in "''${FOLDER[@]}"; do
|
||||
mkdir -p "$folder"
|
||||
rsync -a "$backupname/${config.networking.hostName}$folder/" "$folder"
|
||||
done
|
||||
'')
|
||||
]
|
||||
++ (lib.mapAttrsToList (
|
||||
name: target:
|
||||
pkgs.writeShellScriptBin ("localbackup-mount-" + name) ''
|
||||
set -efu -o pipefail
|
||||
${lib.optionalString (target.preMountHook != null) target.preMountHook}
|
||||
${lib.optionalString (target.mountpoint != null) ''
|
||||
if ! ${pkgs.util-linux}/bin/mountpoint -q ${lib.escapeShellArg target.mountpoint}; then
|
||||
${pkgs.util-linux}/bin/mount -o X-mount.mkdir ${lib.escapeShellArg target.mountpoint}
|
||||
fi
|
||||
''}
|
||||
${lib.optionalString (target.postMountHook != null) target.postMountHook}
|
||||
''
|
||||
) settings.targets)
|
||||
++ lib.mapAttrsToList (
|
||||
name: target:
|
||||
pkgs.writeShellScriptBin ("localbackup-unmount-" + name) ''
|
||||
set -efu -o pipefail
|
||||
${lib.optionalString (target.preUnmountHook != null) target.preUnmountHook}
|
||||
${lib.optionalString (
|
||||
target.mountpoint != null
|
||||
) "${pkgs.util-linux}/bin/umount ${lib.escapeShellArg target.mountpoint}"}
|
||||
${lib.optionalString (target.postUnmountHook != null) target.postUnmountHook}
|
||||
''
|
||||
) settings.targets;
|
||||
|
||||
clan.core.backups.providers.localbackup = {
|
||||
# TODO list needs to run locally or on the remote machine
|
||||
list = "localbackup-list";
|
||||
create = "localbackup-create";
|
||||
restore = "localbackup-restore";
|
||||
};
|
||||
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
16
clanServices/localbackup/flake-module.nix
Normal file
16
clanServices/localbackup/flake-module.nix
Normal file
@@ -0,0 +1,16 @@
|
||||
{ lib, ... }:
|
||||
let
|
||||
module = lib.modules.importApply ./default.nix { };
|
||||
in
|
||||
{
|
||||
clan.modules.localbackup = module;
|
||||
perSystem =
|
||||
{ ... }:
|
||||
{
|
||||
clan.nixosTests.localbackup = {
|
||||
imports = [ ./tests/vm/default.nix ];
|
||||
|
||||
clan.modules."@clan/localbackup" = module;
|
||||
};
|
||||
};
|
||||
}
|
||||
62
clanServices/localbackup/tests/vm/default.nix
Normal file
62
clanServices/localbackup/tests/vm/default.nix
Normal file
@@ -0,0 +1,62 @@
|
||||
{ ... }:
|
||||
{
|
||||
name = "service-localbackup";
|
||||
|
||||
clan = {
|
||||
directory = ./.;
|
||||
test.useContainers = true;
|
||||
inventory = {
|
||||
|
||||
machines.machine = { };
|
||||
|
||||
instances = {
|
||||
localbackup = {
|
||||
module.name = "@clan/localbackup";
|
||||
module.input = "self";
|
||||
roles.default.machines."machine".settings = {
|
||||
|
||||
targets.hdd = {
|
||||
directory = "/mnt/external-disk";
|
||||
preMountHook = ''
|
||||
touch /run/mount-external-disk
|
||||
'';
|
||||
postUnmountHook = ''
|
||||
touch /run/unmount-external-disk
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
nodes.machine = {
|
||||
clan.core.state.test-backups.folders = [ "/var/test-backups" ];
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
import json
|
||||
start_all()
|
||||
|
||||
machine.systemctl("start network-online.target")
|
||||
machine.wait_for_unit("network-online.target")
|
||||
|
||||
# dummy data
|
||||
machine.succeed("mkdir -p /var/test-backups")
|
||||
machine.succeed("echo testing > /var/test-backups/somefile")
|
||||
|
||||
# create
|
||||
machine.succeed("localbackup-create >&2")
|
||||
machine.wait_until_succeeds("! systemctl is-active localbackup-job-serverone >&2")
|
||||
|
||||
# list
|
||||
snapshot_list = machine.succeed("localbackup-list").strip()
|
||||
assert json.loads(snapshot_list)[0]["name"].strip() == "hdd::/mnt/external-disk/snapshot.0"
|
||||
|
||||
# borgbackup restore
|
||||
machine.succeed("rm -f /var/test-backups/somefile")
|
||||
|
||||
machine.succeed("NAME=/mnt/external-disk/snapshot.0 FOLDERS=/var/test-backups /run/current-system/sw/bin/localbackup-restore >&2")
|
||||
assert machine.succeed("cat /var/test-backups/somefile").strip() == "testing", "restore failed"
|
||||
'';
|
||||
}
|
||||
@@ -184,24 +184,24 @@
|
||||
settings.certificate.searchDomains != [ ]
|
||||
) config.clan.core.vars.generators.openssh-cert.files."ssh.id_ed25519-cert.pub".path;
|
||||
|
||||
hostKeys =
|
||||
[
|
||||
{
|
||||
path = config.clan.core.vars.generators.openssh.files."ssh.id_ed25519".path;
|
||||
type = "ed25519";
|
||||
}
|
||||
]
|
||||
++ lib.optional settings.hostKeys.rsa.enable {
|
||||
path = config.clan.core.vars.generators.openssh-rsa.files."ssh.id_rsa".path;
|
||||
type = "rsa";
|
||||
};
|
||||
hostKeys = [
|
||||
{
|
||||
path = config.clan.core.vars.generators.openssh.files."ssh.id_ed25519".path;
|
||||
type = "ed25519";
|
||||
}
|
||||
]
|
||||
++ lib.optional settings.hostKeys.rsa.enable {
|
||||
path = config.clan.core.vars.generators.openssh-rsa.files."ssh.id_rsa".path;
|
||||
type = "rsa";
|
||||
};
|
||||
};
|
||||
|
||||
programs.ssh.knownHosts.clan-sshd-self-ed25519 = {
|
||||
hostNames = [
|
||||
"localhost"
|
||||
config.networking.hostName
|
||||
] ++ (lib.optional (config.networking.domain != null) config.networking.fqdn);
|
||||
]
|
||||
++ (lib.optional (config.networking.domain != null) config.networking.fqdn);
|
||||
publicKey = config.clan.core.vars.generators.openssh.files."ssh.id_ed25519.pub".value;
|
||||
};
|
||||
};
|
||||
|
||||
20
clanServices/syncthing/README.md
Normal file
20
clanServices/syncthing/README.md
Normal file
@@ -0,0 +1,20 @@
|
||||
## Usage
|
||||
|
||||
```nix
|
||||
{
|
||||
instances.syncthing = {
|
||||
roles.peer.tags.all = { };
|
||||
roles.peer.settings.folders = {
|
||||
documents = {
|
||||
path = "~/syncthing/documents";
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
Now the folder `~/syncthing/documents` will be shared with all your machines.
|
||||
|
||||
|
||||
## Documentation
|
||||
Extensive documentation is available on the [Syncthing](https://docs.syncthing.net/) website.
|
||||
243
clanServices/syncthing/default.nix
Normal file
243
clanServices/syncthing/default.nix
Normal file
@@ -0,0 +1,243 @@
|
||||
{ ... }:
|
||||
{
|
||||
_class = "clan.service";
|
||||
manifest.name = "clan-core/syncthing";
|
||||
manifest.description = "Syncthing is a continuous file synchronization program with automatic peer discovery";
|
||||
manifest.categories = [
|
||||
"Utility"
|
||||
"System"
|
||||
"Network"
|
||||
];
|
||||
manifest.readme = builtins.readFile ./README.md;
|
||||
|
||||
roles.peer = {
|
||||
interface =
|
||||
{ lib, ... }:
|
||||
{
|
||||
options.openDefaultPorts = lib.mkOption {
|
||||
type = lib.types.bool;
|
||||
default = true;
|
||||
description = ''
|
||||
Whether to open the default syncthing ports in the firewall.
|
||||
'';
|
||||
};
|
||||
|
||||
options.folders = lib.mkOption {
|
||||
type = lib.types.attrsOf (
|
||||
lib.types.submodule {
|
||||
options = {
|
||||
path = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
description = "Path to the folder to sync";
|
||||
};
|
||||
devices = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = [ ];
|
||||
description = "List of device names to share this folder with. Empty list means all peers and extraDevices.";
|
||||
};
|
||||
ignorePerms = lib.mkOption {
|
||||
type = lib.types.bool;
|
||||
default = false;
|
||||
description = "Ignore permission changes";
|
||||
};
|
||||
rescanIntervalS = lib.mkOption {
|
||||
type = lib.types.int;
|
||||
default = 3600;
|
||||
description = "Rescan interval in seconds";
|
||||
};
|
||||
type = lib.mkOption {
|
||||
type = lib.types.enum [
|
||||
"sendreceive"
|
||||
"sendonly"
|
||||
"receiveonly"
|
||||
];
|
||||
default = "sendreceive";
|
||||
description = "Folder type";
|
||||
};
|
||||
versioning = lib.mkOption {
|
||||
type = lib.types.nullOr (
|
||||
lib.types.submodule {
|
||||
options = {
|
||||
type = lib.mkOption {
|
||||
type = lib.types.enum [
|
||||
"external"
|
||||
"simple"
|
||||
"staggered"
|
||||
"trashcan"
|
||||
];
|
||||
description = "Versioning type";
|
||||
};
|
||||
params = lib.mkOption {
|
||||
type = lib.types.attrsOf lib.types.str;
|
||||
default = { };
|
||||
description = "Versioning parameters";
|
||||
};
|
||||
};
|
||||
}
|
||||
);
|
||||
default = null;
|
||||
description = "Versioning configuration";
|
||||
};
|
||||
};
|
||||
}
|
||||
);
|
||||
default = { };
|
||||
description = "Folders to synchronize between all peers";
|
||||
};
|
||||
|
||||
options.extraDevices = lib.mkOption {
|
||||
type = lib.types.attrsOf (
|
||||
lib.types.submodule {
|
||||
options = {
|
||||
id = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
description = "Device ID of the external syncthing device";
|
||||
example = "P56IOI7-MZJNU2Y-IQGDREY-DM2MGTI-MGL3BXN-PQ6W5BM-TBBZ4TJ-XZWICQ2";
|
||||
};
|
||||
addresses = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = [ "dynamic" ];
|
||||
description = "List of addresses for the device";
|
||||
example = [
|
||||
"dynamic"
|
||||
"tcp://192.168.1.100:22000"
|
||||
];
|
||||
};
|
||||
name = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "";
|
||||
description = "Human readable name for the device";
|
||||
};
|
||||
};
|
||||
}
|
||||
);
|
||||
default = { };
|
||||
description = "External syncthing devices not managed by clan (e.g., mobile phones)";
|
||||
example = lib.literalExpression ''
|
||||
{
|
||||
phone = {
|
||||
id = "P56IOI7-MZJNU2Y-IQGDREY-DM2MGTI-MGL3BXN-PQ6W5BM-TBBZ4TJ-XZWICQ2";
|
||||
name = "My Phone";
|
||||
addresses = [ "dynamic" ];
|
||||
};
|
||||
}
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
perInstance =
|
||||
{
|
||||
settings,
|
||||
roles,
|
||||
...
|
||||
}:
|
||||
{
|
||||
nixosModule =
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
...
|
||||
}:
|
||||
let
|
||||
allPeerMachines = lib.attrNames roles.peer.machines;
|
||||
|
||||
readMachineVar =
|
||||
machine: varPath: default:
|
||||
let
|
||||
fullPath = "${config.clan.core.settings.directory}/vars/per-machine/${machine}/${varPath}";
|
||||
in
|
||||
if builtins.pathExists fullPath then
|
||||
lib.removeSuffix "\n" (builtins.readFile fullPath)
|
||||
else
|
||||
default;
|
||||
|
||||
peerDevices = lib.listToAttrs (
|
||||
lib.forEach allPeerMachines (machine: {
|
||||
name = machine;
|
||||
value = {
|
||||
name = machine;
|
||||
id = readMachineVar machine "syncthing/id/value" "";
|
||||
addresses = [
|
||||
"dynamic"
|
||||
]
|
||||
++
|
||||
lib.optional (readMachineVar machine "zerotier/zerotier-ip/value" null != null)
|
||||
"tcp://[${readMachineVar machine "zerotier/zerotier-ip/value" ""}]:22000";
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
extraDevicesConfig = lib.mapAttrs (deviceName: deviceConfig: {
|
||||
inherit (deviceConfig) id addresses;
|
||||
name = if deviceConfig.name != "" then deviceConfig.name else deviceName;
|
||||
}) settings.extraDevices;
|
||||
|
||||
allDevices = peerDevices // extraDevicesConfig;
|
||||
|
||||
validDevices = lib.filterAttrs (_: device: device.id != "") allDevices;
|
||||
|
||||
syncthingFolders = lib.mapAttrs (
|
||||
_folderName: folderConfig:
|
||||
let
|
||||
targetDevices =
|
||||
if folderConfig.devices == [ ] then lib.attrNames validDevices else folderConfig.devices;
|
||||
in
|
||||
folderConfig
|
||||
// {
|
||||
devices = targetDevices;
|
||||
}
|
||||
) settings.folders;
|
||||
in
|
||||
{
|
||||
imports = [
|
||||
./vars.nix
|
||||
];
|
||||
|
||||
config = lib.mkMerge [
|
||||
{
|
||||
services.syncthing = {
|
||||
enable = true;
|
||||
configDir = "/var/lib/syncthing";
|
||||
group = "syncthing";
|
||||
|
||||
key = lib.mkDefault config.clan.core.vars.generators.syncthing.files.key.path or null;
|
||||
cert = lib.mkDefault config.clan.core.vars.generators.syncthing.files.cert.path or null;
|
||||
|
||||
settings = {
|
||||
devices = validDevices;
|
||||
folders = syncthingFolders;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
# Conditionally open firewall ports
|
||||
(lib.mkIf settings.openDefaultPorts {
|
||||
services.syncthing.openDefaultPorts = true;
|
||||
# Syncthing ports: 8384 for remote access to GUI
|
||||
# 22000 TCP and/or UDP for sync traffic
|
||||
# 21027/UDP for discovery
|
||||
# source: https://docs.syncthing.net/users/firewall.html
|
||||
networking.firewall.interfaces."zt+".allowedTCPPorts = [
|
||||
8384
|
||||
22000
|
||||
];
|
||||
networking.firewall.interfaces."zt+".allowedUDPPorts = [
|
||||
22000
|
||||
21027
|
||||
];
|
||||
})
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
perMachine = _: {
|
||||
nixosModule =
|
||||
{ lib, ... }:
|
||||
{
|
||||
# Activates inotify compatibility on syncthing
|
||||
# use mkOverride 900 here as it otherwise would collide with the default of the
|
||||
# upstream nixos xserver.nix
|
||||
boot.kernel.sysctl."fs.inotify.max_user_watches" = lib.mkOverride 900 524288;
|
||||
};
|
||||
};
|
||||
}
|
||||
54
clanServices/syncthing/flake-module.nix
Normal file
54
clanServices/syncthing/flake-module.nix
Normal file
@@ -0,0 +1,54 @@
|
||||
{
|
||||
self,
|
||||
lib,
|
||||
inputs,
|
||||
...
|
||||
}:
|
||||
let
|
||||
module = lib.modules.importApply ./default.nix {
|
||||
inherit (self) packages;
|
||||
};
|
||||
in
|
||||
{
|
||||
clan.modules = {
|
||||
syncthing = module;
|
||||
};
|
||||
perSystem =
|
||||
let
|
||||
unit-test-module = (
|
||||
self.clanLib.test.flakeModules.makeEvalChecks {
|
||||
inherit module;
|
||||
inherit inputs;
|
||||
fileset = lib.fileset.unions [
|
||||
# The zerotier service being tested
|
||||
../../clanServices/syncthing
|
||||
# Required modules
|
||||
../../nixosModules/clanCore
|
||||
# Dependencies like clan-cli
|
||||
../../pkgs/clan-cli
|
||||
];
|
||||
testName = "syncthing";
|
||||
tests = ./tests/eval-tests.nix;
|
||||
testArgs = { };
|
||||
}
|
||||
);
|
||||
in
|
||||
{ ... }:
|
||||
{
|
||||
imports = [
|
||||
unit-test-module
|
||||
];
|
||||
/**
|
||||
1. Prepare the test vars
|
||||
nix run .#generate-test-vars -- clanServices/syncthing/tests/vm syncthing-service
|
||||
|
||||
2. To run the test
|
||||
nix build .#checks.x86_64-linux.syncthing-service
|
||||
*/
|
||||
clan.nixosTests.syncthing-service = {
|
||||
imports = [ ./tests/vm/default.nix ];
|
||||
|
||||
clan.modules.syncthing-service = module;
|
||||
};
|
||||
};
|
||||
}
|
||||
62
clanServices/syncthing/tests/eval-tests.nix
Normal file
62
clanServices/syncthing/tests/eval-tests.nix
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
module,
|
||||
clanLib,
|
||||
lib,
|
||||
...
|
||||
}:
|
||||
let
|
||||
testFlake =
|
||||
(clanLib.clan {
|
||||
self = { };
|
||||
directory = ./vm;
|
||||
|
||||
machines.machine1 = {
|
||||
nixpkgs.hostPlatform = "x86_64-linux";
|
||||
};
|
||||
machines.machine2 = {
|
||||
nixpkgs.hostPlatform = "x86_64-linux";
|
||||
};
|
||||
machines.machine3 = {
|
||||
nixpkgs.hostPlatform = "x86_64-linux";
|
||||
};
|
||||
machines.machine4 = {
|
||||
nixpkgs.hostPlatform = "x86_64-linux";
|
||||
};
|
||||
|
||||
modules.syncthing = module;
|
||||
|
||||
inventory.instances = {
|
||||
default = {
|
||||
module.name = "syncthing";
|
||||
module.input = "self";
|
||||
|
||||
roles.peer.tags.all = { };
|
||||
roles.peer.settings.extraDevices.phone = {
|
||||
id = "P56IOI7-MZJNU2Y-IQGDREY-DM2MGTI-MGL3BXN-PQ6W5BM-TBBZ4TJ-XZWICQ2";
|
||||
};
|
||||
};
|
||||
};
|
||||
}).config;
|
||||
in
|
||||
{
|
||||
test_machine1_peers = {
|
||||
expr = {
|
||||
devices = lib.attrNames testFlake.nixosConfigurations.machine1.config.services.syncthing.settings.devices;
|
||||
machine4_ID =
|
||||
testFlake.nixosConfigurations.machine1.config.services.syncthing.settings.devices.machine1.id;
|
||||
externalPhoneId =
|
||||
testFlake.nixosConfigurations.machine1.config.services.syncthing.settings.devices.phone.id;
|
||||
};
|
||||
expected = {
|
||||
devices = [
|
||||
"machine1"
|
||||
"machine2"
|
||||
"machine3"
|
||||
"machine4"
|
||||
"phone"
|
||||
];
|
||||
machine4_ID = "LJOGYGS-RQPWIHV-HD4B3GK-JZPVPK6-VI3IAY5-CWQWIXK-NJSQMFH-KXHOHA4";
|
||||
externalPhoneId = "P56IOI7-MZJNU2Y-IQGDREY-DM2MGTI-MGL3BXN-PQ6W5BM-TBBZ4TJ-XZWICQ2";
|
||||
};
|
||||
};
|
||||
}
|
||||
95
clanServices/syncthing/tests/vm/default.nix
Normal file
95
clanServices/syncthing/tests/vm/default.nix
Normal file
@@ -0,0 +1,95 @@
|
||||
{
|
||||
name = "service-syncthing-service";
|
||||
|
||||
clan = {
|
||||
directory = ./.;
|
||||
test.useContainers = true;
|
||||
inventory = {
|
||||
machines.machine1 = { };
|
||||
machines.machine2 = { };
|
||||
machines.machine3 = { };
|
||||
machines.machine4 = { };
|
||||
|
||||
instances.default = {
|
||||
module.name = "syncthing-service";
|
||||
module.input = "self";
|
||||
roles.peer.tags.all = { };
|
||||
roles.peer.settings.folders = {
|
||||
documents = {
|
||||
path = "/var/lib/syncthing/documents";
|
||||
type = "sendreceive";
|
||||
};
|
||||
partly_shared = {
|
||||
devices = [
|
||||
"machine1"
|
||||
"machine4"
|
||||
];
|
||||
path = "~/music";
|
||||
type = "sendreceive";
|
||||
};
|
||||
};
|
||||
|
||||
};
|
||||
instances.small = {
|
||||
module.name = "syncthing-service";
|
||||
module.input = "self";
|
||||
roles.peer.machines = {
|
||||
machine3 = { };
|
||||
machine4 = { };
|
||||
};
|
||||
roles.peer.settings.folders = {
|
||||
pictures = {
|
||||
path = "~/pictures";
|
||||
type = "sendreceive";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript =
|
||||
{ ... }:
|
||||
''
|
||||
start_all()
|
||||
|
||||
machine1.wait_for_unit("syncthing.service")
|
||||
machine2.wait_for_unit("syncthing.service")
|
||||
machine3.wait_for_unit("syncthing.service")
|
||||
machine4.wait_for_unit("syncthing.service")
|
||||
|
||||
machine1.wait_for_open_port(8384)
|
||||
machine2.wait_for_open_port(8384)
|
||||
machine3.wait_for_open_port(8384)
|
||||
machine4.wait_for_open_port(8384)
|
||||
|
||||
machine1.wait_for_open_port(22000)
|
||||
machine2.wait_for_open_port(22000)
|
||||
machine3.wait_for_open_port(22000)
|
||||
machine4.wait_for_open_port(22000)
|
||||
|
||||
# Check that the correct folders are synchronized
|
||||
# documents - all
|
||||
machine1.wait_for_file("/var/lib/syncthing/documents")
|
||||
machine2.wait_for_file("/var/lib/syncthing/documents")
|
||||
machine3.wait_for_file("/var/lib/syncthing/documents")
|
||||
machine4.wait_for_file("/var/lib/syncthing/documents")
|
||||
# music - machine 1 & 4
|
||||
machine1.wait_for_file("/var/lib/syncthing/music")
|
||||
machine4.wait_for_file("/var/lib/syncthing/music")
|
||||
# pictures - machine 3 & 4
|
||||
machine3.wait_for_file("/var/lib/syncthing/pictures")
|
||||
machine4.wait_for_file("/var/lib/syncthing/pictures")
|
||||
|
||||
machine1.succeed("echo document > /var/lib/syncthing/documents/document")
|
||||
machine1.succeed("echo music > /var/lib/syncthing/music/music")
|
||||
machine3.succeed("echo picture > /var/lib/syncthing/pictures/picture")
|
||||
|
||||
machine2.wait_for_file("/var/lib/syncthing/documents/document", 20)
|
||||
machine3.wait_for_file("/var/lib/syncthing/documents/document", 20)
|
||||
machine4.wait_for_file("/var/lib/syncthing/documents/document", 20)
|
||||
|
||||
machine4.wait_for_file("/var/lib/syncthing/music/music", 20)
|
||||
|
||||
machine4.wait_for_file("/var/lib/syncthing/pictures/picture", 20)
|
||||
'';
|
||||
}
|
||||
6
clanServices/syncthing/tests/vm/sops/machines/machine1/key.json
Executable file
6
clanServices/syncthing/tests/vm/sops/machines/machine1/key.json
Executable file
@@ -0,0 +1,6 @@
|
||||
[
|
||||
{
|
||||
"publickey": "age1numxr6m52fxrm9a7sdw4vdpkp463mm8qtuf5d0p0jde04wydfgtscwdx78",
|
||||
"type": "age"
|
||||
}
|
||||
]
|
||||
6
clanServices/syncthing/tests/vm/sops/machines/machine2/key.json
Executable file
6
clanServices/syncthing/tests/vm/sops/machines/machine2/key.json
Executable file
@@ -0,0 +1,6 @@
|
||||
[
|
||||
{
|
||||
"publickey": "age1aqng9vmlgth5aucu5ty2wa0kk9tvk7erj4s07hq03s6emu72fgxsqkrqql",
|
||||
"type": "age"
|
||||
}
|
||||
]
|
||||
6
clanServices/syncthing/tests/vm/sops/machines/machine3/key.json
Executable file
6
clanServices/syncthing/tests/vm/sops/machines/machine3/key.json
Executable file
@@ -0,0 +1,6 @@
|
||||
[
|
||||
{
|
||||
"publickey": "age1kul02mg50nxccsl38nvma0enrgx454wq0qdefllj4l0adqkllvls5wuhfr",
|
||||
"type": "age"
|
||||
}
|
||||
]
|
||||
6
clanServices/syncthing/tests/vm/sops/machines/machine4/key.json
Executable file
6
clanServices/syncthing/tests/vm/sops/machines/machine4/key.json
Executable file
@@ -0,0 +1,6 @@
|
||||
[
|
||||
{
|
||||
"publickey": "age1kqgx4elusxx4u8409gml5z6tvrsayqsphewsl93mtqn7pl2p5dwq9lujpj",
|
||||
"type": "age"
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:XqOUvbQOkOUrxQAMrUSRGfOl2eYjGZGI5st1iZl9Re8ymQa0LtokGzknfSiSucwfP50YTo7NoQsNCRsq/nS0RWbby0TQceqSVSw=,iv:GT3UrKdanFy4aMRPTwuOIyvP7pT6MXGRIstgKZmvG/4=,tag:Bg3ZzCPpw3KHXwy2Hazqrg==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBmWWJuTjBCaUhUeXhSRmZS\nVW9uQ09yOXVJVFFybzJsdmVmaEpOZVdEVVJNCmxxcUU3eG44cTlrY1lnVUFjdlZK\nOXNvR2pDMVJtZXFCbjJlTW1xVGoyTlUKLS0tIDlkMS9FNVJuZGZyODJzZzVRNzJQ\nNWpuaUxpMnRaUWt5bmw1TjBJV0dUOHcKmuHPHwzTUtkl7eZjZ4422C8cxGUnnzg1\nBPwAewDXEgq+clTXpU3UWjSvUNpqIkh9GDRE+qYfYWa7m36TrzACig==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:20:56Z",
|
||||
"mac": "ENC[AES256_GCM,data:INCJWY2hsAwF049zFpYUTccJvczfgrPQwu+YUt8s/vfdrAk+tmLqCVFh9idG97l/OIEVPLv1gFE3SlcC+kKCNQV4SAyZA62CTeeNdgSSqDDipjrFn4fr1L8ZenCn56/giW0GIB2bBCa5WUYaHtGDoG8f6HSqNIhjnY9/qmDLUWQ=,iv:Utda22592sXOEEKFRSgfP+yLgi6FQGeEFiT61ZiKcws=,tag:UWyEnSNt0ztxTh0FGRzTtA==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../users/admin
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:bDwReDwUe9t0lXtTRU5r6WY3OvV1qOvkkByG2PbjXtArF4w6y3X/i6SYCnoys3wMvAY32+uH13ETXDS7LR/6W4/+pCAGAb3ATqQ=,iv:84dC7UP6QkHVkLwSvmb/NjXCA9OJ+of49UfitU0Cuoo=,tag:oO15dzpH7BKjawPGI1+F0Q==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBBVXpNTkhTU3A1OU5HZm9p\nb2t5YTIxZWU4Z3hZR0ljTUNRT2hvQXEwNjJRCjh4SXY1NWhPVFFTMWswaWo2cFJB\ndUx4V1RiRFQ1UkNQU24zaGlSV3ZYeE0KLS0tIGJ5ak9RS2JnNk0vU3BaMEZLTFI0\nZit5RDlZbWE1QzY0RHQzcU5pZy84TUkK3/HHFBGfA9EpU+WDrlM9w5rbmgOb7ZAi\nVQiO08PVGTOvsEDed5A2oIXImRopcuBATzKoi4DNXpYlpSI/scYuAA==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:08Z",
|
||||
"mac": "ENC[AES256_GCM,data:qdNz02bNRHaWhTLxHqgvMRokpXZHvijyoP2c+KXMQxWP/7884SWQ4xoPSiUeLOPatLXpjXWQ/OBiDqQAzLkbC0/ujJH+BJQtV75xl6ytOjLbYw+hwatAR9C5GxiuFgKjLSsdaqzRlXJsULbf56edXeT+U/u5G8542WjF4AUat00=,iv:FYQRJQaRR0aPFyHyl0QCVPpjTXePXiZ2LjccPaybWh4=,tag:W/xkIgTUoHiTX9azluinAQ==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../users/admin
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:2B9BeSPThkGX4sBAtm9c5Uc8vz5q+hSs8y0ibQ520s2EOYNWEEwP0e+9CBUmU1n0TKR4FP3HMHA0N7LKHBEXEb513I5lNHWgREc=,iv:qkmur96UjZ1BoWgvvihWk/7VRDg4snMeDA0s1MWdLb4=,tag:3djouRryDjcOFPv9Z8nLvg==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBxd2wxQWQwSHl5R0ZlRkxX\nWXBuck04TStoTHRYMDJaQkNRcDQvZFV6U0U0ClROOVpiVXhCVTdzeXpVQlRTYWRP\naWE2R0VtaGwvUk9UcGRWbEZtQlh5ckkKLS0tIDNjdm40UCthbDBwSE9ZYXBwVC9h\nRENrMFV2a21QRjBBYWJ4bFZ3K2NxWDAKr9hgDOuG4lR6dChQCvw/VOQdRNF0Mj1k\nzQRfQaC8nfZGGOJtU09zv5+ZPYJdheRuz91M18qY5uA5qrch5Yrvww==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:21Z",
|
||||
"mac": "ENC[AES256_GCM,data:F6Ombz312QLl0BAkDbhN37KQ8gxMoukRqz8XkqANcdm8HyqfWL/V5Hxdt5Ve3HW4KzLO1lxb9M0/rQbtCj1hKMSnfM4XjP1R4ClLkU0knxeuLwbOtc4nN1qkaIs5fPcR0G9+gv+FnhZqDzosrhogDSWAayqiArwRWscMY4l716w=,iv:KYlcVQQGcWPIsL81VmTRmEBmC6PPT71i3ywcEbSfc5k=,tag:ogDnTv1KtXVtBDvhkjV1pw==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../users/admin
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:chE6ezkSGLHwvfqrUPiTLEoB63NhzBJgHQnGLe/0mwuR9iST3SpthUlKv7K8cezOvq7VVqJUoOGQSbqtuA04NUvwShVCBbqPjeA=,iv:50IV6ggQVnQpUpU+jfBB2z5/BZYs3lktGMfsWsCtJUs=,tag:BjtV31lmGOT5FposJA91WA==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBZSVAzVXcrMWdUQmNCNHo4\neWw5NnJjYkdHbkE1dmJPcDlNb1dpT3kxbFN3Cm1HWVFxblgrNkpaZmtOVHBpdEVF\nWXRMamdSUGxkSk5ic3p3cEZrNG5QZEkKLS0tIDdYOGZRclREL0o1bWZmVTZoMUlu\nQzJPZFlUdFE4NkE0di94QVRzRXgwb1EKkqzL7yOdALLd8I2GnKPG3d61XTbNMs4+\n52M+o+wA8h+mnImbyVOtU9D6AkxvbKywZLmNRukkPqnkLqu5IXoSMQ==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:34Z",
|
||||
"mac": "ENC[AES256_GCM,data:DI61cHhur+0q+gGtXHA6uFyZyKY/hCTvo2MLhhWPCSUWzYJ+j37fHfH1KUZcol+5prhnUIRrC4AoVt+0MeoXrNj+3SyptTVo1MgqNBayQkBMxKYp++SbqlXnlkLD+XOohpw6+f67rGNecjNc/OwCcfXu7PZmFhAFkwC39hUm7l0=,iv:lyinQFzoXo37Zs1QtbM1Jla+KRSMSUcph7bIR6/X1nw=,tag:nFfq7KtaTwZkQ9joCUOE2w==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../users/admin
|
||||
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"publickey": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"type": "age"
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine1
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:H+qlkDBtl4xYPz7X56gamUTQfuDE5+m/BkDPK9963A9F,iv:paPRFOnxW8aSt/NYg4afp+M+8svt0j4G6mZaJms/c7o=,tag:9uxvemHUr6Jm0MzvSf9cxQ==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1numxr6m52fxrm9a7sdw4vdpkp463mm8qtuf5d0p0jde04wydfgtscwdx78",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBBM1ZUaEwwOGVFSVF0dlRX\nQzdMRjJFZU5kSTNDTzVrZ1JzNHdROXpGNlI0CkZXQ0JkalloT2xNNEh5ZG5NT2Vn\nVlFKZTV6aTYwNmF2bVpHM01YdUtLeGMKLS0tIDRCVDZrNk5UdkVOODdnZFptbDlZ\nVnNNMGc4U0FvOHlsU2dwTE03TUtyL1kKoikATM2w95ca39e7K9mOezI9z4hSgUAA\nUiq4GH9Sn4jtuew2FXK2ZqedzgSQuSOob8iaYBUMGdWL7bfn2gZsHQ==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBNMUQxZmxrZDR0OU0vd2tl\ndjlCNEFSeTVFRFRxa01xNXhRTDdFRWJtQkhFCitPV2hDNG9uWXE0a0gyK3NuSnc3\nblBKVjNkMzkyWHVKMzBKV1NzdFVsbXMKLS0tIDdkMTVrSk9pcjhlWkVISWNoQVU4\nTm1sQVlDbUVObFIrRkNObG5nMUdWVE0KWwIpPYL8HwcbUDUS6TG4DyJgAz+xz9lA\nLLRYARWF0zNJlOTHAIEksT14YSU3dplMxunwas2mLBjfYH0DIg2Org==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:20:56Z",
|
||||
"mac": "ENC[AES256_GCM,data:vN/zqTJO+szaDV1aJc/ZYAbe0ISc7dJvQg8prjVkYIZA+aAh5Hk2Kec6iz+gH23CqapYcsEYJ/qd24eRrSsxPSJuvYWJAgyrPasNXNuGNrsVnkvTyClTUGiBbhoac6yHu66BHVpH15BJUgeSmO7iVD7HD+KPy0p66agJwWQEcn0=,iv:GLRGWh131n5otLJjBGElmMGxGOL1bljGZ8zzTE/fLn4=,tag:a65wuRUzsBtO2rO+hJN0RA==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine1
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:F5FWqPeOBJvuwzuomHd4IkWgGeBBmYmgSVeH/TSJKKG4BHAwgam8dT1oTWiKEd/SfLj6LY33N+s4fKqe3bdCCa5/CPC3P7ZP1RW0/fyAdftlXFpbmXjsFf1rsTcTPMPBxOdwbu6+w6ji16sEkKXsN/GQsk6bhT7gxEgy4ZXY1kiiE08TtIG8p6aIcES/sTXhNqvZvWkJeESh1pkVhfP0OSWp0x05RPvxg2FoU1IMcqmySZwL/Ltqp4I3pKNHmLzuTFfFmK10fC9OptBbPmPtp75k0pJcQ0xbzJAyMM1riGzhOOMO1Qj/DXcSPDVugrMYDZRpEqNGg+LPBuCF1l2e7UitPLY4/1g669lCAPR3stiSW3S2NZIsDsW/ubN2qYeMBSBM8NWpQPgcsrKELCgUExD3MFMO9G0Rl3sU1BsYw+jf+kZNZWDL/w9W6vYt73q3Nmu2DfPpGzwKLrnbBpqjgMJKev/9JG/gMejbMe5AK1leyr5TDi97ku3zTvQfur4YMWABiBswmIbgbjBZAJOIKzy10AJTFQbqNoGGaOz22i2RBJLVHU7CKoKyJL99Iun0MjSlIIv5hTZMXDnHPZxvLq4w0htA5U2VJNKg04YKaPpc5ceDLVt9kgEF6OKG7/5nwDoQ/cgjrwbzl85sWpWQgXxwpSDPN8A0rkyZkgxNch/tUASgUoOExJRd6ht0iJiFM3tW25z8NU1ZWijs287hCkXNac63ZnXCa/2gB2zj+iHHLzgGvsxfhFiM2smjC0CSNbteTYr3sgZszrJSDdyI3Aerx92UwN336Iu0pXAvAGh8SBLBdz1CcmAlldK08Z8tFxplqOaqCQ8AtuklfQQeh8/Lx0VW09HIGkjQp88xotW4bZ803rPb7pqJNTzQmSF/4JWtIJXEG8eRbyHXpTM9p9RYvlY7/yK75EH/vwq6eAu6Cm2AwUxLjqMv9uoAad0gij66DflQ93D3hOfJaF+yNLa+HTce/p0Ymqeo/YM3/UUQ/ZHVbPonULXkylYs8JFFVkf3DBJTGYB85UCZ/ObNyXFtk/GuzQ==,iv:zV2Wm02uMdGmdKR0qiYnTdTlyRec85wXwUJOE3mAeWo=,tag:8fw+x1O1/53ff0qOuoph/A==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1numxr6m52fxrm9a7sdw4vdpkp463mm8qtuf5d0p0jde04wydfgtscwdx78",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBvbTg5alA5SzJoUTZRSFM0\ndk1XaTlkOXo3NVNxYUJCc1FWWG16Q3IwMERnCkU1S1hlT3ZxcTRhaWs4c1BaVkNi\ndEJ0dTNTV1MxN0QrV2dJUzUvRTlsV3MKLS0tIExSb20zTDhuVExUMG9JZGd2bG00\nL0NTanIwZE92N1IxNWR5NW5PTktpK0EKsBMTH1ln0H/dK2AgUBWiGtdcY2ujvu1H\nTR+X3MZfFHQdOIvW4dkhUJt7BnZ5cEOCUizE5oI1+DifjDqk1dd+VQ==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSAzNHJsaElGRWc3ZTQrQlBs\nb0NLMmJUNzhyOHFielFZa1VnRDcwQ2JaREU0CnhrR3hGVXF3SnpRZWE5dlc5VFVV\nT1A4SGNoT2s0MU1jdHJ5U0ZvK3l0emcKLS0tIDZULzBXZ294NXBSRVJVRHlwUkpr\nWi90bUpNQlAremhpaWZkVXpRcE5yL1UKp5KUA5g/trDODHcXI7SWjbWR/ozXpxRp\ngHbONszf4y4hjkHOCVvtQ2NmR/cF64nhCswZYlfssUb4aJBVrMlVEw==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:20:56Z",
|
||||
"mac": "ENC[AES256_GCM,data:3l51gIfAbCnYvWjp6meJGsWQJPI7x4yswFe3WPLGtDJMPrRdOWW5UtU2Qb/rcoKGoSee5OJblOGvkloWTp++HebS2TxKEQ26qU9ycOq27vFt6mfmu4IVcmTqoVM3BPRh9md590lqdLyIsy/BULFBhe0jGBUQbXfDkfewronDOdU=,iv:JkVkjT9G5Z2+u/ZYDaoM8sRk5cBVWYR8fm48Gwfbr/Y=,tag:L9xZSAP3zQZfy6malNIFDw==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
@@ -0,0 +1 @@
|
||||
LJOGYGS-RQPWIHV-HD4B3GK-JZPVPK6-VI3IAY5-CWQWIXK-NJSQMFH-KXHOHA4
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine1
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:oYhwmtmsMpBKoUnC2sze5M7Qx3y9+ukpt7apmicJJOaDNliI8+NrrYntl56X70U4i+DN3Iz7qwPGGBxjveV0z7kIUEvlDiu3GgQmC/34omktyogb8dH/POoO+Z/E1CxKJEIn+rEU6Oqhg4aPKt/JbveW5wlevlQsMwW1oAtS701JJeY6KU6AfDerU0cS4K5+xw+dFg28pIvIIe02uDzTnFEXZYzvPk7UxfOE0IFxs6/zzPFoBbeFo2WIZZBMQYzP4AQMEiFxxaK7qthBwGuCEc7yMpW1uwVe9CZBfkVkg+wIX4DSm+0j8TiyikobsBxkvPUgqYHNyQaINJp82aGPOzFLsJ6ixf4RqgOzPikzdsyU8phi7waiDPDLf2rhZmnR,iv:9zhjW5wBlNtCxV1kBqzUlZaVRjAbr9LpxypJaIk4RPw=,tag:u1qxkOdHIqkelp/bchb4bg==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1numxr6m52fxrm9a7sdw4vdpkp463mm8qtuf5d0p0jde04wydfgtscwdx78",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBFZGxnQXhHUWhONitsWnRk\nbSsvSHRYbHJLc1BjOGgyb3Fyem5VeXk0MENRCkpkdGVmb01rK2FCV25Oc1JGY3U4\nK0xIb0UvTCsrV3lDRkIwZmhscDN4TUkKLS0tIG1GOTFPZTlvcXVOMElHYWN5ZFBU\nbjB4Y3FmY2ZMMmZ6cUFlZ0NUL2c3Z2MK+Fn2tf9eX6Iy6GMsa5//fzKDAUUCFa4x\neeCStPrxTbmKWb4T2NxXrZYK73kvOAmEQenUasddZ24/SDhnnrlXbw==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB0N0V1Q1l1NS9JK1R1RG8z\nVE9rRnVqQW9mRGNvWHViZWlKS21qSkF4RENFCjl0OU1rMUFHUHVDR3NjSXc1Tnpr\ndE1HUGxQYXMzWW11Q2tlNUdjdXpqdk0KLS0tIGtDZGJpQ3FrbnFZVDJuYjN6bFFN\nZGZqUFNKMmpFSUd5QXFtSFVSWFlpelUKFzm5m/+ReOVDHpvgqAKs5Vwq4XVCPH0K\nzmxtSIAwey9lUxnWNkuKOUG07o89ACsVe6pVPLLqHpGLotvne68GRw==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:20:56Z",
|
||||
"mac": "ENC[AES256_GCM,data:k92YniWJ8U75G0YJ8yt/Xgi5NtyeoJrSNCCMd+Znj9/JL8mWwevJ9aeR387TK7OjVyKp8TOXTSky3bOqZbr5BTHUQDRr0LlQDO6ozmN3CVOdKpK31hfIotTHNRCUcpq+CoEB4VyjeAAdnzCkvyhckZ/L/CtQpJkSGI02M+mHcwE=,iv:cG5Q75Y0zeGj6EezNY1rRD0ZLtzIxkcKKsSHV8Og1aM=,tag:dsSFoW/XzoeOHmH5sqJzzg==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine2
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:ZMxZz2bWSw4lKwXp+kdeblLJxpLxdWyZ9ZPBMkZeZGmf,iv:Dnpmvt8YomnwKeJ9ULWy8iAw9OspZ99glCYDtr2Ymkw=,tag:aBHwYdO8lZmtb8PbjCEeKA==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1aqng9vmlgth5aucu5ty2wa0kk9tvk7erj4s07hq03s6emu72fgxsqkrqql",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBHSWh4aDdUVnowNDBGRllw\nWitjU1pKQkhSNTdmMjZOUkJ6a1UwY2FkWEhzCjlIeGJvV2JpeTBXcVFCQzY2MXJC\nVlFsY3Uzd3R3cEJuVlZYYXd3dXkxSUEKLS0tIHpnT2Jjb1EzNUpXVlhseTBSdDd6\nYmorNlo3bXY4OGl3WXJQZ2dHbHlUVFUKdVPZd/2QtR/ELpf+vy5sXdGC0tS1N4uE\n5zsRpMBWQptheOUF0tNZAbw264gbYX/fece/myTJLISAPM9wZQd7ug==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA5YzdiaDBxYURVcTVLQkF6\nMEVDd04wSXhYV1hKWXlhYVJVaUJtYUxmRGwwClU1SXNpUzJQNXVTTGQ2d2x0QmRV\ndWgyL2hMR2pTZWE5bGsvWmVYWFY5akkKLS0tICtLRHFuVjBlUldZQ2FCWTAza0Yy\nMWRSQTd4WWNjcGtYT1RqcS9ybWp1ZWMKidHF/OqLKVYWo02zHnSLm8BEN+qyJ2+i\nBqg62OqGLt8pbE/r1bxnxia4ZCgfc+xKtsWuARKq/BUBrKlMrBZGEA==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:08Z",
|
||||
"mac": "ENC[AES256_GCM,data:JInxA9c2UQBDFzetaHU8KYNnp1JS3eBPyAMjsP6sNwOsRJBY8enckQANvhXF4Mo6eksx9RUhdcqd5zqO/uupnqV4AuARxwI8Sq3ak1r1fnH4uzOC/wV5hWIHdOfKrePuE1+ayo/mR0xqM3ipxuUfJ/cM0ktYrTvJypR1brHkJyg=,iv:Qs1M3UMUOUuPVG564xJAJAhRsj7GGrs+jqNbFjupGDg=,tag:WjTJT3N7FkVmdv2giNVECQ==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine2
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:AiUO8delOdyU9pl1q9DuSegdy/MMh8q30M1hXuinpKVC9Wdtw/NFUVfKSM6wX5d3hkBnZ+BmG75bMIkZXXJ6Cl3Q7l3M60HcBrDQaUYJ3tfuO5dJGSi9Ab5RtnWKmaA/5wtb71sM08PbmmkPXnVI2KgXwX23VnNPdlMXFRYdWuzbTD7U+S7BPKiINjMCyLlMlNfw7YidGufwCkYb6YbpJ+Z6EkfArkH6pGKlB38vaz0rSiVSBa4fxzD4rgx2lh98yJ8VC80O/PdpvYhWub1aHpK1j6gq6InptvLNIyUh0i/9YXHbjpXc/D7hhphF/tA0JUOFMyN0Ses8cvBPHLOMU3uM82aYk9lBfnn74vKsNk5IWeiPskzN6dwmHr6yJHQmDiFp9ll7Ujf9GU5DzJhQsVUFf1xb2wq05YMomEpDdDo0xz35L/w8hFQg5kKLDvzKCiDQuvYz4KdaDZqobZbJ+ICNx0dZHlBg6NGvYK3Q1TpNkR6qHDnOGYC3dbZzeskuLDnD6VScPrLvf0duOrNwE2QP5Eqtk/5r+yP4SJzNK9UMZhYcXIF2koel6dxhlEmJMBLryR/56+GFIWOHIebKUGJhtumj0cl21QAcZgeQ42oy6mza+K17u+SpRwb+Vl+d10zSauxV6jqHE/qec4ZxKyFJH1TDR4R3FcqY3Xb2323r0Lx3Z6h3QSuXaoSz3lZj1FnXNh5x4Xctb87cNd6u/woxL3X105OwMOxaI4SIt81WWSA7do9WFzfNjOUahyullq5RWNOapLPAmMjuSdJBeImFEbKPUst25vgkzs3x4PZkS02s6R+o3h3PoeTZPDIpNIv6Ye6sRy0txhF4nBmHKFNjoClxaDG6esdDlsyu+eyZKRy6/Hvfe/iimlyb+iQMLlmTLoES9B7J0F1JAwJhetdnfoa3pbgZsehb2b0BmqgHbytgA/nOlNWcbae+aX8HCj4Ju0RnYwObbjaVqHQoST/LjnnVJCo1y0UZ4CnOg7MNEKrtxRb3BV8s9Xp3BR3UPFN/gKWBGh/xyHoR3ZEJ0mxt3vjdymqMtBQ=,iv:4slGBU5rnFPaiFXr6Nzuk54ku76XL9+cFOF3IPWbZn4=,tag:+XOCNKwG2PqzY/PTzruouA==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1aqng9vmlgth5aucu5ty2wa0kk9tvk7erj4s07hq03s6emu72fgxsqkrqql",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBYbVZ0c2tqSEZJNkxTbGpC\nd1U3YVQwQnhmY21pY0R5SnJHL0lVZC81SFFzCjEyV2VwQng5ejc2L0F0ZUFrWDB6\nY1k0blJQbUhRQ0kweGM5Ty9uQXVGS0EKLS0tIG96S1NOQWlidXZORDlrYWZWcFUy\nb2RLa2pKNEhkSFlwdVFMRXlEOStkNVkKYahEQPOfncD2TGeT1JNPWCOG0ScUCJ0K\n2cYRCBxayd/ssfyEL3jV+TVSxFnKrAhtGJzP2xEAZFYJsHoLG+56sQ==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBQcWdtYmpwYmJQV3BNL01x\nY0FURkczRjBKYWlzOFplcDZ4cktGMTNMN0VVCm5FQmorTzRMUzVSd2Y4VS94R050\nR0V5MXUvODh2Q0IxeU9RelJLTUZoWmsKLS0tIFFjT1pyS3NwWi9OY24xTytLN3NZ\nUDdqUEp4ek5tTGxvdFU4SGduR1VBeUkKX013r6b+KL1i3glEcpwgv/KzBEE9N9sj\nqgyZ3S5dhCY1LOquP/xnS7kqZzN+znv61F7577wV1pBy53KPOhl47Q==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:08Z",
|
||||
"mac": "ENC[AES256_GCM,data:Oiq0mU3K+zgaHhdlniCGgx22Sa1tEOmO+ddfDdD2IsMm1c9vAI4O+/5PsF/DE4h0WoORJsn88qfzPDva90YUJKQAJsflMHNk8jjExGhtVLVjxkpZk2EtpBzfg311riYzKl5vSJxLqp67Ks2dHyl8qHEH71dSgkXxVMIhPDUmJCg=,iv:B+Hi7qoRbBT53wH3cBSLoQocdXCcfKenkwnErTweMwY=,tag:88c4zp8a6WNfnJkRZvNFqw==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
@@ -0,0 +1 @@
|
||||
K2FAKIB-HUUM7M7-CUPDN5Y-NORZCJP-F43EFQY-UPV4AUO-GAI3VZW-PH5TRQX
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine2
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:9U+BF9rhGbNCokJoZEl347m1jJm9n+JOJFXSVrqYM+r2A7NPTDGBKIrnVpGdOM+8VmOIzAIROGb1h+wgQoHbr98YtVRx88D0n9bA7ZtSCNiACXzgTl/swaqYCPgJ5FtPvylNagK92SDwjpVSV7jfLA0kdE49HESg9dWIUlPS2CoORhW+FY4wynXdMEsgZmeRgtYFMubqshX/Ep07qnF9HjQmmDy5XKQ4uEm3HydlZOZvW6USwyRYIcqH8x5p1xzif5zYgUsu1iHAmbhtqzULJoe2rzuM3+7Av0LeGcQpXe3gkX/3+aA/SgGN5mJOtLalcwClow5HH0atlOopgy4523Z+jANvc4XwExpJbSYeJuqUTV20q1X16Kyph1O+XYOW,iv:dS7CcP6oPfdByqIW3sz4AUm2BfVInFmDDkxxbGhySmc=,tag:T5ZbP2hDaKNR/c2J/mlS6g==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1aqng9vmlgth5aucu5ty2wa0kk9tvk7erj4s07hq03s6emu72fgxsqkrqql",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBvcDZLcXpaODVma1RUNHE0\nMElMRkpRSWJwa2Q3dnFEQ0J2a1JpblZuWVFJCk1tUTV1U3VpYUJqajZKd3l3OHNE\nWDk4TzhQSlluOGdnNlcrUTBxaXJrRVEKLS0tIHdiOVhRTHI1emE5TnMxbEZJdm1W\nR2NxMVRhQ0xDT2o0cE1POXRQT2ZwMmMK+3x5+5pVrGPILDpCpqPKuVX5emmfDvFt\n2HjLIwy/2mWcsktXh2gk8KAD4WWL/JjatTuP+EP5zoOFcL5/U3S5Pg==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSArS3B3SjQ5ejRLU0xUT245\nWGNkaUErdTQ4ODNqSzY2aVFPeUlabHl2MkNnCnlzcUh0ZjlUcFBXbEJtZ2pDUmNr\nYWRvRG1EbkwrZVE3NlU2cWc4VSswQ3MKLS0tIGYvZFZWOXV3U2hMVU1XOHVCSXdp\nSm1vaC83c01TZms3Z0NqMHhqbzRNUGMKKiRAy58NZDz5rjNA3xYv8/dU4H5L0fo/\nQc3WsEXIIwnVHO0X6WN+qUpF8uTPAZW26wecspdsf/5NOJtM7o8+EA==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:08Z",
|
||||
"mac": "ENC[AES256_GCM,data:QGID6lY6g1qvYXhtXrlNA3E+IjJZRKiB8+CDDLYGwx5vtX32M/WNVfzEs8bUdUDEJTUnfFI6tBHjFbKUVNFmbH2utZ3aiUGVJadVUzuWQf3HO/McyPwRMm47gt29U0iagUTSdVbzSROwdwCfXfZeyJUPXXNZgyA5LqL8puPS+jo=,iv:kZzMQQ+1shjScxxmMVSqK5ckXckgCtxbqa8uBZ9RTZI=,tag:do0RhBzfHmFZc8L90WXTqQ==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine3
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:1yqn7/zQsDblJjKK5nK9wLQjmvBTmsr4WAkWvjRJ2FLX,iv:F9+nsOQSgbyr/q498oI1wU/KiOsEvSUec/2VC9bzv8E=,tag:tklcKqwd9MmuJany0pg79g==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1kul02mg50nxccsl38nvma0enrgx454wq0qdefllj4l0adqkllvls5wuhfr",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBMa054akc5ekFzYVBYZk1Z\ndFoxRGxUcGZaNzF6RjlPRTRXZjh6b2xxTzBjCktuV2Q1WnhnM2NMdGRncFhXWHpG\nRHhlU01PVVNUMEF4K2x0NEhreHRKKzAKLS0tIG5YQ0YzaTlxNGtPTURNY3VtNGhO\ndUxPa1BwdXZ0SVJnZmNqYngwaW1CYjQKngYlXRD28yZ0j7WMmACTRCbi7aJ6xNlN\nor7I530+HUv1s067PRwMiBWbNesiAHHhhZjOsnSiyQ/IYr0R77huyg==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBBdnhOU2MwY2ZXM0p5Rlg2\nL2FmaDZYT3pIL3llenV1MVZDT0xiVklXWFdFCko2S1BxMFpPc2QxRjExVnhXTVNp\nZStpdis4RDlHcmNobmpNK1FtbFhaSTAKLS0tIHRJblljNTJxS0ZKdWY0dFF1cURP\nZGFDRGZ6ZnozU3NJT3EvSGNEaTlralEKywGQQwjS3bJ2yI8839ycWeknES+r4oMH\ncBG/zSbYTznyMQq2uEhAe74a8zf3pe48EnwPksiCcFLIG5IP1SE6Iw==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:21Z",
|
||||
"mac": "ENC[AES256_GCM,data:06hwqU0gL2x8brDOB3y69igVBHwIkgUiuxYin6FPsjAQO84qsT8suQVGHmqPxHXrPM9fMvu9/Xqio0w/ar8NElQLmUlTkOads/pb/ykuC5mU/645q1spvV65i4C/spGrIA4XtSaeY9APuQGDr8wiQKdnvuAXSsuVSQvEhTpbBqs=,iv:UguEbHUFqZoV2g7DLeUpI8dd3tdoWhfIDLnj0h5i1Wg=,tag:CJ5/uOYKhleP2N1oTlvdxQ==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine3
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:Az/IU5qbln8kN2rkY9MK4dMtCgaIQDjXw3aqbFs/Kjh3wLy4jYM4XkvQBeZPqgmonbGtJ50eDLS14g6ABYCmpQtDPHUAVllMPlKJMiGL1/+YPXGfsQyPgOXmgWA6hFI24ZPEsQYCRwbXoDZLJMkTxUMlMQqZTWsXkICbrIYzyJUjkbK0uaSjqbu7vLt7HVgb6WBoFtdXdP3rD2BVo6LEZUTd4mQW9PB0FPms7WKsVnoWQpKmowentae/2cdx1KCvEmoJfvnAGB09woHDGJWzs3VSrlenPVOu1dN9NUaGI61WRLJT4/h47PBEt1Pw0ec5YK2h1Po0pZa8LaYNQ9ewx9cIXsvWpKgvi73zSe70Q4uWT7xHglZHhncR6wECXIpClpQW4Z0a7CgMujfMexK6Dh8fJZ72hA5ZN5RwoxuAlhN0HujFBMk77w5t1QrulyJs8+UQIqfNhrK0LGQKRn4AnW1R0Hc7bOzeE3C50oKmyU1kDyoDChce9A6fGXQ758kUxOYTZy0095LIEL5Q0OdCujdz30a1NiORuuuiXSCCIAfH1rUMeO49Q1T3QF8IRJScvD2Q81ggqeewjzt83d8KbCHWOtPiaAAWzNOHPS7N/x9efTqZmf8YxajnSeQDbqqyRayjFPs5AaTyk1pqFiIRKYH9cKJeEXr/CY6j+ykgDXDksAvg3fVXikdt4E5O/HaWxU55CJZmBfshdC8GWEetRtl6pfA5poTT4L79B6edF6TxtX4R9Js322UTTKxOTJ8fkx/0grXUa+KxUreb+jbN826AyL7kWr5Zt2clXbCFetjASoJn+tMQKeUmE8xe2V4qibTmPlkzuhUIVdVG7G8xg8w31lBSErF9/M4AaaWba93s6GCgqdJ5T7Duf9DnfaiQr011eCmKpfJxsUTGm1Gs55+YIDQZEraOgLr9BWjBGy1gCwQIhZtY/udKzFXvqB/KBr2/mzXNzPNmnrcElKkjxpcHF0RkJ5JGo630k2AOab3+KJzHoRFPI8/VzR2dh3rHQ7SjN2cdiJiGO18sD9Xc+9Me6jgVLrMf8NY=,iv:TIaxmvvQUzzM1hLdb2caQJbTTfgeea1LijeGa/5Jid8=,tag:e77+V1Ok2qNkI3MdUntqIw==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1kul02mg50nxccsl38nvma0enrgx454wq0qdefllj4l0adqkllvls5wuhfr",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSAwaGRjSEo3K2xKZE9HSDVX\nU3lRWWlnbm5QcEx6VG5nS1RyUmUxcnBPL0Y0ClE3VWZjclJybEJCZi9NdVJqeTBG\nM0hhV1FPdzVBT0NLRGZvdFdDMFNYN3MKLS0tIForRURHM1NrU2M3ZnhJb0FFWmwy\nUWo4QmJBS293aWtyRHQ3bTR5RWFpZWMKoGjITft9CoH3cVRXRGx02PTFFQMN8bPU\ngxNLcbWyLVSUvUZE1xYiVMh5aGMLlMoGwgwJBUAb/Cm744Yh6gz5Hg==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA5RkZxY0E1N0FOd2x4ZFlv\nZW5PbmIwZk9uS200N3N2SS9pU0ZzaU1ibUVVClRYRWlQbDRZNXVjZlp5cXpJRmh6\nUllUT3llVjRHSEpTNC94WjRFeTlIWUEKLS0tIEMvZ2M2VDByeGtpaUp6UFdtSWZC\naGFId2w1OXg4bnd0bFlCNUtSZDJlVTAKFLc3mW7y7M+HOO5TIEPPxznqXkWvUWEl\nVy8DA+hZAAsoRfGseFTD7ny9qJOLwnmpmW2m5dZTuPcRAFk/29DrGw==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:21Z",
|
||||
"mac": "ENC[AES256_GCM,data:Y+XjgRgXHWrLZlmes1j2C7OLXxttm24aNTUUf7voSCUK/z3iIrqhsRs4OZfpLs1JyHNPYMhRu5kT1cjIQfZPwSNWPfw0QVvMGpgoNV8MGQW+G9mUxIvEy+F5yRGXiMd5bybyDgitADHG/6OxBd43IoEfkCpMs06yaEAievk+P9s=,iv:iZ9jP0cNmwHD/EViXzzmM8/6xIY+T2q+aMnQRH/JC1E=,tag:vT9/XUbuq7pltl5d1zLaew==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
@@ -0,0 +1 @@
|
||||
UWOQP5L-DE3OCK6-BIQLRGV-JFZ7JTJ-VUYUFDN-F3PJNH4-VS2AJKR-PQJKQQD
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine3
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:pzKV11EWtow3USkwNwSO0qLAijfG8UbeVvDDkQJqd/S/lWF5gBTt+33uPTsBj2A0tyVglwHrGIuPl/WSxZqt4jIKogjeWrODzN6YCQC8I9/3JgCaCYKlDM2b4GTZxzC35YXD0CBmCcpGUgAvP3Awr5ga4SmAQLVHSR4MHODODRa2cj4TsReLLIf2JgFd1m4Jd6RS9YrWYBSTOp3btFzdJSISqKAHcIcEzMbb2uS8tBqmoWpG56snrzmchrBifB5jGe2RgN/HLN6+gp8p1MStEVWSBbmGszDtUH4k2hMKYpY+HEqHL2onbc+LIOo4ntpXagwD034Iw/mEfmGkgrf+AmnX8Gm6j/6CW8ILlypRYAvDucKvKRNOnWWrkR6XmiPq,iv:Zp/B5it4q49i3B1XT1F5II8Ajc9xjGpJ35SJQtLsr60=,tag:tqc3YsaNIPrvTR/6AVH8ww==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1kul02mg50nxccsl38nvma0enrgx454wq0qdefllj4l0adqkllvls5wuhfr",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA4SVJ4S1gvQjJYVTJjTVk4\nUkRrTXBhekhDUWlZQmtJWHJyQkk0Q1Nxcmh3CjdnazFxdmZGaEoxRCtkVnRwOGFW\nMnMzSHZOWUF3aWtmTGk3cTNlWjlpNkEKLS0tIE95VVFDQjRTRTdFNUNud2g5c21z\neHBHeHVMUTYvOHdFVW9kVWtSeFpodzQKXndWkTpSrJbzUpHStyWb4Q5p4p9ASjTc\nGQ4wcd3RLjsox/LfFw7NPXuRLQh7kRFgWxx3akjzeaVCbNOg7N2fKg==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBuQjBmeFAwQmNiVGhUTmtk\neDhaNTBaRmMyNHdaRVVJanorWE1Mc3kwVDFzCkxEMTFMRjdxYytLRk9HaWlaaTJv\nYTR5ZXRvMllYWHRDTlNXWFZlaXBwRmsKLS0tIDkxRVI3bXdKaXNMUGNWNTBLcVpw\nSXc4WHZDOUg0dFlDZnE3Mi9uMGFMcWMKsdxeoutdG3nwU24n2/qy9oTkhOueXKJ1\nbEuSY7OsQWp2tc6bVwx05G/R9bI9sYtA00FjJLLIR0Xk9M3ngor5/w==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:21Z",
|
||||
"mac": "ENC[AES256_GCM,data:GKWyUqU/FeOQ3um7h+82ZYh4aylxII+li8F55iRvFXRLK4A8J6z37AiDI7TOxqQ2XZIh6b8HZA0BpFN6HlkL+q4DpiuV8eTmke+hLq4H74M/m3Kgv4rQtrBupk2OUdXdsUu6ERJiIxW7XKvAhX6Hvm6GEDSSttbba5L5esi/xQg=,iv:qIy/MQXUpTzcDF6VvY7ERUDLs2XJUK/dNxTIFgfu9Wc=,tag:D2ocRXrxvEtTHX/t85EtpA==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine4
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:5Bb2+eeTSoCvrJUFHeEL/ugPADUqkgxzBebyDHrg8v6G,iv:1TWwRiIm0u+zrydC+mlz51Dtz59FfX2fi6QjjO9+jdA=,tag:mkLCH2kbShRc6RdtAFaFtA==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1kqgx4elusxx4u8409gml5z6tvrsayqsphewsl93mtqn7pl2p5dwq9lujpj",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBUam1sYWxDT3FQZy9ZNlln\nc0o4ZkdVVG1zNlRDbVpxdUJ6NFlUVzZsc1VnCjlobzF0dmtUZlp2U1IwdDdMMlVk\nVUt0ZFc3Ty9UQVNTb2dJc01ZbXFlUEEKLS0tIEVGcnNoSXd5b2NoS1hZNWEyMGdM\nZXgrNGhFMGp5LzlrdE4yOG15b0g3NVUKX46oVCCn8eQ60raWVtxRMVazQUJaPD5Q\n0e95w2IXfcN7YPn+CdIcWEoWJ5dbmGQ1DB9VBWrZQONdi1SHzyHx8g==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB6QUZuWG1Ga1JpUUszVmFI\nL1BxTXk0M0hKLzdKVUQ5Tnp3Y2t1bnEreFRFClcxVmFvTWJQNVg4YkxXa0RmcHFk\nNW4yYTRRWFpJeFhTSVc4eEJzSFNFRFUKLS0tIGlhenZuRFcyamZNQmIya3BEWGhu\nckhsUE1QS2tFTUNUcFBqclN2Z3A1SGsK7e3Eoc7J0P5pyn4dqQAn67iBfcgokdNr\nSJqoyukHst9ZDJ04+n19mr02njxMz4MTt1Qw76q9s2sadU8iHsHnog==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:34Z",
|
||||
"mac": "ENC[AES256_GCM,data:9pDg1EWTuPtIs+hfwtgQkX6HrIaI+IWyVY16SVq2bBrEd5uxkXQ2fL9P1tjIwo7iO8mZ6Ji3uTWZa/mqPXwJjdGOV5Jehg3y1S3Y0mPD4yN/Xy/eWrieIly3md+EVcDndFLyKkcnpd78VjJhjvdia2OC64MBqylGoZ1M7sWmj6U=,iv:VrUEQeFg+JeDuQznYgeNZiml8HTcfWv3gy4/mEMlXO8=,tag:bxALn8GfEJY7aQQP2FmSsw==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine4
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:WK99B0F4/xudxleTboafseIybqtOaaF1j1Y/e0/DP2XB/Wsr6x+1S89ZaAsEh10221d+n+iz+X7eo2uPpEVqpP68qyLJDw3fGli3ilUaMZ+xxZMtboW4lGjg4q3UlGzD6XApFKCRRBbzqGHRbQAHD7RaBJOmz3TmXSsCXJDMyv9ac9ijf+8ko/mAi2yXEgZwOzaU9vU541Mqtux61GJ9RS+C+VJ2iKg+he+mMFMdbqRRAMB7QMqbeXoOKt5cF6mwUQJBlHqotF4/yy/gqCMlThAVSdLqc5McHTdv/oOQEfWH3UwZFxCP0T8mHxAkgI0ISlSzpESYf/GO1HYZLxggY546TWdJ4Mczq8j4Q118YPVi77++ZsqUuozVHN776pBdJHcLuo37hp/6MNqB2AOeCa8dwiF5bylHRiU+sfFXNNk/J3eO5NDwt3wujAiRsuLPdVQ0u3v8InUPIfbxaBr7FTksi1FLbm6oPBPBAJSIVl7L/MC8HBZZjywP1MCbO1kVpPbyP3C30PyKjvvhkWxHkv4YWbosuG0s7TxSxI2ngRjoPi7hbgmm4zpgiujfDthx93KEEqTxCf+/LBzxuKRuN+eP2y+5qAsRJL+opaETeyi3nap3hEKl1PTg9Vdt6PUW7sUDLWzhkNvAFqW4CbZ5HyKEsvZskdKP1anHXoLahlAzme9ag+aOxEgmlMN7Tdikmo7LxDrtU2GysH2miSj1ezQKkpq9kYkAogA9qBbr8N4o8jn8KZUDkqZXsD3W/WfF+xXVLWoKS5sbOmBvBu4QWGI2IDEBIwl/JRDwp12BKOraKNtG9v2oVm7rq/8Ulb52Z5MpBwHCwT2Yts7Y7RKLnZb+0Z4FWG4lqMz3gVGplZeXEjzbqj8J2YV2hd1Puk0B1OTwadQ2f6mFZRmxUmRLoaTMjGkkVKbaPb4g8G7zQEgJZj2y4FXZx/TqUm3ls04o5kgTuGML0wFrkzjtf3gQ0APwjqqax8/dqUXKWQOr3XeB5Be8OtDMfxws52M9pHjXXZxbPrWxxpqkcC/8fWoNNi0Umz3WKw==,iv:uyy4whMZL8gCqN5UESWxxK7o8THSAkOriyKqUzSvI6A=,tag:gXdVxcdc6nMmD10Y6QxEgw==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1kqgx4elusxx4u8409gml5z6tvrsayqsphewsl93mtqn7pl2p5dwq9lujpj",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBlakt2TzR5b0NHdjY5YW94\nM1NMcC96Q1RxWjM1aGFybHBFZ0NvOFdhdjB3CkR0dE45aFZ4ck40R0xsTjRXOGt2\ncWpSK0lqV1FITWQ2RnFwS2JDYmNrMHMKLS0tIEpJcm8xK3dOYWNKb0JRTHNncjdr\nUkVLV2tSMGhyamgzVi9IcE9yNDQ1NFEK5HbycP8g2i9tHVbAfqzeXh0Krqsus3xP\n+Ta6lmWW4vP0fvA9IgZcGJXY5gCpEaum6GkkvaB2zhxs7Uddk5bEsQ==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBTTlJWaGQvekR4WUtEODkz\nYlFFMjhTVmZxdGNmU1lUbCtMNFdGeU50Um5RCnF2NTlsVm55VUNxc3k4dTc1c280\nYjVqV3dMZmxxR09VU0dSb0wzQ2NHczgKLS0tIGVFVjBPRFJTbWNjU3g4alJHeHM0\nQThYWlNQVHQ3K3lvS1Y1Si80RWlKSk0KJRpWoQj+VrMGfqlppNj9IHe/o5JzqQUW\nG+HiWay1SgQj14LM7G29fuYt39I9pdwuUMRYXJ4MwVq/wr0raHYoaw==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:34Z",
|
||||
"mac": "ENC[AES256_GCM,data:Rkek09R889QqIfKa11bF1olCmyj9l9xM5bgTBpcSqaJyFUPg4bA8I3PnnAMtIeEGtG8ioh51jHRpbjsp5BuZM+GhrhW0X5NWF3ZlWS+Gx4hyoJSZJGgNVFzvr5Wj3QkS+9mDLrxE/UfLJZU1/raleAV89r2BiXOHhJugNPCSmnw=,iv:NUQZG+DzYOVsVhJnDaqcfAIOmnfRg6GuBamp61e+CoA=,tag:voEZOktdGpScZQrJnwlKnQ==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
@@ -0,0 +1 @@
|
||||
OORI2OC-364PI2W-LFOFX4X-7PB752I-QVLZWG3-EC4XJ4C-RPJQWTV-XYPRQA6
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/machines/machine4
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"data": "ENC[AES256_GCM,data:e8f43yOO08ww3x9QY4D9K/51v2NdxzxnzKrPCkl0f/ElZv9iWdon2NyknjAA9sJEuy86eY8N0LkgqIodLoc3FoxmfADHU8BbhyFHjtoIpuTNV61eajW7vnVJA4zTtRxrEReNXe02muuHM/1sfXA+ocGKUK4r3pz6nSkKaE5VJFZl7e6gev1H2w6rSq3XMMp9V3AgRvyLtSQziKj3fRFFHhet+AopqU9uMb5t6zHa3Sj8sn8dw0HPrEQYelZyS0iAi0g7OHBMasHrDQYrtfd71SWoARuMN7JzSxE2/F+yeqGmwRdf1EMJf/1Xdrgta/f0Fzmt4+UzuciN6SsREGRBk+nGYQZrqsN+wk5bPet4P68+T2V1EFuCh4X8t25N36L0,iv:TmXHWEb3jgULdlIVOxmfUHL/ZrY4NneVkeJWJUOxzpg=,tag:lGor/T44A4yRCz+VZIVM6Q==,type:str]",
|
||||
"sops": {
|
||||
"age": [
|
||||
{
|
||||
"recipient": "age1kqgx4elusxx4u8409gml5z6tvrsayqsphewsl93mtqn7pl2p5dwq9lujpj",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBsbEhCZ0JGYkVCTEwyeDF2\nYjU3cFNTb2pna2xjSzdHYlVrNDdDZGRCM0RvCmpJbHVsblBsbG1CQkh3NTVHWTEv\nR25HdFpkYktKNWZpZ2tvcEorTWRJZDgKLS0tIG9MQWpERTJnb2dDWlZ1NVgwMkdL\nMWYwamM3bC9pcHRXSk8yYnN4UmhhQnMKyJAUuClhEPLBoq8hDeopp0xkMbtekJrg\nfZXUAA6Wr/cwISLJHAFMa8DdBGBj9ICU6v/AMyo3Zfkv9gV/NTDmfw==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
},
|
||||
{
|
||||
"recipient": "age1qm0p4vf9jvcnn43s6l4prk8zn6cx0ep9gzvevxecv729xz540v8qa742eg",
|
||||
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBvbU1DMFRoS1Fjc3FwWVJD\nUUdQQm5HelRYT3U1SXNJaFhkd3ZsYmFFQ3hnCnJaa2RpbWQ5WnZwWlJBZnZENnRG\nRENkUWorcFgzZ2hYMjI5Z2o0cHNWcTAKLS0tIGhMQVRmdjB0cWlJOTI2Y1N6dWRr\nSjVIVnpTV1VRS0NaZUwyeUt5ZTlVSEkKfyL4gE731vmdqDjWjPaZiSYkoZVkj4fj\nqKJMwG8VN58NnL06oExzqVGgFKR4CTas0auWQ76cs2gdv5n7Gm2NMw==\n-----END AGE ENCRYPTED FILE-----\n"
|
||||
}
|
||||
],
|
||||
"lastmodified": "2025-07-31T13:21:34Z",
|
||||
"mac": "ENC[AES256_GCM,data:KVewXUwkR68EyGj7Ld8JfxWu10+rbLn74Pv8y0Vkh6ZFDmo76XtuamLrH74uNOtDGwGhBM53xzNVf7LHfiWmT3S1dLifrusV4BWXa7XX37ACxGEq2FYg3HuUKG6bYgykKfxc3vLv50NK8cZMSxsAyuP5z7efas6joKHuzA93ZSw=,iv:xnHbVGa67SjGMHiGsjvteOQRtA66kfsWY04ZfdV6Cw0=,tag:9AFqKEn6Hs/LweyQYVDJ0w==,type:str]",
|
||||
"unencrypted_suffix": "_unencrypted",
|
||||
"version": "3.10.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
../../../../../../sops/users/admin
|
||||
21
clanServices/syncthing/vars.nix
Normal file
21
clanServices/syncthing/vars.nix
Normal file
@@ -0,0 +1,21 @@
|
||||
{ pkgs, ... }:
|
||||
{
|
||||
clan.core.vars.generators.syncthing = {
|
||||
files.key = { };
|
||||
files.cert = { };
|
||||
files.api = { };
|
||||
files.id.secret = false;
|
||||
runtimeInputs = [
|
||||
pkgs.coreutils
|
||||
pkgs.gnugrep
|
||||
pkgs.syncthing
|
||||
];
|
||||
script = ''
|
||||
syncthing generate --config "$out"
|
||||
mv "$out"/key.pem "$out"/key
|
||||
mv "$out"/cert.pem "$out"/cert
|
||||
cat "$out"/config.xml | grep -oP '(?<=<device id=")[^"]+' | uniq > "$out"/id
|
||||
cat "$out"/config.xml | grep -oP '<apikey>\K[^<]+' | uniq > "$out"/api
|
||||
'';
|
||||
};
|
||||
}
|
||||
@@ -88,9 +88,17 @@
|
||||
files.user-password.deploy = false;
|
||||
|
||||
prompts.user-password = lib.mkIf settings.prompt {
|
||||
display = {
|
||||
group = settings.user;
|
||||
label = "password";
|
||||
required = false;
|
||||
helperText = ''
|
||||
Your password will be encrypted and stored securely using the secret store you've configured.
|
||||
'';
|
||||
};
|
||||
type = "hidden";
|
||||
persist = true;
|
||||
description = "You can autogenerate a password, if you leave this prompt blank.";
|
||||
description = "Leave empty to generate automatically";
|
||||
};
|
||||
|
||||
runtimeInputs = [
|
||||
|
||||
@@ -9,14 +9,7 @@
|
||||
let
|
||||
controllerMachine = builtins.head (lib.attrNames roles.controller.machines or { });
|
||||
networkIdPath = "${config.clan.core.settings.directory}/vars/per-machine/${controllerMachine}/zerotier/zerotier-network-id/value";
|
||||
networkId =
|
||||
if builtins.pathExists networkIdPath then
|
||||
builtins.readFile networkIdPath
|
||||
else
|
||||
builtins.throw ''
|
||||
No zerotier network id found for ${controllerMachine}.
|
||||
Please run `clan vars generate ${controllerMachine}` first.
|
||||
'';
|
||||
networkId = if builtins.pathExists networkIdPath then builtins.readFile networkIdPath else null;
|
||||
moons = lib.attrNames (roles.moon.machines or { });
|
||||
moonIps = builtins.foldl' (
|
||||
ips: name:
|
||||
|
||||
299
devFlake/flake-compat.nix
Normal file
299
devFlake/flake-compat.nix
Normal file
@@ -0,0 +1,299 @@
|
||||
# Compatibility function to allow flakes to be used by
|
||||
# non-flake-enabled Nix versions. Given a source tree containing a
|
||||
# 'flake.nix' and 'flake.lock' file, it fetches the flake inputs and
|
||||
# calls the flake's 'outputs' function. It then returns an attrset
|
||||
# containing 'defaultNix' (to be used in 'default.nix'), 'shellNix'
|
||||
# (to be used in 'shell.nix').
|
||||
|
||||
{
|
||||
src,
|
||||
system ? builtins.currentSystem or "unknown-system",
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
lockFilePath = src + "/flake.lock";
|
||||
|
||||
lockFile = builtins.fromJSON (builtins.readFile lockFilePath);
|
||||
|
||||
fetchTree =
|
||||
builtins.fetchTree or (
|
||||
info:
|
||||
if info.type == "github" then
|
||||
{
|
||||
outPath = fetchTarball (
|
||||
{
|
||||
url = "https://api.${info.host or "github.com"}/repos/${info.owner}/${info.repo}/tarball/${info.rev}";
|
||||
}
|
||||
// (if info ? narHash then { sha256 = info.narHash; } else { })
|
||||
);
|
||||
rev = info.rev;
|
||||
shortRev = builtins.substring 0 7 info.rev;
|
||||
lastModified = info.lastModified;
|
||||
lastModifiedDate = formatSecondsSinceEpoch info.lastModified;
|
||||
narHash = info.narHash;
|
||||
}
|
||||
else if info.type == "git" then
|
||||
{
|
||||
outPath = builtins.fetchGit (
|
||||
{
|
||||
url = info.url;
|
||||
}
|
||||
// (if info ? rev then { inherit (info) rev; } else { })
|
||||
// (if info ? ref then { inherit (info) ref; } else { })
|
||||
// (if info ? submodules then { inherit (info) submodules; } else { })
|
||||
);
|
||||
lastModified = info.lastModified;
|
||||
lastModifiedDate = formatSecondsSinceEpoch info.lastModified;
|
||||
narHash = info.narHash;
|
||||
revCount = info.revCount or 0;
|
||||
}
|
||||
// (
|
||||
if info ? rev then
|
||||
{
|
||||
rev = info.rev;
|
||||
shortRev = builtins.substring 0 7 info.rev;
|
||||
}
|
||||
else
|
||||
{ }
|
||||
)
|
||||
else if info.type == "path" then
|
||||
{
|
||||
outPath = builtins.path {
|
||||
path = info.path;
|
||||
sha256 = info.narHash;
|
||||
};
|
||||
narHash = info.narHash;
|
||||
}
|
||||
else if info.type == "tarball" then
|
||||
{
|
||||
outPath = fetchTarball (
|
||||
{ inherit (info) url; } // (if info ? narHash then { sha256 = info.narHash; } else { })
|
||||
);
|
||||
}
|
||||
else if info.type == "gitlab" then
|
||||
{
|
||||
inherit (info) rev narHash lastModified;
|
||||
outPath = fetchTarball (
|
||||
{
|
||||
url = "https://${info.host or "gitlab.com"}/api/v4/projects/${info.owner}%2F${info.repo}/repository/archive.tar.gz?sha=${info.rev}";
|
||||
}
|
||||
// (if info ? narHash then { sha256 = info.narHash; } else { })
|
||||
);
|
||||
shortRev = builtins.substring 0 7 info.rev;
|
||||
}
|
||||
else if info.type == "sourcehut" then
|
||||
{
|
||||
inherit (info) rev narHash lastModified;
|
||||
outPath = fetchTarball (
|
||||
{
|
||||
url = "https://${info.host or "git.sr.ht"}/${info.owner}/${info.repo}/archive/${info.rev}.tar.gz";
|
||||
}
|
||||
// (if info ? narHash then { sha256 = info.narHash; } else { })
|
||||
);
|
||||
shortRev = builtins.substring 0 7 info.rev;
|
||||
}
|
||||
else
|
||||
# FIXME: add Mercurial, tarball inputs.
|
||||
throw "flake input has unsupported input type '${info.type}'"
|
||||
);
|
||||
|
||||
callFlake4 =
|
||||
flakeSrc: locks:
|
||||
let
|
||||
flake = import (flakeSrc + "/flake.nix");
|
||||
|
||||
inputs = builtins.mapAttrs (
|
||||
_n: v:
|
||||
if v.flake or true then
|
||||
callFlake4 (fetchTree (v.locked // v.info)) v.inputs
|
||||
else
|
||||
fetchTree (v.locked // v.info)
|
||||
) locks;
|
||||
|
||||
outputs = flakeSrc // (flake.outputs (inputs // { self = outputs; }));
|
||||
in
|
||||
assert flake.edition == 201909;
|
||||
outputs;
|
||||
|
||||
callLocklessFlake =
|
||||
flakeSrc:
|
||||
let
|
||||
flake = import (flakeSrc + "/flake.nix");
|
||||
outputs = flakeSrc // (flake.outputs ({ self = outputs; }));
|
||||
in
|
||||
outputs;
|
||||
|
||||
rootSrc =
|
||||
let
|
||||
# Try to clean the source tree by using fetchGit, if this source
|
||||
# tree is a valid git repository.
|
||||
tryFetchGit =
|
||||
src:
|
||||
if isGit && !isShallow then
|
||||
let
|
||||
res = builtins.fetchGit src;
|
||||
in
|
||||
if res.rev == "0000000000000000000000000000000000000000" then
|
||||
removeAttrs res [
|
||||
"rev"
|
||||
"shortRev"
|
||||
]
|
||||
else
|
||||
res
|
||||
else
|
||||
{
|
||||
outPath =
|
||||
# Massage `src` into a store path.
|
||||
if builtins.isPath src then
|
||||
if
|
||||
dirOf (toString src) == builtins.storeDir
|
||||
# `builtins.storePath` is not available in pure-eval mode.
|
||||
&& builtins ? currentSystem
|
||||
then
|
||||
# If it's already a store path, don't copy it again.
|
||||
builtins.storePath src
|
||||
else
|
||||
"${src}"
|
||||
else
|
||||
src;
|
||||
};
|
||||
# NB git worktrees have a file for .git, so we don't check the type of .git
|
||||
isGit = builtins.pathExists (src + "/.git");
|
||||
isShallow = builtins.pathExists (src + "/.git/shallow");
|
||||
|
||||
in
|
||||
{
|
||||
lastModified = 0;
|
||||
lastModifiedDate = formatSecondsSinceEpoch 0;
|
||||
}
|
||||
// (if src ? outPath then src else tryFetchGit src);
|
||||
|
||||
# Format number of seconds in the Unix epoch as %Y%m%d%H%M%S.
|
||||
formatSecondsSinceEpoch =
|
||||
t:
|
||||
let
|
||||
rem = x: y: x - x / y * y;
|
||||
days = t / 86400;
|
||||
secondsInDay = rem t 86400;
|
||||
hours = secondsInDay / 3600;
|
||||
minutes = (rem secondsInDay 3600) / 60;
|
||||
seconds = rem t 60;
|
||||
|
||||
# Courtesy of https://stackoverflow.com/a/32158604.
|
||||
z = days + 719468;
|
||||
era = (if z >= 0 then z else z - 146096) / 146097;
|
||||
doe = z - era * 146097;
|
||||
yoe = (doe - doe / 1460 + doe / 36524 - doe / 146096) / 365;
|
||||
y = yoe + era * 400;
|
||||
doy = doe - (365 * yoe + yoe / 4 - yoe / 100);
|
||||
mp = (5 * doy + 2) / 153;
|
||||
d = doy - (153 * mp + 2) / 5 + 1;
|
||||
m = mp + (if mp < 10 then 3 else -9);
|
||||
y' = y + (if m <= 2 then 1 else 0);
|
||||
|
||||
pad = s: if builtins.stringLength s < 2 then "0" + s else s;
|
||||
in
|
||||
"${toString y'}${pad (toString m)}${pad (toString d)}${pad (toString hours)}${pad (toString minutes)}${pad (toString seconds)}";
|
||||
|
||||
allNodes = builtins.mapAttrs (
|
||||
key: node:
|
||||
let
|
||||
sourceInfo =
|
||||
if key == lockFile.root then
|
||||
rootSrc
|
||||
else
|
||||
fetchTree (node.info or { } // removeAttrs node.locked [ "dir" ]);
|
||||
|
||||
subdir = if key == lockFile.root then "" else node.locked.dir or "";
|
||||
|
||||
outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir);
|
||||
|
||||
flake = import (outPath + "/flake.nix");
|
||||
|
||||
inputs = builtins.mapAttrs (_inputName: inputSpec: allNodes.${resolveInput inputSpec}) (
|
||||
node.inputs or { }
|
||||
);
|
||||
|
||||
# Resolve a input spec into a node name. An input spec is
|
||||
# either a node name, or a 'follows' path from the root
|
||||
# node.
|
||||
resolveInput =
|
||||
inputSpec: if builtins.isList inputSpec then getInputByPath lockFile.root inputSpec else inputSpec;
|
||||
|
||||
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
|
||||
# root node, returning the final node.
|
||||
getInputByPath =
|
||||
nodeName: path:
|
||||
if path == [ ] then
|
||||
nodeName
|
||||
else
|
||||
getInputByPath
|
||||
# Since this could be a 'follows' input, call resolveInput.
|
||||
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
|
||||
(builtins.tail path);
|
||||
|
||||
outputs = flake.outputs (inputs // { self = result; });
|
||||
|
||||
result =
|
||||
outputs
|
||||
# We add the sourceInfo attribute for its metadata, as they are
|
||||
# relevant metadata for the flake. However, the outPath of the
|
||||
# sourceInfo does not necessarily match the outPath of the flake,
|
||||
# as the flake may be in a subdirectory of a source.
|
||||
# This is shadowed in the next //
|
||||
// sourceInfo
|
||||
// {
|
||||
# This shadows the sourceInfo.outPath
|
||||
inherit outPath;
|
||||
|
||||
inherit inputs;
|
||||
inherit outputs;
|
||||
inherit sourceInfo;
|
||||
_type = "flake";
|
||||
};
|
||||
|
||||
in
|
||||
if node.flake or true then
|
||||
assert builtins.isFunction flake.outputs;
|
||||
result
|
||||
else
|
||||
sourceInfo
|
||||
) lockFile.nodes;
|
||||
|
||||
result =
|
||||
if !(builtins.pathExists lockFilePath) then
|
||||
callLocklessFlake rootSrc
|
||||
else if lockFile.version == 4 then
|
||||
callFlake4 rootSrc (lockFile.inputs)
|
||||
else if lockFile.version >= 5 && lockFile.version <= 7 then
|
||||
allNodes.${lockFile.root}
|
||||
else
|
||||
throw "lock file '${lockFilePath}' has unsupported version ${toString lockFile.version}";
|
||||
|
||||
in
|
||||
rec {
|
||||
outputs = result;
|
||||
|
||||
defaultNix =
|
||||
builtins.removeAttrs result [ "__functor" ]
|
||||
// (
|
||||
if result ? defaultPackage.${system} then { default = result.defaultPackage.${system}; } else { }
|
||||
)
|
||||
// (
|
||||
if result ? packages.${system}.default then
|
||||
{ default = result.packages.${system}.default; }
|
||||
else
|
||||
{ }
|
||||
);
|
||||
|
||||
shellNix =
|
||||
defaultNix
|
||||
// (if result ? devShell.${system} then { default = result.devShell.${system}; } else { })
|
||||
// (
|
||||
if result ? devShells.${system}.default then
|
||||
{ default = result.devShells.${system}.default; }
|
||||
else
|
||||
{ }
|
||||
);
|
||||
}
|
||||
25
devFlake/private/flake.lock → devFlake/flake.lock
generated
25
devFlake/private/flake.lock → devFlake/flake.lock
generated
@@ -1,5 +1,23 @@
|
||||
{
|
||||
"nodes": {
|
||||
"clan-core-for-checks": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1754973208,
|
||||
"narHash": "sha256-K/abuL/G6TtwV6Oo/C5EloDfRd2lAbPhCxQ/KnIDI9k=",
|
||||
"ref": "main",
|
||||
"rev": "caae6c7a559d918de06636febc317e6c0a59e0cb",
|
||||
"shallow": true,
|
||||
"type": "git",
|
||||
"url": "https://git.clan.lol/clan/clan-core"
|
||||
},
|
||||
"original": {
|
||||
"ref": "main",
|
||||
"shallow": true,
|
||||
"type": "git",
|
||||
"url": "https://git.clan.lol/clan/clan-core"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": [
|
||||
@@ -104,6 +122,7 @@
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"clan-core-for-checks": "clan-core-for-checks",
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs-dev": "nixpkgs-dev",
|
||||
"nuschtos": "nuschtos",
|
||||
@@ -146,11 +165,11 @@
|
||||
"nixpkgs": []
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1752055615,
|
||||
"narHash": "sha256-19m7P4O/Aw/6+CzncWMAJu89JaKeMh3aMle1CNQSIwM=",
|
||||
"lastModified": 1754847726,
|
||||
"narHash": "sha256-2vX8QjO5lRsDbNYvN9hVHXLU6oMl+V/PsmIiJREG4rE=",
|
||||
"owner": "numtide",
|
||||
"repo": "treefmt-nix",
|
||||
"rev": "c9d477b5d5bd7f26adddd3f96cfd6a904768d4f9",
|
||||
"rev": "7d81f6fb2e19bf84f1c65135d1060d829fae2408",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user