Merge branch 'main' into DavHau-dave
This commit is contained in:
3
.envrc
3
.envrc
@@ -1,3 +1,4 @@
|
|||||||
|
# shellcheck shell=bash
|
||||||
if ! has nix_direnv_version || ! nix_direnv_version 3.0.4; then
|
if ! has nix_direnv_version || ! nix_direnv_version 3.0.4; then
|
||||||
source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/3.0.4/direnvrc" "sha256-DzlYZ33mWF/Gs8DDeyjr8mnVmQGx7ASYqA5WlxwvBG4="
|
source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/3.0.4/direnvrc" "sha256-DzlYZ33mWF/Gs8DDeyjr8mnVmQGx7ASYqA5WlxwvBG4="
|
||||||
fi
|
fi
|
||||||
@@ -5,7 +6,7 @@ fi
|
|||||||
watch_file .direnv/selected-shell
|
watch_file .direnv/selected-shell
|
||||||
|
|
||||||
if [ -e .direnv/selected-shell ]; then
|
if [ -e .direnv/selected-shell ]; then
|
||||||
use flake .#$(cat .direnv/selected-shell)
|
use flake ".#$(cat .direnv/selected-shell)"
|
||||||
else
|
else
|
||||||
use flake
|
use flake
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,100 +0,0 @@
|
|||||||
From bc199a27f23b0fcf175b116f7cf606c0d22b422a Mon Sep 17 00:00:00 2001
|
|
||||||
From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= <joerg@thalheim.io>
|
|
||||||
Date: Tue, 11 Jun 2024 11:40:47 +0200
|
|
||||||
Subject: [PATCH 1/2] register_new_matrix_user: add password-file flag
|
|
||||||
MIME-Version: 1.0
|
|
||||||
Content-Type: text/plain; charset=UTF-8
|
|
||||||
Content-Transfer-Encoding: 8bit
|
|
||||||
|
|
||||||
getpass in python expects stdin to be a tty, hence we cannot just pipe
|
|
||||||
into register_new_matrix_user. --password-file instead works better and
|
|
||||||
it would also allow the use of stdin if /dev/stdin is passed.
|
|
||||||
|
|
||||||
Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
|
|
||||||
Signed-off-by: Jörg Thalheim <joerg@thalheim.io>
|
|
||||||
---
|
|
||||||
changelog.d/17294.feature | 2 ++
|
|
||||||
debian/register_new_matrix_user.ronn | 9 +++++++--
|
|
||||||
synapse/_scripts/register_new_matrix_user.py | 20 +++++++++++++++-----
|
|
||||||
3 files changed, 24 insertions(+), 7 deletions(-)
|
|
||||||
create mode 100644 changelog.d/17294.feature
|
|
||||||
|
|
||||||
diff --git a/changelog.d/17294.feature b/changelog.d/17294.feature
|
|
||||||
new file mode 100644
|
|
||||||
index 000000000..33aac7b0b
|
|
||||||
--- /dev/null
|
|
||||||
+++ b/changelog.d/17294.feature
|
|
||||||
@@ -0,0 +1,2 @@
|
|
||||||
+`register_new_matrix_user` now supports a --password-file flag, which
|
|
||||||
+is useful for scripting.
|
|
||||||
diff --git a/debian/register_new_matrix_user.ronn b/debian/register_new_matrix_user.ronn
|
|
||||||
index 0410b1f4c..d99e9215a 100644
|
|
||||||
--- a/debian/register_new_matrix_user.ronn
|
|
||||||
+++ b/debian/register_new_matrix_user.ronn
|
|
||||||
@@ -31,8 +31,13 @@ A sample YAML file accepted by `register_new_matrix_user` is described below:
|
|
||||||
Local part of the new user. Will prompt if omitted.
|
|
||||||
|
|
||||||
* `-p`, `--password`:
|
|
||||||
- New password for user. Will prompt if omitted. Supplying the password
|
|
||||||
- on the command line is not recommended. Use the STDIN instead.
|
|
||||||
+ New password for user. Will prompt if this option and `--password-file` are omitted.
|
|
||||||
+ Supplying the password on the command line is not recommended.
|
|
||||||
+ Use `--password-file` if possible.
|
|
||||||
+
|
|
||||||
+ * `--password-file`:
|
|
||||||
+ File containing the new password for user. If set, overrides `--password`.
|
|
||||||
+ This is a more secure alternative to specifying the password on the command line.
|
|
||||||
|
|
||||||
* `-a`, `--admin`:
|
|
||||||
Register new user as an admin. Will prompt if omitted.
|
|
||||||
diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py
|
|
||||||
index 77a7129ee..972b35e2d 100644
|
|
||||||
--- a/synapse/_scripts/register_new_matrix_user.py
|
|
||||||
+++ b/synapse/_scripts/register_new_matrix_user.py
|
|
||||||
@@ -173,11 +173,18 @@ def main() -> None:
|
|
||||||
default=None,
|
|
||||||
help="Local part of the new user. Will prompt if omitted.",
|
|
||||||
)
|
|
||||||
- parser.add_argument(
|
|
||||||
+ password_group = parser.add_mutually_exclusive_group()
|
|
||||||
+ password_group.add_argument(
|
|
||||||
"-p",
|
|
||||||
"--password",
|
|
||||||
default=None,
|
|
||||||
- help="New password for user. Will prompt if omitted.",
|
|
||||||
+ help="New password for user. Will prompt for a password if "
|
|
||||||
+ "this flag and `--password-file` are both omitted.",
|
|
||||||
+ )
|
|
||||||
+ password_group.add_argument(
|
|
||||||
+ "--password-file",
|
|
||||||
+ default=None,
|
|
||||||
+ help="File containing the new password for user. If set, will override `--password`.",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-t",
|
|
||||||
@@ -247,6 +254,11 @@ def main() -> None:
|
|
||||||
print(_NO_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
+ if args.password_file:
|
|
||||||
+ password = _read_file(args.password_file, "password-file").strip()
|
|
||||||
+ else:
|
|
||||||
+ password = args.password
|
|
||||||
+
|
|
||||||
if args.server_url:
|
|
||||||
server_url = args.server_url
|
|
||||||
elif config is not None:
|
|
||||||
@@ -269,9 +281,7 @@ def main() -> None:
|
|
||||||
if args.admin or args.no_admin:
|
|
||||||
admin = args.admin
|
|
||||||
|
|
||||||
- register_new_user(
|
|
||||||
- args.user, args.password, server_url, secret, admin, args.user_type
|
|
||||||
- )
|
|
||||||
+ register_new_user(args.user, password, server_url, secret, admin, args.user_type)
|
|
||||||
|
|
||||||
|
|
||||||
def _read_file(file_path: Any, config_path: str) -> str:
|
|
||||||
--
|
|
||||||
2.44.1
|
|
||||||
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
From 1789416df425d22693b0055a6688d8686e0ee4a1 Mon Sep 17 00:00:00 2001
|
|
||||||
From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= <joerg@thalheim.io>
|
|
||||||
Date: Thu, 13 Jun 2024 14:38:19 +0200
|
|
||||||
Subject: [PATCH 2/2] register-new-matrix-user: add a flag to ignore already
|
|
||||||
existing users
|
|
||||||
MIME-Version: 1.0
|
|
||||||
Content-Type: text/plain; charset=UTF-8
|
|
||||||
Content-Transfer-Encoding: 8bit
|
|
||||||
|
|
||||||
This allows to register users in a more declarative and stateless way.
|
|
||||||
|
|
||||||
Signed-off-by: Jörg Thalheim <joerg@thalheim.io>
|
|
||||||
---
|
|
||||||
synapse/_scripts/register_new_matrix_user.py | 22 ++++++++++++++++++--
|
|
||||||
1 file changed, 20 insertions(+), 2 deletions(-)
|
|
||||||
|
|
||||||
diff --git a/synapse/_scripts/register_new_matrix_user.py b/synapse/_scripts/register_new_matrix_user.py
|
|
||||||
index 972b35e2d..233e7267d 100644
|
|
||||||
--- a/synapse/_scripts/register_new_matrix_user.py
|
|
||||||
+++ b/synapse/_scripts/register_new_matrix_user.py
|
|
||||||
@@ -52,6 +52,7 @@ def request_registration(
|
|
||||||
user_type: Optional[str] = None,
|
|
||||||
_print: Callable[[str], None] = print,
|
|
||||||
exit: Callable[[int], None] = sys.exit,
|
|
||||||
+ exists_ok: bool = False,
|
|
||||||
) -> None:
|
|
||||||
url = "%s/_synapse/admin/v1/register" % (server_location.rstrip("/"),)
|
|
||||||
|
|
||||||
@@ -97,6 +98,10 @@ def request_registration(
|
|
||||||
r = requests.post(url, json=data)
|
|
||||||
|
|
||||||
if r.status_code != 200:
|
|
||||||
+ response = r.json()
|
|
||||||
+ if exists_ok and response["errcode"] == "M_USER_IN_USE":
|
|
||||||
+ _print("User already exists. Skipping.")
|
|
||||||
+ return
|
|
||||||
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
|
|
||||||
if 400 <= r.status_code < 500:
|
|
||||||
try:
|
|
||||||
@@ -115,6 +120,7 @@ def register_new_user(
|
|
||||||
shared_secret: str,
|
|
||||||
admin: Optional[bool],
|
|
||||||
user_type: Optional[str],
|
|
||||||
+ exists_ok: bool = False,
|
|
||||||
) -> None:
|
|
||||||
if not user:
|
|
||||||
try:
|
|
||||||
@@ -154,7 +160,13 @@ def register_new_user(
|
|
||||||
admin = False
|
|
||||||
|
|
||||||
request_registration(
|
|
||||||
- user, password, server_location, shared_secret, bool(admin), user_type
|
|
||||||
+ user,
|
|
||||||
+ password,
|
|
||||||
+ server_location,
|
|
||||||
+ shared_secret,
|
|
||||||
+ bool(admin),
|
|
||||||
+ user_type,
|
|
||||||
+ exists_ok=exists_ok,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@@ -173,6 +185,11 @@ def main() -> None:
|
|
||||||
default=None,
|
|
||||||
help="Local part of the new user. Will prompt if omitted.",
|
|
||||||
)
|
|
||||||
+ parser.add_argument(
|
|
||||||
+ "--exists-ok",
|
|
||||||
+ action="store_true",
|
|
||||||
+ help="Do not fail if user already exists.",
|
|
||||||
+ )
|
|
||||||
password_group = parser.add_mutually_exclusive_group()
|
|
||||||
password_group.add_argument(
|
|
||||||
"-p",
|
|
||||||
@@ -192,6 +209,7 @@ def main() -> None:
|
|
||||||
default=None,
|
|
||||||
help="User type as specified in synapse.api.constants.UserTypes",
|
|
||||||
)
|
|
||||||
+
|
|
||||||
admin_group = parser.add_mutually_exclusive_group()
|
|
||||||
admin_group.add_argument(
|
|
||||||
"-a",
|
|
||||||
@@ -281,7 +299,7 @@ def main() -> None:
|
|
||||||
if args.admin or args.no_admin:
|
|
||||||
admin = args.admin
|
|
||||||
|
|
||||||
- register_new_user(args.user, password, server_url, secret, admin, args.user_type)
|
|
||||||
+ register_new_user(args.user, password, server_url, secret, admin, args.user_type, exists_ok=args.exists_ok)
|
|
||||||
|
|
||||||
|
|
||||||
def _read_file(file_path: Any, config_path: str) -> str:
|
|
||||||
--
|
|
||||||
2.44.1
|
|
||||||
|
|
||||||
@@ -17,19 +17,8 @@ let
|
|||||||
ln -s $out/config.json $out/config.${nginx-vhost}.json
|
ln -s $out/config.json $out/config.${nginx-vhost}.json
|
||||||
'';
|
'';
|
||||||
|
|
||||||
# FIXME: This was taken from upstream. Drop this when our patch is upstream
|
|
||||||
synapseCfg = config.services.matrix-synapse;
|
|
||||||
wantedExtras =
|
|
||||||
synapseCfg.extras
|
|
||||||
++ lib.optional (synapseCfg.settings ? oidc_providers) "oidc"
|
|
||||||
++ lib.optional (synapseCfg.settings ? jwt_config) "jwt"
|
|
||||||
++ lib.optional (synapseCfg.settings ? saml2_config) "saml2"
|
|
||||||
++ lib.optional (synapseCfg.settings ? redis) "redis"
|
|
||||||
++ lib.optional (synapseCfg.settings ? sentry) "sentry"
|
|
||||||
++ lib.optional (synapseCfg.settings ? user_directory) "user-search"
|
|
||||||
++ lib.optional (synapseCfg.settings.url_preview_enabled) "url-preview"
|
|
||||||
++ lib.optional (synapseCfg.settings.database.name == "psycopg2") "postgres";
|
|
||||||
in
|
in
|
||||||
|
# FIXME: This was taken from upstream. Drop this when our patch is upstream
|
||||||
{
|
{
|
||||||
options.services.matrix-synapse.package = lib.mkOption { readOnly = false; };
|
options.services.matrix-synapse.package = lib.mkOption { readOnly = false; };
|
||||||
options.clan.matrix-synapse = {
|
options.clan.matrix-synapse = {
|
||||||
@@ -78,21 +67,6 @@ in
|
|||||||
];
|
];
|
||||||
config = {
|
config = {
|
||||||
services.matrix-synapse = {
|
services.matrix-synapse = {
|
||||||
package = lib.mkForce (
|
|
||||||
pkgs.matrix-synapse.override {
|
|
||||||
matrix-synapse-unwrapped = pkgs.matrix-synapse.unwrapped.overrideAttrs (_old: {
|
|
||||||
doInstallCheck = false; # too slow, nixpkgs maintainer already run this.
|
|
||||||
patches = [
|
|
||||||
# see: https://github.com/element-hq/synapse/pull/17304
|
|
||||||
./0001-register_new_matrix_user-add-password-file-flag.patch
|
|
||||||
./0002-register-new-matrix-user-add-a-flag-to-ignore-alread.patch
|
|
||||||
];
|
|
||||||
});
|
|
||||||
extras = wantedExtras;
|
|
||||||
plugins = synapseCfg.plugins;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
enable = true;
|
enable = true;
|
||||||
settings = {
|
settings = {
|
||||||
server_name = cfg.domain;
|
server_name = cfg.domain;
|
||||||
|
|||||||
@@ -30,6 +30,16 @@ in
|
|||||||
default = [ config.clan.core.machineName ];
|
default = [ config.clan.core.machineName ];
|
||||||
description = "Hosts that should be excluded";
|
description = "Hosts that should be excluded";
|
||||||
};
|
};
|
||||||
|
networkIps = lib.mkOption {
|
||||||
|
type = lib.types.listOf lib.types.str;
|
||||||
|
default = [ ];
|
||||||
|
description = "Extra zerotier network Ips that should be accepted";
|
||||||
|
};
|
||||||
|
networkIds = lib.mkOption {
|
||||||
|
type = lib.types.listOf lib.types.str;
|
||||||
|
default = [ ];
|
||||||
|
description = "Extra zerotier network Ids that should be accepted";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
config.systemd.services.zerotier-static-peers-autoaccept =
|
config.systemd.services.zerotier-static-peers-autoaccept =
|
||||||
@@ -56,6 +66,7 @@ in
|
|||||||
lib.nameValuePair (builtins.readFile fullPath) [ machine ]
|
lib.nameValuePair (builtins.readFile fullPath) [ machine ]
|
||||||
) filteredMachines
|
) filteredMachines
|
||||||
);
|
);
|
||||||
|
allHostIPs = config.clan.zerotier-static-peers.networkIps ++ hosts;
|
||||||
in
|
in
|
||||||
lib.mkIf (config.clan.core.networking.zerotier.controller.enable) {
|
lib.mkIf (config.clan.core.networking.zerotier.controller.enable) {
|
||||||
wantedBy = [ "multi-user.target" ];
|
wantedBy = [ "multi-user.target" ];
|
||||||
@@ -65,7 +76,10 @@ in
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
${lib.concatMapStringsSep "\n" (host: ''
|
${lib.concatMapStringsSep "\n" (host: ''
|
||||||
${config.clan.core.clanPkgs.zerotier-members}/bin/zerotier-members allow --member-ip ${host}
|
${config.clan.core.clanPkgs.zerotier-members}/bin/zerotier-members allow --member-ip ${host}
|
||||||
'') hosts}
|
'') allHostIPs}
|
||||||
|
${lib.concatMapStringsSep "\n" (host: ''
|
||||||
|
${config.clan.core.clanPkgs.zerotier-members}/bin/zerotier-members allow ${host}
|
||||||
|
'') config.clan.zerotier-static-peers.networkIds}
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
|
# shellcheck shell=bash
|
||||||
source_up
|
source_up
|
||||||
|
|
||||||
watch_file $(find ./nix -name "*.nix" -printf '%p ')
|
mapfile -d '' -t nix_files < <(find ./nix -name "*.nix" -print0)
|
||||||
|
watch_file "${nix_files[@]}"
|
||||||
|
|
||||||
# Because we depend on nixpkgs sources, uploading to builders takes a long time
|
# Because we depend on nixpkgs sources, uploading to builders takes a long time
|
||||||
use flake .#docs --builders ''
|
use flake .#docs --builders ''
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ markdown_extensions:
|
|||||||
- attr_list
|
- attr_list
|
||||||
- footnotes
|
- footnotes
|
||||||
- md_in_html
|
- md_in_html
|
||||||
|
- def_list
|
||||||
- meta
|
- meta
|
||||||
- plantuml_markdown
|
- plantuml_markdown
|
||||||
- pymdownx.emoji:
|
- pymdownx.emoji:
|
||||||
@@ -49,6 +50,8 @@ nav:
|
|||||||
- Mesh VPN: getting-started/mesh-vpn.md
|
- Mesh VPN: getting-started/mesh-vpn.md
|
||||||
- Backup & Restore: getting-started/backups.md
|
- Backup & Restore: getting-started/backups.md
|
||||||
- Flake-parts: getting-started/flake-parts.md
|
- Flake-parts: getting-started/flake-parts.md
|
||||||
|
- Concepts:
|
||||||
|
- Configuration: concepts/configuration.md
|
||||||
- Reference:
|
- Reference:
|
||||||
- Clan Modules:
|
- Clan Modules:
|
||||||
- reference/clanModules/borgbackup-static.md
|
- reference/clanModules/borgbackup-static.md
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ pkgs.stdenv.mkDerivation {
|
|||||||
mkdocs-material
|
mkdocs-material
|
||||||
mkdocs-rss-plugin
|
mkdocs-rss-plugin
|
||||||
mkdocs-macros
|
mkdocs-macros
|
||||||
|
filelock # FIXME: this should be already provided by mkdocs-rss-plugin
|
||||||
]);
|
]);
|
||||||
configurePhase = ''
|
configurePhase = ''
|
||||||
mkdir -p ./site/reference/cli
|
mkdir -p ./site/reference/cli
|
||||||
|
|||||||
@@ -38,7 +38,7 @@
|
|||||||
patchShebangs --build $out
|
patchShebangs --build $out
|
||||||
|
|
||||||
ruff format --check --diff $out
|
ruff format --check --diff $out
|
||||||
ruff --line-length 88 $out
|
ruff check --line-length 88 $out
|
||||||
mypy --strict $out
|
mypy --strict $out
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
|||||||
114
docs/site/concepts/configuration.md
Normal file
114
docs/site/concepts/configuration.md
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
# Configuration
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
When managing machine configuration this can be done through many possible ways.
|
||||||
|
Ranging from writing `nix` expression in a `flake.nix` file; placing `autoincluded` files into your machine directory; or configuring everything in a simple UI (upcomming).
|
||||||
|
|
||||||
|
clan currently offers the following methods to configure machines:
|
||||||
|
|
||||||
|
!!! Success "Recommended for nix people"
|
||||||
|
|
||||||
|
- flake.nix (i.e. via `buildClan`)
|
||||||
|
- `machine` argument
|
||||||
|
- `inventory` argument
|
||||||
|
|
||||||
|
- machines/`machine_name`/configuration.nix (`autoincluded` if it exists)
|
||||||
|
|
||||||
|
???+ Note "Used by CLI & UI"
|
||||||
|
|
||||||
|
- inventory.json
|
||||||
|
- machines/`machine_name`/hardware-configuration.nix (`autoincluded` if it exists)
|
||||||
|
|
||||||
|
|
||||||
|
!!! Warning "Deprecated"
|
||||||
|
|
||||||
|
machines/`machine_name`/settings.json
|
||||||
|
|
||||||
|
## BuildClan
|
||||||
|
|
||||||
|
The core function that produces a clan. It returns a set of consistent configurations for all machines with ready-to-use secrets, backups and other services.
|
||||||
|
|
||||||
|
### Inputs
|
||||||
|
|
||||||
|
`directory`
|
||||||
|
: The directory containing the machines subdirectory
|
||||||
|
|
||||||
|
`machines`
|
||||||
|
: Allows to include machine-specific modules i.e. machines.${name} = { ... }
|
||||||
|
|
||||||
|
`meta`
|
||||||
|
: An optional set
|
||||||
|
|
||||||
|
: `{ name :: string, icon :: string, description :: string }`
|
||||||
|
|
||||||
|
`inventory`
|
||||||
|
: Service set for easily configuring distributed services, such as backups
|
||||||
|
|
||||||
|
: For more details see [Inventory](#inventory)
|
||||||
|
|
||||||
|
`specialArgs`
|
||||||
|
: Extra arguments to pass to nixosSystem i.e. useful to make self available
|
||||||
|
|
||||||
|
`pkgsForSystem`
|
||||||
|
: A function that maps from architecture to pkgs, if specified this nixpkgs will be only imported once for each system.
|
||||||
|
This improves performance, but all nipxkgs.* options will be ignored.
|
||||||
|
`(string -> pkgs )`
|
||||||
|
|
||||||
|
## Inventory
|
||||||
|
|
||||||
|
`Inventory` is an abstract service layer for consistently configuring distributed services across machine boundaries.
|
||||||
|
|
||||||
|
The following is the specification of the inventory in `cuelang`
|
||||||
|
|
||||||
|
```cue
|
||||||
|
{
|
||||||
|
meta: {
|
||||||
|
// A name of the clan (primarily shown by the UI)
|
||||||
|
name: string
|
||||||
|
// A description of the clan
|
||||||
|
description?: string
|
||||||
|
// The icon path
|
||||||
|
icon?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
// A map of services
|
||||||
|
services: [string]: [string]: {
|
||||||
|
// Required meta fields
|
||||||
|
meta: {
|
||||||
|
name: string,
|
||||||
|
icon?: string
|
||||||
|
description?: string,
|
||||||
|
},
|
||||||
|
// Machines are added via the avilable roles
|
||||||
|
// Membership depends only on this field
|
||||||
|
roles: [string]: {
|
||||||
|
machines: [...string],
|
||||||
|
tags: [...string],
|
||||||
|
}
|
||||||
|
machines?: {
|
||||||
|
[string]: {
|
||||||
|
config?: {
|
||||||
|
...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// Global Configuration for the service
|
||||||
|
// Applied to all machines.
|
||||||
|
config?: {
|
||||||
|
// Schema depends on the module.
|
||||||
|
// It declares the interface how the service can be configured.
|
||||||
|
...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// A map of machines, extends the machines of `buildClan`
|
||||||
|
machines: [string]: {
|
||||||
|
name: string,
|
||||||
|
description?: string,
|
||||||
|
icon?: string
|
||||||
|
tags: [...string]
|
||||||
|
system: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
60
flake.lock
generated
60
flake.lock
generated
@@ -7,11 +7,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1718846788,
|
"lastModified": 1720056646,
|
||||||
"narHash": "sha256-9dtXYtEkmXoUJV+PGLqscqF7qTn4AIhAKpFWRFU2NYs=",
|
"narHash": "sha256-BymcV4HWtx2VFuabDCM4/nEJcfivCx0S02wUCz11mAY=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "disko",
|
"repo": "disko",
|
||||||
"rev": "e1174d991944a01eaaa04bc59c6281edca4c0e6e",
|
"rev": "64679cd7f318c9b6595902b47d4585b1d51d5f9e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -27,11 +27,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1717285511,
|
"lastModified": 1719994518,
|
||||||
"narHash": "sha256-iKzJcpdXih14qYVcZ9QC9XuZYnPc6T8YImb6dX166kw=",
|
"narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=",
|
||||||
"owner": "hercules-ci",
|
"owner": "hercules-ci",
|
||||||
"repo": "flake-parts",
|
"repo": "flake-parts",
|
||||||
"rev": "2a55567fcf15b1b1c7ed712a2c6fadaec7412ea8",
|
"rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -40,29 +40,6 @@
|
|||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nixos-generators": {
|
|
||||||
"inputs": {
|
|
||||||
"nixlib": [
|
|
||||||
"nixpkgs"
|
|
||||||
],
|
|
||||||
"nixpkgs": [
|
|
||||||
"nixpkgs"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1718025593,
|
|
||||||
"narHash": "sha256-WZ1gdKq/9u1Ns/oXuNsDm+W0salonVA0VY1amw8urJ4=",
|
|
||||||
"owner": "nix-community",
|
|
||||||
"repo": "nixos-generators",
|
|
||||||
"rev": "35c20ba421dfa5059e20e0ef2343c875372bdcf3",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nix-community",
|
|
||||||
"repo": "nixos-generators",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixos-images": {
|
"nixos-images": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"nixos-stable": [],
|
"nixos-stable": [],
|
||||||
@@ -71,11 +48,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1718845599,
|
"lastModified": 1720055024,
|
||||||
"narHash": "sha256-HbQ0iKohKJC5grC95HNjLxGPdgsc/BJgoENDYNbzkLo=",
|
"narHash": "sha256-c5rsiI1R7tnCDpcgfsa7ouSdn6wpctbme9TUp53CFyU=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "nixos-images",
|
"repo": "nixos-images",
|
||||||
"rev": "c1e6a5f7b08f1c9993de1cfc5f15f838bf783b88",
|
"rev": "f8650460d37d9d1820a93ebb7f0db5b6c3621946",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -86,11 +63,11 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719451888,
|
"lastModified": 1720340162,
|
||||||
"narHash": "sha256-Ky0sgEEJMcBmNEJztY6KcVn+6bq74EKM7pd1CR1wnPQ=",
|
"narHash": "sha256-iVLH0Ygtw/Iw9Q1cFFX7OhNnoPbc7/ZWW6J3c0zbiZw=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "3664857c48feacb35770c00abfdc671e55849be5",
|
"rev": "60a94e515488e335bd5bce096431d490486915e3",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -104,7 +81,6 @@
|
|||||||
"inputs": {
|
"inputs": {
|
||||||
"disko": "disko",
|
"disko": "disko",
|
||||||
"flake-parts": "flake-parts",
|
"flake-parts": "flake-parts",
|
||||||
"nixos-generators": "nixos-generators",
|
|
||||||
"nixos-images": "nixos-images",
|
"nixos-images": "nixos-images",
|
||||||
"nixpkgs": "nixpkgs",
|
"nixpkgs": "nixpkgs",
|
||||||
"sops-nix": "sops-nix",
|
"sops-nix": "sops-nix",
|
||||||
@@ -119,11 +95,11 @@
|
|||||||
"nixpkgs-stable": []
|
"nixpkgs-stable": []
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1719111739,
|
"lastModified": 1720321395,
|
||||||
"narHash": "sha256-kr2QzRrplzlCP87ddayCZQS+dhGW98kw2zy7+jUXtF4=",
|
"narHash": "sha256-kcI8q9Nh8/CSj0ygfWq1DLckHl8IHhFarL8ie6g7OEk=",
|
||||||
"owner": "Mic92",
|
"owner": "Mic92",
|
||||||
"repo": "sops-nix",
|
"repo": "sops-nix",
|
||||||
"rev": "5e2e9421e9ed2b918be0a441c4535cfa45e04811",
|
"rev": "c184aca4db5d71c3db0c8cbfcaaec337a5d065ea",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -139,11 +115,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1718522839,
|
"lastModified": 1720436211,
|
||||||
"narHash": "sha256-ULzoKzEaBOiLRtjeY3YoGFJMwWSKRYOic6VNw2UyTls=",
|
"narHash": "sha256-/cKXod0oGLl+vH4bKBZnTV3qxrw4jgOLnyQ8KXey5J8=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "treefmt-nix",
|
"repo": "treefmt-nix",
|
||||||
"rev": "68eb1dc333ce82d0ab0c0357363ea17c31ea1f81",
|
"rev": "6fc8bded78715cdd43a3278a14ded226eb3a239e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|||||||
@@ -13,9 +13,6 @@
|
|||||||
sops-nix.url = "github:Mic92/sops-nix";
|
sops-nix.url = "github:Mic92/sops-nix";
|
||||||
sops-nix.inputs.nixpkgs.follows = "nixpkgs";
|
sops-nix.inputs.nixpkgs.follows = "nixpkgs";
|
||||||
sops-nix.inputs.nixpkgs-stable.follows = "";
|
sops-nix.inputs.nixpkgs-stable.follows = "";
|
||||||
nixos-generators.url = "github:nix-community/nixos-generators";
|
|
||||||
nixos-generators.inputs.nixpkgs.follows = "nixpkgs";
|
|
||||||
nixos-generators.inputs.nixlib.follows = "nixpkgs";
|
|
||||||
nixos-images.url = "github:nix-community/nixos-images";
|
nixos-images.url = "github:nix-community/nixos-images";
|
||||||
nixos-images.inputs.nixos-unstable.follows = "nixpkgs";
|
nixos-images.inputs.nixos-unstable.follows = "nixpkgs";
|
||||||
# unused input
|
# unused input
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ lib, inputs, ... }:
|
{ inputs, ... }:
|
||||||
{
|
{
|
||||||
imports = [ inputs.treefmt-nix.flakeModule ];
|
imports = [ inputs.treefmt-nix.flakeModule ];
|
||||||
perSystem =
|
perSystem =
|
||||||
@@ -8,43 +8,19 @@
|
|||||||
treefmt.programs.shellcheck.enable = true;
|
treefmt.programs.shellcheck.enable = true;
|
||||||
|
|
||||||
treefmt.programs.mypy.enable = true;
|
treefmt.programs.mypy.enable = true;
|
||||||
|
treefmt.programs.nixfmt.enable = true;
|
||||||
|
treefmt.programs.nixfmt.package = pkgs.nixfmt-rfc-style;
|
||||||
|
treefmt.programs.deadnix.enable = true;
|
||||||
|
|
||||||
treefmt.programs.mypy.directories = {
|
treefmt.programs.mypy.directories = {
|
||||||
"pkgs/clan-cli".extraPythonPackages = self'.packages.clan-cli.testDependencies;
|
"pkgs/clan-cli".extraPythonPackages = self'.packages.clan-cli.testDependencies;
|
||||||
"pkgs/clan-app".extraPythonPackages =
|
"pkgs/clan-app".extraPythonPackages =
|
||||||
# clan-app currently only exists on linux
|
# clan-app currently only exists on linux
|
||||||
(self'.packages.clan-app.externalTestDeps or [ ]) ++ self'.packages.clan-cli.testDependencies;
|
(self'.packages.clan-app.externalTestDeps or [ ]) ++ self'.packages.clan-cli.testDependencies;
|
||||||
};
|
};
|
||||||
|
treefmt.programs.ruff.check = true;
|
||||||
|
treefmt.programs.ruff.format = true;
|
||||||
|
|
||||||
treefmt.settings.formatter.nix = {
|
|
||||||
command = "sh";
|
|
||||||
options = [
|
|
||||||
"-eucx"
|
|
||||||
''
|
|
||||||
# First deadnix
|
|
||||||
${lib.getExe pkgs.deadnix} --edit "$@"
|
|
||||||
# Then nixpkgs-fmt
|
|
||||||
${lib.getExe pkgs.nixfmt-rfc-style} "$@"
|
|
||||||
''
|
|
||||||
"--" # this argument is ignored by bash
|
|
||||||
];
|
|
||||||
includes = [ "*.nix" ];
|
|
||||||
excludes = [
|
|
||||||
# Was copied from nixpkgs. Keep diff minimal to simplify upstreaming.
|
|
||||||
"pkgs/builders/script-writers.nix"
|
|
||||||
];
|
|
||||||
};
|
|
||||||
treefmt.settings.formatter.python = {
|
|
||||||
command = "sh";
|
|
||||||
options = [
|
|
||||||
"-eucx"
|
|
||||||
''
|
|
||||||
${lib.getExe pkgs.ruff} check --fix "$@"
|
|
||||||
${lib.getExe pkgs.ruff} format "$@"
|
|
||||||
''
|
|
||||||
"--" # this argument is ignored by bash
|
|
||||||
];
|
|
||||||
includes = [ "*.py" ];
|
|
||||||
};
|
|
||||||
# FIXME: currently broken in CI
|
# FIXME: currently broken in CI
|
||||||
#treefmt.settings.formatter.vale =
|
#treefmt.settings.formatter.vale =
|
||||||
# let
|
# let
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
{
|
{
|
||||||
|
"meta": {
|
||||||
|
"name": "Minimal inventory"
|
||||||
|
},
|
||||||
"machines": {
|
"machines": {
|
||||||
"minimal-inventory-machine": {
|
"minimal-inventory-machine": {
|
||||||
"name": "foo",
|
"name": "foo",
|
||||||
|
|||||||
@@ -152,6 +152,13 @@ let
|
|||||||
in
|
in
|
||||||
(machineImports settings)
|
(machineImports settings)
|
||||||
++ [
|
++ [
|
||||||
|
{
|
||||||
|
# Autoinclude configuration.nix and hardware-configuration.nix
|
||||||
|
imports = builtins.filter (p: builtins.pathExists p) [
|
||||||
|
"${directory}/machines/${name}/configuration.nix"
|
||||||
|
"${directory}/machines/${name}/hardware-configuration.nix"
|
||||||
|
];
|
||||||
|
}
|
||||||
settings
|
settings
|
||||||
clan-core.nixosModules.clanCore
|
clan-core.nixosModules.clanCore
|
||||||
extraConfig
|
extraConfig
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# shellcheck shell=bash
|
||||||
source_up
|
source_up
|
||||||
|
|
||||||
watch_file flake-module.nix
|
watch_file flake-module.nix
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ package schema
|
|||||||
description?: string,
|
description?: string,
|
||||||
icon?: string
|
icon?: string
|
||||||
tags: [...string]
|
tags: [...string]
|
||||||
|
system?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
#role: string
|
#role: string
|
||||||
|
|||||||
@@ -1,5 +1,26 @@
|
|||||||
{ lib, ... }:
|
|
||||||
{
|
{
|
||||||
|
lib,
|
||||||
|
config,
|
||||||
|
pkgs,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib.types) submoduleWith;
|
||||||
|
submodule =
|
||||||
|
module:
|
||||||
|
submoduleWith {
|
||||||
|
specialArgs.pkgs = pkgs;
|
||||||
|
modules = [ module ];
|
||||||
|
};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
imports = [
|
||||||
|
./public/in_repo.nix
|
||||||
|
# ./public/vm.nix
|
||||||
|
# ./secret/password-store.nix
|
||||||
|
./secret/sops.nix
|
||||||
|
# ./secret/vm.nix
|
||||||
|
];
|
||||||
options.clan.core.vars = lib.mkOption {
|
options.clan.core.vars = lib.mkOption {
|
||||||
visible = false;
|
visible = false;
|
||||||
description = ''
|
description = ''
|
||||||
@@ -11,6 +32,20 @@
|
|||||||
- generate secrets like private keys automatically when they are needed
|
- generate secrets like private keys automatically when they are needed
|
||||||
- output multiple values like private and public keys simultaneously
|
- output multiple values like private and public keys simultaneously
|
||||||
'';
|
'';
|
||||||
type = lib.types.submoduleWith { modules = [ ./interface.nix ]; };
|
type = submodule { imports = [ ./interface.nix ]; };
|
||||||
|
};
|
||||||
|
|
||||||
|
config.system.clan.deployment.data = {
|
||||||
|
vars = {
|
||||||
|
generators = lib.flip lib.mapAttrs config.clan.core.vars.generators (
|
||||||
|
_name: generator: {
|
||||||
|
inherit (generator) finalScript;
|
||||||
|
files = lib.flip lib.mapAttrs generator.files (_name: file: { inherit (file) secret; });
|
||||||
|
}
|
||||||
|
);
|
||||||
|
inherit (config.clan.core.vars.settings) secretUploadDirectory secretModule publicModule;
|
||||||
|
};
|
||||||
|
inherit (config.clan.networking) targetHost buildHost;
|
||||||
|
inherit (config.clan.deployment) requireExplicitUpdate;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -54,21 +54,6 @@ in
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
# Ensure that generators.imports works
|
|
||||||
# This allows importing generators from third party projects without providing
|
|
||||||
# them access to other settings.
|
|
||||||
test_generator_modules =
|
|
||||||
let
|
|
||||||
generator_module = {
|
|
||||||
my-generator.files.password = { };
|
|
||||||
};
|
|
||||||
config = eval { generators.imports = [ generator_module ]; };
|
|
||||||
in
|
|
||||||
{
|
|
||||||
expr = config.generators ? my-generator;
|
|
||||||
expected = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
# script can be text
|
# script can be text
|
||||||
test_script_text =
|
test_script_text =
|
||||||
let
|
let
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
{ lib, ... }:
|
{
|
||||||
|
lib,
|
||||||
|
config,
|
||||||
|
pkgs,
|
||||||
|
...
|
||||||
|
}:
|
||||||
let
|
let
|
||||||
inherit (lib) mkOption;
|
inherit (lib) mkOption;
|
||||||
inherit (lib.types)
|
inherit (lib.types)
|
||||||
anything
|
|
||||||
attrsOf
|
attrsOf
|
||||||
bool
|
bool
|
||||||
either
|
either
|
||||||
@@ -14,30 +18,27 @@ let
|
|||||||
submoduleWith
|
submoduleWith
|
||||||
;
|
;
|
||||||
# the original types.submodule has strange behavior
|
# the original types.submodule has strange behavior
|
||||||
submodule = module: submoduleWith { modules = [ module ]; };
|
submodule =
|
||||||
|
module:
|
||||||
|
submoduleWith {
|
||||||
|
specialArgs.pkgs = pkgs;
|
||||||
|
modules = [ module ];
|
||||||
|
};
|
||||||
options = lib.mapAttrs (_: mkOption);
|
options = lib.mapAttrs (_: mkOption);
|
||||||
subOptions = opts: submodule { options = options opts; };
|
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
options = options {
|
options = {
|
||||||
settings = {
|
settings = import ./settings-opts.nix { inherit lib; };
|
||||||
|
generators = lib.mkOption {
|
||||||
description = ''
|
description = ''
|
||||||
Settings for the generated variables.
|
A set of generators that can be used to generate files.
|
||||||
|
Generators are scripts that produce files based on the values of other generators and user input.
|
||||||
|
Each generator is expected to produce a set of files under a directory.
|
||||||
'';
|
'';
|
||||||
type = submodule {
|
default = { };
|
||||||
freeformType = anything;
|
type = attrsOf (submodule {
|
||||||
imports = [ ./settings.nix ];
|
imports = [ ./generator.nix ];
|
||||||
};
|
options = options {
|
||||||
};
|
|
||||||
generators = {
|
|
||||||
default = {
|
|
||||||
imports = [
|
|
||||||
# implementation of the generator
|
|
||||||
./generator.nix
|
|
||||||
];
|
|
||||||
};
|
|
||||||
type = submodule {
|
|
||||||
freeformType = attrsOf (subOptions {
|
|
||||||
dependencies = {
|
dependencies = {
|
||||||
description = ''
|
description = ''
|
||||||
A list of other generators that this generator depends on.
|
A list of other generators that this generator depends on.
|
||||||
@@ -52,32 +53,45 @@ in
|
|||||||
A set of files to generate.
|
A set of files to generate.
|
||||||
The generator 'script' is expected to produce exactly these files under $out.
|
The generator 'script' is expected to produce exactly these files under $out.
|
||||||
'';
|
'';
|
||||||
type = attrsOf (subOptions {
|
type = attrsOf (
|
||||||
secret = {
|
submodule (file: {
|
||||||
description = ''
|
imports = [ config.settings.fileModule ];
|
||||||
Whether the file should be treated as a secret.
|
options = options {
|
||||||
'';
|
name = {
|
||||||
type = bool;
|
type = lib.types.str;
|
||||||
default = true;
|
description = ''
|
||||||
};
|
name of the public fact
|
||||||
path = {
|
'';
|
||||||
description = ''
|
readOnly = true;
|
||||||
The path to the file containing the content of the generated value.
|
default = file.config._module.args.name;
|
||||||
This will be set automatically
|
};
|
||||||
'';
|
secret = {
|
||||||
type = str;
|
description = ''
|
||||||
readOnly = true;
|
Whether the file should be treated as a secret.
|
||||||
};
|
'';
|
||||||
value = {
|
type = bool;
|
||||||
description = ''
|
default = true;
|
||||||
The content of the generated value.
|
};
|
||||||
Only available if the file is not secret.
|
path = {
|
||||||
'';
|
description = ''
|
||||||
type = str;
|
The path to the file containing the content of the generated value.
|
||||||
default = throw "Cannot access value of secret file";
|
This will be set automatically
|
||||||
defaultText = "Throws error because the value of a secret file is not accessible";
|
'';
|
||||||
};
|
type = str;
|
||||||
});
|
readOnly = true;
|
||||||
|
};
|
||||||
|
value = {
|
||||||
|
description = ''
|
||||||
|
The content of the generated value.
|
||||||
|
Only available if the file is not secret.
|
||||||
|
'';
|
||||||
|
type = str;
|
||||||
|
default = throw "Cannot access value of secret file";
|
||||||
|
defaultText = "Throws error because the value of a secret file is not accessible";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
})
|
||||||
|
);
|
||||||
};
|
};
|
||||||
prompts = {
|
prompts = {
|
||||||
description = ''
|
description = ''
|
||||||
@@ -85,28 +99,30 @@ in
|
|||||||
Prompts are available to the generator script as files.
|
Prompts are available to the generator script as files.
|
||||||
For example, a prompt named 'prompt1' will be available via $prompts/prompt1
|
For example, a prompt named 'prompt1' will be available via $prompts/prompt1
|
||||||
'';
|
'';
|
||||||
type = attrsOf (subOptions {
|
type = attrsOf (submodule {
|
||||||
description = {
|
options = {
|
||||||
description = ''
|
description = {
|
||||||
The description of the prompted value
|
description = ''
|
||||||
'';
|
The description of the prompted value
|
||||||
type = str;
|
'';
|
||||||
example = "SSH private key";
|
type = str;
|
||||||
};
|
example = "SSH private key";
|
||||||
type = {
|
};
|
||||||
description = ''
|
type = {
|
||||||
The input type of the prompt.
|
description = ''
|
||||||
The following types are available:
|
The input type of the prompt.
|
||||||
- hidden: A hidden text (e.g. password)
|
The following types are available:
|
||||||
- line: A single line of text
|
- hidden: A hidden text (e.g. password)
|
||||||
- multiline: A multiline text
|
- line: A single line of text
|
||||||
'';
|
- multiline: A multiline text
|
||||||
type = enum [
|
'';
|
||||||
"hidden"
|
type = enum [
|
||||||
"line"
|
"hidden"
|
||||||
"multiline"
|
"line"
|
||||||
];
|
"multiline"
|
||||||
default = "line";
|
];
|
||||||
|
default = "line";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@@ -140,8 +156,8 @@ in
|
|||||||
internal = true;
|
internal = true;
|
||||||
visible = false;
|
visible = false;
|
||||||
};
|
};
|
||||||
});
|
};
|
||||||
};
|
});
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
12
nixosModules/clanCore/vars/public/in_repo.nix
Normal file
12
nixosModules/clanCore/vars/public/in_repo.nix
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{ config, lib, ... }:
|
||||||
|
{
|
||||||
|
config.clan.core.vars.settings =
|
||||||
|
lib.mkIf (config.clan.core.vars.settings.publicStore == "in_repo")
|
||||||
|
{
|
||||||
|
publicModule = "clan_cli.vars.public_modules.in_repo";
|
||||||
|
fileModule = file: {
|
||||||
|
path =
|
||||||
|
config.clan.core.clanDir + "/machines/${config.clan.core.machineName}/vars/${file.config.name}";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
61
nixosModules/clanCore/vars/secret/sops.nix
Normal file
61
nixosModules/clanCore/vars/secret/sops.nix
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
{
|
||||||
|
config,
|
||||||
|
lib,
|
||||||
|
pkgs,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
secretsDir = config.clan.core.clanDir + "/sops/secrets";
|
||||||
|
groupsDir = config.clan.core.clanDir + "/sops/groups";
|
||||||
|
|
||||||
|
# My symlink is in the nixos module detected as a directory also it works in the repl. Is this because of pure evaluation?
|
||||||
|
containsSymlink =
|
||||||
|
path:
|
||||||
|
builtins.pathExists path
|
||||||
|
&& (builtins.readFileType path == "directory" || builtins.readFileType path == "symlink");
|
||||||
|
|
||||||
|
containsMachine =
|
||||||
|
parent: name: type:
|
||||||
|
type == "directory" && containsSymlink "${parent}/${name}/machines/${config.clan.core.machineName}";
|
||||||
|
|
||||||
|
containsMachineOrGroups =
|
||||||
|
name: type:
|
||||||
|
(containsMachine secretsDir name type)
|
||||||
|
|| lib.any (
|
||||||
|
group: type == "directory" && containsSymlink "${secretsDir}/${name}/groups/${group}"
|
||||||
|
) groups;
|
||||||
|
|
||||||
|
filterDir =
|
||||||
|
filter: dir:
|
||||||
|
lib.optionalAttrs (builtins.pathExists dir) (lib.filterAttrs filter (builtins.readDir dir));
|
||||||
|
|
||||||
|
groups = builtins.attrNames (filterDir (containsMachine groupsDir) groupsDir);
|
||||||
|
secrets = filterDir containsMachineOrGroups secretsDir;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
config.clan.core.vars.settings = lib.mkIf (config.clan.core.vars.settings.secretStore == "sops") {
|
||||||
|
# Before we generate a secret we cannot know the path yet, so we need to set it to an empty string
|
||||||
|
fileModule = file: {
|
||||||
|
path =
|
||||||
|
lib.mkIf file.secret
|
||||||
|
config.sops.secrets.${"${config.clan.core.machineName}-${file.config.name}"}.path
|
||||||
|
or "/no-such-path";
|
||||||
|
};
|
||||||
|
secretModule = "clan_cli.vars.secret_modules.sops";
|
||||||
|
secretUploadDirectory = lib.mkDefault "/var/lib/sops-nix";
|
||||||
|
};
|
||||||
|
|
||||||
|
config.sops = lib.mkIf (config.clan.core.vars.settings.secretStore == "sops") {
|
||||||
|
secrets = builtins.mapAttrs (name: _: {
|
||||||
|
sopsFile = config.clan.core.clanDir + "/sops/secrets/${name}/secret";
|
||||||
|
format = "binary";
|
||||||
|
}) secrets;
|
||||||
|
# To get proper error messages about missing secrets we need a dummy secret file that is always present
|
||||||
|
defaultSopsFile = lib.mkIf config.sops.validateSopsFiles (
|
||||||
|
lib.mkDefault (builtins.toString (pkgs.writeText "dummy.yaml" ""))
|
||||||
|
);
|
||||||
|
age.keyFile = lib.mkIf (builtins.pathExists (
|
||||||
|
config.clan.core.clanDir + "/sops/secrets/${config.clan.core.machineName}-age.key/secret"
|
||||||
|
)) (lib.mkDefault "/var/lib/sops-nix/key.txt");
|
||||||
|
};
|
||||||
|
}
|
||||||
71
nixosModules/clanCore/vars/settings-opts.nix
Normal file
71
nixosModules/clanCore/vars/settings-opts.nix
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
{ lib, ... }:
|
||||||
|
{
|
||||||
|
secretStore = lib.mkOption {
|
||||||
|
type = lib.types.enum [
|
||||||
|
"sops"
|
||||||
|
"password-store"
|
||||||
|
"vm"
|
||||||
|
"custom"
|
||||||
|
];
|
||||||
|
default = "sops";
|
||||||
|
description = ''
|
||||||
|
method to store secret facts
|
||||||
|
custom can be used to define a custom secret fact store.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
secretModule = lib.mkOption {
|
||||||
|
type = lib.types.str;
|
||||||
|
internal = true;
|
||||||
|
description = ''
|
||||||
|
the python import path to the secret module
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
secretUploadDirectory = lib.mkOption {
|
||||||
|
type = lib.types.nullOr lib.types.path;
|
||||||
|
default = null;
|
||||||
|
description = ''
|
||||||
|
The directory where secrets are uploaded into, This is backend specific.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
fileModule = lib.mkOption {
|
||||||
|
type = lib.types.deferredModule;
|
||||||
|
internal = true;
|
||||||
|
description = ''
|
||||||
|
A module to be imported in every vars.files.<name> submodule.
|
||||||
|
Used by backends to define the `path` attribute.
|
||||||
|
'';
|
||||||
|
default = { };
|
||||||
|
};
|
||||||
|
|
||||||
|
publicStore = lib.mkOption {
|
||||||
|
type = lib.types.enum [
|
||||||
|
"in_repo"
|
||||||
|
"vm"
|
||||||
|
"custom"
|
||||||
|
];
|
||||||
|
default = "in_repo";
|
||||||
|
description = ''
|
||||||
|
method to store public facts.
|
||||||
|
custom can be used to define a custom public fact store.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
publicModule = lib.mkOption {
|
||||||
|
type = lib.types.str;
|
||||||
|
internal = true;
|
||||||
|
description = ''
|
||||||
|
the python import path to the public module
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
publicDirectory = lib.mkOption {
|
||||||
|
type = lib.types.nullOr lib.types.path;
|
||||||
|
default = null;
|
||||||
|
description = ''
|
||||||
|
The directory where public facts are stored.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,72 +0,0 @@
|
|||||||
{ lib, ... }:
|
|
||||||
{
|
|
||||||
options = {
|
|
||||||
secretStore = lib.mkOption {
|
|
||||||
type = lib.types.enum [
|
|
||||||
"sops"
|
|
||||||
"password-store"
|
|
||||||
"vm"
|
|
||||||
"custom"
|
|
||||||
];
|
|
||||||
default = "sops";
|
|
||||||
description = ''
|
|
||||||
method to store secret facts
|
|
||||||
custom can be used to define a custom secret fact store.
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
secretModule = lib.mkOption {
|
|
||||||
type = lib.types.str;
|
|
||||||
internal = true;
|
|
||||||
description = ''
|
|
||||||
the python import path to the secret module
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
secretUploadDirectory = lib.mkOption {
|
|
||||||
type = lib.types.nullOr lib.types.path;
|
|
||||||
default = null;
|
|
||||||
description = ''
|
|
||||||
The directory where secrets are uploaded into, This is backend specific.
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
secretPathFunction = lib.mkOption {
|
|
||||||
type = lib.types.raw;
|
|
||||||
description = ''
|
|
||||||
The function to use to generate the path for a secret.
|
|
||||||
The default function will use the path attribute of the secret.
|
|
||||||
The function will be called with the secret submodule as an argument.
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
publicStore = lib.mkOption {
|
|
||||||
type = lib.types.enum [
|
|
||||||
"in_repo"
|
|
||||||
"vm"
|
|
||||||
"custom"
|
|
||||||
];
|
|
||||||
default = "in_repo";
|
|
||||||
description = ''
|
|
||||||
method to store public facts.
|
|
||||||
custom can be used to define a custom public fact store.
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
publicModule = lib.mkOption {
|
|
||||||
type = lib.types.str;
|
|
||||||
internal = true;
|
|
||||||
description = ''
|
|
||||||
the python import path to the public module
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
publicDirectory = lib.mkOption {
|
|
||||||
type = lib.types.nullOr lib.types.path;
|
|
||||||
default = null;
|
|
||||||
description = ''
|
|
||||||
The directory where public facts are stored.
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -27,107 +27,127 @@ rec {
|
|||||||
# Examples:
|
# Examples:
|
||||||
# writeBash = makeScriptWriter { interpreter = "${pkgs.bash}/bin/bash"; }
|
# writeBash = makeScriptWriter { interpreter = "${pkgs.bash}/bin/bash"; }
|
||||||
# makeScriptWriter { interpreter = "${pkgs.dash}/bin/dash"; } "hello" "echo hello world"
|
# makeScriptWriter { interpreter = "${pkgs.dash}/bin/dash"; } "hello" "echo hello world"
|
||||||
makeScriptWriter = { interpreter, check ? "", makeWrapperArgs ? [], }: nameOrPath: content:
|
makeScriptWriter =
|
||||||
assert lib.or (types.path.check nameOrPath) (builtins.match "([0-9A-Za-z._])[0-9A-Za-z._-]*" nameOrPath != null);
|
{
|
||||||
|
interpreter,
|
||||||
|
check ? "",
|
||||||
|
makeWrapperArgs ? [ ],
|
||||||
|
}:
|
||||||
|
nameOrPath: content:
|
||||||
|
assert lib.or (types.path.check nameOrPath) (
|
||||||
|
builtins.match "([0-9A-Za-z._])[0-9A-Za-z._-]*" nameOrPath != null
|
||||||
|
);
|
||||||
assert lib.or (types.path.check content) (types.str.check content);
|
assert lib.or (types.path.check content) (types.str.check content);
|
||||||
let
|
let
|
||||||
name = last (builtins.split "/" nameOrPath);
|
name = last (builtins.split "/" nameOrPath);
|
||||||
in
|
in
|
||||||
|
|
||||||
pkgs.runCommandLocal name (
|
pkgs.runCommandLocal name
|
||||||
{
|
(
|
||||||
inherit makeWrapperArgs;
|
{
|
||||||
nativeBuildInputs = [
|
inherit makeWrapperArgs;
|
||||||
makeWrapper
|
nativeBuildInputs = [ makeWrapper ];
|
||||||
];
|
|
||||||
}
|
|
||||||
// lib.optionalAttrs (nameOrPath == "/bin/${name}") {
|
|
||||||
meta.mainProgram = name;
|
|
||||||
}
|
|
||||||
// (
|
|
||||||
if (types.str.check content) then {
|
|
||||||
inherit content interpreter;
|
|
||||||
passAsFile = [ "content" ];
|
|
||||||
} else {
|
|
||||||
inherit interpreter;
|
|
||||||
contentPath = content;
|
|
||||||
}
|
}
|
||||||
|
// lib.optionalAttrs (nameOrPath == "/bin/${name}") { meta.mainProgram = name; }
|
||||||
|
// (
|
||||||
|
if (types.str.check content) then
|
||||||
|
{
|
||||||
|
inherit content interpreter;
|
||||||
|
passAsFile = [ "content" ];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
inherit interpreter;
|
||||||
|
contentPath = content;
|
||||||
|
}
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
''
|
||||||
''
|
# On darwin a script cannot be used as an interpreter in a shebang but
|
||||||
# On darwin a script cannot be used as an interpreter in a shebang but
|
# there doesn't seem to be a limit to the size of shebang and multiple
|
||||||
# there doesn't seem to be a limit to the size of shebang and multiple
|
# arguments to the interpreter are allowed.
|
||||||
# arguments to the interpreter are allowed.
|
if [[ -n "${toString pkgs.stdenvNoCC.isDarwin}" ]] && isScript $interpreter
|
||||||
if [[ -n "${toString pkgs.stdenvNoCC.isDarwin}" ]] && isScript $interpreter
|
|
||||||
then
|
|
||||||
wrapperInterpreterLine=$(head -1 "$interpreter" | tail -c+3)
|
|
||||||
# Get first word from the line (note: xargs echo remove leading spaces)
|
|
||||||
wrapperInterpreter=$(echo "$wrapperInterpreterLine" | xargs echo | cut -d " " -f1)
|
|
||||||
|
|
||||||
if isScript $wrapperInterpreter
|
|
||||||
then
|
then
|
||||||
echo "error: passed interpreter ($interpreter) is a script which has another script ($wrapperInterpreter) as an interpreter, which is not supported."
|
wrapperInterpreterLine=$(head -1 "$interpreter" | tail -c+3)
|
||||||
exit 1
|
# Get first word from the line (note: xargs echo remove leading spaces)
|
||||||
|
wrapperInterpreter=$(echo "$wrapperInterpreterLine" | xargs echo | cut -d " " -f1)
|
||||||
|
|
||||||
|
if isScript $wrapperInterpreter
|
||||||
|
then
|
||||||
|
echo "error: passed interpreter ($interpreter) is a script which has another script ($wrapperInterpreter) as an interpreter, which is not supported."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This should work as long as wrapperInterpreter is a shell, which is
|
||||||
|
# the case for programs wrapped with makeWrapper, like
|
||||||
|
# python3.withPackages etc.
|
||||||
|
interpreterLine="$wrapperInterpreterLine $interpreter"
|
||||||
|
else
|
||||||
|
interpreterLine=$interpreter
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# This should work as long as wrapperInterpreter is a shell, which is
|
echo "#! $interpreterLine" > $out
|
||||||
# the case for programs wrapped with makeWrapper, like
|
cat "$contentPath" >> $out
|
||||||
# python3.withPackages etc.
|
${optionalString (check != "") ''
|
||||||
interpreterLine="$wrapperInterpreterLine $interpreter"
|
${check} $out
|
||||||
else
|
''}
|
||||||
interpreterLine=$interpreter
|
chmod +x $out
|
||||||
fi
|
|
||||||
|
|
||||||
echo "#! $interpreterLine" > $out
|
# Relocate executable if path was specified instead of name.
|
||||||
cat "$contentPath" >> $out
|
# Only in this case wrapProgram is applied, as it wouldn't work with a
|
||||||
${optionalString (check != "") ''
|
# single executable file under $out.
|
||||||
${check} $out
|
${optionalString (types.path.check nameOrPath) ''
|
||||||
''}
|
mv $out tmp
|
||||||
chmod +x $out
|
mkdir -p $out/$(dirname "${nameOrPath}")
|
||||||
|
mv tmp $out/${nameOrPath}
|
||||||
# Relocate executable if path was specified instead of name.
|
wrapProgram $out/${nameOrPath} ''${makeWrapperArgs[@]}
|
||||||
# Only in this case wrapProgram is applied, as it wouldn't work with a
|
''}
|
||||||
# single executable file under $out.
|
'';
|
||||||
${optionalString (types.path.check nameOrPath) ''
|
|
||||||
mv $out tmp
|
|
||||||
mkdir -p $out/$(dirname "${nameOrPath}")
|
|
||||||
mv tmp $out/${nameOrPath}
|
|
||||||
wrapProgram $out/${nameOrPath} ''${makeWrapperArgs[@]}
|
|
||||||
''}
|
|
||||||
'';
|
|
||||||
|
|
||||||
# Base implementation for compiled executables.
|
# Base implementation for compiled executables.
|
||||||
# Takes a compile script, which in turn takes the name as an argument.
|
# Takes a compile script, which in turn takes the name as an argument.
|
||||||
#
|
#
|
||||||
# Examples:
|
# Examples:
|
||||||
# writeSimpleC = makeBinWriter { compileScript = name: "gcc -o $out $contentPath"; }
|
# writeSimpleC = makeBinWriter { compileScript = name: "gcc -o $out $contentPath"; }
|
||||||
makeBinWriter = { compileScript, strip ? true }: nameOrPath: content:
|
makeBinWriter =
|
||||||
assert lib.or (types.path.check nameOrPath) (builtins.match "([0-9A-Za-z._])[0-9A-Za-z._-]*" nameOrPath != null);
|
{
|
||||||
|
compileScript,
|
||||||
|
strip ? true,
|
||||||
|
}:
|
||||||
|
nameOrPath: content:
|
||||||
|
assert lib.or (types.path.check nameOrPath) (
|
||||||
|
builtins.match "([0-9A-Za-z._])[0-9A-Za-z._-]*" nameOrPath != null
|
||||||
|
);
|
||||||
assert lib.or (types.path.check content) (types.str.check content);
|
assert lib.or (types.path.check content) (types.str.check content);
|
||||||
let
|
let
|
||||||
name = last (builtins.split "/" nameOrPath);
|
name = last (builtins.split "/" nameOrPath);
|
||||||
in
|
in
|
||||||
pkgs.runCommand name ((if (types.str.check content) then {
|
pkgs.runCommand name
|
||||||
inherit content;
|
(
|
||||||
passAsFile = [ "content" ];
|
(
|
||||||
} else {
|
if (types.str.check content) then
|
||||||
contentPath = content;
|
{
|
||||||
}) // lib.optionalAttrs (nameOrPath == "/bin/${name}") {
|
inherit content;
|
||||||
meta.mainProgram = name;
|
passAsFile = [ "content" ];
|
||||||
}) ''
|
}
|
||||||
${compileScript}
|
else
|
||||||
${lib.optionalString strip
|
{ contentPath = content; }
|
||||||
"${lib.getBin buildPackages.bintools-unwrapped}/bin/${buildPackages.bintools-unwrapped.targetPrefix}strip -S $out"}
|
)
|
||||||
# Sometimes binaries produced for darwin (e. g. by GHC) won't be valid
|
// lib.optionalAttrs (nameOrPath == "/bin/${name}") { meta.mainProgram = name; }
|
||||||
# mach-o executables from the get-go, but need to be corrected somehow
|
)
|
||||||
# which is done by fixupPhase.
|
''
|
||||||
${lib.optionalString pkgs.stdenvNoCC.hostPlatform.isDarwin "fixupPhase"}
|
${compileScript}
|
||||||
${optionalString (types.path.check nameOrPath) ''
|
${lib.optionalString strip "${lib.getBin buildPackages.bintools-unwrapped}/bin/${buildPackages.bintools-unwrapped.targetPrefix}strip -S $out"}
|
||||||
mv $out tmp
|
# Sometimes binaries produced for darwin (e. g. by GHC) won't be valid
|
||||||
mkdir -p $out/$(dirname "${nameOrPath}")
|
# mach-o executables from the get-go, but need to be corrected somehow
|
||||||
mv tmp $out/${nameOrPath}
|
# which is done by fixupPhase.
|
||||||
''}
|
${lib.optionalString pkgs.stdenvNoCC.hostPlatform.isDarwin "fixupPhase"}
|
||||||
'';
|
${optionalString (types.path.check nameOrPath) ''
|
||||||
|
mv $out tmp
|
||||||
|
mkdir -p $out/$(dirname "${nameOrPath}")
|
||||||
|
mv tmp $out/${nameOrPath}
|
||||||
|
''}
|
||||||
|
'';
|
||||||
|
|
||||||
# Like writeScript but the first line is a shebang to bash
|
# Like writeScript but the first line is a shebang to bash
|
||||||
#
|
#
|
||||||
@@ -135,13 +155,10 @@ rec {
|
|||||||
# writeBash "example" ''
|
# writeBash "example" ''
|
||||||
# echo hello world
|
# echo hello world
|
||||||
# ''
|
# ''
|
||||||
writeBash = makeScriptWriter {
|
writeBash = makeScriptWriter { interpreter = "${lib.getExe pkgs.bash}"; };
|
||||||
interpreter = "${lib.getExe pkgs.bash}";
|
|
||||||
};
|
|
||||||
|
|
||||||
# Like writeScriptBin but the first line is a shebang to bash
|
# Like writeScriptBin but the first line is a shebang to bash
|
||||||
writeBashBin = name:
|
writeBashBin = name: writeBash "/bin/${name}";
|
||||||
writeBash "/bin/${name}";
|
|
||||||
|
|
||||||
# Like writeScript but the first line is a shebang to dash
|
# Like writeScript but the first line is a shebang to dash
|
||||||
#
|
#
|
||||||
@@ -149,13 +166,10 @@ rec {
|
|||||||
# writeDash "example" ''
|
# writeDash "example" ''
|
||||||
# echo hello world
|
# echo hello world
|
||||||
# ''
|
# ''
|
||||||
writeDash = makeScriptWriter {
|
writeDash = makeScriptWriter { interpreter = "${lib.getExe pkgs.dash}"; };
|
||||||
interpreter = "${lib.getExe pkgs.dash}";
|
|
||||||
};
|
|
||||||
|
|
||||||
# Like writeScriptBin but the first line is a shebang to dash
|
# Like writeScriptBin but the first line is a shebang to dash
|
||||||
writeDashBin = name:
|
writeDashBin = name: writeDash "/bin/${name}";
|
||||||
writeDash "/bin/${name}";
|
|
||||||
|
|
||||||
# Like writeScript but the first line is a shebang to fish
|
# Like writeScript but the first line is a shebang to fish
|
||||||
#
|
#
|
||||||
@@ -165,12 +179,11 @@ rec {
|
|||||||
# ''
|
# ''
|
||||||
writeFish = makeScriptWriter {
|
writeFish = makeScriptWriter {
|
||||||
interpreter = "${lib.getExe pkgs.fish} --no-config";
|
interpreter = "${lib.getExe pkgs.fish} --no-config";
|
||||||
check = "${lib.getExe pkgs.fish} --no-config --no-execute"; # syntax check only
|
check = "${lib.getExe pkgs.fish} --no-config --no-execute"; # syntax check only
|
||||||
};
|
};
|
||||||
|
|
||||||
# Like writeScriptBin but the first line is a shebang to fish
|
# Like writeScriptBin but the first line is a shebang to fish
|
||||||
writeFishBin = name:
|
writeFishBin = name: writeFish "/bin/${name}";
|
||||||
writeFish "/bin/${name}";
|
|
||||||
|
|
||||||
# writeHaskell takes a name, an attrset with libraries and haskell version (both optional)
|
# writeHaskell takes a name, an attrset with libraries and haskell version (both optional)
|
||||||
# and some haskell source code and returns an executable.
|
# and some haskell source code and returns an executable.
|
||||||
@@ -181,29 +194,31 @@ rec {
|
|||||||
#
|
#
|
||||||
# main = launchMissiles
|
# main = launchMissiles
|
||||||
# '';
|
# '';
|
||||||
writeHaskell = name: {
|
writeHaskell =
|
||||||
libraries ? [],
|
name:
|
||||||
ghc ? pkgs.ghc,
|
{
|
||||||
ghcArgs ? [],
|
libraries ? [ ],
|
||||||
threadedRuntime ? true,
|
ghc ? pkgs.ghc,
|
||||||
strip ? true
|
ghcArgs ? [ ],
|
||||||
}:
|
threadedRuntime ? true,
|
||||||
|
strip ? true,
|
||||||
|
}:
|
||||||
let
|
let
|
||||||
appendIfNotSet = el: list: if elem el list then list else list ++ [ el ];
|
appendIfNotSet = el: list: if elem el list then list else list ++ [ el ];
|
||||||
ghcArgs' = if threadedRuntime then appendIfNotSet "-threaded" ghcArgs else ghcArgs;
|
ghcArgs' = if threadedRuntime then appendIfNotSet "-threaded" ghcArgs else ghcArgs;
|
||||||
|
|
||||||
in makeBinWriter {
|
in
|
||||||
|
makeBinWriter {
|
||||||
compileScript = ''
|
compileScript = ''
|
||||||
cp $contentPath tmp.hs
|
cp $contentPath tmp.hs
|
||||||
${(ghc.withPackages (_: libraries ))}/bin/ghc ${lib.escapeShellArgs ghcArgs'} tmp.hs
|
${(ghc.withPackages (_: libraries))}/bin/ghc ${lib.escapeShellArgs ghcArgs'} tmp.hs
|
||||||
mv tmp $out
|
mv tmp $out
|
||||||
'';
|
'';
|
||||||
inherit strip;
|
inherit strip;
|
||||||
} name;
|
} name;
|
||||||
|
|
||||||
# writeHaskellBin takes the same arguments as writeHaskell but outputs a directory (like writeScriptBin)
|
# writeHaskellBin takes the same arguments as writeHaskell but outputs a directory (like writeScriptBin)
|
||||||
writeHaskellBin = name:
|
writeHaskellBin = name: writeHaskell "/bin/${name}";
|
||||||
writeHaskell "/bin/${name}";
|
|
||||||
|
|
||||||
# Like writeScript but the first line is a shebang to nu
|
# Like writeScript but the first line is a shebang to nu
|
||||||
#
|
#
|
||||||
@@ -211,30 +226,30 @@ rec {
|
|||||||
# writeNu "example" ''
|
# writeNu "example" ''
|
||||||
# echo hello world
|
# echo hello world
|
||||||
# ''
|
# ''
|
||||||
writeNu = makeScriptWriter {
|
writeNu = makeScriptWriter { interpreter = "${lib.getExe pkgs.nushell} --no-config-file"; };
|
||||||
interpreter = "${lib.getExe pkgs.nushell} --no-config-file";
|
|
||||||
};
|
|
||||||
|
|
||||||
# Like writeScriptBin but the first line is a shebang to nu
|
# Like writeScriptBin but the first line is a shebang to nu
|
||||||
writeNuBin = name:
|
writeNuBin = name: writeNu "/bin/${name}";
|
||||||
writeNu "/bin/${name}";
|
|
||||||
|
|
||||||
# makeRubyWriter takes ruby and compatible rubyPackages and produces ruby script writer,
|
# makeRubyWriter takes ruby and compatible rubyPackages and produces ruby script writer,
|
||||||
# If any libraries are specified, ruby.withPackages is used as interpreter, otherwise the "bare" ruby is used.
|
# If any libraries are specified, ruby.withPackages is used as interpreter, otherwise the "bare" ruby is used.
|
||||||
makeRubyWriter = ruby: rubyPackages: buildRubyPackages: name: { libraries ? [], ... } @ args:
|
makeRubyWriter =
|
||||||
makeScriptWriter (
|
ruby: _rubyPackages: _buildRubyPackages: name:
|
||||||
(builtins.removeAttrs args ["libraries"])
|
{
|
||||||
// {
|
libraries ? [ ],
|
||||||
interpreter =
|
...
|
||||||
if libraries == []
|
}@args:
|
||||||
then "${ruby}/bin/ruby"
|
makeScriptWriter (
|
||||||
else "${(ruby.withPackages (ps: libraries))}/bin/ruby";
|
(builtins.removeAttrs args [ "libraries" ])
|
||||||
# Rubocop doesnt seem to like running in this fashion.
|
// {
|
||||||
#check = (writeDash "rubocop.sh" ''
|
interpreter =
|
||||||
# exec ${lib.getExe buildRubyPackages.rubocop} "$1"
|
if libraries == [ ] then "${ruby}/bin/ruby" else "${(ruby.withPackages (_ps: libraries))}/bin/ruby";
|
||||||
#'');
|
# Rubocop doesnt seem to like running in this fashion.
|
||||||
}
|
#check = (writeDash "rubocop.sh" ''
|
||||||
) name;
|
# exec ${lib.getExe buildRubyPackages.rubocop} "$1"
|
||||||
|
#'');
|
||||||
|
}
|
||||||
|
) name;
|
||||||
|
|
||||||
# Like writeScript but the first line is a shebang to ruby
|
# Like writeScript but the first line is a shebang to ruby
|
||||||
#
|
#
|
||||||
@@ -244,26 +259,29 @@ rec {
|
|||||||
# ''
|
# ''
|
||||||
writeRuby = makeRubyWriter pkgs.ruby pkgs.rubyPackages buildPackages.rubyPackages;
|
writeRuby = makeRubyWriter pkgs.ruby pkgs.rubyPackages buildPackages.rubyPackages;
|
||||||
|
|
||||||
writeRubyBin = name:
|
writeRubyBin = name: writeRuby "/bin/${name}";
|
||||||
writeRuby "/bin/${name}";
|
|
||||||
|
|
||||||
# makeLuaWriter takes lua and compatible luaPackages and produces lua script writer,
|
# makeLuaWriter takes lua and compatible luaPackages and produces lua script writer,
|
||||||
# which validates the script with luacheck at build time. If any libraries are specified,
|
# which validates the script with luacheck at build time. If any libraries are specified,
|
||||||
# lua.withPackages is used as interpreter, otherwise the "bare" lua is used.
|
# lua.withPackages is used as interpreter, otherwise the "bare" lua is used.
|
||||||
makeLuaWriter = lua: luaPackages: buildLuaPackages: name: { libraries ? [], ... } @ args:
|
makeLuaWriter =
|
||||||
makeScriptWriter (
|
lua: _luaPackages: buildLuaPackages: name:
|
||||||
(builtins.removeAttrs args ["libraries"])
|
{ ... }@args:
|
||||||
// {
|
makeScriptWriter (
|
||||||
interpreter = lua.interpreter;
|
(builtins.removeAttrs args [ "libraries" ])
|
||||||
|
// {
|
||||||
|
interpreter = lua.interpreter;
|
||||||
# if libraries == []
|
# if libraries == []
|
||||||
# then lua.interpreter
|
# then lua.interpreter
|
||||||
# else (lua.withPackages (ps: libraries)).interpreter
|
# else (lua.withPackages (ps: libraries)).interpreter
|
||||||
# This should support packages! I just cant figure out why some dependency collision happens whenever I try to run this.
|
# This should support packages! I just cant figure out why some dependency collision happens whenever I try to run this.
|
||||||
check = (writeDash "luacheck.sh" ''
|
check = (
|
||||||
exec ${buildLuaPackages.luacheck}/bin/luacheck "$1"
|
writeDash "luacheck.sh" ''
|
||||||
'');
|
exec ${buildLuaPackages.luacheck}/bin/luacheck "$1"
|
||||||
}
|
''
|
||||||
) name;
|
);
|
||||||
|
}
|
||||||
|
) name;
|
||||||
|
|
||||||
# writeLua takes a name an attributeset with libraries and some lua source code and
|
# writeLua takes a name an attributeset with libraries and some lua source code and
|
||||||
# returns an executable (should also work with luajit)
|
# returns an executable (should also work with luajit)
|
||||||
@@ -287,27 +305,27 @@ rec {
|
|||||||
# ''
|
# ''
|
||||||
writeLua = makeLuaWriter pkgs.lua pkgs.luaPackages buildPackages.luaPackages;
|
writeLua = makeLuaWriter pkgs.lua pkgs.luaPackages buildPackages.luaPackages;
|
||||||
|
|
||||||
writeLuaBin = name:
|
writeLuaBin = name: writeLua "/bin/${name}";
|
||||||
writeLua "/bin/${name}";
|
|
||||||
|
|
||||||
writeRust = name: {
|
writeRust =
|
||||||
|
name:
|
||||||
|
{
|
||||||
rustc ? pkgs.rustc,
|
rustc ? pkgs.rustc,
|
||||||
rustcArgs ? [],
|
rustcArgs ? [ ],
|
||||||
strip ? true
|
strip ? true,
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
darwinArgs = lib.optionals stdenv.isDarwin [ "-L${lib.getLib libiconv}/lib" ];
|
darwinArgs = lib.optionals stdenv.isDarwin [ "-L${lib.getLib libiconv}/lib" ];
|
||||||
in
|
in
|
||||||
makeBinWriter {
|
makeBinWriter {
|
||||||
compileScript = ''
|
compileScript = ''
|
||||||
cp "$contentPath" tmp.rs
|
cp "$contentPath" tmp.rs
|
||||||
PATH=${lib.makeBinPath [pkgs.gcc]} ${rustc}/bin/rustc ${lib.escapeShellArgs rustcArgs} ${lib.escapeShellArgs darwinArgs} -o "$out" tmp.rs
|
PATH=${lib.makeBinPath [ pkgs.gcc ]} ${rustc}/bin/rustc ${lib.escapeShellArgs rustcArgs} ${lib.escapeShellArgs darwinArgs} -o "$out" tmp.rs
|
||||||
'';
|
'';
|
||||||
inherit strip;
|
inherit strip;
|
||||||
} name;
|
} name;
|
||||||
|
|
||||||
writeRustBin = name:
|
writeRustBin = name: writeRust "/bin/${name}";
|
||||||
writeRust "/bin/${name}";
|
|
||||||
|
|
||||||
# writeJS takes a name an attributeset with libraries and some JavaScript sourcecode and
|
# writeJS takes a name an attributeset with libraries and some JavaScript sourcecode and
|
||||||
# returns an executable
|
# returns an executable
|
||||||
@@ -319,23 +337,26 @@ rec {
|
|||||||
# var result = UglifyJS.minify(code);
|
# var result = UglifyJS.minify(code);
|
||||||
# console.log(result.code);
|
# console.log(result.code);
|
||||||
# ''
|
# ''
|
||||||
writeJS = name: { libraries ? [] }: content:
|
writeJS =
|
||||||
let
|
name:
|
||||||
node-env = pkgs.buildEnv {
|
{
|
||||||
name = "node";
|
libraries ? [ ],
|
||||||
paths = libraries;
|
}:
|
||||||
pathsToLink = [
|
content:
|
||||||
"/lib/node_modules"
|
let
|
||||||
];
|
node-env = pkgs.buildEnv {
|
||||||
};
|
name = "node";
|
||||||
in writeDash name ''
|
paths = libraries;
|
||||||
export NODE_PATH=${node-env}/lib/node_modules
|
pathsToLink = [ "/lib/node_modules" ];
|
||||||
exec ${lib.getExe pkgs.nodejs} ${pkgs.writeText "js" content} "$@"
|
};
|
||||||
'';
|
in
|
||||||
|
writeDash name ''
|
||||||
|
export NODE_PATH=${node-env}/lib/node_modules
|
||||||
|
exec ${lib.getExe pkgs.nodejs} ${pkgs.writeText "js" content} "$@"
|
||||||
|
'';
|
||||||
|
|
||||||
# writeJSBin takes the same arguments as writeJS but outputs a directory (like writeScriptBin)
|
# writeJSBin takes the same arguments as writeJS but outputs a directory (like writeScriptBin)
|
||||||
writeJSBin = name:
|
writeJSBin = name: writeJS "/bin/${name}";
|
||||||
writeJS "/bin/${name}";
|
|
||||||
|
|
||||||
awkFormatNginx = builtins.toFile "awkFormat-nginx.awk" ''
|
awkFormatNginx = builtins.toFile "awkFormat-nginx.awk" ''
|
||||||
awk -f
|
awk -f
|
||||||
@@ -343,18 +364,22 @@ rec {
|
|||||||
/\{/{ctx++;idx=1}
|
/\{/{ctx++;idx=1}
|
||||||
/\}/{ctx--}
|
/\}/{ctx--}
|
||||||
{id="";for(i=idx;i<ctx;i++)id=sprintf("%s%s", id, "\t");printf "%s%s\n", id, $0}
|
{id="";for(i=idx;i<ctx;i++)id=sprintf("%s%s", id, "\t");printf "%s%s\n", id, $0}
|
||||||
'';
|
|
||||||
|
|
||||||
writeNginxConfig = name: text: pkgs.runCommandLocal name {
|
|
||||||
inherit text;
|
|
||||||
passAsFile = [ "text" ];
|
|
||||||
nativeBuildInputs = [ gixy ];
|
|
||||||
} /* sh */ ''
|
|
||||||
# nginx-config-formatter has an error - https://github.com/1connect/nginx-config-formatter/issues/16
|
|
||||||
awk -f ${awkFormatNginx} "$textPath" | sed '/^\s*$/d' > $out
|
|
||||||
gixy $out
|
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
writeNginxConfig =
|
||||||
|
name: text:
|
||||||
|
pkgs.runCommandLocal name
|
||||||
|
{
|
||||||
|
inherit text;
|
||||||
|
passAsFile = [ "text" ];
|
||||||
|
nativeBuildInputs = [ gixy ];
|
||||||
|
} # sh
|
||||||
|
''
|
||||||
|
# nginx-config-formatter has an error - https://github.com/1connect/nginx-config-formatter/issues/16
|
||||||
|
awk -f ${awkFormatNginx} "$textPath" | sed '/^\s*$/d' > $out
|
||||||
|
gixy $out
|
||||||
|
'';
|
||||||
|
|
||||||
# writePerl takes a name an attributeset with libraries and some perl sourcecode and
|
# writePerl takes a name an attributeset with libraries and some perl sourcecode and
|
||||||
# returns an executable
|
# returns an executable
|
||||||
#
|
#
|
||||||
@@ -363,42 +388,55 @@ rec {
|
|||||||
# use boolean;
|
# use boolean;
|
||||||
# print "Howdy!\n" if true;
|
# print "Howdy!\n" if true;
|
||||||
# ''
|
# ''
|
||||||
writePerl = name: { libraries ? [], ... } @ args:
|
writePerl =
|
||||||
|
name:
|
||||||
|
{
|
||||||
|
libraries ? [ ],
|
||||||
|
...
|
||||||
|
}@args:
|
||||||
makeScriptWriter (
|
makeScriptWriter (
|
||||||
(builtins.removeAttrs args ["libraries"])
|
(builtins.removeAttrs args [ "libraries" ])
|
||||||
// {
|
// {
|
||||||
interpreter = "${lib.getExe (pkgs.perl.withPackages (p: libraries))}";
|
interpreter = "${lib.getExe (pkgs.perl.withPackages (_p: libraries))}";
|
||||||
}
|
}
|
||||||
) name;
|
) name;
|
||||||
|
|
||||||
# writePerlBin takes the same arguments as writePerl but outputs a directory (like writeScriptBin)
|
# writePerlBin takes the same arguments as writePerl but outputs a directory (like writeScriptBin)
|
||||||
writePerlBin = name:
|
writePerlBin = name: writePerl "/bin/${name}";
|
||||||
writePerl "/bin/${name}";
|
|
||||||
|
|
||||||
# makePythonWriter takes python and compatible pythonPackages and produces python script writer,
|
# makePythonWriter takes python and compatible pythonPackages and produces python script writer,
|
||||||
# which validates the script with flake8 at build time. If any libraries are specified,
|
# which validates the script with flake8 at build time. If any libraries are specified,
|
||||||
# python.withPackages is used as interpreter, otherwise the "bare" python is used.
|
# python.withPackages is used as interpreter, otherwise the "bare" python is used.
|
||||||
makePythonWriter = python: pythonPackages: buildPythonPackages: name: { libraries ? [], flakeIgnore ? [], ... } @ args:
|
makePythonWriter =
|
||||||
let
|
python: pythonPackages: buildPythonPackages: name:
|
||||||
ignoreAttribute = optionalString (flakeIgnore != []) "--ignore ${concatMapStringsSep "," escapeShellArg flakeIgnore}";
|
{
|
||||||
in
|
libraries ? [ ],
|
||||||
makeScriptWriter
|
flakeIgnore ? [ ],
|
||||||
(
|
...
|
||||||
(builtins.removeAttrs args ["libraries" "flakeIgnore"])
|
}@args:
|
||||||
|
let
|
||||||
|
ignoreAttribute =
|
||||||
|
optionalString (flakeIgnore != [ ])
|
||||||
|
"--ignore ${concatMapStringsSep "," escapeShellArg flakeIgnore}";
|
||||||
|
in
|
||||||
|
makeScriptWriter (
|
||||||
|
(builtins.removeAttrs args [
|
||||||
|
"libraries"
|
||||||
|
"flakeIgnore"
|
||||||
|
])
|
||||||
// {
|
// {
|
||||||
interpreter =
|
interpreter =
|
||||||
if pythonPackages != pkgs.pypy2Packages || pythonPackages != pkgs.pypy3Packages then
|
if pythonPackages != pkgs.pypy2Packages || pythonPackages != pkgs.pypy3Packages then
|
||||||
if libraries == []
|
if libraries == [ ] then python.interpreter else (python.withPackages (_ps: libraries)).interpreter
|
||||||
then python.interpreter
|
else
|
||||||
else (python.withPackages (ps: libraries)).interpreter
|
python.interpreter;
|
||||||
else python.interpreter
|
check = optionalString python.isPy3k (
|
||||||
;
|
writeDash "pythoncheck.sh" ''
|
||||||
check = optionalString python.isPy3k (writeDash "pythoncheck.sh" ''
|
exec ${buildPythonPackages.flake8}/bin/flake8 --show-source ${ignoreAttribute} "$1"
|
||||||
exec ${buildPythonPackages.flake8}/bin/flake8 --show-source ${ignoreAttribute} "$1"
|
''
|
||||||
'');
|
);
|
||||||
}
|
}
|
||||||
)
|
) name;
|
||||||
name;
|
|
||||||
|
|
||||||
# writePyPy2 takes a name an attributeset with libraries and some pypy2 sourcecode and
|
# writePyPy2 takes a name an attributeset with libraries and some pypy2 sourcecode and
|
||||||
# returns an executable
|
# returns an executable
|
||||||
@@ -415,8 +453,7 @@ rec {
|
|||||||
writePyPy2 = makePythonWriter pkgs.pypy2 pkgs.pypy2Packages buildPackages.pypy2Packages;
|
writePyPy2 = makePythonWriter pkgs.pypy2 pkgs.pypy2Packages buildPackages.pypy2Packages;
|
||||||
|
|
||||||
# writePyPy2Bin takes the same arguments as writePyPy2 but outputs a directory (like writeScriptBin)
|
# writePyPy2Bin takes the same arguments as writePyPy2 but outputs a directory (like writeScriptBin)
|
||||||
writePyPy2Bin = name:
|
writePyPy2Bin = name: writePyPy2 "/bin/${name}";
|
||||||
writePyPy2 "/bin/${name}";
|
|
||||||
|
|
||||||
# writePython3 takes a name an attributeset with libraries and some python3 sourcecode and
|
# writePython3 takes a name an attributeset with libraries and some python3 sourcecode and
|
||||||
# returns an executable
|
# returns an executable
|
||||||
@@ -433,8 +470,7 @@ rec {
|
|||||||
writePython3 = makePythonWriter pkgs.python3 pkgs.python3Packages buildPackages.python3Packages;
|
writePython3 = makePythonWriter pkgs.python3 pkgs.python3Packages buildPackages.python3Packages;
|
||||||
|
|
||||||
# writePython3Bin takes the same arguments as writePython3 but outputs a directory (like writeScriptBin)
|
# writePython3Bin takes the same arguments as writePython3 but outputs a directory (like writeScriptBin)
|
||||||
writePython3Bin = name:
|
writePython3Bin = name: writePython3 "/bin/${name}";
|
||||||
writePython3 "/bin/${name}";
|
|
||||||
|
|
||||||
# writePyPy3 takes a name an attributeset with libraries and some pypy3 sourcecode and
|
# writePyPy3 takes a name an attributeset with libraries and some pypy3 sourcecode and
|
||||||
# returns an executable
|
# returns an executable
|
||||||
@@ -451,47 +487,61 @@ rec {
|
|||||||
writePyPy3 = makePythonWriter pkgs.pypy3 pkgs.pypy3Packages buildPackages.pypy3Packages;
|
writePyPy3 = makePythonWriter pkgs.pypy3 pkgs.pypy3Packages buildPackages.pypy3Packages;
|
||||||
|
|
||||||
# writePyPy3Bin takes the same arguments as writePyPy3 but outputs a directory (like writeScriptBin)
|
# writePyPy3Bin takes the same arguments as writePyPy3 but outputs a directory (like writeScriptBin)
|
||||||
writePyPy3Bin = name:
|
writePyPy3Bin = name: writePyPy3 "/bin/${name}";
|
||||||
writePyPy3 "/bin/${name}";
|
|
||||||
|
|
||||||
|
makeFSharpWriter =
|
||||||
|
{
|
||||||
|
dotnet-sdk ? pkgs.dotnet-sdk,
|
||||||
|
fsi-flags ? "",
|
||||||
|
libraries ? _: [ ],
|
||||||
|
...
|
||||||
|
}@args:
|
||||||
|
nameOrPath:
|
||||||
|
let
|
||||||
|
fname = last (builtins.split "/" nameOrPath);
|
||||||
|
path = if strings.hasSuffix ".fsx" nameOrPath then nameOrPath else "${nameOrPath}.fsx";
|
||||||
|
_nugetDeps = mkNugetDeps {
|
||||||
|
name = "${fname}-nuget-deps";
|
||||||
|
nugetDeps = libraries;
|
||||||
|
};
|
||||||
|
|
||||||
makeFSharpWriter = { dotnet-sdk ? pkgs.dotnet-sdk, fsi-flags ? "", libraries ? _: [], ... } @ args: nameOrPath:
|
nuget-source = mkNugetSource {
|
||||||
let
|
name = "${fname}-nuget-source";
|
||||||
fname = last (builtins.split "/" nameOrPath);
|
description = "A Nuget source with the dependencies for ${fname}";
|
||||||
path = if strings.hasSuffix ".fsx" nameOrPath then nameOrPath else "${nameOrPath}.fsx";
|
deps = [ _nugetDeps ];
|
||||||
_nugetDeps = mkNugetDeps { name = "${fname}-nuget-deps"; nugetDeps = libraries; };
|
};
|
||||||
|
|
||||||
nuget-source = mkNugetSource {
|
fsi = writeBash "fsi" ''
|
||||||
name = "${fname}-nuget-source";
|
export HOME=$NIX_BUILD_TOP/.home
|
||||||
description = "A Nuget source with the dependencies for ${fname}";
|
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
|
||||||
deps = [ _nugetDeps ];
|
export DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||||
};
|
export DOTNET_NOLOGO=1
|
||||||
|
script="$1"; shift
|
||||||
|
${lib.getExe dotnet-sdk} fsi --quiet --nologo --readline- ${fsi-flags} "$@" < "$script"
|
||||||
|
'';
|
||||||
|
|
||||||
fsi = writeBash "fsi" ''
|
in
|
||||||
export HOME=$NIX_BUILD_TOP/.home
|
content:
|
||||||
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
|
makeScriptWriter
|
||||||
export DOTNET_CLI_TELEMETRY_OPTOUT=1
|
(
|
||||||
export DOTNET_NOLOGO=1
|
(builtins.removeAttrs args [
|
||||||
script="$1"; shift
|
"dotnet-sdk"
|
||||||
${lib.getExe dotnet-sdk} fsi --quiet --nologo --readline- ${fsi-flags} "$@" < "$script"
|
"fsi-flags"
|
||||||
'';
|
"libraries"
|
||||||
|
])
|
||||||
|
// {
|
||||||
|
interpreter = fsi;
|
||||||
|
}
|
||||||
|
)
|
||||||
|
path
|
||||||
|
''
|
||||||
|
#i "nuget: ${nuget-source}/lib"
|
||||||
|
${content}
|
||||||
|
exit 0
|
||||||
|
'';
|
||||||
|
|
||||||
in content: makeScriptWriter (
|
writeFSharp = makeFSharpWriter { };
|
||||||
(builtins.removeAttrs args ["dotnet-sdk" "fsi-flags" "libraries"])
|
|
||||||
// {
|
|
||||||
interpreter = fsi;
|
|
||||||
}
|
|
||||||
) path
|
|
||||||
''
|
|
||||||
#i "nuget: ${nuget-source}/lib"
|
|
||||||
${ content }
|
|
||||||
exit 0
|
|
||||||
'';
|
|
||||||
|
|
||||||
writeFSharp =
|
writeFSharpBin = name: writeFSharp "/bin/${name}";
|
||||||
makeFSharpWriter {};
|
|
||||||
|
|
||||||
writeFSharpBin = name:
|
|
||||||
writeFSharp "/bin/${name}";
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# shellcheck shell=bash
|
||||||
source_up
|
source_up
|
||||||
|
|
||||||
watch_file flake-module.nix shell.nix default.nix
|
watch_file flake-module.nix shell.nix default.nix
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
pygobject3,
|
pygobject3,
|
||||||
wrapGAppsHook,
|
wrapGAppsHook,
|
||||||
gtk4,
|
gtk4,
|
||||||
gnome,
|
adwaita-icon-theme,
|
||||||
pygobject-stubs,
|
pygobject-stubs,
|
||||||
gobject-introspection,
|
gobject-introspection,
|
||||||
clan-cli,
|
clan-cli,
|
||||||
@@ -39,7 +39,7 @@ let
|
|||||||
gtk4
|
gtk4
|
||||||
libadwaita
|
libadwaita
|
||||||
webkitgtk_6_0
|
webkitgtk_6_0
|
||||||
gnome.adwaita-icon-theme
|
adwaita-icon-theme
|
||||||
];
|
];
|
||||||
|
|
||||||
# Deps including python packages from the local project
|
# Deps including python packages from the local project
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# shellcheck shell=bash
|
||||||
source_up
|
source_up
|
||||||
|
|
||||||
watch_file flake-module.nix shell.nix default.nix
|
watch_file flake-module.nix shell.nix default.nix
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ from . import (
|
|||||||
machines,
|
machines,
|
||||||
secrets,
|
secrets,
|
||||||
state,
|
state,
|
||||||
|
vars,
|
||||||
vms,
|
vms,
|
||||||
)
|
)
|
||||||
from .clan_uri import FlakeId
|
from .clan_uri import FlakeId
|
||||||
@@ -272,6 +273,43 @@ For more detailed information, visit: {help_hyperlink("secrets", "https://docs.c
|
|||||||
)
|
)
|
||||||
facts.register_parser(parser_facts)
|
facts.register_parser(parser_facts)
|
||||||
|
|
||||||
|
# like facts but with vars instead of facts
|
||||||
|
parser_vars = subparsers.add_parser(
|
||||||
|
"vars",
|
||||||
|
help="WIP: manage vars",
|
||||||
|
description="WIP: manage vars",
|
||||||
|
epilog=(
|
||||||
|
f"""
|
||||||
|
This subcommand provides an interface to vars of clan machines.
|
||||||
|
Vars are variables that a service can generate.
|
||||||
|
There are public and secret vars.
|
||||||
|
Public vars can be referenced by other machines directly.
|
||||||
|
Public vars can include: ip addresses, public keys.
|
||||||
|
Secret vars can include: passwords, private keys.
|
||||||
|
|
||||||
|
A service is an included clan-module that implements vars generation functionality.
|
||||||
|
For example the zerotier module will generate private and public vars.
|
||||||
|
In this case the public var will be the resulting zerotier-ip of the machine.
|
||||||
|
The secret var will be the zerotier-identity-secret, which is used by zerotier
|
||||||
|
to prove the machine has control of the zerotier-ip.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ clan vars generate
|
||||||
|
Will generate vars for all machines.
|
||||||
|
|
||||||
|
$ clan vars generate --service [SERVICE] --regenerate
|
||||||
|
Will regenerate vars, if they are already generated for a specific service.
|
||||||
|
This is especially useful for resetting certain passwords while leaving the rest
|
||||||
|
of the vars for a machine in place.
|
||||||
|
|
||||||
|
For more detailed information, visit: {help_hyperlink("secrets", "https://docs.clan.lol/getting-started/secrets")}
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
)
|
||||||
|
vars.register_parser(parser_vars)
|
||||||
|
|
||||||
parser_machine = subparsers.add_parser(
|
parser_machine = subparsers.add_parser(
|
||||||
"machines",
|
"machines",
|
||||||
help="manage machines and their configuration",
|
help="manage machines and their configuration",
|
||||||
|
|||||||
@@ -76,6 +76,7 @@ def type_to_dict(t: Any, scope: str = "", type_map: dict[TypeVar, type] = {}) ->
|
|||||||
properties = {
|
properties = {
|
||||||
f.name: type_to_dict(f.type, f"{scope} {t.__name__}.{f.name}", type_map)
|
f.name: type_to_dict(f.type, f"{scope} {t.__name__}.{f.name}", type_map)
|
||||||
for f in fields
|
for f in fields
|
||||||
|
if not f.name.startswith("_")
|
||||||
}
|
}
|
||||||
|
|
||||||
required = set()
|
required = set()
|
||||||
@@ -127,7 +128,7 @@ def type_to_dict(t: Any, scope: str = "", type_map: dict[TypeVar, type] = {}) ->
|
|||||||
if origin is None:
|
if origin is None:
|
||||||
# Non-generic user-defined or built-in type
|
# Non-generic user-defined or built-in type
|
||||||
# TODO: handle custom types
|
# TODO: handle custom types
|
||||||
raise JSchemaTypeError("Unhandled Type: ", origin)
|
raise JSchemaTypeError(f"{scope} Unhandled Type: ", origin)
|
||||||
|
|
||||||
elif origin is Literal:
|
elif origin is Literal:
|
||||||
# Handle Literal values for enums in JSON Schema
|
# Handle Literal values for enums in JSON Schema
|
||||||
@@ -172,7 +173,7 @@ def type_to_dict(t: Any, scope: str = "", type_map: dict[TypeVar, type] = {}) ->
|
|||||||
new_map.update(inspect_dataclass_fields(t))
|
new_map.update(inspect_dataclass_fields(t))
|
||||||
return type_to_dict(origin, scope, new_map)
|
return type_to_dict(origin, scope, new_map)
|
||||||
|
|
||||||
raise JSchemaTypeError(f"Error api type not yet supported {t!s}")
|
raise JSchemaTypeError(f"{scope} - Error api type not yet supported {t!s}")
|
||||||
|
|
||||||
elif isinstance(t, type):
|
elif isinstance(t, type):
|
||||||
if t is str:
|
if t is str:
|
||||||
@@ -187,7 +188,7 @@ def type_to_dict(t: Any, scope: str = "", type_map: dict[TypeVar, type] = {}) ->
|
|||||||
return {"type": "object"}
|
return {"type": "object"}
|
||||||
if t is Any:
|
if t is Any:
|
||||||
raise JSchemaTypeError(
|
raise JSchemaTypeError(
|
||||||
f"Usage of the Any type is not supported for API functions. In: {scope}"
|
f"{scope} - Usage of the Any type is not supported for API functions. In: {scope}"
|
||||||
)
|
)
|
||||||
if t is pathlib.Path:
|
if t is pathlib.Path:
|
||||||
return {
|
return {
|
||||||
@@ -196,13 +197,13 @@ def type_to_dict(t: Any, scope: str = "", type_map: dict[TypeVar, type] = {}) ->
|
|||||||
}
|
}
|
||||||
if t is dict:
|
if t is dict:
|
||||||
raise JSchemaTypeError(
|
raise JSchemaTypeError(
|
||||||
"Error: generic dict type not supported. Use dict[str, Any] instead"
|
f"{scope} - Generic 'dict' type not supported. Use dict[str, Any] or any more expressive type."
|
||||||
)
|
)
|
||||||
|
|
||||||
# Optional[T] gets internally transformed Union[T,NoneType]
|
# Optional[T] gets internally transformed Union[T,NoneType]
|
||||||
if t is NoneType:
|
if t is NoneType:
|
||||||
return {"type": "null"}
|
return {"type": "null"}
|
||||||
|
|
||||||
raise JSchemaTypeError(f"Error primitive type not supported {t!s}")
|
raise JSchemaTypeError(f"{scope} - Error primitive type not supported {t!s}")
|
||||||
else:
|
else:
|
||||||
raise JSchemaTypeError(f"Error type not supported {t!s}")
|
raise JSchemaTypeError(f"{scope} - Error type not supported {t!s}")
|
||||||
|
|||||||
@@ -81,11 +81,11 @@ def cast(value: Any, input_type: Any, opt_description: str) -> Any:
|
|||||||
else:
|
else:
|
||||||
raise ClanError(f"Invalid value {value} for boolean")
|
raise ClanError(f"Invalid value {value} for boolean")
|
||||||
# handle lists
|
# handle lists
|
||||||
elif get_origin(input_type) == list:
|
elif get_origin(input_type) is list:
|
||||||
subtype = input_type.__args__[0]
|
subtype = input_type.__args__[0]
|
||||||
return [cast([x], subtype, opt_description) for x in value]
|
return [cast([x], subtype, opt_description) for x in value]
|
||||||
# handle dicts
|
# handle dicts
|
||||||
elif get_origin(input_type) == dict:
|
elif get_origin(input_type) is dict:
|
||||||
if not isinstance(value, dict):
|
if not isinstance(value, dict):
|
||||||
raise ClanError(
|
raise ClanError(
|
||||||
f"Cannot set {opt_description} directly. Specify a suboption like {opt_description}.<name>"
|
f"Cannot set {opt_description} directly. Specify a suboption like {opt_description}.<name>"
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ class Machine:
|
|||||||
name: str
|
name: str
|
||||||
flake: FlakeId
|
flake: FlakeId
|
||||||
nix_options: list[str] = field(default_factory=list)
|
nix_options: list[str] = field(default_factory=list)
|
||||||
cached_deployment: None | dict = None
|
cached_deployment: None | dict[str, Any] = None
|
||||||
|
|
||||||
_eval_cache: dict[str, str] = field(default_factory=dict)
|
_eval_cache: dict[str, str] = field(default_factory=dict)
|
||||||
_build_cache: dict[str, Path] = field(default_factory=dict)
|
_build_cache: dict[str, Path] = field(default_factory=dict)
|
||||||
@@ -69,12 +69,26 @@ class Machine:
|
|||||||
def public_facts_module(self) -> str:
|
def public_facts_module(self) -> str:
|
||||||
return self.deployment["facts"]["publicModule"]
|
return self.deployment["facts"]["publicModule"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def secret_vars_module(self) -> str:
|
||||||
|
return self.deployment["vars"]["secretModule"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def public_vars_module(self) -> str:
|
||||||
|
return self.deployment["vars"]["publicModule"]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def facts_data(self) -> dict[str, dict[str, Any]]:
|
def facts_data(self) -> dict[str, dict[str, Any]]:
|
||||||
if self.deployment["facts"]["services"]:
|
if self.deployment["facts"]["services"]:
|
||||||
return self.deployment["facts"]["services"]
|
return self.deployment["facts"]["services"]
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def vars_generators(self) -> dict[str, dict[str, Any]]:
|
||||||
|
if self.deployment["vars"]["generators"]:
|
||||||
|
return self.deployment["vars"]["generators"]
|
||||||
|
return {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def secrets_upload_directory(self) -> str:
|
def secrets_upload_directory(self) -> str:
|
||||||
return self.deployment["facts"]["secretUploadDirectory"]
|
return self.deployment["facts"]["secretUploadDirectory"]
|
||||||
|
|||||||
132
pkgs/clan-cli/clan_cli/vars/__init__.py
Normal file
132
pkgs/clan-cli/clan_cli/vars/__init__.py
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
# !/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from ..hyperlink import help_hyperlink
|
||||||
|
from .check import register_check_parser
|
||||||
|
from .generate import register_generate_parser
|
||||||
|
from .list import register_list_parser
|
||||||
|
from .upload import register_upload_parser
|
||||||
|
|
||||||
|
|
||||||
|
# takes a (sub)parser and configures it
|
||||||
|
def register_parser(parser: argparse.ArgumentParser) -> None:
|
||||||
|
subparser = parser.add_subparsers(
|
||||||
|
title="command",
|
||||||
|
description="the command to run",
|
||||||
|
help="the command to run",
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
check_parser = subparser.add_parser(
|
||||||
|
"check",
|
||||||
|
help="check if facts are up to date",
|
||||||
|
epilog=(
|
||||||
|
f"""
|
||||||
|
This subcommand allows checking if all facts are up to date.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ clan facts check [MACHINE]
|
||||||
|
Will check facts for the specified machine.
|
||||||
|
|
||||||
|
|
||||||
|
For more detailed information, visit: {help_hyperlink("secrets", "https://docs.clan.lol/getting-started/secrets")}
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
)
|
||||||
|
register_check_parser(check_parser)
|
||||||
|
|
||||||
|
list_parser = subparser.add_parser(
|
||||||
|
"list",
|
||||||
|
help="list all facts",
|
||||||
|
epilog=(
|
||||||
|
f"""
|
||||||
|
This subcommand allows listing all public facts for a specific machine.
|
||||||
|
|
||||||
|
The resulting list will be a json string with the name of the fact as its key
|
||||||
|
and the fact itself as it's value.
|
||||||
|
|
||||||
|
This is how an example output might look like:
|
||||||
|
```
|
||||||
|
\u007b
|
||||||
|
"[FACT_NAME]": "[FACT]"
|
||||||
|
\u007d
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ clan facts list [MACHINE]
|
||||||
|
Will list facts for the specified machine.
|
||||||
|
|
||||||
|
|
||||||
|
For more detailed information, visit: {help_hyperlink("secrets", "https://docs.clan.lol/getting-started/secrets")}
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
)
|
||||||
|
register_list_parser(list_parser)
|
||||||
|
|
||||||
|
parser_generate = subparser.add_parser(
|
||||||
|
"generate",
|
||||||
|
help="generate public and secret facts for machines",
|
||||||
|
epilog=(
|
||||||
|
f"""
|
||||||
|
This subcommand allows control of the generation of facts.
|
||||||
|
Often this function will be invoked automatically on deploying machines,
|
||||||
|
but there are situations the user may want to have more granular control,
|
||||||
|
especially for the regeneration of certain services.
|
||||||
|
|
||||||
|
A service is an included clan-module that implements facts generation functionality.
|
||||||
|
For example the zerotier module will generate private and public facts.
|
||||||
|
In this case the public fact will be the resulting zerotier-ip of the machine.
|
||||||
|
The secret fact will be the zerotier-identity-secret, which is used by zerotier
|
||||||
|
to prove the machine has control of the zerotier-ip.
|
||||||
|
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ clan facts generate
|
||||||
|
Will generate facts for all machines.
|
||||||
|
|
||||||
|
$ clan facts generate [MACHINE]
|
||||||
|
Will generate facts for the specified machine.
|
||||||
|
|
||||||
|
$ clan facts generate [MACHINE] --service [SERVICE]
|
||||||
|
Will generate facts for the specified machine for the specified service.
|
||||||
|
|
||||||
|
$ clan facts generate --service [SERVICE] --regenerate
|
||||||
|
Will regenerate facts, if they are already generated for a specific service.
|
||||||
|
This is especially useful for resetting certain passwords while leaving the rest
|
||||||
|
of the facts for a machine in place.
|
||||||
|
|
||||||
|
For more detailed information, visit: {help_hyperlink("secrets", "https://docs.clan.lol/getting-started/secrets")}
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
)
|
||||||
|
register_generate_parser(parser_generate)
|
||||||
|
|
||||||
|
parser_upload = subparser.add_parser(
|
||||||
|
"upload",
|
||||||
|
help="upload secrets for machines",
|
||||||
|
epilog=(
|
||||||
|
f"""
|
||||||
|
This subcommand allows uploading secrets to remote machines.
|
||||||
|
|
||||||
|
If using sops as a secret backend it will upload the private key to the machine.
|
||||||
|
If using password store it uploads all the secrets you manage to the machine.
|
||||||
|
|
||||||
|
The default backend is sops.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
$ clan facts upload [MACHINE]
|
||||||
|
Will upload secrets to a specific machine.
|
||||||
|
|
||||||
|
For more detailed information, visit: {help_hyperlink("secrets", "https://docs.clan.lol/getting-started/secrets")}
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
)
|
||||||
|
register_upload_parser(parser_upload)
|
||||||
64
pkgs/clan-cli/clan_cli/vars/check.py
Normal file
64
pkgs/clan-cli/clan_cli/vars/check.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import argparse
|
||||||
|
import importlib
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from ..completions import add_dynamic_completer, complete_machines
|
||||||
|
from ..machines.machines import Machine
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def check_secrets(machine: Machine, generator_name: None | str = None) -> bool:
|
||||||
|
secret_vars_module = importlib.import_module(machine.secret_vars_module)
|
||||||
|
secret_vars_store = secret_vars_module.SecretStore(machine=machine)
|
||||||
|
public_vars_module = importlib.import_module(machine.public_vars_module)
|
||||||
|
public_vars_store = public_vars_module.FactStore(machine=machine)
|
||||||
|
|
||||||
|
missing_secret_vars = []
|
||||||
|
missing_public_vars = []
|
||||||
|
if generator_name:
|
||||||
|
services = [generator_name]
|
||||||
|
else:
|
||||||
|
services = list(machine.vars_generators.keys())
|
||||||
|
for generator_name in services:
|
||||||
|
for name, file in machine.vars_generators[generator_name]["files"].items():
|
||||||
|
if file["secret"] and not secret_vars_store.exists(generator_name, name):
|
||||||
|
log.info(
|
||||||
|
f"Secret fact '{name}' for service '{generator_name}' in machine {machine.name} is missing."
|
||||||
|
)
|
||||||
|
missing_secret_vars.append((generator_name, name))
|
||||||
|
if not file["secret"] and not public_vars_store.exists(
|
||||||
|
generator_name, name
|
||||||
|
):
|
||||||
|
log.info(
|
||||||
|
f"Public fact '{name}' for service '{generator_name}' in machine {machine.name} is missing."
|
||||||
|
)
|
||||||
|
missing_public_vars.append((generator_name, name))
|
||||||
|
|
||||||
|
log.debug(f"missing_secret_vars: {missing_secret_vars}")
|
||||||
|
log.debug(f"missing_public_vars: {missing_public_vars}")
|
||||||
|
if missing_secret_vars or missing_public_vars:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def check_command(args: argparse.Namespace) -> None:
|
||||||
|
machine = Machine(
|
||||||
|
name=args.machine,
|
||||||
|
flake=args.flake,
|
||||||
|
)
|
||||||
|
check_secrets(machine, generator_name=args.service)
|
||||||
|
|
||||||
|
|
||||||
|
def register_check_parser(parser: argparse.ArgumentParser) -> None:
|
||||||
|
machines_parser = parser.add_argument(
|
||||||
|
"machine",
|
||||||
|
help="The machine to check secrets for",
|
||||||
|
)
|
||||||
|
add_dynamic_completer(machines_parser, complete_machines)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--service",
|
||||||
|
help="the service to check",
|
||||||
|
)
|
||||||
|
parser.set_defaults(func=check_command)
|
||||||
240
pkgs/clan-cli/clan_cli/vars/generate.py
Normal file
240
pkgs/clan-cli/clan_cli/vars/generate.py
Normal file
@@ -0,0 +1,240 @@
|
|||||||
|
import argparse
|
||||||
|
import importlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from collections.abc import Callable
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
|
from clan_cli.cmd import run
|
||||||
|
|
||||||
|
from ..completions import (
|
||||||
|
add_dynamic_completer,
|
||||||
|
complete_machines,
|
||||||
|
complete_services_for_machine,
|
||||||
|
)
|
||||||
|
from ..errors import ClanError
|
||||||
|
from ..git import commit_files
|
||||||
|
from ..machines.inventory import get_all_machines, get_selected_machines
|
||||||
|
from ..machines.machines import Machine
|
||||||
|
from ..nix import nix_shell
|
||||||
|
from .check import check_secrets
|
||||||
|
from .public_modules import FactStoreBase
|
||||||
|
from .secret_modules import SecretStoreBase
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def read_multiline_input(prompt: str = "Finish with Ctrl-D") -> str:
|
||||||
|
"""
|
||||||
|
Read multi-line input from stdin.
|
||||||
|
"""
|
||||||
|
print(prompt, flush=True)
|
||||||
|
proc = subprocess.run(["cat"], stdout=subprocess.PIPE, text=True)
|
||||||
|
log.info("Input received. Processing...")
|
||||||
|
return proc.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def bubblewrap_cmd(generator: str, generator_dir: Path) -> list[str]:
|
||||||
|
# fmt: off
|
||||||
|
return nix_shell(
|
||||||
|
[
|
||||||
|
"nixpkgs#bash",
|
||||||
|
"nixpkgs#bubblewrap",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"bwrap",
|
||||||
|
"--ro-bind", "/nix/store", "/nix/store",
|
||||||
|
"--tmpfs", "/usr/lib/systemd",
|
||||||
|
"--dev", "/dev",
|
||||||
|
"--bind", str(generator_dir), str(generator_dir),
|
||||||
|
"--unshare-all",
|
||||||
|
"--unshare-user",
|
||||||
|
"--uid", "1000",
|
||||||
|
"--",
|
||||||
|
"bash", "-c", generator
|
||||||
|
],
|
||||||
|
)
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
|
||||||
|
def execute_generator(
|
||||||
|
machine: Machine,
|
||||||
|
generator_name: str,
|
||||||
|
regenerate: bool,
|
||||||
|
secret_vars_store: SecretStoreBase,
|
||||||
|
public_vars_store: FactStoreBase,
|
||||||
|
tmpdir: Path,
|
||||||
|
prompt: Callable[[str], str],
|
||||||
|
) -> bool:
|
||||||
|
generator_dir = tmpdir / generator_name
|
||||||
|
# check if all secrets exist and generate them if at least one is missing
|
||||||
|
needs_regeneration = not check_secrets(machine, generator_name=generator_name)
|
||||||
|
log.debug(f"{generator_name} needs_regeneration: {needs_regeneration}")
|
||||||
|
if not (needs_regeneration or regenerate):
|
||||||
|
return False
|
||||||
|
if not isinstance(machine.flake, Path):
|
||||||
|
msg = f"flake is not a Path: {machine.flake}"
|
||||||
|
msg += "fact/secret generation is only supported for local flakes"
|
||||||
|
|
||||||
|
env = os.environ.copy()
|
||||||
|
generator_dir.mkdir(parents=True)
|
||||||
|
env["out"] = str(generator_dir)
|
||||||
|
# compatibility for old outputs.nix users
|
||||||
|
generator = machine.vars_generators[generator_name]["finalScript"]
|
||||||
|
# if machine.vars_data[generator_name]["generator"]["prompt"]:
|
||||||
|
# prompt_value = prompt(machine.vars_data[generator_name]["generator"]["prompt"])
|
||||||
|
# env["prompt_value"] = prompt_value
|
||||||
|
if sys.platform == "linux":
|
||||||
|
cmd = bubblewrap_cmd(generator, generator_dir)
|
||||||
|
else:
|
||||||
|
cmd = ["bash", "-c", generator]
|
||||||
|
run(
|
||||||
|
cmd,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
files_to_commit = []
|
||||||
|
# store secrets
|
||||||
|
files = machine.vars_generators[generator_name]["files"]
|
||||||
|
for file_name, file in files.items():
|
||||||
|
groups = file.get("groups", [])
|
||||||
|
|
||||||
|
secret_file = generator_dir / file_name
|
||||||
|
if not secret_file.is_file():
|
||||||
|
msg = f"did not generate a file for '{file_name}' when running the following command:\n"
|
||||||
|
msg += generator
|
||||||
|
raise ClanError(msg)
|
||||||
|
if file["secret"]:
|
||||||
|
file_path = secret_vars_store.set(
|
||||||
|
generator_name, file_name, secret_file.read_bytes(), groups
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
file_path = public_vars_store.set(
|
||||||
|
generator_name, file_name, secret_file.read_bytes()
|
||||||
|
)
|
||||||
|
if file_path:
|
||||||
|
files_to_commit.append(file_path)
|
||||||
|
commit_files(
|
||||||
|
files_to_commit,
|
||||||
|
machine.flake_dir,
|
||||||
|
f"Update facts/secrets for service {generator_name} in machine {machine.name}",
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def prompt_func(text: str) -> str:
|
||||||
|
print(f"{text}: ")
|
||||||
|
return read_multiline_input()
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_vars_for_machine(
|
||||||
|
machine: Machine,
|
||||||
|
generator_name: str | None,
|
||||||
|
regenerate: bool,
|
||||||
|
tmpdir: Path,
|
||||||
|
prompt: Callable[[str], str] = prompt_func,
|
||||||
|
) -> bool:
|
||||||
|
local_temp = tmpdir / machine.name
|
||||||
|
local_temp.mkdir()
|
||||||
|
secret_vars_module = importlib.import_module(machine.secret_vars_module)
|
||||||
|
secret_vars_store = secret_vars_module.SecretStore(machine=machine)
|
||||||
|
|
||||||
|
public_vars_module = importlib.import_module(machine.public_vars_module)
|
||||||
|
public_vars_store = public_vars_module.FactStore(machine=machine)
|
||||||
|
|
||||||
|
machine_updated = False
|
||||||
|
|
||||||
|
if generator_name and generator_name not in machine.vars_generators:
|
||||||
|
generators = list(machine.vars_generators.keys())
|
||||||
|
raise ClanError(
|
||||||
|
f"Could not find generator with name: {generator_name}. The following generators are available: {generators}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if generator_name:
|
||||||
|
machine_generator_facts = {
|
||||||
|
generator_name: machine.vars_generators[generator_name]
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
machine_generator_facts = machine.vars_generators
|
||||||
|
|
||||||
|
for generator_name in machine_generator_facts:
|
||||||
|
machine_updated |= execute_generator(
|
||||||
|
machine=machine,
|
||||||
|
generator_name=generator_name,
|
||||||
|
regenerate=regenerate,
|
||||||
|
secret_vars_store=secret_vars_store,
|
||||||
|
public_vars_store=public_vars_store,
|
||||||
|
tmpdir=local_temp,
|
||||||
|
prompt=prompt,
|
||||||
|
)
|
||||||
|
if machine_updated:
|
||||||
|
# flush caches to make sure the new secrets are available in evaluation
|
||||||
|
machine.flush_caches()
|
||||||
|
return machine_updated
|
||||||
|
|
||||||
|
|
||||||
|
def generate_vars(
|
||||||
|
machines: list[Machine],
|
||||||
|
generator_name: str | None,
|
||||||
|
regenerate: bool,
|
||||||
|
prompt: Callable[[str], str] = prompt_func,
|
||||||
|
) -> bool:
|
||||||
|
was_regenerated = False
|
||||||
|
with TemporaryDirectory() as tmp:
|
||||||
|
tmpdir = Path(tmp)
|
||||||
|
|
||||||
|
for machine in machines:
|
||||||
|
errors = 0
|
||||||
|
try:
|
||||||
|
was_regenerated |= _generate_vars_for_machine(
|
||||||
|
machine, generator_name, regenerate, tmpdir, prompt
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
log.error(f"Failed to generate facts for {machine.name}: {exc}")
|
||||||
|
errors += 1
|
||||||
|
if errors > 0:
|
||||||
|
raise ClanError(
|
||||||
|
f"Failed to generate facts for {errors} hosts. Check the logs above"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not was_regenerated:
|
||||||
|
print("All secrets and facts are already up to date")
|
||||||
|
return was_regenerated
|
||||||
|
|
||||||
|
|
||||||
|
def generate_command(args: argparse.Namespace) -> None:
|
||||||
|
if len(args.machines) == 0:
|
||||||
|
machines = get_all_machines(args.flake, args.option)
|
||||||
|
else:
|
||||||
|
machines = get_selected_machines(args.flake, args.option, args.machines)
|
||||||
|
generate_vars(machines, args.service, args.regenerate)
|
||||||
|
|
||||||
|
|
||||||
|
def register_generate_parser(parser: argparse.ArgumentParser) -> None:
|
||||||
|
machines_parser = parser.add_argument(
|
||||||
|
"machines",
|
||||||
|
type=str,
|
||||||
|
help="machine to generate facts for. if empty, generate facts for all machines",
|
||||||
|
nargs="*",
|
||||||
|
default=[],
|
||||||
|
)
|
||||||
|
add_dynamic_completer(machines_parser, complete_machines)
|
||||||
|
|
||||||
|
service_parser = parser.add_argument(
|
||||||
|
"--service",
|
||||||
|
type=str,
|
||||||
|
help="service to generate facts for, if empty, generate facts for every service",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
add_dynamic_completer(service_parser, complete_services_for_machine)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--regenerate",
|
||||||
|
type=bool,
|
||||||
|
action=argparse.BooleanOptionalAction,
|
||||||
|
help="whether to regenerate facts for the specified machine",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
parser.set_defaults(func=generate_command)
|
||||||
47
pkgs/clan-cli/clan_cli/vars/list.py
Normal file
47
pkgs/clan-cli/clan_cli/vars/list.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
import argparse
|
||||||
|
import importlib
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from ..completions import add_dynamic_completer, complete_machines
|
||||||
|
from ..machines.machines import Machine
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO get also secret facts
|
||||||
|
def get_all_facts(machine: Machine) -> dict:
|
||||||
|
public_facts_module = importlib.import_module(machine.public_facts_module)
|
||||||
|
public_facts_store = public_facts_module.FactStore(machine=machine)
|
||||||
|
|
||||||
|
# for service in machine.secrets_data:
|
||||||
|
# facts[service] = {}
|
||||||
|
# for fact in machine.secrets_data[service]["facts"]:
|
||||||
|
# fact_content = fact_store.get(service, fact)
|
||||||
|
# if fact_content:
|
||||||
|
# facts[service][fact] = fact_content.decode()
|
||||||
|
# else:
|
||||||
|
# log.error(f"Fact {fact} for service {service} is missing")
|
||||||
|
return public_facts_store.get_all()
|
||||||
|
|
||||||
|
|
||||||
|
def get_command(args: argparse.Namespace) -> None:
|
||||||
|
machine = Machine(name=args.machine, flake=args.flake)
|
||||||
|
|
||||||
|
# the raw_facts are bytestrings making them not json serializable
|
||||||
|
raw_facts = get_all_facts(machine)
|
||||||
|
facts = dict()
|
||||||
|
for key in raw_facts["TODO"]:
|
||||||
|
facts[key] = raw_facts["TODO"][key].decode("utf8")
|
||||||
|
|
||||||
|
print(json.dumps(facts, indent=4))
|
||||||
|
|
||||||
|
|
||||||
|
def register_list_parser(parser: argparse.ArgumentParser) -> None:
|
||||||
|
machines_parser = parser.add_argument(
|
||||||
|
"machine",
|
||||||
|
help="The machine to print facts for",
|
||||||
|
)
|
||||||
|
add_dynamic_completer(machines_parser, complete_machines)
|
||||||
|
|
||||||
|
parser.set_defaults(func=get_command)
|
||||||
28
pkgs/clan-cli/clan_cli/vars/public_modules/__init__.py
Normal file
28
pkgs/clan-cli/clan_cli/vars/public_modules/__init__.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from clan_cli.machines.machines import Machine
|
||||||
|
|
||||||
|
|
||||||
|
class FactStoreBase(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def __init__(self, machine: Machine) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def exists(self, service: str, name: str) -> bool:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set(self, service: str, name: str, value: bytes) -> Path | None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# get a single fact
|
||||||
|
@abstractmethod
|
||||||
|
def get(self, service: str, name: str) -> bytes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# get all facts
|
||||||
|
@abstractmethod
|
||||||
|
def get_all(self) -> dict[str, dict[str, bytes]]:
|
||||||
|
pass
|
||||||
64
pkgs/clan-cli/clan_cli/vars/public_modules/in_repo.py
Normal file
64
pkgs/clan-cli/clan_cli/vars/public_modules/in_repo.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from clan_cli.errors import ClanError
|
||||||
|
from clan_cli.machines.machines import Machine
|
||||||
|
|
||||||
|
from . import FactStoreBase
|
||||||
|
|
||||||
|
|
||||||
|
class FactStore(FactStoreBase):
|
||||||
|
def __init__(self, machine: Machine) -> None:
|
||||||
|
self.machine = machine
|
||||||
|
self.works_remotely = False
|
||||||
|
|
||||||
|
def set(self, generator_name: str, name: str, value: bytes) -> Path | None:
|
||||||
|
if self.machine.flake.is_local():
|
||||||
|
fact_path = (
|
||||||
|
self.machine.flake.path
|
||||||
|
/ "machines"
|
||||||
|
/ self.machine.name
|
||||||
|
/ "vars"
|
||||||
|
/ generator_name
|
||||||
|
/ name
|
||||||
|
)
|
||||||
|
fact_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
fact_path.touch()
|
||||||
|
fact_path.write_bytes(value)
|
||||||
|
return fact_path
|
||||||
|
else:
|
||||||
|
raise ClanError(
|
||||||
|
f"in_flake fact storage is only supported for local flakes: {self.machine.flake}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def exists(self, generator_name: str, name: str) -> bool:
|
||||||
|
fact_path = (
|
||||||
|
self.machine.flake_dir
|
||||||
|
/ "machines"
|
||||||
|
/ self.machine.name
|
||||||
|
/ "vars"
|
||||||
|
/ generator_name
|
||||||
|
/ name
|
||||||
|
)
|
||||||
|
return fact_path.exists()
|
||||||
|
|
||||||
|
# get a single fact
|
||||||
|
def get(self, generator_name: str, name: str) -> bytes:
|
||||||
|
fact_path = (
|
||||||
|
self.machine.flake_dir
|
||||||
|
/ "machines"
|
||||||
|
/ self.machine.name
|
||||||
|
/ "vars"
|
||||||
|
/ generator_name
|
||||||
|
/ name
|
||||||
|
)
|
||||||
|
return fact_path.read_bytes()
|
||||||
|
|
||||||
|
# get all public vars
|
||||||
|
def get_all(self) -> dict[str, dict[str, bytes]]:
|
||||||
|
facts_folder = self.machine.flake_dir / "machines" / self.machine.name / "vars"
|
||||||
|
facts: dict[str, dict[str, bytes]] = {}
|
||||||
|
facts["TODO"] = {}
|
||||||
|
if facts_folder.exists():
|
||||||
|
for fact_path in facts_folder.iterdir():
|
||||||
|
facts["TODO"][fact_path.name] = fact_path.read_bytes()
|
||||||
|
return facts
|
||||||
46
pkgs/clan-cli/clan_cli/vars/public_modules/vm.py
Normal file
46
pkgs/clan-cli/clan_cli/vars/public_modules/vm.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from clan_cli.dirs import vm_state_dir
|
||||||
|
from clan_cli.errors import ClanError
|
||||||
|
from clan_cli.machines.machines import Machine
|
||||||
|
|
||||||
|
from . import FactStoreBase
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class FactStore(FactStoreBase):
|
||||||
|
def __init__(self, machine: Machine) -> None:
|
||||||
|
self.machine = machine
|
||||||
|
self.works_remotely = False
|
||||||
|
self.dir = vm_state_dir(str(machine.flake), machine.name) / "facts"
|
||||||
|
log.debug(f"FactStore initialized with dir {self.dir}")
|
||||||
|
|
||||||
|
def exists(self, service: str, name: str) -> bool:
|
||||||
|
fact_path = self.dir / service / name
|
||||||
|
return fact_path.exists()
|
||||||
|
|
||||||
|
def set(self, service: str, name: str, value: bytes) -> Path | None:
|
||||||
|
fact_path = self.dir / service / name
|
||||||
|
fact_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
fact_path.write_bytes(value)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# get a single fact
|
||||||
|
def get(self, service: str, name: str) -> bytes:
|
||||||
|
fact_path = self.dir / service / name
|
||||||
|
if fact_path.exists():
|
||||||
|
return fact_path.read_bytes()
|
||||||
|
raise ClanError(f"Fact {name} for service {service} not found")
|
||||||
|
|
||||||
|
# get all facts
|
||||||
|
def get_all(self) -> dict[str, dict[str, bytes]]:
|
||||||
|
facts: dict[str, dict[str, bytes]] = {}
|
||||||
|
if self.dir.exists():
|
||||||
|
for service in self.dir.iterdir():
|
||||||
|
facts[service.name] = {}
|
||||||
|
for fact in service.iterdir():
|
||||||
|
facts[service.name][fact.name] = fact.read_bytes()
|
||||||
|
|
||||||
|
return facts
|
||||||
31
pkgs/clan-cli/clan_cli/vars/secret_modules/__init__.py
Normal file
31
pkgs/clan-cli/clan_cli/vars/secret_modules/__init__.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from clan_cli.machines.machines import Machine
|
||||||
|
|
||||||
|
|
||||||
|
class SecretStoreBase(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def __init__(self, machine: Machine) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set(
|
||||||
|
self, service: str, name: str, value: bytes, groups: list[str]
|
||||||
|
) -> Path | None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get(self, service: str, name: str) -> bytes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def exists(self, service: str, name: str) -> bool:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def update_check(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def upload(self, output_dir: Path) -> None:
|
||||||
|
pass
|
||||||
117
pkgs/clan-cli/clan_cli/vars/secret_modules/password_store.py
Normal file
117
pkgs/clan-cli/clan_cli/vars/secret_modules/password_store.py
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from clan_cli.machines.machines import Machine
|
||||||
|
from clan_cli.nix import nix_shell
|
||||||
|
|
||||||
|
from . import SecretStoreBase
|
||||||
|
|
||||||
|
|
||||||
|
class SecretStore(SecretStoreBase):
|
||||||
|
def __init__(self, machine: Machine) -> None:
|
||||||
|
self.machine = machine
|
||||||
|
|
||||||
|
def set(
|
||||||
|
self, service: str, name: str, value: bytes, groups: list[str]
|
||||||
|
) -> Path | None:
|
||||||
|
subprocess.run(
|
||||||
|
nix_shell(
|
||||||
|
["nixpkgs#pass"],
|
||||||
|
["pass", "insert", "-m", f"machines/{self.machine.name}/{name}"],
|
||||||
|
),
|
||||||
|
input=value,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
return None # we manage the files outside of the git repo
|
||||||
|
|
||||||
|
def get(self, service: str, name: str) -> bytes:
|
||||||
|
return subprocess.run(
|
||||||
|
nix_shell(
|
||||||
|
["nixpkgs#pass"],
|
||||||
|
["pass", "show", f"machines/{self.machine.name}/{name}"],
|
||||||
|
),
|
||||||
|
check=True,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
).stdout
|
||||||
|
|
||||||
|
def exists(self, service: str, name: str) -> bool:
|
||||||
|
password_store = os.environ.get(
|
||||||
|
"PASSWORD_STORE_DIR", f"{os.environ['HOME']}/.password-store"
|
||||||
|
)
|
||||||
|
secret_path = Path(password_store) / f"machines/{self.machine.name}/{name}.gpg"
|
||||||
|
return secret_path.exists()
|
||||||
|
|
||||||
|
def generate_hash(self) -> bytes:
|
||||||
|
password_store = os.environ.get(
|
||||||
|
"PASSWORD_STORE_DIR", f"{os.environ['HOME']}/.password-store"
|
||||||
|
)
|
||||||
|
hashes = []
|
||||||
|
hashes.append(
|
||||||
|
subprocess.run(
|
||||||
|
nix_shell(
|
||||||
|
["nixpkgs#git"],
|
||||||
|
[
|
||||||
|
"git",
|
||||||
|
"-C",
|
||||||
|
password_store,
|
||||||
|
"log",
|
||||||
|
"-1",
|
||||||
|
"--format=%H",
|
||||||
|
f"machines/{self.machine.name}",
|
||||||
|
],
|
||||||
|
),
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
).stdout.strip()
|
||||||
|
)
|
||||||
|
for symlink in Path(password_store).glob(f"machines/{self.machine.name}/**/*"):
|
||||||
|
if symlink.is_symlink():
|
||||||
|
hashes.append(
|
||||||
|
subprocess.run(
|
||||||
|
nix_shell(
|
||||||
|
["nixpkgs#git"],
|
||||||
|
[
|
||||||
|
"git",
|
||||||
|
"-C",
|
||||||
|
password_store,
|
||||||
|
"log",
|
||||||
|
"-1",
|
||||||
|
"--format=%H",
|
||||||
|
str(symlink),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
).stdout.strip()
|
||||||
|
)
|
||||||
|
|
||||||
|
# we sort the hashes to make sure that the order is always the same
|
||||||
|
hashes.sort()
|
||||||
|
return b"\n".join(hashes)
|
||||||
|
|
||||||
|
# FIXME: add this when we switch to python3.12
|
||||||
|
# @override
|
||||||
|
def update_check(self) -> bool:
|
||||||
|
local_hash = self.generate_hash()
|
||||||
|
remote_hash = self.machine.target_host.run(
|
||||||
|
# TODO get the path to the secrets from the machine
|
||||||
|
["cat", f"{self.machine.secrets_upload_directory}/.pass_info"],
|
||||||
|
check=False,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
).stdout.strip()
|
||||||
|
|
||||||
|
if not remote_hash:
|
||||||
|
print("remote hash is empty")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return local_hash.decode() == remote_hash
|
||||||
|
|
||||||
|
def upload(self, output_dir: Path) -> None:
|
||||||
|
for service in self.machine.facts_data:
|
||||||
|
for secret in self.machine.facts_data[service]["secret"]:
|
||||||
|
if isinstance(secret, dict):
|
||||||
|
secret_name = secret["name"]
|
||||||
|
else:
|
||||||
|
# TODO: drop old format soon
|
||||||
|
secret_name = secret
|
||||||
|
(output_dir / secret_name).write_bytes(self.get(service, secret_name))
|
||||||
|
(output_dir / ".pass_info").write_bytes(self.generate_hash())
|
||||||
69
pkgs/clan-cli/clan_cli/vars/secret_modules/sops.py
Normal file
69
pkgs/clan-cli/clan_cli/vars/secret_modules/sops.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from clan_cli.machines.machines import Machine
|
||||||
|
from clan_cli.secrets.folders import sops_secrets_folder
|
||||||
|
from clan_cli.secrets.machines import add_machine, has_machine
|
||||||
|
from clan_cli.secrets.secrets import decrypt_secret, encrypt_secret, has_secret
|
||||||
|
from clan_cli.secrets.sops import generate_private_key
|
||||||
|
|
||||||
|
from . import SecretStoreBase
|
||||||
|
|
||||||
|
|
||||||
|
class SecretStore(SecretStoreBase):
|
||||||
|
def __init__(self, machine: Machine) -> None:
|
||||||
|
self.machine = machine
|
||||||
|
|
||||||
|
# no need to generate keys if we don't manage secrets
|
||||||
|
if not hasattr(self.machine, "vars_data") or not self.machine.vars_generators:
|
||||||
|
return
|
||||||
|
for generator in self.machine.vars_generators.values():
|
||||||
|
if "files" in generator:
|
||||||
|
for file in generator["files"].values():
|
||||||
|
if file["secret"]:
|
||||||
|
return
|
||||||
|
|
||||||
|
if has_machine(self.machine.flake_dir, self.machine.name):
|
||||||
|
return
|
||||||
|
priv_key, pub_key = generate_private_key()
|
||||||
|
encrypt_secret(
|
||||||
|
self.machine.flake_dir,
|
||||||
|
sops_secrets_folder(self.machine.flake_dir)
|
||||||
|
/ f"{self.machine.name}-age.key",
|
||||||
|
priv_key,
|
||||||
|
)
|
||||||
|
add_machine(self.machine.flake_dir, self.machine.name, pub_key, False)
|
||||||
|
|
||||||
|
def set(
|
||||||
|
self, generator_name: str, name: str, value: bytes, groups: list[str]
|
||||||
|
) -> Path | None:
|
||||||
|
path = (
|
||||||
|
sops_secrets_folder(self.machine.flake_dir)
|
||||||
|
/ f"{self.machine.name}-{generator_name}-{name}"
|
||||||
|
)
|
||||||
|
encrypt_secret(
|
||||||
|
self.machine.flake_dir,
|
||||||
|
path,
|
||||||
|
value,
|
||||||
|
add_machines=[self.machine.name],
|
||||||
|
add_groups=groups,
|
||||||
|
)
|
||||||
|
return path
|
||||||
|
|
||||||
|
def get(self, service: str, name: str) -> bytes:
|
||||||
|
return decrypt_secret(
|
||||||
|
self.machine.flake_dir, f"{self.machine.name}-{name}"
|
||||||
|
).encode("utf-8")
|
||||||
|
|
||||||
|
def exists(self, service: str, name: str) -> bool:
|
||||||
|
return has_secret(
|
||||||
|
self.machine.flake_dir,
|
||||||
|
f"{self.machine.name}-{name}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def upload(self, output_dir: Path) -> None:
|
||||||
|
key_name = f"{self.machine.name}-age.key"
|
||||||
|
if not has_secret(self.machine.flake_dir, key_name):
|
||||||
|
# skip uploading the secret, not managed by us
|
||||||
|
return
|
||||||
|
key = decrypt_secret(self.machine.flake_dir, key_name)
|
||||||
|
(output_dir / "key.txt").write_text(key)
|
||||||
35
pkgs/clan-cli/clan_cli/vars/secret_modules/vm.py
Normal file
35
pkgs/clan-cli/clan_cli/vars/secret_modules/vm.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from clan_cli.dirs import vm_state_dir
|
||||||
|
from clan_cli.machines.machines import Machine
|
||||||
|
|
||||||
|
from . import SecretStoreBase
|
||||||
|
|
||||||
|
|
||||||
|
class SecretStore(SecretStoreBase):
|
||||||
|
def __init__(self, machine: Machine) -> None:
|
||||||
|
self.machine = machine
|
||||||
|
self.dir = vm_state_dir(str(machine.flake), machine.name) / "secrets"
|
||||||
|
self.dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def set(
|
||||||
|
self, service: str, name: str, value: bytes, groups: list[str]
|
||||||
|
) -> Path | None:
|
||||||
|
secret_file = self.dir / service / name
|
||||||
|
secret_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
secret_file.write_bytes(value)
|
||||||
|
return None # we manage the files outside of the git repo
|
||||||
|
|
||||||
|
def get(self, service: str, name: str) -> bytes:
|
||||||
|
secret_file = self.dir / service / name
|
||||||
|
return secret_file.read_bytes()
|
||||||
|
|
||||||
|
def exists(self, service: str, name: str) -> bool:
|
||||||
|
return (self.dir / service / name).exists()
|
||||||
|
|
||||||
|
def upload(self, output_dir: Path) -> None:
|
||||||
|
if os.path.exists(output_dir):
|
||||||
|
shutil.rmtree(output_dir)
|
||||||
|
shutil.copytree(self.dir, output_dir)
|
||||||
58
pkgs/clan-cli/clan_cli/vars/upload.py
Normal file
58
pkgs/clan-cli/clan_cli/vars/upload.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import argparse
|
||||||
|
import importlib
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
|
from ..cmd import Log, run
|
||||||
|
from ..completions import add_dynamic_completer, complete_machines
|
||||||
|
from ..machines.machines import Machine
|
||||||
|
from ..nix import nix_shell
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def upload_secrets(machine: Machine) -> None:
|
||||||
|
secret_facts_module = importlib.import_module(machine.secret_facts_module)
|
||||||
|
secret_facts_store = secret_facts_module.SecretStore(machine=machine)
|
||||||
|
|
||||||
|
if secret_facts_store.update_check():
|
||||||
|
log.info("Secrets already up to date")
|
||||||
|
return
|
||||||
|
with TemporaryDirectory() as tempdir:
|
||||||
|
secret_facts_store.upload(Path(tempdir))
|
||||||
|
host = machine.target_host
|
||||||
|
|
||||||
|
ssh_cmd = host.ssh_cmd()
|
||||||
|
run(
|
||||||
|
nix_shell(
|
||||||
|
["nixpkgs#rsync"],
|
||||||
|
[
|
||||||
|
"rsync",
|
||||||
|
"-e",
|
||||||
|
" ".join(["ssh"] + ssh_cmd[2:]),
|
||||||
|
"-az",
|
||||||
|
"--delete",
|
||||||
|
"--chown=root:root",
|
||||||
|
"--chmod=D700,F600",
|
||||||
|
f"{tempdir!s}/",
|
||||||
|
f"{host.user}@{host.host}:{machine.secrets_upload_directory}/",
|
||||||
|
],
|
||||||
|
),
|
||||||
|
log=Log.BOTH,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def upload_command(args: argparse.Namespace) -> None:
|
||||||
|
machine = Machine(name=args.machine, flake=args.flake)
|
||||||
|
upload_secrets(machine)
|
||||||
|
|
||||||
|
|
||||||
|
def register_upload_parser(parser: argparse.ArgumentParser) -> None:
|
||||||
|
machines_parser = parser.add_argument(
|
||||||
|
"machine",
|
||||||
|
help="The machine to upload secrets to",
|
||||||
|
)
|
||||||
|
add_dynamic_completer(machines_parser, complete_machines)
|
||||||
|
|
||||||
|
parser.set_defaults(func=upload_command)
|
||||||
@@ -254,6 +254,9 @@ def collect_commands() -> list[Category]:
|
|||||||
if isinstance(action, argparse._SubParsersAction):
|
if isinstance(action, argparse._SubParsersAction):
|
||||||
subparsers: dict[str, argparse.ArgumentParser] = action.choices
|
subparsers: dict[str, argparse.ArgumentParser] = action.choices
|
||||||
for name, subparser in subparsers.items():
|
for name, subparser in subparsers.items():
|
||||||
|
if str(subparser.description).startswith("WIP"):
|
||||||
|
print(f"Excluded {name} from documentation as it is marked as WIP")
|
||||||
|
continue
|
||||||
(_options, _positionals, _subcommands) = get_subcommands(
|
(_options, _positionals, _subcommands) = get_subcommands(
|
||||||
subparser, to=result, level=2, prefix=[name]
|
subparser, to=result, level=2, prefix=[name]
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -58,6 +58,7 @@
|
|||||||
python docs.py reference
|
python docs.py reference
|
||||||
mkdir -p $out
|
mkdir -p $out
|
||||||
cp -r out/* $out
|
cp -r out/* $out
|
||||||
|
ls -lah $out
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
clan-ts-api = pkgs.stdenv.mkDerivation {
|
clan-ts-api = pkgs.stdenv.mkDerivation {
|
||||||
|
|||||||
142
pkgs/clan-cli/tests/test_api_dataclass_compat.py
Normal file
142
pkgs/clan-cli/tests/test_api_dataclass_compat.py
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
import ast
|
||||||
|
import importlib.util
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from dataclasses import is_dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from clan_cli.api.util import JSchemaTypeError, type_to_dict
|
||||||
|
from clan_cli.errors import ClanError
|
||||||
|
|
||||||
|
|
||||||
|
def find_dataclasses_in_directory(
|
||||||
|
directory: Path, exclude_paths: list[str] = []
|
||||||
|
) -> list[tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
Find all dataclass classes in all Python files within a nested directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
directory (str): The root directory to start searching from.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[Tuple[str, str]]: A list of tuples containing the file path and the dataclass name.
|
||||||
|
"""
|
||||||
|
dataclass_files = []
|
||||||
|
|
||||||
|
excludes = [os.path.join(directory, d) for d in exclude_paths]
|
||||||
|
|
||||||
|
for root, _, files in os.walk(directory, topdown=False):
|
||||||
|
for file in files:
|
||||||
|
if not file.endswith(".py"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
file_path = os.path.join(root, file)
|
||||||
|
|
||||||
|
if file_path in excludes:
|
||||||
|
print(f"Skipping dataclass check for file: {file_path}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
with open(file_path, encoding="utf-8") as f:
|
||||||
|
try:
|
||||||
|
tree = ast.parse(f.read(), filename=file_path)
|
||||||
|
for node in ast.walk(tree):
|
||||||
|
if isinstance(node, ast.ClassDef):
|
||||||
|
for deco in node.decorator_list:
|
||||||
|
if (
|
||||||
|
isinstance(deco, ast.Name)
|
||||||
|
and deco.id == "dataclass"
|
||||||
|
):
|
||||||
|
dataclass_files.append((file_path, node.name))
|
||||||
|
elif (
|
||||||
|
isinstance(deco, ast.Call)
|
||||||
|
and isinstance(deco.func, ast.Name)
|
||||||
|
and deco.func.id == "dataclass"
|
||||||
|
):
|
||||||
|
dataclass_files.append((file_path, node.name))
|
||||||
|
except (SyntaxError, UnicodeDecodeError) as e:
|
||||||
|
print(f"Error parsing {file_path}: {e}")
|
||||||
|
|
||||||
|
return dataclass_files
|
||||||
|
|
||||||
|
|
||||||
|
def load_dataclass_from_file(
|
||||||
|
file_path: str, class_name: str, root_dir: str
|
||||||
|
) -> type | None:
|
||||||
|
"""
|
||||||
|
Load a dataclass from a given file path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path (str): Path to the file.
|
||||||
|
class_name (str): Name of the class to load.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List[Type]: The dataclass type if found, else an empty list.
|
||||||
|
"""
|
||||||
|
module_name = (
|
||||||
|
os.path.relpath(file_path, root_dir).replace(os.path.sep, ".").rstrip(".py")
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
sys.path.insert(0, root_dir)
|
||||||
|
spec = importlib.util.spec_from_file_location(module_name, file_path)
|
||||||
|
if not spec:
|
||||||
|
raise ClanError(f"Could not load spec from file: {file_path}")
|
||||||
|
|
||||||
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
if not module:
|
||||||
|
raise ClanError(f"Could not create module: {file_path}")
|
||||||
|
|
||||||
|
if not spec.loader:
|
||||||
|
raise ClanError(f"Could not load loader from spec: {spec}")
|
||||||
|
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
sys.path.pop(0)
|
||||||
|
dataclass_type = getattr(module, class_name, None)
|
||||||
|
|
||||||
|
if dataclass_type and is_dataclass(dataclass_type):
|
||||||
|
return dataclass_type
|
||||||
|
|
||||||
|
raise ClanError(f"Could not load dataclass {class_name} from file: {file_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_dataclasses() -> None:
|
||||||
|
"""
|
||||||
|
This Test ensures that all dataclasses are compatible with the API.
|
||||||
|
|
||||||
|
It will load all dataclasses from the clan_cli directory and
|
||||||
|
generate a JSON schema for each of them.
|
||||||
|
|
||||||
|
It will fail if any dataclass cannot be converted to JSON schema.
|
||||||
|
This means the dataclass in its current form is not compatible with the API.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Excludes:
|
||||||
|
# - API includes Type Generic wrappers, that are not known in the init file.
|
||||||
|
excludes = ["api/__init__.py"]
|
||||||
|
|
||||||
|
cli_path = Path("clan_cli").resolve()
|
||||||
|
dataclasses = find_dataclasses_in_directory(cli_path, excludes)
|
||||||
|
|
||||||
|
for file, dataclass in dataclasses:
|
||||||
|
print(f"checking dataclass {dataclass} in file: {file}")
|
||||||
|
try:
|
||||||
|
dclass = load_dataclass_from_file(file, dataclass, str(cli_path.parent))
|
||||||
|
type_to_dict(dclass)
|
||||||
|
except JSchemaTypeError as e:
|
||||||
|
print(f"Error loading dataclass {dataclass} from {file}: {e}")
|
||||||
|
raise ClanError(
|
||||||
|
f"""
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
Error converting dataclass 'class {dataclass}()' from {file}
|
||||||
|
|
||||||
|
Details:
|
||||||
|
{e}
|
||||||
|
|
||||||
|
Help:
|
||||||
|
- Converting public fields to PRIVATE by prefixing them with underscore ('_')
|
||||||
|
- Ensure all private fields are initialized the API wont provide initial values for them.
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
""",
|
||||||
|
location=__file__,
|
||||||
|
)
|
||||||
@@ -108,7 +108,7 @@ def test_type_from_schema_path_simple() -> None:
|
|||||||
schema = dict(
|
schema = dict(
|
||||||
type="boolean",
|
type="boolean",
|
||||||
)
|
)
|
||||||
assert parsing.type_from_schema_path(schema, []) == bool
|
assert parsing.type_from_schema_path(schema, []) is bool
|
||||||
|
|
||||||
|
|
||||||
def test_type_from_schema_path_nested() -> None:
|
def test_type_from_schema_path_nested() -> None:
|
||||||
@@ -125,8 +125,8 @@ def test_type_from_schema_path_nested() -> None:
|
|||||||
age=dict(type="integer"),
|
age=dict(type="integer"),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
assert parsing.type_from_schema_path(schema, ["age"]) == int
|
assert parsing.type_from_schema_path(schema, ["age"]) is int
|
||||||
assert parsing.type_from_schema_path(schema, ["name", "first"]) == str
|
assert parsing.type_from_schema_path(schema, ["name", "first"]) is str
|
||||||
|
|
||||||
|
|
||||||
def test_type_from_schema_path_dynamic_attrs() -> None:
|
def test_type_from_schema_path_dynamic_attrs() -> None:
|
||||||
@@ -140,16 +140,16 @@ def test_type_from_schema_path_dynamic_attrs() -> None:
|
|||||||
),
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
assert parsing.type_from_schema_path(schema, ["age"]) == int
|
assert parsing.type_from_schema_path(schema, ["age"]) is int
|
||||||
assert parsing.type_from_schema_path(schema, ["users", "foo"]) == str
|
assert parsing.type_from_schema_path(schema, ["users", "foo"]) is str
|
||||||
|
|
||||||
|
|
||||||
def test_map_type() -> None:
|
def test_map_type() -> None:
|
||||||
with pytest.raises(ClanError):
|
with pytest.raises(ClanError):
|
||||||
config.map_type("foo")
|
config.map_type("foo")
|
||||||
assert config.map_type("string") == str
|
assert config.map_type("string") is str
|
||||||
assert config.map_type("integer") == int
|
assert config.map_type("integer") is int
|
||||||
assert config.map_type("boolean") == bool
|
assert config.map_type("boolean") is bool
|
||||||
assert config.map_type("attribute set of string") == dict[str, str]
|
assert config.map_type("attribute set of string") == dict[str, str]
|
||||||
assert config.map_type("attribute set of integer") == dict[str, int]
|
assert config.map_type("attribute set of integer") == dict[str, int]
|
||||||
assert config.map_type("null or string") == str | None
|
assert config.map_type("null or string") == str | None
|
||||||
|
|||||||
49
pkgs/clan-cli/tests/test_vars.py
Normal file
49
pkgs/clan-cli/tests/test_vars.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from fixtures_flakes import generate_flake
|
||||||
|
from helpers.cli import Cli
|
||||||
|
from root import CLAN_CORE
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.impure
|
||||||
|
def test_generate_secret(
|
||||||
|
monkeypatch: pytest.MonkeyPatch,
|
||||||
|
temporary_home: Path,
|
||||||
|
# age_keys: list["KeyPair"],
|
||||||
|
) -> None:
|
||||||
|
flake = generate_flake(
|
||||||
|
temporary_home,
|
||||||
|
flake_template=CLAN_CORE / "templates" / "minimal",
|
||||||
|
machine_configs=dict(
|
||||||
|
my_machine=dict(
|
||||||
|
clan=dict(
|
||||||
|
core=dict(
|
||||||
|
vars=dict(
|
||||||
|
generators=dict(
|
||||||
|
my_generator=dict(
|
||||||
|
files=dict(
|
||||||
|
my_secret=dict(
|
||||||
|
secret=False,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
script="echo hello > $out/my_secret",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
monkeypatch.chdir(flake.path)
|
||||||
|
cli = Cli()
|
||||||
|
cmd = ["vars", "generate", "--flake", str(flake.path), "my_machine"]
|
||||||
|
cli.run(cmd)
|
||||||
|
assert (
|
||||||
|
flake.path / "machines" / "my_machine" / "vars" / "my_generator" / "my_secret"
|
||||||
|
).is_file()
|
||||||
@@ -19,29 +19,6 @@ let
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
installerModule =
|
|
||||||
{ config, modulesPath, ... }:
|
|
||||||
{
|
|
||||||
imports = [
|
|
||||||
wifiModule
|
|
||||||
self.nixosModules.installer
|
|
||||||
self.inputs.nixos-generators.nixosModules.all-formats
|
|
||||||
(modulesPath + "/installer/cd-dvd/iso-image.nix")
|
|
||||||
];
|
|
||||||
|
|
||||||
isoImage.squashfsCompression = "zstd";
|
|
||||||
|
|
||||||
system.stateVersion = config.system.nixos.version;
|
|
||||||
nixpkgs.pkgs = self.inputs.nixpkgs.legacyPackages.x86_64-linux;
|
|
||||||
};
|
|
||||||
|
|
||||||
installerSystem = lib.nixosSystem {
|
|
||||||
modules = [
|
|
||||||
self.inputs.disko.nixosModules.default
|
|
||||||
installerModule
|
|
||||||
{ disko.memSize = 4096; } # FIXME: otherwise the image builder goes OOM
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
flashInstallerModule =
|
flashInstallerModule =
|
||||||
{ config, ... }:
|
{ config, ... }:
|
||||||
@@ -98,14 +75,6 @@ let
|
|||||||
in
|
in
|
||||||
{
|
{
|
||||||
clan = {
|
clan = {
|
||||||
# To build a generic installer image (without ssh pubkeys),
|
|
||||||
# use the following command:
|
|
||||||
# $ nix build .#iso-installer
|
|
||||||
machines.iso-installer = {
|
|
||||||
imports = [ installerModule ];
|
|
||||||
fileSystems."/".device = lib.mkDefault "/dev/null";
|
|
||||||
};
|
|
||||||
|
|
||||||
# To directly flash the installer to a disk, use the following command:
|
# To directly flash the installer to a disk, use the following command:
|
||||||
# $ clan flash flash-installer --disk main /dev/sdX --yes
|
# $ clan flash flash-installer --disk main /dev/sdX --yes
|
||||||
# This will include your ssh public keys in the installer.
|
# This will include your ssh public keys in the installer.
|
||||||
@@ -114,7 +83,4 @@ in
|
|||||||
boot.loader.grub.enable = lib.mkDefault true;
|
boot.loader.grub.enable = lib.mkDefault true;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
flake.packages.x86_64-linux.iso-installer = installerSystem.config.formats.iso;
|
|
||||||
flake.apps.x86_64-linux.install-vm.program = installerSystem.config.formats.vm.outPath;
|
|
||||||
flake.apps.x86_64-linux.install-vm-nogui.program = installerSystem.config.formats.vm-nogui.outPath;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# shellcheck shell=bash
|
||||||
source_up
|
source_up
|
||||||
|
|
||||||
watch_file flake-module.nix default.nix
|
watch_file flake-module.nix default.nix
|
||||||
|
|||||||
Reference in New Issue
Block a user