Compare commits
125 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3d4b7902e6 | ||
|
|
e6b494a849 | ||
|
|
cde72f3710 | ||
|
|
5047b6686d | ||
|
|
b77ffac4d4 | ||
|
|
b2d3ff4431 | ||
|
|
f70879aa63 | ||
|
|
31190ed8e5 | ||
|
|
36dbb8fafd | ||
|
|
47ae5981f6 | ||
|
|
11c3b6f353 | ||
|
|
191562a84e | ||
|
|
06a54c21c3 | ||
|
|
359ad22c90 | ||
|
|
754e0ca9e8 | ||
|
|
8290660f20 | ||
|
|
78a50c5d74 | ||
|
|
496555b405 | ||
|
|
216e5a53d4 | ||
|
|
a1af14db57 | ||
|
|
976b4a2c3a | ||
|
|
c6a2db15a7 | ||
|
|
6f80cee971 | ||
|
|
f17cf41093 | ||
|
|
483e2c05ea | ||
|
|
da34bd7199 | ||
|
|
3478dea8b2 | ||
|
|
ce3fc6973b | ||
|
|
c228d72da2 | ||
|
|
127009b303 | ||
|
|
ed653fa8b9 | ||
|
|
b8da149453 | ||
|
|
a23c251b09 | ||
|
|
bf214011cf | ||
|
|
a1dcddf9b4 | ||
|
|
f500aee786 | ||
|
|
4cfd580447 | ||
|
|
b1a4b4de96 | ||
|
|
108a37b0a3 | ||
|
|
8c7db195ab | ||
|
|
f7bb5d7aaf | ||
|
|
8e9053cf80 | ||
|
|
9ec66195eb | ||
|
|
93475ab4b3 | ||
|
|
d1e8b1ed96 | ||
|
|
3acc4b4d25 | ||
|
|
7932517b4a | ||
|
|
5f1191148e | ||
|
|
d079bc85a8 | ||
|
|
df6683a0bd | ||
|
|
4b3b573e8c | ||
|
|
e930e14238 | ||
|
|
2ccf32c36b | ||
|
|
398a61acbc | ||
|
|
fdedf40e27 | ||
|
|
45fd64a930 | ||
|
|
31722d9dc0 | ||
|
|
d804c6059d | ||
|
|
4d1437b5cc | ||
|
|
58bc8d162d | ||
|
|
d12019d290 | ||
|
|
1918cfd707 | ||
|
|
067da45082 | ||
|
|
0a8b8713d9 | ||
|
|
4993b98258 | ||
|
|
183c1f4235 | ||
|
|
ea7b0c8b90 | ||
|
|
27b9c8915b | ||
|
|
36771f3ecd | ||
|
|
52fcc91479 | ||
|
|
65d2a4e081 | ||
|
|
9dc362437c | ||
|
|
6eb8fe47c4 | ||
|
|
7208d63e78 | ||
|
|
01f1a6900a | ||
|
|
12ce8238f1 | ||
|
|
c5071bc212 | ||
|
|
81fc60eef8 | ||
|
|
bb25e136c3 | ||
|
|
a1e2a4f64a | ||
|
|
943c19939a | ||
|
|
17d7eec0ae | ||
|
|
7b4e76df29 | ||
|
|
1cb33a5c6c | ||
|
|
cd11f6ad10 | ||
|
|
67ceba6637 | ||
|
|
1330c60190 | ||
|
|
e8d4cd9936 | ||
|
|
537a1ae87f | ||
|
|
0aa876a06c | ||
|
|
457e45d989 | ||
|
|
1356ca9b8c | ||
|
|
df8074100d | ||
|
|
d441f1d60c | ||
|
|
a0097dab66 | ||
|
|
6c17fa648f | ||
|
|
51b087f7ae | ||
|
|
c340831edd | ||
|
|
c3dc315576 | ||
|
|
ff3a1dc928 | ||
|
|
3695a5adf2 | ||
|
|
4d404cfc50 | ||
|
|
7091b09fa7 | ||
|
|
77c84e7471 | ||
|
|
413e172cbd | ||
|
|
3b975ed993 | ||
|
|
36baec8d48 | ||
|
|
eb8d5167e7 | ||
|
|
b358089488 | ||
|
|
36b20f18d4 | ||
|
|
52c6ad548d | ||
|
|
57e9b27ff8 | ||
|
|
661004972b | ||
|
|
714f3b0378 | ||
|
|
87f301122e | ||
|
|
53d658a3c0 | ||
|
|
9257f140ba | ||
|
|
b68e39e8fa | ||
|
|
c566872f05 | ||
|
|
446039b02b | ||
|
|
5a69bbe93e | ||
|
|
a715364338 | ||
|
|
7bf1c0e42a | ||
|
|
81545766a0 | ||
|
|
4e0ae54471 |
1
.env.template
Normal file
1
.env.template
Normal file
@@ -0,0 +1 @@
|
||||
export OPENAI_API_KEY=$(rbw get openai-api-key)
|
||||
12
.envrc
12
.envrc
@@ -2,4 +2,14 @@ if ! has nix_direnv_version || ! nix_direnv_version 3.0.4; then
|
||||
source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/3.0.4/direnvrc" "sha256-DzlYZ33mWF/Gs8DDeyjr8mnVmQGx7ASYqA5WlxwvBG4="
|
||||
fi
|
||||
|
||||
use flake
|
||||
watch_file .direnv/selected-shell
|
||||
|
||||
if [ -e .env ]; then
|
||||
source .env
|
||||
fi
|
||||
|
||||
if [ -e .direnv/selected-shell ]; then
|
||||
use flake .#$(cat .direnv/selected-shell)
|
||||
else
|
||||
use flake
|
||||
fi
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
name: checks-impure
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: main
|
||||
jobs:
|
||||
test:
|
||||
if: ${{ github.actor != 'ui-asset-bot' }}
|
||||
runs-on: nix
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: nix run .#impure-checks
|
||||
@@ -2,11 +2,21 @@ name: checks
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: main
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
test:
|
||||
if: ${{ github.actor != 'ui-asset-bot' }}
|
||||
checks:
|
||||
runs-on: nix
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: nix run --refresh github:Mic92/nix-fast-build -- --no-nom --eval-workers 20
|
||||
check-links:
|
||||
runs-on: nix
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: nix run --refresh --inputs-from .# nixpkgs#lychee .
|
||||
checks-impure:
|
||||
runs-on: nix
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: nix run .#impure-checks
|
||||
|
||||
11
README.md
11
README.md
@@ -6,18 +6,23 @@ Welcome to the cLAN Core Repository, the heart of the [clan.lol](https://clan.lo
|
||||
|
||||
If you're new to cLAN and eager to dive in, start with our quickstart guide:
|
||||
|
||||
- **Quickstart Guide**: Check out [quickstart.md](docs/quickstart.md) to get up and running with cLAN in no time.
|
||||
- **Quickstart Guide**: Check out [quickstart.md](docs/admins/quickstart.md) to get up and running with cLAN in no time.
|
||||
|
||||
## Managing Secrets
|
||||
|
||||
Security is paramount, and cLAN provides guidelines for handling secrets effectively:
|
||||
|
||||
- **Secrets Management**: Learn how to manage secrets securely by reading [secrets-management.md](docs/secrets-management.md).
|
||||
- **Secrets Management**: Learn how to manage secrets securely by reading [secrets-management.md](docs/admins/secrets-management.md).
|
||||
|
||||
## Contributing to cLAN
|
||||
|
||||
We welcome contributions from the community, and we've prepared a comprehensive guide to help you get started:
|
||||
|
||||
- **Contribution Guidelines**: Find out how to contribute and make a meaningful impact on the cLAN project by reading [contributing.md](docs/contributing.md).
|
||||
- **Contribution Guidelines**: Find out how to contribute and make a meaningful impact on the cLAN project by reading [contributing.md](docs/contributing/contributing.md).
|
||||
|
||||
Whether you're a newcomer or a seasoned developer, we look forward to your contributions and collaboration on the cLAN project. Let's build amazing things together!
|
||||
|
||||
|
||||
### development environment
|
||||
Setup `direnv` and `nix-direnv` and execute `dienv allow`.
|
||||
To switch between different dev environments execute `select-shell`.
|
||||
|
||||
@@ -46,6 +46,25 @@ in
|
||||
users.users.root.openssh.authorizedKeys.keyFiles = [
|
||||
../lib/ssh/pubkey
|
||||
];
|
||||
|
||||
systemd.tmpfiles.settings."vmsecrets" = {
|
||||
"/etc/secrets/borgbackup.ssh" = {
|
||||
C.argument = "${../lib/ssh/privkey}";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "root";
|
||||
};
|
||||
};
|
||||
"/etc/secrets/borgbackup.repokey" = {
|
||||
C.argument = builtins.toString (pkgs.writeText "repokey" "repokey12345");
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "root";
|
||||
};
|
||||
};
|
||||
};
|
||||
clanCore.secretStore = "vm";
|
||||
|
||||
environment.systemPackages = [ self.packages.${pkgs.system}.clan-cli ];
|
||||
environment.etc."install-closure".source = "${closureInfo}/store-paths";
|
||||
nix.settings = {
|
||||
@@ -58,10 +77,7 @@ in
|
||||
clanCore.state.test-backups.folders = [ "/var/test-backups" ];
|
||||
clan.borgbackup = {
|
||||
enable = true;
|
||||
destinations.test_backup_server = {
|
||||
repo = "borg@server:.";
|
||||
rsh = "ssh -i /root/.ssh/id_ed25519 -o StrictHostKeyChecking=no";
|
||||
};
|
||||
destinations.test_backup_server.repo = "borg@server:.";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
(import ../lib/test-base.nix) ({ ... }: {
|
||||
name = "borgbackup";
|
||||
|
||||
nodes.machine = { self, ... }: {
|
||||
nodes.machine = { self, pkgs, ... }: {
|
||||
imports = [
|
||||
self.clanModules.borgbackup
|
||||
self.nixosModules.clanCore
|
||||
@@ -18,21 +18,27 @@
|
||||
clanCore.clanDir = ./.;
|
||||
clanCore.state.testState.folders = [ "/etc/state" ];
|
||||
environment.etc.state.text = "hello world";
|
||||
systemd.tmpfiles.settings = {
|
||||
"ssh-key"."/root/.ssh/id_ed25519" = {
|
||||
systemd.tmpfiles.settings."vmsecrets" = {
|
||||
"/etc/secrets/borgbackup.ssh" = {
|
||||
C.argument = "${../lib/ssh/privkey}";
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "root";
|
||||
};
|
||||
};
|
||||
"/etc/secrets/borgbackup.repokey" = {
|
||||
C.argument = builtins.toString (pkgs.writeText "repokey" "repokey12345");
|
||||
z = {
|
||||
mode = "0400";
|
||||
user = "root";
|
||||
};
|
||||
};
|
||||
};
|
||||
clanCore.secretStore = "vm";
|
||||
|
||||
clan.borgbackup = {
|
||||
enable = true;
|
||||
destinations.test = {
|
||||
repo = "borg@localhost:.";
|
||||
rsh = "ssh -i /root/.ssh/id_ed25519 -o StrictHostKeyChecking=no";
|
||||
};
|
||||
destinations.test.repo = "borg@localhost:.";
|
||||
};
|
||||
}
|
||||
];
|
||||
|
||||
@@ -19,8 +19,8 @@ test_driver = ["py.typed"]
|
||||
target-version = "py311"
|
||||
line-length = 88
|
||||
|
||||
select = [ "E", "F", "I", "U", "N", "RUF", "ANN", "A" ]
|
||||
ignore = ["E501", "ANN101", "ANN401", "A003"]
|
||||
lint.select = [ "E", "F", "I", "U", "N", "RUF", "ANN", "A" ]
|
||||
lint.ignore = ["E501", "ANN101", "ANN401", "A003"]
|
||||
|
||||
[tool.mypy]
|
||||
python_version = "3.11"
|
||||
|
||||
@@ -34,18 +34,21 @@ in
|
||||
services.borgbackup.jobs = lib.mapAttrs
|
||||
(_: dest: {
|
||||
paths = lib.flatten (map (state: state.folders) (lib.attrValues config.clanCore.state));
|
||||
exclude = [
|
||||
"*.pyc"
|
||||
];
|
||||
exclude = [ "*.pyc" ];
|
||||
repo = dest.repo;
|
||||
environment.BORG_RSH = dest.rsh;
|
||||
encryption.mode = "none";
|
||||
compression = "auto,zstd";
|
||||
startAt = "*-*-* 01:00:00";
|
||||
persistentTimer = true;
|
||||
preHook = ''
|
||||
set -x
|
||||
'';
|
||||
|
||||
encryption = {
|
||||
mode = "repokey";
|
||||
passCommand = "cat ${config.clanCore.secrets.borgbackup.secrets."borgbackup.repokey".path}";
|
||||
};
|
||||
|
||||
prune.keep = {
|
||||
within = "1d"; # Keep all archives from the last day
|
||||
daily = 7;
|
||||
@@ -58,20 +61,21 @@ in
|
||||
clanCore.secrets.borgbackup = {
|
||||
facts."borgbackup.ssh.pub" = { };
|
||||
secrets."borgbackup.ssh" = { };
|
||||
generator.path = [ pkgs.openssh pkgs.coreutils ];
|
||||
secrets."borgbackup.repokey" = { };
|
||||
generator.path = [ pkgs.openssh pkgs.coreutils pkgs.xkcdpass ];
|
||||
generator.script = ''
|
||||
ssh-keygen -t ed25519 -N "" -f "$secrets"/borgbackup.ssh
|
||||
mv "$secrets"/borgbackup.ssh.pub "$facts"/borgbackup.ssh.pub
|
||||
xkcdpass -n 4 -d - > "$secrets"/borgbackup.repokey
|
||||
'';
|
||||
};
|
||||
|
||||
clanCore.backups.providers.borgbackup = {
|
||||
# TODO list needs to run locally or on the remote machine
|
||||
list = ''
|
||||
${lib.concatMapStringsSep "\n" (dest: ''
|
||||
# we need yes here to skip the changed url verification
|
||||
yes y | borg-job-${dest.name} list --json | jq -r '. + {"job-name": "${dest.name}"}'
|
||||
'') (lib.attrValues cfg.destinations)}
|
||||
# we need yes here to skip the changed url verification
|
||||
${lib.concatMapStringsSep "\n" (dest: ''yes y | borg-job-${dest.name} list --json | jq -r '. + {"job-name": "${dest.name}"}' '')
|
||||
(lib.attrValues cfg.destinations)}
|
||||
'';
|
||||
create = ''
|
||||
${lib.concatMapStringsSep "\n" (dest: ''
|
||||
|
||||
106
devShell-python.nix
Normal file
106
devShell-python.nix
Normal file
@@ -0,0 +1,106 @@
|
||||
{
|
||||
perSystem =
|
||||
{ pkgs
|
||||
, self'
|
||||
, lib
|
||||
, ...
|
||||
}:
|
||||
let
|
||||
python3 = pkgs.python3;
|
||||
pypkgs = python3.pkgs;
|
||||
clan-cli = self'.packages.clan-cli;
|
||||
clan-vm-manager = self'.packages.clan-vm-manager;
|
||||
pythonWithDeps = python3.withPackages (
|
||||
ps:
|
||||
clan-cli.propagatedBuildInputs
|
||||
++ clan-cli.devDependencies
|
||||
++ [
|
||||
ps.pip
|
||||
# clan-vm-manager deps
|
||||
ps.pygobject3
|
||||
]
|
||||
);
|
||||
linuxOnlyPackages = lib.optionals pkgs.stdenv.isLinux [
|
||||
pkgs.xdg-utils
|
||||
];
|
||||
in
|
||||
{
|
||||
devShells.python = pkgs.mkShell {
|
||||
inputsFrom = [ self'.devShells.default ];
|
||||
packages =
|
||||
[
|
||||
pythonWithDeps
|
||||
pypkgs.mypy
|
||||
pypkgs.ipdb
|
||||
pkgs.desktop-file-utils
|
||||
pkgs.gtk4.dev
|
||||
pkgs.ruff
|
||||
pkgs.libadwaita.devdoc # has the demo called 'adwaita-1-demo'
|
||||
]
|
||||
++ linuxOnlyPackages
|
||||
++ clan-vm-manager.nativeBuildInputs
|
||||
++ clan-vm-manager.buildInputs
|
||||
++ clan-cli.nativeBuildInputs;
|
||||
|
||||
PYTHONBREAKPOINT = "ipdb.set_trace";
|
||||
|
||||
shellHook = ''
|
||||
ln -sfT ${clan-cli.nixpkgs} ./pkgs/clan-cli/clan_cli/nixpkgs
|
||||
|
||||
## PYTHON
|
||||
|
||||
tmp_path=$(realpath ./.direnv)
|
||||
repo_root=$(realpath .)
|
||||
mkdir -p "$tmp_path/python/${pythonWithDeps.sitePackages}"
|
||||
|
||||
# local dependencies
|
||||
localPackages=(
|
||||
$repo_root/pkgs/clan-cli
|
||||
$repo_root/pkgs/clan-vm-manager
|
||||
)
|
||||
|
||||
# Install the package in editable mode
|
||||
# This allows executing `clan` from within the dev-shell using the current
|
||||
# version of the code and its dependencies.
|
||||
# TODO: this is slow. get rid of pip or add better caching
|
||||
echo "==== Installing local python packages in editable mode ===="
|
||||
for package in "''${localPackages[@]}"; do
|
||||
${pythonWithDeps}/bin/pip install \
|
||||
--quiet \
|
||||
--disable-pip-version-check \
|
||||
--no-index \
|
||||
--no-build-isolation \
|
||||
--prefix "$tmp_path/python" \
|
||||
--editable "$package"
|
||||
done
|
||||
|
||||
export PATH="$tmp_path/python/bin:$PATH"
|
||||
export PYTHONPATH="''${PYTHONPATH:+$PYTHONPATH:}$tmp_path/python/${pythonWithDeps.sitePackages}"
|
||||
|
||||
for package in "''${localPackages[@]}"; do
|
||||
export PYTHONPATH="$package:$PYTHONPATH"
|
||||
done
|
||||
|
||||
if ! command -v xdg-mime &> /dev/null; then
|
||||
echo "Warning: 'xdg-mime' is not available. The desktop file cannot be installed."
|
||||
fi
|
||||
|
||||
# install desktop file
|
||||
set -eou pipefail
|
||||
DESKTOP_FILE_NAME=org.clan.vm-manager.desktop
|
||||
DESKTOP_DST=~/.local/share/applications/$DESKTOP_FILE_NAME
|
||||
DESKTOP_SRC=${clan-vm-manager.desktop-file}/share/applications/$DESKTOP_FILE_NAME
|
||||
UI_BIN="clan-vm-manager"
|
||||
|
||||
cp -f $DESKTOP_SRC $DESKTOP_DST
|
||||
sleep 2
|
||||
sed -i "s|Exec=.*clan-vm-manager|Exec=$UI_BIN|" $DESKTOP_DST
|
||||
xdg-mime default $DESKTOP_FILE_NAME x-scheme-handler/clan
|
||||
echo "==== Validating desktop file installation ===="
|
||||
set -x
|
||||
desktop-file-validate $DESKTOP_DST
|
||||
set +xeou pipefail
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
||||
22
devShell.nix
22
devShell.nix
@@ -4,9 +4,27 @@
|
||||
, self'
|
||||
, config
|
||||
, ...
|
||||
}: {
|
||||
}:
|
||||
let
|
||||
writers = pkgs.callPackage ./pkgs/builders/script-writers.nix { };
|
||||
|
||||
ansiEscapes = {
|
||||
reset = ''\033[0m'';
|
||||
green = ''\033[32m'';
|
||||
};
|
||||
|
||||
# A python program to switch between dev-shells
|
||||
# usage: select-shell shell-name
|
||||
# the currently enabled dev-shell gets stored in ./.direnv/selected-shell
|
||||
select-shell = writers.writePython3Bin "select-shell"
|
||||
{
|
||||
flakeIgnore = [ "E501" ];
|
||||
} ./pkgs/scripts/select-shell.py;
|
||||
in
|
||||
{
|
||||
devShells.default = pkgs.mkShell {
|
||||
packages = [
|
||||
select-shell
|
||||
pkgs.tea
|
||||
self'.packages.tea-create-pr
|
||||
self'.packages.merge-after-ci
|
||||
@@ -17,6 +35,8 @@
|
||||
shellHook = ''
|
||||
# no longer used
|
||||
rm -f "$(git rev-parse --show-toplevel)/.git/hooks/pre-commit"
|
||||
|
||||
echo -e "${ansiEscapes.green}switch to another dev-shell using: select-shell${ansiEscapes.reset}"
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,138 +0,0 @@
|
||||
# API Guidelines
|
||||
|
||||
This issue serves to collect our common understanding how to design our API so that it is extensible and usable and understandable.
|
||||
|
||||
## Resource oriented
|
||||
|
||||
A resource-oriented API is generally modeled as a resource hierarchy, where each node is either a simple resource or a collection resource. For convenience, they are often called a resource and a collection, respectively.
|
||||
|
||||
Examples of Resource Nouns:
|
||||
|
||||
`machine`
|
||||
`user`
|
||||
`flake`
|
||||
|
||||
Often resources have sub-resources. Even if it is not foreseen, it is recommended to use plural (trailing `s`) on resources to allow them to be collections of sub-resources.
|
||||
|
||||
e.g,
|
||||
|
||||
`users`
|
||||
->
|
||||
`users/*/profile`
|
||||
|
||||
## Verbs
|
||||
|
||||
Verbs should not be part of the URL
|
||||
|
||||
Bad:
|
||||
`/api/create-products`
|
||||
|
||||
Good:
|
||||
`/api/products`
|
||||
|
||||
Only resources are part of the URL, verbs are described via the HTTP Method.
|
||||
|
||||
Exception:
|
||||
|
||||
If a different HTTP Method must be used for technical reasons it is okay to terminate the path with a (short) verb / action.
|
||||
|
||||
Okay ish:
|
||||
`/api/products/create`
|
||||
|
||||
## Usually the following HTTP Methods exist to interact with a resource
|
||||
|
||||
- POST (create an order for a resource)
|
||||
- GET (retrieve the information)
|
||||
- PUT (update and replace information)
|
||||
- PATCH (update and modify information) **(Not used yet)**
|
||||
- DELETE (delete the item)
|
||||
|
||||
## Every resource should be CRUD compatible
|
||||
|
||||
All API resources MUST be designed in a way that allows the typical CRUD operations.
|
||||
|
||||
Where crud stands for:
|
||||
|
||||
C - Create
|
||||
R - Read
|
||||
U - Update
|
||||
D - Delete
|
||||
|
||||
Resources should implement at least a "Read" operation.
|
||||
|
||||
## Body
|
||||
|
||||
Use JSON as an exchange format.
|
||||
|
||||
All responses MUST be JSON parseable.
|
||||
|
||||
Bad:
|
||||
`bare string`
|
||||
|
||||
Better:
|
||||
`"quoted string"`
|
||||
|
||||
Best: (Enveloped see next section)
|
||||
`{ name: "quoted string"}`
|
||||
|
||||
Errors should have a consistent JSON format, such that it is clear in which field to look at for displaying error messages.
|
||||
|
||||
## Envelop all Data collections
|
||||
|
||||
Response data should be wrapped into an JSON Object `{}`
|
||||
Lists `[]` should also contain Objects `{}`.
|
||||
This allows everything, to be extensible, without breaking backwards compatibility. (Adding fields is trivial, since the schema doesn't change)
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
{
|
||||
"users": [{
|
||||
first_name: "John",
|
||||
last_name: "Doe",
|
||||
…
|
||||
}, {
|
||||
first_name: "Jane",
|
||||
last_name: "Doe",
|
||||
…
|
||||
}
|
||||
....
|
||||
],
|
||||
"skip": 0,
|
||||
"limit": 20,
|
||||
....
|
||||
}
|
||||
```
|
||||
|
||||
Bad Example of a breaking change:
|
||||
`GET /api/flakes`
|
||||
`old`
|
||||
|
||||
```
|
||||
[
|
||||
"dream2nix"
|
||||
"disko"
|
||||
]
|
||||
```
|
||||
|
||||
`new`
|
||||
|
||||
```
|
||||
[
|
||||
{
|
||||
name: "dream2nix",
|
||||
url: "github/...."
|
||||
},
|
||||
{
|
||||
name: "disko",
|
||||
url: "github/...."
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
Those kind of breaking changes can be avoided by using an object from the beginning.
|
||||
Even if the object only contains one key, it is extensible, without breaking.
|
||||
|
||||
## More will follow.
|
||||
|
||||
...maybe
|
||||
@@ -1,10 +1,6 @@
|
||||
# Contributing
|
||||
|
||||
**Frontend**: Our frontend is powered by [React NextJS](https://nextjs.org/), a popular and versatile framework for building web applications.
|
||||
|
||||
**Backend**: For the backend, we use Python along with the [FastAPI framework](https://fastapi.tiangolo.com/). To ensure seamless communication between the frontend and backend, we generate an `openapi.json` file from the Python code, which defines the REST API. This file is then used with [Orval](https://orval.dev/) to generate TypeScript bindings for the REST API. We're committed to code correctness, so we use [mypy](https://mypy-lang.org/) to ensure that our Python code is statically typed correctly. For backend testing, we rely on [pytest](https://docs.pytest.org/en/7.4.x/).
|
||||
|
||||
**Continuous Integration (CI)**: We've set up a CI bot that rigorously checks your code using the quality assurance (QA) tools mentioned above. If any errors are detected, it will block pull requests until they're resolved.
|
||||
**Continuous Integration (CI)**: Each pull request gets automatically tested by gitea. If any errors are detected, it will block pull requests until they're resolved.
|
||||
|
||||
**Dependency Management**: We use the [Nix package manager](https://nixos.org/) to manage dependencies and ensure reproducibility, making your development process more robust.
|
||||
|
||||
@@ -34,7 +30,7 @@ Let's get your development environment up and running:
|
||||
3. **Add direnv to your shell**:
|
||||
|
||||
- Direnv needs to [hook into your shell](https://direnv.net/docs/hook.html) to work.
|
||||
You can do this by executing following command:
|
||||
You can do this by executing following command. The example below will setup direnv for `zsh` and `bash`
|
||||
|
||||
```bash
|
||||
echo 'eval "$(direnv hook zsh)"' >> ~/.zshrc && echo 'eval "$(direnv hook bash)"' >> ~/.bashrc && eval "$SHELL"
|
||||
@@ -52,39 +48,6 @@ Let's get your development environment up and running:
|
||||
```
|
||||
- Execute `direnv allow` to automatically execute the shell script `.envrc` when entering the directory.
|
||||
|
||||
6. **Build the Backend**:
|
||||
|
||||
- Go to the `pkgs/clan-cli` directory and execute:
|
||||
```bash
|
||||
direnv allow
|
||||
```
|
||||
- Wait for the backend to build.
|
||||
|
||||
7. **Start the Backend Server**:
|
||||
|
||||
- To start the backend server, execute:
|
||||
```bash
|
||||
clan webui --reload --no-open --log-level debug
|
||||
```
|
||||
- The server will automatically restart if any Python files change.
|
||||
|
||||
8. **Build the Frontend**:
|
||||
|
||||
- In a different shell, navigate to the `pkgs/ui` directory and execute:
|
||||
```bash
|
||||
direnv allow
|
||||
```
|
||||
- Wait for the frontend to build.
|
||||
|
||||
NOTE: If you have the error "@clan/colors.json" you executed `npm install`, please do not do that. `direnv reload` will handle dependency management. Please delete node_modules with `rm -rf node_modules`.
|
||||
|
||||
9. **Start the Frontend**:
|
||||
- To start the frontend, execute:
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
- Access the website by going to [http://localhost:3000](http://localhost:3000).
|
||||
|
||||
# Setting Up Your Git Workflow
|
||||
|
||||
Let's set up your Git workflow to collaborate effectively:
|
||||
@@ -96,7 +59,7 @@ Let's set up your Git workflow to collaborate effectively:
|
||||
tea login add
|
||||
```
|
||||
- Fill out the prompt as follows:
|
||||
- URL of Gitea instance: `https://gitea.gchq.icu`
|
||||
- URL of Gitea instance: `https://git.clan.lol`
|
||||
- Name of new Login [gitea.gchq.icu]: `gitea.gchq.icu:7171`
|
||||
- Do you have an access token? No
|
||||
- Username: YourUsername
|
||||
@@ -125,7 +88,7 @@ Let's set up your Git workflow to collaborate effectively:
|
||||
|
||||
4. **Review Your Pull Request**:
|
||||
|
||||
- Visit https://gitea.gchq.icu and go to the project page. Check under "Pull Requests" for any issues with your pull request.
|
||||
- Visit https://git.clan.lol and go to the project page. Check under "Pull Requests" for any issues with your pull request.
|
||||
|
||||
5. **Push Your Changes**:
|
||||
- If there are issues, fix them and redo step 2. Afterward, execute:
|
||||
@@ -136,21 +99,22 @@ Let's set up your Git workflow to collaborate effectively:
|
||||
|
||||
# Debugging
|
||||
|
||||
When working on the backend of your project, debugging is an essential part of the development process. Here are some methods for debugging and testing the backend of your application:
|
||||
Here are some methods for debugging and testing the clan-cli:
|
||||
|
||||
## Test Backend Locally in Devshell with Breakpoints
|
||||
## Test Locally in Devshell with Breakpoints
|
||||
|
||||
To test the backend locally in a development environment and set breakpoints for debugging, follow these steps:
|
||||
To test the cli locally in a development environment and set breakpoints for debugging, follow these steps:
|
||||
|
||||
1. Run the following command to execute your tests and allow for debugging with breakpoints:
|
||||
```bash
|
||||
pytest -n0 -s --maxfail=1
|
||||
cd ./pkgs/clan-cli
|
||||
pytest -n0 -s --maxfail=1 ./tests/test_nameofthetest.py
|
||||
```
|
||||
You can place `breakpoint()` in your Python code where you want to trigger a breakpoint for debugging.
|
||||
|
||||
## Test Backend Locally in a Nix Sandbox
|
||||
## Test Locally in a Nix Sandbox
|
||||
|
||||
To run your backend tests in a Nix sandbox, you have two options depending on whether your test functions have been marked as impure or not:
|
||||
To run tests in a Nix sandbox, you have two options depending on whether your test functions have been marked as impure or not:
|
||||
|
||||
### Running Tests Marked as Impure
|
||||
|
||||
@@ -189,29 +153,3 @@ If you need to inspect the Nix sandbox while running tests, follow these steps:
|
||||
cntr exec -w your_sandbox_name
|
||||
psgrep -a -x your_python_process_name
|
||||
```
|
||||
|
||||
These debugging and testing methods will help you identify and fix issues in your backend code efficiently, ensuring the reliability and robustness of your application.
|
||||
|
||||
For more information on testing read [property and contract based testing](testing.md)
|
||||
|
||||
# Using this Template
|
||||
|
||||
To make the most of this template:
|
||||
|
||||
1. Set up a new Gitea account named `ui-asset-bot`. Generate an access token with all access permissions and set it under `settings/actions/secrets` as a secret called `BOT_ACCESS_TOKEN`.
|
||||
|
||||
- Also, edit the file `.gitea/workflows/ui_assets.yaml` and change the `BOT_EMAIL` variable to match the email you set for that account. Gitea matches commits to accounts by their email address, so this step is essential.
|
||||
|
||||
2. Create a second Gitea account named `merge-bot`. Edit the file `pkgs/merge-after-ci/default.nix` if the name should be different. Under "Branches," set the main branch to be protected and add `merge-bot` to the whitelisted users for pushing. Set the unprotected file pattern to `**/ui-assets.nix`.
|
||||
|
||||
- Enable the status check for "build / test (pull_request)."
|
||||
|
||||
3. Add both `merge-bot` and `ui-asset-bot` as collaborators.
|
||||
- Set the option to "Delete pull request branch after merge by default."
|
||||
- Also, set the default merge style to "Rebase then create merge commit."
|
||||
|
||||
With this template, you're well-equipped to build and collaborate on high-quality websites efficiently. Happy coding!.
|
||||
|
||||
# API guidelines
|
||||
|
||||
see [./api-guidelines](./api-guidelines)
|
||||
|
||||
@@ -1,111 +0,0 @@
|
||||
# Property vs Contract based testing
|
||||
|
||||
In this section, we'll explore the importance of testing the backend of your FastAPI application, specifically focusing on the advantages of using contract-based testing with property-based testing frameworks.
|
||||
|
||||
## Why Use Property-Based Testing?
|
||||
|
||||
Property-based testing is a powerful approach to test your APIs, offering several key benefits:
|
||||
|
||||
### 1. Scope
|
||||
|
||||
Instead of having to write numerous test cases for various input arguments, property-based testing enables you to test a range of arguments for each parameter using a single test. This approach significantly enhances the robustness of your test suite while reducing redundancy in your testing code. In short, your test code becomes cleaner, more DRY (Don't Repeat Yourself), and more efficient. It also becomes more effective as you can easily test numerous edge cases.
|
||||
|
||||
### 2. Reproducibility
|
||||
|
||||
Property-based testing tools retain test cases and their results, allowing you to reproduce and replay tests in case of failure. This feature is invaluable for debugging and ensuring the stability of your application over time.
|
||||
|
||||
## Frameworks for Property-Based Testing
|
||||
|
||||
To implement property-based testing in FastAPI, you can use the following framework:
|
||||
|
||||
- [Hypothesis: Property-Based Testing](https://hypothesis.readthedocs.io/en/latest/quickstart.html)
|
||||
- [Schemathesis](https://schemathesis.readthedocs.io/en/stable/#id2)
|
||||
|
||||
## Example
|
||||
|
||||
Running schemathesis fuzzer on GET requests
|
||||
|
||||
```bash
|
||||
nix run .#runSchemaTests
|
||||
```
|
||||
|
||||
If you want to test more request types edit the file [flake-module.nix](../checks/impure/flake-module.nix)
|
||||
|
||||
After a run it will upload the results to `schemathesis.io` and give you a link to the report.
|
||||
The credentials to the account are `Username: schemathesis@qube.email` and `Password:6tv4eP96WXsarF`
|
||||
|
||||
## Why Schemas Are Not Contracts
|
||||
|
||||
A schema is a description of the data structure of your API, whereas a contract defines not only the structure but also the expected behavior and constraints. The following resource explains why schemas are not contracts in more detail:
|
||||
|
||||
- [Why Schemas Are Not Contracts](https://pactflow.io/blog/schemas-are-not-contracts/)
|
||||
|
||||
In a nutshell, schemas may define the data structure but often fail to capture complex constraints and the expected interactions between different API endpoints. Contracts fill this gap by specifying both the structure and behavior of your API.
|
||||
|
||||
## Why Use Contract-Driven Testing?
|
||||
|
||||
Contract-driven testing combines the benefits of type annotations and property-based testing, providing a robust approach to ensuring the correctness of your APIs.
|
||||
|
||||
- Contracts become an integral part of the function signature and can be checked statically, ensuring that the API adheres to the defined contract.
|
||||
- Contracts, like property-based tests, allow you to specify conditions and constraints, with the testing framework automatically generating test cases and verifying call results.
|
||||
|
||||
### Frameworks for Contract-Driven Testing
|
||||
|
||||
To implement contract-driven testing in FastAPI, consider the following framework and extension:
|
||||
|
||||
- [Deal: Contract Driven Development](https://deal.readthedocs.io/)
|
||||
By adopting contract-driven testing, you can ensure that your FastAPI application not only has a well-defined structure but also behaves correctly, making it more robust and reliable.
|
||||
- [Whitepaper: Python by contract](https://users.ece.utexas.edu/~gligoric/papers/ZhangETAL22PythonByContractDataset.pdf) This paper goes more into detail how it works
|
||||
|
||||
## Examples
|
||||
|
||||
You can annotate functions with `@deal.raises(ClanError)` to say that they can _only_ raise a ClanError Exception.
|
||||
|
||||
```python
|
||||
import deal
|
||||
|
||||
@deal.raises(ClanError)
|
||||
def get_task(uuid: UUID) -> BaseTask:
|
||||
global POOL
|
||||
return POOL[uuid]
|
||||
```
|
||||
|
||||
To say that it can raise multiple exceptions just add after one another separated with a `,`
|
||||
|
||||
```python
|
||||
import deal
|
||||
|
||||
@deal.raises(ClanError, IndexError, ZeroDivisionError)
|
||||
def get_task(uuid: UUID) -> BaseTask:
|
||||
global POOL
|
||||
return POOL[uuid]
|
||||
```
|
||||
|
||||
### Adding deal annotated functions to pytest
|
||||
|
||||
```python
|
||||
from clan_cli.task_manager import get_task
|
||||
import deal
|
||||
|
||||
@deal.cases(get_task) # <--- Add function get_task to testing corpus
|
||||
def test_get_task(case: deal.TestCase) -> None:
|
||||
case() # <--- Call testing framework with function
|
||||
```
|
||||
|
||||
### Adding example input for deeper testing
|
||||
|
||||
You can combine hypothesis annotations with deal annotations to add example inputs to the function so that the verifier can reach deeper parts of the function.
|
||||
|
||||
```python
|
||||
import deal
|
||||
|
||||
@deal.example(lambda: get_task(UUID("5c2061e0-4512-4b30-aa8e-7be4a75b8b45"))) # type: ignore
|
||||
@deal.example(lambda: get_task(UUID("7c2061e6-4512-4b30-aa8e-7be4a75b8b45"))) # type: ignore
|
||||
@deal.raises(ClanError)
|
||||
def get_task(uuid: UUID) -> BaseTask:
|
||||
global POOL
|
||||
return POOL[uuid]
|
||||
```
|
||||
|
||||
You can also add `pre` and `post` conditions. A `pre` condition must be true before the function is executed. A `post` condition must be true after the function was executed. For more information read the [Writing Contracts Section](https://deal.readthedocs.io/basic/values.html).
|
||||
Or read the [API doc of Deal](https://deal.readthedocs.io/details/api.html)
|
||||
36
flake.lock
generated
36
flake.lock
generated
@@ -7,11 +7,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1707524024,
|
||||
"narHash": "sha256-HmumZ8FuWAAYZrWUKm3N4G4h8nmZ5VUVX+vXLmCJNKM=",
|
||||
"lastModified": 1708564520,
|
||||
"narHash": "sha256-juduDTYBhGN6jNfQ5RMDpbQF+MkO0pj3k7XGDSTjAbs=",
|
||||
"owner": "nix-community",
|
||||
"repo": "disko",
|
||||
"rev": "d07de570ba05cec2807d058daaa044f6955720c7",
|
||||
"rev": "23d308f0059955e3719efc81a34d1fc0369fbb74",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -42,11 +42,11 @@
|
||||
},
|
||||
"nixlib": {
|
||||
"locked": {
|
||||
"lastModified": 1693701915,
|
||||
"narHash": "sha256-waHPLdDYUOHSEtMKKabcKIMhlUOHPOOPQ9UyFeEoovs=",
|
||||
"lastModified": 1708217146,
|
||||
"narHash": "sha256-nGfEv7k78slqIR5E0zzWSx214d/4/ZPKDkObLJqVLVw=",
|
||||
"owner": "nix-community",
|
||||
"repo": "nixpkgs.lib",
|
||||
"rev": "f5af57d3ef9947a70ac86e42695231ac1ad00c25",
|
||||
"rev": "e623008d8a46517470e6365505f1a3ce171fa46a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -63,11 +63,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1707405218,
|
||||
"narHash": "sha256-ZQ366Oo8WJbCqXAZET7N0Sz6RQ3G2IbqVtxQRSa3SXc=",
|
||||
"lastModified": 1708563055,
|
||||
"narHash": "sha256-FaojUZNu+YPFi3eCI7mL4kxPKQ51DoySa7mqmllUOuc=",
|
||||
"owner": "nix-community",
|
||||
"repo": "nixos-generators",
|
||||
"rev": "843e2f04c716092797ffa4ce14c446adce2f09ef",
|
||||
"rev": "f4631dee1a0fd56c0db89860e83e3588a28c7631",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -78,11 +78,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1707639604,
|
||||
"narHash": "sha256-J5ipSdfkbYcYaH3Js2dUf3Of94BWStapdmxpW5wwH1U=",
|
||||
"lastModified": 1708847675,
|
||||
"narHash": "sha256-RUZ7KEs/a4EzRELYDGnRB6i7M1Izii3JD/LyzH0c6Tg=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "bdc57436da855500d44e9c1ce7450c0772e1cfa1",
|
||||
"rev": "2a34566b67bef34c551f204063faeecc444ae9da",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -110,11 +110,11 @@
|
||||
"nixpkgs-stable": []
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1707620614,
|
||||
"narHash": "sha256-gfAoB9dGzBu62NoAoM945aok7+6M+LFu+nvnGwAsTp4=",
|
||||
"lastModified": 1708830076,
|
||||
"narHash": "sha256-Cjh2xdjxC6S6nW6Whr2dxSeh8vjodzhTmQdI4zPJ4RA=",
|
||||
"owner": "Mic92",
|
||||
"repo": "sops-nix",
|
||||
"rev": "2eb7c4ba3aa75e2660fd217eb1ab64d5b793608e",
|
||||
"rev": "2874fbbe4a65bd2484b0ad757d27a16107f6bc17",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -130,11 +130,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1707300477,
|
||||
"narHash": "sha256-qQF0fEkHlnxHcrKIMRzOETnRBksUK048MXkX0SOmxvA=",
|
||||
"lastModified": 1708897213,
|
||||
"narHash": "sha256-QECZB+Hgz/2F/8lWvHNk05N6NU/rD9bWzuNn6Cv8oUk=",
|
||||
"owner": "numtide",
|
||||
"repo": "treefmt-nix",
|
||||
"rev": "ac599dab59a66304eb511af07b3883114f061b9d",
|
||||
"rev": "e497a9ddecff769c2a7cbab51e1ed7a8501e7a3a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
@@ -30,6 +30,7 @@
|
||||
imports = [
|
||||
./checks/flake-module.nix
|
||||
./devShell.nix
|
||||
./devShell-python.nix
|
||||
./formatter.nix
|
||||
./templates/flake-module.nix
|
||||
./clanModules/flake-module.nix
|
||||
|
||||
@@ -30,6 +30,10 @@
|
||||
"--" # this argument is ignored by bash
|
||||
];
|
||||
includes = [ "*.nix" ];
|
||||
excludes = [
|
||||
# Was copied from nixpkgs. Keep diff minimal to simplify upstreaming.
|
||||
"pkgs/builders/script-writers.nix"
|
||||
];
|
||||
};
|
||||
treefmt.settings.formatter.python = {
|
||||
command = "sh";
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
, machines ? { } # allows to include machine-specific modules i.e. machines.${name} = { ... }
|
||||
, clanName # Needs to be (globally) unique, as this determines the folder name where the flake gets downloaded to.
|
||||
, clanIcon ? null # A path to an icon to be used for the clan, should be the same for all machines
|
||||
, pkgsForSystem ? (_system: null) # A map from arch to pkgs, if specified this nixpkgs will be only imported once for each system.
|
||||
# This improves performance, but all nipxkgs.* options will be ignored.
|
||||
}:
|
||||
let
|
||||
machinesDirs = lib.optionalAttrs (builtins.pathExists "${directory}/machines") (builtins.readDir (directory + /machines));
|
||||
@@ -78,13 +80,23 @@ let
|
||||
configsPerSystem = builtins.listToAttrs
|
||||
(builtins.map
|
||||
(system: lib.nameValuePair system
|
||||
(lib.mapAttrs (name: _: nixosConfiguration { inherit name system; }) allMachines))
|
||||
(lib.mapAttrs
|
||||
(name: _: nixosConfiguration {
|
||||
inherit name system;
|
||||
pkgs = pkgsForSystem system;
|
||||
})
|
||||
allMachines))
|
||||
supportedSystems);
|
||||
|
||||
configsFuncPerSystem = builtins.listToAttrs
|
||||
(builtins.map
|
||||
(system: lib.nameValuePair system
|
||||
(lib.mapAttrs (name: _: args: nixosConfiguration (args // { inherit name system; })) allMachines))
|
||||
(lib.mapAttrs
|
||||
(name: _: args: nixosConfiguration (args // {
|
||||
inherit name system;
|
||||
pkgs = pkgsForSystem system;
|
||||
}))
|
||||
allMachines))
|
||||
supportedSystems);
|
||||
in
|
||||
{
|
||||
|
||||
@@ -62,13 +62,7 @@
|
||||
description = ''
|
||||
secret data as json for the generator
|
||||
'';
|
||||
default = pkgs.writers.writeJSON "secrets.json" (lib.mapAttrs
|
||||
(_name: secret: {
|
||||
secrets = builtins.attrNames secret.secrets;
|
||||
facts = lib.mapAttrs (_: secret: secret.path) secret.facts;
|
||||
generator = secret.generator.finalScript;
|
||||
})
|
||||
config.clanCore.secrets);
|
||||
default = pkgs.writers.writeJSON "secrets.json" config.clanCore.secrets;
|
||||
};
|
||||
vm.create = lib.mkOption {
|
||||
type = lib.types.path;
|
||||
|
||||
@@ -35,13 +35,13 @@
|
||||
options.clanCore.secrets = lib.mkOption {
|
||||
default = { };
|
||||
type = lib.types.attrsOf
|
||||
(lib.types.submodule (secret: {
|
||||
(lib.types.submodule (service: {
|
||||
options = {
|
||||
name = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = secret.config._module.args.name;
|
||||
default = service.config._module.args.name;
|
||||
description = ''
|
||||
Namespace of the secret
|
||||
Namespace of the service
|
||||
'';
|
||||
};
|
||||
generator = lib.mkOption {
|
||||
@@ -54,6 +54,14 @@
|
||||
Extra paths to add to the PATH environment variable when running the generator.
|
||||
'';
|
||||
};
|
||||
prompt = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.str;
|
||||
default = null;
|
||||
description = ''
|
||||
prompt text to ask for a value.
|
||||
This value will be passed to the script as the environment variabel $prompt_value.
|
||||
'';
|
||||
};
|
||||
script = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
description = ''
|
||||
@@ -92,14 +100,14 @@
|
||||
config' = config;
|
||||
in
|
||||
lib.mkOption {
|
||||
type = lib.types.attrsOf (lib.types.submodule ({ config, ... }: {
|
||||
type = lib.types.attrsOf (lib.types.submodule ({ config, name, ... }: {
|
||||
options = {
|
||||
name = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
description = ''
|
||||
name of the secret
|
||||
'';
|
||||
default = config._module.args.name;
|
||||
default = name;
|
||||
};
|
||||
path = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
@@ -108,6 +116,14 @@
|
||||
'';
|
||||
default = "${config'.clanCore.secretsDirectory}/${config'.clanCore.secretsPrefix}${config.name}";
|
||||
};
|
||||
} // lib.optionalAttrs (config'.clanCore.secretStore == "sops") {
|
||||
groups = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = config'.clanCore.sops.defaultGroups;
|
||||
description = ''
|
||||
Groups to decrypt the secret for. By default we always use the user's key.
|
||||
'';
|
||||
};
|
||||
};
|
||||
}));
|
||||
description = ''
|
||||
|
||||
@@ -22,6 +22,14 @@ let
|
||||
secrets = filterDir containsMachineOrGroups secretsDir;
|
||||
in
|
||||
{
|
||||
options = {
|
||||
clanCore.sops.defaultGroups = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = [ ];
|
||||
example = [ "admins" ];
|
||||
description = "The default groups to for encryption use when no groups are specified.";
|
||||
};
|
||||
};
|
||||
config = lib.mkIf (config.clanCore.secretStore == "sops") {
|
||||
clanCore.secretsDirectory = "/run/secrets";
|
||||
clanCore.secretsPrefix = config.clanCore.machineName + "-";
|
||||
|
||||
497
pkgs/builders/script-writers.nix
Normal file
497
pkgs/builders/script-writers.nix
Normal file
@@ -0,0 +1,497 @@
|
||||
{
|
||||
buildPackages,
|
||||
gixy,
|
||||
lib,
|
||||
libiconv,
|
||||
makeWrapper,
|
||||
mkNugetDeps,
|
||||
mkNugetSource,
|
||||
pkgs,
|
||||
stdenv,
|
||||
}:
|
||||
let
|
||||
inherit (lib)
|
||||
concatMapStringsSep
|
||||
elem
|
||||
escapeShellArg
|
||||
last
|
||||
optionalString
|
||||
strings
|
||||
types
|
||||
;
|
||||
in
|
||||
rec {
|
||||
# Base implementation for non-compiled executables.
|
||||
# Takes an interpreter, for example `${lib.getExe pkgs.bash}`
|
||||
#
|
||||
# Examples:
|
||||
# writeBash = makeScriptWriter { interpreter = "${pkgs.bash}/bin/bash"; }
|
||||
# makeScriptWriter { interpreter = "${pkgs.dash}/bin/dash"; } "hello" "echo hello world"
|
||||
makeScriptWriter = { interpreter, check ? "", makeWrapperArgs ? [], }: nameOrPath: content:
|
||||
assert lib.or (types.path.check nameOrPath) (builtins.match "([0-9A-Za-z._])[0-9A-Za-z._-]*" nameOrPath != null);
|
||||
assert lib.or (types.path.check content) (types.str.check content);
|
||||
let
|
||||
name = last (builtins.split "/" nameOrPath);
|
||||
in
|
||||
|
||||
pkgs.runCommandLocal name (
|
||||
{
|
||||
inherit makeWrapperArgs;
|
||||
nativeBuildInputs = [
|
||||
makeWrapper
|
||||
];
|
||||
}
|
||||
// lib.optionalAttrs (nameOrPath == "/bin/${name}") {
|
||||
meta.mainProgram = name;
|
||||
}
|
||||
// (
|
||||
if (types.str.check content) then {
|
||||
inherit content interpreter;
|
||||
passAsFile = [ "content" ];
|
||||
} else {
|
||||
inherit interpreter;
|
||||
contentPath = content;
|
||||
}
|
||||
)
|
||||
)
|
||||
''
|
||||
# On darwin a script cannot be used as an interpreter in a shebang but
|
||||
# there doesn't seem to be a limit to the size of shebang and multiple
|
||||
# arguments to the interpreter are allowed.
|
||||
if [[ -n "${toString pkgs.stdenvNoCC.isDarwin}" ]] && isScript $interpreter
|
||||
then
|
||||
wrapperInterpreterLine=$(head -1 "$interpreter" | tail -c+3)
|
||||
# Get first word from the line (note: xargs echo remove leading spaces)
|
||||
wrapperInterpreter=$(echo "$wrapperInterpreterLine" | xargs echo | cut -d " " -f1)
|
||||
|
||||
if isScript $wrapperInterpreter
|
||||
then
|
||||
echo "error: passed interpreter ($interpreter) is a script which has another script ($wrapperInterpreter) as an interpreter, which is not supported."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# This should work as long as wrapperInterpreter is a shell, which is
|
||||
# the case for programs wrapped with makeWrapper, like
|
||||
# python3.withPackages etc.
|
||||
interpreterLine="$wrapperInterpreterLine $interpreter"
|
||||
else
|
||||
interpreterLine=$interpreter
|
||||
fi
|
||||
|
||||
echo "#! $interpreterLine" > $out
|
||||
cat "$contentPath" >> $out
|
||||
${optionalString (check != "") ''
|
||||
${check} $out
|
||||
''}
|
||||
chmod +x $out
|
||||
|
||||
# Relocate executable if path was specified instead of name.
|
||||
# Only in this case wrapProgram is applied, as it wouldn't work with a
|
||||
# single executable file under $out.
|
||||
${optionalString (types.path.check nameOrPath) ''
|
||||
mv $out tmp
|
||||
mkdir -p $out/$(dirname "${nameOrPath}")
|
||||
mv tmp $out/${nameOrPath}
|
||||
wrapProgram $out/${nameOrPath} ''${makeWrapperArgs[@]}
|
||||
''}
|
||||
'';
|
||||
|
||||
# Base implementation for compiled executables.
|
||||
# Takes a compile script, which in turn takes the name as an argument.
|
||||
#
|
||||
# Examples:
|
||||
# writeSimpleC = makeBinWriter { compileScript = name: "gcc -o $out $contentPath"; }
|
||||
makeBinWriter = { compileScript, strip ? true }: nameOrPath: content:
|
||||
assert lib.or (types.path.check nameOrPath) (builtins.match "([0-9A-Za-z._])[0-9A-Za-z._-]*" nameOrPath != null);
|
||||
assert lib.or (types.path.check content) (types.str.check content);
|
||||
let
|
||||
name = last (builtins.split "/" nameOrPath);
|
||||
in
|
||||
pkgs.runCommand name ((if (types.str.check content) then {
|
||||
inherit content;
|
||||
passAsFile = [ "content" ];
|
||||
} else {
|
||||
contentPath = content;
|
||||
}) // lib.optionalAttrs (nameOrPath == "/bin/${name}") {
|
||||
meta.mainProgram = name;
|
||||
}) ''
|
||||
${compileScript}
|
||||
${lib.optionalString strip
|
||||
"${lib.getBin buildPackages.bintools-unwrapped}/bin/${buildPackages.bintools-unwrapped.targetPrefix}strip -S $out"}
|
||||
# Sometimes binaries produced for darwin (e. g. by GHC) won't be valid
|
||||
# mach-o executables from the get-go, but need to be corrected somehow
|
||||
# which is done by fixupPhase.
|
||||
${lib.optionalString pkgs.stdenvNoCC.hostPlatform.isDarwin "fixupPhase"}
|
||||
${optionalString (types.path.check nameOrPath) ''
|
||||
mv $out tmp
|
||||
mkdir -p $out/$(dirname "${nameOrPath}")
|
||||
mv tmp $out/${nameOrPath}
|
||||
''}
|
||||
'';
|
||||
|
||||
# Like writeScript but the first line is a shebang to bash
|
||||
#
|
||||
# Example:
|
||||
# writeBash "example" ''
|
||||
# echo hello world
|
||||
# ''
|
||||
writeBash = makeScriptWriter {
|
||||
interpreter = "${lib.getExe pkgs.bash}";
|
||||
};
|
||||
|
||||
# Like writeScriptBin but the first line is a shebang to bash
|
||||
writeBashBin = name:
|
||||
writeBash "/bin/${name}";
|
||||
|
||||
# Like writeScript but the first line is a shebang to dash
|
||||
#
|
||||
# Example:
|
||||
# writeDash "example" ''
|
||||
# echo hello world
|
||||
# ''
|
||||
writeDash = makeScriptWriter {
|
||||
interpreter = "${lib.getExe pkgs.dash}";
|
||||
};
|
||||
|
||||
# Like writeScriptBin but the first line is a shebang to dash
|
||||
writeDashBin = name:
|
||||
writeDash "/bin/${name}";
|
||||
|
||||
# Like writeScript but the first line is a shebang to fish
|
||||
#
|
||||
# Example:
|
||||
# writeFish "example" ''
|
||||
# echo hello world
|
||||
# ''
|
||||
writeFish = makeScriptWriter {
|
||||
interpreter = "${lib.getExe pkgs.fish} --no-config";
|
||||
check = "${lib.getExe pkgs.fish} --no-config --no-execute"; # syntax check only
|
||||
};
|
||||
|
||||
# Like writeScriptBin but the first line is a shebang to fish
|
||||
writeFishBin = name:
|
||||
writeFish "/bin/${name}";
|
||||
|
||||
# writeHaskell takes a name, an attrset with libraries and haskell version (both optional)
|
||||
# and some haskell source code and returns an executable.
|
||||
#
|
||||
# Example:
|
||||
# writeHaskell "missiles" { libraries = [ pkgs.haskellPackages.acme-missiles ]; } ''
|
||||
# import Acme.Missiles
|
||||
#
|
||||
# main = launchMissiles
|
||||
# '';
|
||||
writeHaskell = name: {
|
||||
libraries ? [],
|
||||
ghc ? pkgs.ghc,
|
||||
ghcArgs ? [],
|
||||
threadedRuntime ? true,
|
||||
strip ? true
|
||||
}:
|
||||
let
|
||||
appendIfNotSet = el: list: if elem el list then list else list ++ [ el ];
|
||||
ghcArgs' = if threadedRuntime then appendIfNotSet "-threaded" ghcArgs else ghcArgs;
|
||||
|
||||
in makeBinWriter {
|
||||
compileScript = ''
|
||||
cp $contentPath tmp.hs
|
||||
${(ghc.withPackages (_: libraries ))}/bin/ghc ${lib.escapeShellArgs ghcArgs'} tmp.hs
|
||||
mv tmp $out
|
||||
'';
|
||||
inherit strip;
|
||||
} name;
|
||||
|
||||
# writeHaskellBin takes the same arguments as writeHaskell but outputs a directory (like writeScriptBin)
|
||||
writeHaskellBin = name:
|
||||
writeHaskell "/bin/${name}";
|
||||
|
||||
# Like writeScript but the first line is a shebang to nu
|
||||
#
|
||||
# Example:
|
||||
# writeNu "example" ''
|
||||
# echo hello world
|
||||
# ''
|
||||
writeNu = makeScriptWriter {
|
||||
interpreter = "${lib.getExe pkgs.nushell} --no-config-file";
|
||||
};
|
||||
|
||||
# Like writeScriptBin but the first line is a shebang to nu
|
||||
writeNuBin = name:
|
||||
writeNu "/bin/${name}";
|
||||
|
||||
# makeRubyWriter takes ruby and compatible rubyPackages and produces ruby script writer,
|
||||
# If any libraries are specified, ruby.withPackages is used as interpreter, otherwise the "bare" ruby is used.
|
||||
makeRubyWriter = ruby: rubyPackages: buildRubyPackages: name: { libraries ? [], ... } @ args:
|
||||
makeScriptWriter (
|
||||
(builtins.removeAttrs args ["libraries"])
|
||||
// {
|
||||
interpreter =
|
||||
if libraries == []
|
||||
then "${ruby}/bin/ruby"
|
||||
else "${(ruby.withPackages (ps: libraries))}/bin/ruby";
|
||||
# Rubocop doesnt seem to like running in this fashion.
|
||||
#check = (writeDash "rubocop.sh" ''
|
||||
# exec ${lib.getExe buildRubyPackages.rubocop} "$1"
|
||||
#'');
|
||||
}
|
||||
) name;
|
||||
|
||||
# Like writeScript but the first line is a shebang to ruby
|
||||
#
|
||||
# Example:
|
||||
# writeRuby "example" ''
|
||||
# puts "hello world"
|
||||
# ''
|
||||
writeRuby = makeRubyWriter pkgs.ruby pkgs.rubyPackages buildPackages.rubyPackages;
|
||||
|
||||
writeRubyBin = name:
|
||||
writeRuby "/bin/${name}";
|
||||
|
||||
# makeLuaWriter takes lua and compatible luaPackages and produces lua script writer,
|
||||
# which validates the script with luacheck at build time. If any libraries are specified,
|
||||
# lua.withPackages is used as interpreter, otherwise the "bare" lua is used.
|
||||
makeLuaWriter = lua: luaPackages: buildLuaPackages: name: { libraries ? [], ... } @ args:
|
||||
makeScriptWriter (
|
||||
(builtins.removeAttrs args ["libraries"])
|
||||
// {
|
||||
interpreter = lua.interpreter;
|
||||
# if libraries == []
|
||||
# then lua.interpreter
|
||||
# else (lua.withPackages (ps: libraries)).interpreter
|
||||
# This should support packages! I just cant figure out why some dependency collision happens whenever I try to run this.
|
||||
check = (writeDash "luacheck.sh" ''
|
||||
exec ${buildLuaPackages.luacheck}/bin/luacheck "$1"
|
||||
'');
|
||||
}
|
||||
) name;
|
||||
|
||||
# writeLua takes a name an attributeset with libraries and some lua source code and
|
||||
# returns an executable (should also work with luajit)
|
||||
#
|
||||
# Example:
|
||||
# writeLua "test_lua" { libraries = [ pkgs.luaPackages.say ]; } ''
|
||||
# s = require("say")
|
||||
# s:set_namespace("en")
|
||||
#
|
||||
# s:set('money', 'I have %s dollars')
|
||||
# s:set('wow', 'So much money!')
|
||||
#
|
||||
# print(s('money', {1000})) -- I have 1000 dollars
|
||||
#
|
||||
# s:set_namespace("fr") -- switch to french!
|
||||
# s:set('wow', "Tant d'argent!")
|
||||
#
|
||||
# print(s('wow')) -- Tant d'argent!
|
||||
# s:set_namespace("en") -- switch back to english!
|
||||
# print(s('wow')) -- So much money!
|
||||
# ''
|
||||
writeLua = makeLuaWriter pkgs.lua pkgs.luaPackages buildPackages.luaPackages;
|
||||
|
||||
writeLuaBin = name:
|
||||
writeLua "/bin/${name}";
|
||||
|
||||
writeRust = name: {
|
||||
rustc ? pkgs.rustc,
|
||||
rustcArgs ? [],
|
||||
strip ? true
|
||||
}:
|
||||
let
|
||||
darwinArgs = lib.optionals stdenv.isDarwin [ "-L${lib.getLib libiconv}/lib" ];
|
||||
in
|
||||
makeBinWriter {
|
||||
compileScript = ''
|
||||
cp "$contentPath" tmp.rs
|
||||
PATH=${lib.makeBinPath [pkgs.gcc]} ${rustc}/bin/rustc ${lib.escapeShellArgs rustcArgs} ${lib.escapeShellArgs darwinArgs} -o "$out" tmp.rs
|
||||
'';
|
||||
inherit strip;
|
||||
} name;
|
||||
|
||||
writeRustBin = name:
|
||||
writeRust "/bin/${name}";
|
||||
|
||||
# writeJS takes a name an attributeset with libraries and some JavaScript sourcecode and
|
||||
# returns an executable
|
||||
#
|
||||
# Example:
|
||||
# writeJS "example" { libraries = [ pkgs.nodePackages.uglify-js ]; } ''
|
||||
# var UglifyJS = require("uglify-js");
|
||||
# var code = "function add(first, second) { return first + second; }";
|
||||
# var result = UglifyJS.minify(code);
|
||||
# console.log(result.code);
|
||||
# ''
|
||||
writeJS = name: { libraries ? [] }: content:
|
||||
let
|
||||
node-env = pkgs.buildEnv {
|
||||
name = "node";
|
||||
paths = libraries;
|
||||
pathsToLink = [
|
||||
"/lib/node_modules"
|
||||
];
|
||||
};
|
||||
in writeDash name ''
|
||||
export NODE_PATH=${node-env}/lib/node_modules
|
||||
exec ${lib.getExe pkgs.nodejs} ${pkgs.writeText "js" content} "$@"
|
||||
'';
|
||||
|
||||
# writeJSBin takes the same arguments as writeJS but outputs a directory (like writeScriptBin)
|
||||
writeJSBin = name:
|
||||
writeJS "/bin/${name}";
|
||||
|
||||
awkFormatNginx = builtins.toFile "awkFormat-nginx.awk" ''
|
||||
awk -f
|
||||
{sub(/^[ \t]+/,"");idx=0}
|
||||
/\{/{ctx++;idx=1}
|
||||
/\}/{ctx--}
|
||||
{id="";for(i=idx;i<ctx;i++)id=sprintf("%s%s", id, "\t");printf "%s%s\n", id, $0}
|
||||
'';
|
||||
|
||||
writeNginxConfig = name: text: pkgs.runCommandLocal name {
|
||||
inherit text;
|
||||
passAsFile = [ "text" ];
|
||||
nativeBuildInputs = [ gixy ];
|
||||
} /* sh */ ''
|
||||
# nginx-config-formatter has an error - https://github.com/1connect/nginx-config-formatter/issues/16
|
||||
awk -f ${awkFormatNginx} "$textPath" | sed '/^\s*$/d' > $out
|
||||
gixy $out
|
||||
'';
|
||||
|
||||
# writePerl takes a name an attributeset with libraries and some perl sourcecode and
|
||||
# returns an executable
|
||||
#
|
||||
# Example:
|
||||
# writePerl "example" { libraries = [ pkgs.perlPackages.boolean ]; } ''
|
||||
# use boolean;
|
||||
# print "Howdy!\n" if true;
|
||||
# ''
|
||||
writePerl = name: { libraries ? [], ... } @ args:
|
||||
makeScriptWriter (
|
||||
(builtins.removeAttrs args ["libraries"])
|
||||
// {
|
||||
interpreter = "${lib.getExe (pkgs.perl.withPackages (p: libraries))}";
|
||||
}
|
||||
) name;
|
||||
|
||||
# writePerlBin takes the same arguments as writePerl but outputs a directory (like writeScriptBin)
|
||||
writePerlBin = name:
|
||||
writePerl "/bin/${name}";
|
||||
|
||||
# makePythonWriter takes python and compatible pythonPackages and produces python script writer,
|
||||
# which validates the script with flake8 at build time. If any libraries are specified,
|
||||
# python.withPackages is used as interpreter, otherwise the "bare" python is used.
|
||||
makePythonWriter = python: pythonPackages: buildPythonPackages: name: { libraries ? [], flakeIgnore ? [], ... } @ args:
|
||||
let
|
||||
ignoreAttribute = optionalString (flakeIgnore != []) "--ignore ${concatMapStringsSep "," escapeShellArg flakeIgnore}";
|
||||
in
|
||||
makeScriptWriter
|
||||
(
|
||||
(builtins.removeAttrs args ["libraries" "flakeIgnore"])
|
||||
// {
|
||||
interpreter =
|
||||
if pythonPackages != pkgs.pypy2Packages || pythonPackages != pkgs.pypy3Packages then
|
||||
if libraries == []
|
||||
then python.interpreter
|
||||
else (python.withPackages (ps: libraries)).interpreter
|
||||
else python.interpreter
|
||||
;
|
||||
check = optionalString python.isPy3k (writeDash "pythoncheck.sh" ''
|
||||
exec ${buildPythonPackages.flake8}/bin/flake8 --show-source ${ignoreAttribute} "$1"
|
||||
'');
|
||||
}
|
||||
)
|
||||
name;
|
||||
|
||||
# writePyPy2 takes a name an attributeset with libraries and some pypy2 sourcecode and
|
||||
# returns an executable
|
||||
#
|
||||
# Example:
|
||||
# writePyPy2 "test_pypy2" { libraries = [ pkgs.pypy2Packages.enum ]; } ''
|
||||
# from enum import Enum
|
||||
#
|
||||
# class Test(Enum):
|
||||
# a = "success"
|
||||
#
|
||||
# print Test.a
|
||||
# ''
|
||||
writePyPy2 = makePythonWriter pkgs.pypy2 pkgs.pypy2Packages buildPackages.pypy2Packages;
|
||||
|
||||
# writePyPy2Bin takes the same arguments as writePyPy2 but outputs a directory (like writeScriptBin)
|
||||
writePyPy2Bin = name:
|
||||
writePyPy2 "/bin/${name}";
|
||||
|
||||
# writePython3 takes a name an attributeset with libraries and some python3 sourcecode and
|
||||
# returns an executable
|
||||
#
|
||||
# Example:
|
||||
# writePython3 "test_python3" { libraries = [ pkgs.python3Packages.pyyaml ]; } ''
|
||||
# import yaml
|
||||
#
|
||||
# y = yaml.load("""
|
||||
# - test: success
|
||||
# """)
|
||||
# print(y[0]['test'])
|
||||
# ''
|
||||
writePython3 = makePythonWriter pkgs.python3 pkgs.python3Packages buildPackages.python3Packages;
|
||||
|
||||
# writePython3Bin takes the same arguments as writePython3 but outputs a directory (like writeScriptBin)
|
||||
writePython3Bin = name:
|
||||
writePython3 "/bin/${name}";
|
||||
|
||||
# writePyPy3 takes a name an attributeset with libraries and some pypy3 sourcecode and
|
||||
# returns an executable
|
||||
#
|
||||
# Example:
|
||||
# writePyPy3 "test_pypy3" { libraries = [ pkgs.pypy3Packages.pyyaml ]; } ''
|
||||
# import yaml
|
||||
#
|
||||
# y = yaml.load("""
|
||||
# - test: success
|
||||
# """)
|
||||
# print(y[0]['test'])
|
||||
# ''
|
||||
writePyPy3 = makePythonWriter pkgs.pypy3 pkgs.pypy3Packages buildPackages.pypy3Packages;
|
||||
|
||||
# writePyPy3Bin takes the same arguments as writePyPy3 but outputs a directory (like writeScriptBin)
|
||||
writePyPy3Bin = name:
|
||||
writePyPy3 "/bin/${name}";
|
||||
|
||||
|
||||
makeFSharpWriter = { dotnet-sdk ? pkgs.dotnet-sdk, fsi-flags ? "", libraries ? _: [], ... } @ args: nameOrPath:
|
||||
let
|
||||
fname = last (builtins.split "/" nameOrPath);
|
||||
path = if strings.hasSuffix ".fsx" nameOrPath then nameOrPath else "${nameOrPath}.fsx";
|
||||
_nugetDeps = mkNugetDeps { name = "${fname}-nuget-deps"; nugetDeps = libraries; };
|
||||
|
||||
nuget-source = mkNugetSource {
|
||||
name = "${fname}-nuget-source";
|
||||
description = "A Nuget source with the dependencies for ${fname}";
|
||||
deps = [ _nugetDeps ];
|
||||
};
|
||||
|
||||
fsi = writeBash "fsi" ''
|
||||
export HOME=$NIX_BUILD_TOP/.home
|
||||
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
|
||||
export DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||
export DOTNET_NOLOGO=1
|
||||
script="$1"; shift
|
||||
${lib.getExe dotnet-sdk} fsi --quiet --nologo --readline- ${fsi-flags} "$@" < "$script"
|
||||
'';
|
||||
|
||||
in content: makeScriptWriter (
|
||||
(builtins.removeAttrs args ["dotnet-sdk" "fsi-flags" "libraries"])
|
||||
// {
|
||||
interpreter = fsi;
|
||||
}
|
||||
) path
|
||||
''
|
||||
#i "nuget: ${nuget-source}/lib"
|
||||
${ content }
|
||||
exit 0
|
||||
'';
|
||||
|
||||
writeFSharp =
|
||||
makeFSharpWriter {};
|
||||
|
||||
writeFSharpBin = name:
|
||||
writeFSharp "/bin/${name}";
|
||||
|
||||
}
|
||||
@@ -6,6 +6,8 @@ from pathlib import Path
|
||||
from types import ModuleType
|
||||
from typing import Any
|
||||
|
||||
from clan_cli import clana
|
||||
|
||||
from . import backups, config, facts, flakes, flash, history, machines, secrets, vms
|
||||
from .custom_logger import setup_logging
|
||||
from .dirs import get_clan_flake_toplevel
|
||||
@@ -110,6 +112,11 @@ def create_parser(prog: str | None = None) -> argparse.ArgumentParser:
|
||||
)
|
||||
flash.register_parser(parser_flash)
|
||||
|
||||
parser_clana = subparsers.add_parser(
|
||||
"clana", help="Describe a VM with natural language and launch it"
|
||||
)
|
||||
clana.register_parser(parser_clana)
|
||||
|
||||
if argcomplete:
|
||||
argcomplete.autocomplete(parser)
|
||||
|
||||
|
||||
@@ -58,7 +58,8 @@ def list_backups(machine: Machine, provider: str | None = None) -> list[Backup]:
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
machine = Machine(name=args.machine, flake=args.flake)
|
||||
backups = list_backups(machine=machine, provider=args.provider)
|
||||
print(backups)
|
||||
for backup in backups:
|
||||
print(backup.archive_id)
|
||||
|
||||
|
||||
def register_list_parser(parser: argparse.ArgumentParser) -> None:
|
||||
|
||||
51
pkgs/clan-cli/clan_cli/clan_openai.py
Normal file
51
pkgs/clan-cli/clan_cli/clan_openai.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import json
|
||||
import os
|
||||
import urllib.request
|
||||
from typing import Any
|
||||
|
||||
# Your OpenAI API key
|
||||
api_key: str = os.environ["OPENAI_API_KEY"]
|
||||
|
||||
# The URL to which the request is sent
|
||||
url: str = "https://api.openai.com/v1/chat/completions"
|
||||
|
||||
# The header includes the content type and the authorization with your API key
|
||||
headers: dict[str, str] = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {api_key}",
|
||||
}
|
||||
|
||||
|
||||
def complete(
|
||||
messages: list[dict[str, Any]],
|
||||
model: str = "gpt-3.5-turbo",
|
||||
temperature: float = 1.0,
|
||||
) -> str:
|
||||
# Data to be sent in the request
|
||||
data = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"temperature": temperature,
|
||||
}
|
||||
|
||||
# Create a request object with the URL and the headers
|
||||
req = urllib.request.Request(url, json.dumps(data).encode("utf-8"), headers)
|
||||
|
||||
# Make the request and read the response
|
||||
with urllib.request.urlopen(req) as response:
|
||||
response_body = response.read()
|
||||
resp_data = json.loads(response_body)
|
||||
return resp_data["choices"][0]["message"]["content"]
|
||||
|
||||
|
||||
def complete_prompt(
|
||||
prompt: str,
|
||||
system: str = "",
|
||||
model: str = "gpt-3.5-turbo",
|
||||
temperature: float = 1.0,
|
||||
) -> str:
|
||||
return complete(
|
||||
[{"role": "system", "content": system}, {"role": "user", "content": prompt}],
|
||||
model,
|
||||
temperature,
|
||||
)
|
||||
114
pkgs/clan-cli/clan_cli/clana/__init__.py
Normal file
114
pkgs/clan-cli/clan_cli/clana/__init__.py
Normal file
@@ -0,0 +1,114 @@
|
||||
# !/usr/bin/env python3
|
||||
# A subcommand that interfaces with openai to generate nixos configurations and launches VMs with them.
|
||||
# The `clan clana` command allows the user to enter a prompt with the wishes for the VM and then generates a nixos configuration and launches a VM with it.
|
||||
# for now this POC should be stateless. A configuration.nix should be generated ina temporary directory and directly launched.
|
||||
# there should be no additional arguments.
|
||||
# THe prompt is read from stdin
|
||||
|
||||
import argparse
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from clan_cli import clan_openai
|
||||
from clan_cli.errors import ClanCmdError
|
||||
from clan_cli.vms.run import run_command
|
||||
|
||||
base_config = Path(__file__).parent.joinpath("base-config.nix").read_text()
|
||||
|
||||
system_msg = f"""
|
||||
Your name is clana, an assistant for creating NixOS configurations.
|
||||
Your task is to generate a NixOS configuration.nix file.
|
||||
Do not output any explanations or comments, not even when the user asks a question or provides feedback.
|
||||
Always provide only the content of the configuration.nix file.
|
||||
Don't use any nixos options for which you are not sure about their syntax.
|
||||
Generate a configuration.nix which has a very high probability of just working.
|
||||
The user who provides the prompt might have technical expertise, or none at all.
|
||||
Even a grandmother who has no idea about computers should be able to use this.
|
||||
Translate the users requirements to a working configuration.nix file.
|
||||
Don't set any options under `nix.`.
|
||||
The user should not have a password and log in automatically.
|
||||
|
||||
Take care specifically about:
|
||||
- specify every option only once within the same file. Otherwise it will lead to an error like this: error: attribute 'environment.systemPackages' already defined at [...]/configuration.nix:X:X
|
||||
- don't set a password for the user. it's already set in the base config
|
||||
|
||||
|
||||
Assume the following base config is already imported. Any option set in there is already configured and doesn't need to be specified anymore:
|
||||
|
||||
```nix
|
||||
{base_config}
|
||||
```
|
||||
|
||||
The base config will be imported by the system. No need to import it anymore.
|
||||
"""
|
||||
|
||||
|
||||
# takes a (sub)parser and configures it
|
||||
def register_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument("--show", action="store_true", help="show the configuration")
|
||||
parser.set_defaults(func=clana_command)
|
||||
|
||||
|
||||
def clana_command(args: argparse.Namespace) -> None:
|
||||
print("Please enter your wishes for the new computer: ")
|
||||
prompt = input()
|
||||
# prompt = "I want to email my grandchildren and watch them on facebook"
|
||||
print("Thank you. Generating your computer...")
|
||||
# config = clan_openai.complete(messages, model="gpt-4-turbo-preview").strip()
|
||||
config = Path(".direnv/configuration.nix").read_text()
|
||||
messages = [
|
||||
{"role": "system", "content": system_msg},
|
||||
{"role": "user", "content": prompt},
|
||||
]
|
||||
conf_dir = Path("/tmp/clana")
|
||||
conf_dir.mkdir(exist_ok=True)
|
||||
for f in conf_dir.iterdir():
|
||||
f.unlink()
|
||||
(conf_dir / "flake.nix").write_bytes(
|
||||
Path(__file__).parent.joinpath("flake.nix.template").read_bytes()
|
||||
)
|
||||
with open(conf_dir / "base-config.nix", "w") as f:
|
||||
f.write(base_config)
|
||||
with open(conf_dir / "hardware-configuration.nix", "w") as f:
|
||||
f.write("{}")
|
||||
with open(conf_dir / "configuration.nix", "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
{
|
||||
imports = [
|
||||
./base-config.nix
|
||||
./ai-config.nix
|
||||
];
|
||||
}
|
||||
"""
|
||||
)
|
||||
while True:
|
||||
config_orig = clan_openai.complete(
|
||||
messages, model="gpt-4-turbo-preview"
|
||||
).strip()
|
||||
# remove code blocks
|
||||
lines = config_orig.split("\n")
|
||||
if lines[0].startswith("```"):
|
||||
lines = lines[1:-1]
|
||||
config = "\n".join(lines)
|
||||
if args.show:
|
||||
print("Configuration generated:")
|
||||
print(config)
|
||||
print("Configuration generated. Launching...")
|
||||
with open(conf_dir / "ai-config.nix", "w") as f:
|
||||
f.write(config)
|
||||
|
||||
os.environ["NIXPKGS_ALLOW_UNFREE"] = "1"
|
||||
try:
|
||||
run_command(
|
||||
machine="clana-machine", flake=conf_dir, nix_options=["--impure"]
|
||||
)
|
||||
break
|
||||
except ClanCmdError as e:
|
||||
messages += [
|
||||
{"role": "assistant", "content": config_orig},
|
||||
{
|
||||
"role": "system",
|
||||
"content": f"There was a problem that needs to be fixed:\n{e.cmd.stderr}",
|
||||
},
|
||||
]
|
||||
60
pkgs/clan-cli/clan_cli/clana/base-config.nix
Normal file
60
pkgs/clan-cli/clan_cli/clana/base-config.nix
Normal file
@@ -0,0 +1,60 @@
|
||||
{ config, ... }:
|
||||
|
||||
{
|
||||
imports =
|
||||
[
|
||||
# Include the results of the hardware scan.
|
||||
./hardware-configuration.nix
|
||||
];
|
||||
|
||||
# Ensure that software properties (e.g., being unfree) are respected.
|
||||
nixpkgs.config = {
|
||||
allowUnfree = true;
|
||||
};
|
||||
|
||||
# Use the systemd-boot EFI boot loader.
|
||||
boot.loader.systemd-boot.enable = true;
|
||||
boot.loader.efi.canTouchEfiVariables = true;
|
||||
|
||||
networking.hostName = "clana"; # Define your hostname.
|
||||
networking.networkmanager.enable = true;
|
||||
|
||||
# Enable the X11 windowing system.
|
||||
services.xserver.enable = true;
|
||||
services.xserver.layout = "us";
|
||||
services.xserver.xkbOptions = "eurosign:e";
|
||||
|
||||
# Enable touchpad support.
|
||||
services.xserver.libinput.enable = true;
|
||||
|
||||
# Enable the KDE Desktop Environment.
|
||||
services.xserver.displayManager.sddm.enable = true;
|
||||
services.xserver.desktopManager.plasma5.enable = true;
|
||||
|
||||
# Enable sound.
|
||||
sound.enable = true;
|
||||
hardware.pulseaudio.enable = true;
|
||||
|
||||
# Autologin settings.
|
||||
services.xserver.displayManager.autoLogin.enable = true;
|
||||
services.xserver.displayManager.autoLogin.user = "user";
|
||||
|
||||
# User settings.
|
||||
users.users.user = {
|
||||
isNormalUser = true;
|
||||
extraGroups = [ "wheel" ]; # Enable sudo for the user.
|
||||
uid = 1000;
|
||||
password = "hello";
|
||||
openssh.authorizedKeys.keys = [ ];
|
||||
};
|
||||
|
||||
# Enable firewall.
|
||||
networking.firewall.enable = true;
|
||||
networking.firewall.allowedTCPPorts = [ 80 443 ]; # HTTP and HTTPS
|
||||
|
||||
# Set time zone.
|
||||
time.timeZone = "UTC";
|
||||
|
||||
# System-wide settings.
|
||||
system.stateVersion = "22.05"; # Edit this to your NixOS release version.
|
||||
}
|
||||
30
pkgs/clan-cli/clan_cli/clana/flake.nix.template
Normal file
30
pkgs/clan-cli/clan_cli/clana/flake.nix.template
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
description = "<Put your description here>";
|
||||
|
||||
inputs.clan-core.url = "git+https://git.clan.lol/clan/clan-core";
|
||||
|
||||
outputs = { self, clan-core, ... }:
|
||||
let
|
||||
system = "x86_64-linux";
|
||||
pkgs = clan-core.inputs.nixpkgs.legacyPackages.${system};
|
||||
clan = clan-core.lib.buildClan {
|
||||
directory = self;
|
||||
clanName = "clana-clan";
|
||||
machines.clana-machine = {
|
||||
imports = [
|
||||
./configuration.nix
|
||||
];
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
# all machines managed by cLAN
|
||||
inherit (clan) nixosConfigurations clanInternals;
|
||||
# add the cLAN cli tool to the dev shell
|
||||
devShells.${system}.default = pkgs.mkShell {
|
||||
packages = [
|
||||
clan-core.packages.${system}.clan-cli
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -17,12 +17,18 @@ def get_formatter(color: str) -> Callable[[logging.LogRecord, bool], logging.For
|
||||
record: logging.LogRecord, with_location: bool
|
||||
) -> logging.Formatter:
|
||||
reset = "\x1b[0m"
|
||||
filepath = Path(record.pathname).resolve()
|
||||
|
||||
try:
|
||||
filepath = Path(record.pathname).resolve()
|
||||
filepath = Path("~", filepath.relative_to(Path.home()))
|
||||
except Exception:
|
||||
filepath = Path(record.pathname)
|
||||
|
||||
if not with_location:
|
||||
return logging.Formatter(f"{color}%(levelname)s{reset}: %(message)s")
|
||||
|
||||
return logging.Formatter(
|
||||
f"{color}%(levelname)s{reset}: %(message)s\n {filepath}:%(lineno)d::%(funcName)s\n"
|
||||
f"{color}%(levelname)s{reset}: %(message)s\nLocation: {filepath}:%(lineno)d::%(funcName)s\n"
|
||||
)
|
||||
|
||||
return myformatter
|
||||
@@ -62,7 +68,14 @@ def get_caller() -> str:
|
||||
if caller_frame is None:
|
||||
return "unknown"
|
||||
frame_info = inspect.getframeinfo(caller_frame)
|
||||
ret = f"{frame_info.filename}:{frame_info.lineno}::{frame_info.function}"
|
||||
|
||||
try:
|
||||
filepath = Path(frame_info.filename).resolve()
|
||||
filepath = Path("~", filepath.relative_to(Path.home()))
|
||||
except Exception:
|
||||
filepath = Path(frame_info.filename)
|
||||
|
||||
ret = f"{filepath}:{frame_info.lineno}::{frame_info.function}"
|
||||
return ret
|
||||
|
||||
|
||||
@@ -76,7 +89,7 @@ def setup_logging(level: Any, root_log_name: str = __name__.split(".")[0]) -> No
|
||||
|
||||
# Create and add your custom handler
|
||||
default_handler.setLevel(level)
|
||||
default_handler.setFormatter(CustomFormatter(level == logging.DEBUG))
|
||||
default_handler.setFormatter(CustomFormatter(str(level) == str(logging.DEBUG)))
|
||||
main_logger.addHandler(default_handler)
|
||||
|
||||
# Set logging level for other modules used by this module
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
|
||||
from clan_cli.machines.machines import Machine
|
||||
|
||||
|
||||
class FactStoreBase(ABC):
|
||||
@abstractmethod
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def exists(self, service: str, name: str) -> bool:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set(self, service: str, name: str, value: bytes) -> Path | None:
|
||||
pass
|
||||
|
||||
# get a single fact
|
||||
@abstractmethod
|
||||
def get(self, service: str, name: str) -> bytes:
|
||||
pass
|
||||
|
||||
# get all facts
|
||||
@abstractmethod
|
||||
def get_all(self) -> dict[str, dict[str, bytes]]:
|
||||
pass
|
||||
|
||||
@@ -3,13 +3,15 @@ from pathlib import Path
|
||||
from clan_cli.errors import ClanError
|
||||
from clan_cli.machines.machines import Machine
|
||||
|
||||
from . import FactStoreBase
|
||||
|
||||
class FactStore:
|
||||
|
||||
class FactStore(FactStoreBase):
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
self.machine = machine
|
||||
self.works_remotely = False
|
||||
|
||||
def set(self, _service: str, name: str, value: bytes) -> Path | None:
|
||||
def set(self, service: str, name: str, value: bytes) -> Path | None:
|
||||
if isinstance(self.machine.flake, Path):
|
||||
fact_path = (
|
||||
self.machine.flake / "machines" / self.machine.name / "facts" / name
|
||||
@@ -23,14 +25,14 @@ class FactStore:
|
||||
f"in_flake fact storage is only supported for local flakes: {self.machine.flake}"
|
||||
)
|
||||
|
||||
def exists(self, _service: str, name: str) -> bool:
|
||||
def exists(self, service: str, name: str) -> bool:
|
||||
fact_path = (
|
||||
self.machine.flake_dir / "machines" / self.machine.name / "facts" / name
|
||||
)
|
||||
return fact_path.exists()
|
||||
|
||||
# get a single fact
|
||||
def get(self, _service: str, name: str) -> bytes:
|
||||
def get(self, service: str, name: str) -> bytes:
|
||||
fact_path = (
|
||||
self.machine.flake_dir / "machines" / self.machine.name / "facts" / name
|
||||
)
|
||||
|
||||
@@ -5,10 +5,12 @@ from clan_cli.dirs import vm_state_dir
|
||||
from clan_cli.errors import ClanError
|
||||
from clan_cli.machines.machines import Machine
|
||||
|
||||
from . import FactStoreBase
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FactStore:
|
||||
class FactStore(FactStoreBase):
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
self.machine = machine
|
||||
self.works_remotely = False
|
||||
|
||||
@@ -28,12 +28,10 @@ def commit_files(
|
||||
repo_dir: Path,
|
||||
commit_message: str | None = None,
|
||||
) -> None:
|
||||
# check that the file is in the git repository and exists
|
||||
# check that the file is in the git repository
|
||||
for file_path in file_paths:
|
||||
if not Path(file_path).resolve().is_relative_to(repo_dir.resolve()):
|
||||
raise ClanError(f"File {file_path} is not in the git repository {repo_dir}")
|
||||
if not file_path.exists():
|
||||
raise ClanError(f"File {file_path} does not exist")
|
||||
# generate commit message if not provided
|
||||
if commit_message is None:
|
||||
commit_message = ""
|
||||
|
||||
@@ -35,14 +35,14 @@ class HistoryEntry:
|
||||
self.flake = FlakeConfig(**self.flake)
|
||||
|
||||
|
||||
def merge_dicts(d1: dict, d2: dict) -> dict:
|
||||
def _merge_dicts(d1: dict, d2: dict) -> dict:
|
||||
# create a new dictionary that copies d1
|
||||
merged = dict(d1)
|
||||
# iterate over the keys and values of d2
|
||||
for key, value in d2.items():
|
||||
# if the key is in d1 and both values are dictionaries, merge them recursively
|
||||
if key in d1 and isinstance(d1[key], dict) and isinstance(value, dict):
|
||||
merged[key] = merge_dicts(d1[key], value)
|
||||
merged[key] = _merge_dicts(d1[key], value)
|
||||
# otherwise, update the value of the key in the merged dictionary
|
||||
else:
|
||||
merged[key] = value
|
||||
@@ -59,7 +59,7 @@ def list_history() -> list[HistoryEntry]:
|
||||
parsed = read_history_file()
|
||||
for i, p in enumerate(parsed.copy()):
|
||||
# Everything from the settings dict is merged into the flake dict, and can override existing values
|
||||
parsed[i] = merge_dicts(p, p.get("settings", {}))
|
||||
parsed[i] = _merge_dicts(p, p.get("settings", {}))
|
||||
logs = [HistoryEntry(**p) for p in parsed]
|
||||
except (json.JSONDecodeError, TypeError) as ex:
|
||||
raise ClanError(f"History file at {user_history_file()} is corrupted") from ex
|
||||
@@ -76,40 +76,47 @@ def new_history_entry(url: str, machine: str) -> HistoryEntry:
|
||||
)
|
||||
|
||||
|
||||
def add_history(uri: ClanURI, *, all_machines: bool) -> list[HistoryEntry]:
|
||||
def add_all_to_history(uri: ClanURI) -> list[HistoryEntry]:
|
||||
history = list_history()
|
||||
new_entries: list[HistoryEntry] = []
|
||||
for machine in list_machines(uri.get_internal()):
|
||||
new_entry = _add_maschine_to_history_list(uri.get_internal(), machine, history)
|
||||
new_entries.append(new_entry)
|
||||
write_history_file(history)
|
||||
return new_entries
|
||||
|
||||
|
||||
def add_history(uri: ClanURI) -> HistoryEntry:
|
||||
user_history_file().parent.mkdir(parents=True, exist_ok=True)
|
||||
history = list_history()
|
||||
if not all_machines:
|
||||
add_maschine_to_history(uri.get_internal(), uri.params.flake_attr, history)
|
||||
|
||||
if all_machines:
|
||||
for machine in list_machines(uri.get_internal()):
|
||||
add_maschine_to_history(uri.get_internal(), machine, history)
|
||||
|
||||
new_entry = _add_maschine_to_history_list(
|
||||
uri.get_internal(), uri.params.flake_attr, history
|
||||
)
|
||||
write_history_file(history)
|
||||
return history
|
||||
return new_entry
|
||||
|
||||
|
||||
def add_maschine_to_history(
|
||||
uri_path: str, uri_machine: str, logs: list[HistoryEntry]
|
||||
) -> None:
|
||||
found = False
|
||||
|
||||
for entry in logs:
|
||||
def _add_maschine_to_history_list(
|
||||
uri_path: str, uri_machine: str, entries: list[HistoryEntry]
|
||||
) -> HistoryEntry:
|
||||
for new_entry in entries:
|
||||
if (
|
||||
entry.flake.flake_url == str(uri_path)
|
||||
and entry.flake.flake_attr == uri_machine
|
||||
new_entry.flake.flake_url == str(uri_path)
|
||||
and new_entry.flake.flake_attr == uri_machine
|
||||
):
|
||||
found = True
|
||||
entry.last_used = datetime.datetime.now().isoformat()
|
||||
new_entry.last_used = datetime.datetime.now().isoformat()
|
||||
return new_entry
|
||||
|
||||
if not found:
|
||||
history = new_history_entry(uri_path, uri_machine)
|
||||
logs.append(history)
|
||||
new_entry = new_history_entry(uri_path, uri_machine)
|
||||
entries.append(new_entry)
|
||||
return new_entry
|
||||
|
||||
|
||||
def add_history_command(args: argparse.Namespace) -> None:
|
||||
add_history(args.uri, all_machines=args.all)
|
||||
if args.all:
|
||||
add_all_to_history(args.uri)
|
||||
else:
|
||||
add_history(args.uri)
|
||||
|
||||
|
||||
# takes a (sub)parser and configures it
|
||||
|
||||
@@ -4,6 +4,7 @@ from collections.abc import Generator
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Any
|
||||
|
||||
from clan_cli.dirs import vm_state_dir
|
||||
from qemu.qmp import QEMUMonitorProtocol
|
||||
@@ -24,23 +25,18 @@ class VMAttr:
|
||||
# the symlink will be dangling.
|
||||
self._qmp_socket: Path = state_dir / "qmp.sock"
|
||||
self._qga_socket: Path = state_dir / "qga.sock"
|
||||
self._qmp: QEMUMonitorProtocol | None = None
|
||||
|
||||
@contextmanager
|
||||
def qmp_ctx(self) -> Generator[QEMUMonitorProtocol, None, None]:
|
||||
if self._qmp is None:
|
||||
log.debug(f"qmp_socket: {self._qmp_socket}")
|
||||
rpath = self._qmp_socket.resolve()
|
||||
if not rpath.exists():
|
||||
raise ClanError(
|
||||
f"qmp socket {rpath} does not exist. Is the VM running?"
|
||||
)
|
||||
self._qmp = QEMUMonitorProtocol(str(rpath))
|
||||
self._qmp.connect()
|
||||
rpath = self._qmp_socket.resolve()
|
||||
if not rpath.exists():
|
||||
raise ClanError(f"qmp socket {rpath} does not exist. Is the VM running?")
|
||||
qmp = QEMUMonitorProtocol(str(rpath))
|
||||
qmp.connect()
|
||||
try:
|
||||
yield self._qmp
|
||||
yield qmp
|
||||
finally:
|
||||
self._qmp.close()
|
||||
qmp.close()
|
||||
|
||||
|
||||
class Machine:
|
||||
@@ -107,7 +103,7 @@ class Machine:
|
||||
return self.deployment_info["factsModule"]
|
||||
|
||||
@property
|
||||
def secrets_data(self) -> dict:
|
||||
def secrets_data(self) -> dict[str, dict[str, Any]]:
|
||||
if self.deployment_info["secretsData"]:
|
||||
try:
|
||||
return json.loads(Path(self.deployment_info["secretsData"]).read_text())
|
||||
@@ -129,7 +125,7 @@ class Machine:
|
||||
if hasattr(self, "flake_path"):
|
||||
return Path(self.flake_path)
|
||||
|
||||
self.flake_path = nix_metadata(self.flake)["path"]
|
||||
self.flake_path: str = nix_metadata(self.flake)["path"]
|
||||
return Path(self.flake_path)
|
||||
|
||||
@property
|
||||
@@ -170,6 +166,7 @@ class Machine:
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
|
||||
file_info = dict()
|
||||
with NamedTemporaryFile(mode="w") as config_json:
|
||||
if extra_config is not None:
|
||||
json.dump(extra_config, config_json, indent=2)
|
||||
@@ -177,66 +174,66 @@ class Machine:
|
||||
json.dump({}, config_json)
|
||||
config_json.flush()
|
||||
|
||||
nar_hash = json.loads(
|
||||
file_info = json.loads(
|
||||
run(
|
||||
nix_eval(
|
||||
[
|
||||
"--impure",
|
||||
"--expr",
|
||||
f'(builtins.fetchTree {{ type = "file"; url = "file://{config_json.name}"; }}).narHash',
|
||||
f'let x = (builtins.fetchTree {{ type = "file"; url = "file://{config_json.name}"; }}); in {{ narHash = x.narHash; path = x.outPath; }}',
|
||||
]
|
||||
)
|
||||
).stdout.strip()
|
||||
)
|
||||
|
||||
args = []
|
||||
args = []
|
||||
|
||||
# get git commit from flake
|
||||
if extra_config is not None:
|
||||
metadata = nix_metadata(self.flake_dir)
|
||||
url = metadata["url"]
|
||||
if "dirtyRevision" in metadata:
|
||||
# if not impure:
|
||||
# raise ClanError(
|
||||
# "The machine has a dirty revision, and impure mode is not allowed"
|
||||
# )
|
||||
# else:
|
||||
# args += ["--impure"]
|
||||
args += ["--impure"]
|
||||
# get git commit from flake
|
||||
if extra_config is not None:
|
||||
metadata = nix_metadata(self.flake_dir)
|
||||
url = metadata["url"]
|
||||
if "dirtyRevision" in metadata:
|
||||
# if not impure:
|
||||
# raise ClanError(
|
||||
# "The machine has a dirty revision, and impure mode is not allowed"
|
||||
# )
|
||||
# else:
|
||||
# args += ["--impure"]
|
||||
args += ["--impure"]
|
||||
|
||||
args += [
|
||||
"--expr",
|
||||
f"""
|
||||
((builtins.getFlake "{url}").clanInternals.machinesFunc."{system}"."{self.name}" {{
|
||||
extraConfig = builtins.fromJSON (builtins.readFile (builtins.fetchTree {{
|
||||
type = "file";
|
||||
url = if (builtins.compareVersions builtins.nixVersion "2.19") == -1 then "{config_json.name}" else "file:{config_json.name}";
|
||||
narHash = "{nar_hash}";
|
||||
}}));
|
||||
}}).{attr}
|
||||
""",
|
||||
]
|
||||
else:
|
||||
if isinstance(self.flake, Path):
|
||||
if (self.flake / ".git").exists():
|
||||
flake = f"git+file://{self.flake}"
|
||||
else:
|
||||
flake = f"path:{self.flake}"
|
||||
args += [
|
||||
"--expr",
|
||||
f"""
|
||||
((builtins.getFlake "{url}").clanInternals.machinesFunc."{system}"."{self.name}" {{
|
||||
extraConfig = builtins.fromJSON (builtins.readFile (builtins.fetchTree {{
|
||||
type = "file";
|
||||
url = if (builtins.compareVersions builtins.nixVersion "2.19") == -1 then "{file_info["path"]}" else "file:{file_info["path"]}";
|
||||
narHash = "{file_info["narHash"]}";
|
||||
}}));
|
||||
}}).{attr}
|
||||
""",
|
||||
]
|
||||
else:
|
||||
if isinstance(self.flake, Path):
|
||||
if (self.flake / ".git").exists():
|
||||
flake = f"git+file://{self.flake}"
|
||||
else:
|
||||
flake = self.flake
|
||||
args += [
|
||||
f'{flake}#clanInternals.machines."{system}".{self.name}.{attr}',
|
||||
*nix_options,
|
||||
]
|
||||
|
||||
if method == "eval":
|
||||
output = run(nix_eval(args)).stdout.strip()
|
||||
return output
|
||||
elif method == "build":
|
||||
outpath = run(nix_build(args)).stdout.strip()
|
||||
return Path(outpath)
|
||||
flake = f"path:{self.flake}"
|
||||
else:
|
||||
raise ValueError(f"Unknown method {method}")
|
||||
flake = self.flake
|
||||
args += [
|
||||
f'{flake}#clanInternals.machines."{system}".{self.name}.{attr}',
|
||||
*nix_options,
|
||||
]
|
||||
|
||||
if method == "eval":
|
||||
output = run(nix_eval(args)).stdout.strip()
|
||||
return output
|
||||
elif method == "build":
|
||||
outpath = run(nix_build(args)).stdout.strip()
|
||||
return Path(outpath)
|
||||
else:
|
||||
raise ValueError(f"Unknown method {method}")
|
||||
|
||||
def eval_nix(
|
||||
self,
|
||||
|
||||
@@ -7,7 +7,7 @@ from ..machines.machines import Machine
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_secrets(machine: Machine) -> bool:
|
||||
def check_secrets(machine: Machine, service: None | str = None) -> bool:
|
||||
secrets_module = importlib.import_module(machine.secrets_module)
|
||||
secret_store = secrets_module.SecretStore(machine=machine)
|
||||
facts_module = importlib.import_module(machine.facts_module)
|
||||
@@ -15,11 +15,19 @@ def check_secrets(machine: Machine) -> bool:
|
||||
|
||||
missing_secrets = []
|
||||
missing_facts = []
|
||||
for service in machine.secrets_data:
|
||||
if service:
|
||||
services = [service]
|
||||
else:
|
||||
services = list(machine.secrets_data.keys())
|
||||
for service in services:
|
||||
for secret in machine.secrets_data[service]["secrets"]:
|
||||
if not secret_store.exists(service, secret):
|
||||
if isinstance(secret, str):
|
||||
secret_name = secret
|
||||
else:
|
||||
secret_name = secret["name"]
|
||||
if not secret_store.exists(service, secret_name):
|
||||
log.info(f"Secret {secret} for service {service} is missing")
|
||||
missing_secrets.append((service, secret))
|
||||
missing_secrets.append((service, secret_name))
|
||||
|
||||
for fact in machine.secrets_data[service]["facts"]:
|
||||
if not fact_store.exists(service, fact):
|
||||
@@ -34,8 +42,11 @@ def check_secrets(machine: Machine) -> bool:
|
||||
|
||||
|
||||
def check_command(args: argparse.Namespace) -> None:
|
||||
machine = Machine(name=args.machine, flake=args.flake)
|
||||
check_secrets(machine)
|
||||
machine = Machine(
|
||||
name=args.machine,
|
||||
flake=args.flake,
|
||||
)
|
||||
check_secrets(machine, service=args.service)
|
||||
|
||||
|
||||
def register_check_parser(parser: argparse.ArgumentParser) -> None:
|
||||
@@ -43,4 +54,8 @@ def register_check_parser(parser: argparse.ArgumentParser) -> None:
|
||||
"machine",
|
||||
help="The machine to check secrets for",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--service",
|
||||
help="the service to check",
|
||||
)
|
||||
parser.set_defaults(func=check_command)
|
||||
|
||||
@@ -2,99 +2,145 @@ import argparse
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
from collections.abc import Callable
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from clan_cli.cmd import run
|
||||
|
||||
from ..errors import ClanError
|
||||
from ..facts.modules import FactStoreBase
|
||||
from ..git import commit_files
|
||||
from ..machines.machines import Machine
|
||||
from ..nix import nix_shell
|
||||
from .check import check_secrets
|
||||
from .modules import SecretStoreBase
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def generate_secrets(machine: Machine) -> None:
|
||||
def generate_service_secrets(
|
||||
machine: Machine,
|
||||
service: str,
|
||||
secret_store: SecretStoreBase,
|
||||
fact_store: FactStoreBase,
|
||||
tmpdir: Path,
|
||||
prompt: Callable[[str], str],
|
||||
) -> None:
|
||||
service_dir = tmpdir / service
|
||||
# check if all secrets exist and generate them if at least one is missing
|
||||
needs_regeneration = not check_secrets(machine, service=service)
|
||||
log.debug(f"{service} needs_regeneration: {needs_regeneration}")
|
||||
if needs_regeneration:
|
||||
if not isinstance(machine.flake, Path):
|
||||
msg = f"flake is not a Path: {machine.flake}"
|
||||
msg += "fact/secret generation is only supported for local flakes"
|
||||
|
||||
env = os.environ.copy()
|
||||
facts_dir = service_dir / "facts"
|
||||
facts_dir.mkdir(parents=True)
|
||||
env["facts"] = str(facts_dir)
|
||||
secrets_dir = service_dir / "secrets"
|
||||
secrets_dir.mkdir(parents=True)
|
||||
env["secrets"] = str(secrets_dir)
|
||||
# compatibility for old outputs.nix users
|
||||
if isinstance(machine.secrets_data[service]["generator"], str):
|
||||
generator = machine.secrets_data[service]["generator"]
|
||||
else:
|
||||
generator = machine.secrets_data[service]["generator"]["finalScript"]
|
||||
if machine.secrets_data[service]["generator"]["prompt"]:
|
||||
prompt_value = prompt(
|
||||
machine.secrets_data[service]["generator"]["prompt"]
|
||||
)
|
||||
env["prompt_value"] = prompt_value
|
||||
# fmt: off
|
||||
cmd = nix_shell(
|
||||
[
|
||||
"nixpkgs#bash",
|
||||
"nixpkgs#bubblewrap",
|
||||
],
|
||||
[
|
||||
"bwrap",
|
||||
"--ro-bind", "/nix/store", "/nix/store",
|
||||
"--tmpfs", "/usr/lib/systemd",
|
||||
"--dev", "/dev",
|
||||
"--bind", str(facts_dir), str(facts_dir),
|
||||
"--bind", str(secrets_dir), str(secrets_dir),
|
||||
"--unshare-all",
|
||||
"--unshare-user",
|
||||
"--uid", "1000",
|
||||
"--",
|
||||
"bash", "-c", generator
|
||||
],
|
||||
)
|
||||
# fmt: on
|
||||
run(
|
||||
cmd,
|
||||
env=env,
|
||||
)
|
||||
files_to_commit = []
|
||||
# store secrets
|
||||
for secret in machine.secrets_data[service]["secrets"]:
|
||||
if isinstance(secret, str):
|
||||
# TODO: This is the old NixOS module, can be dropped everyone has updated.
|
||||
secret_name = secret
|
||||
groups = []
|
||||
else:
|
||||
secret_name = secret["name"]
|
||||
groups = secret.get("groups", [])
|
||||
|
||||
secret_file = secrets_dir / secret_name
|
||||
if not secret_file.is_file():
|
||||
msg = f"did not generate a file for '{secret_name}' when running the following command:\n"
|
||||
msg += generator
|
||||
raise ClanError(msg)
|
||||
secret_path = secret_store.set(
|
||||
service, secret_name, secret_file.read_bytes(), groups
|
||||
)
|
||||
if secret_path:
|
||||
files_to_commit.append(secret_path)
|
||||
|
||||
# store facts
|
||||
for name in machine.secrets_data[service]["facts"]:
|
||||
fact_file = facts_dir / name
|
||||
if not fact_file.is_file():
|
||||
msg = f"did not generate a file for '{name}' when running the following command:\n"
|
||||
msg += machine.secrets_data[service]["generator"]
|
||||
raise ClanError(msg)
|
||||
fact_file = fact_store.set(service, name, fact_file.read_bytes())
|
||||
if fact_file:
|
||||
files_to_commit.append(fact_file)
|
||||
commit_files(
|
||||
files_to_commit,
|
||||
machine.flake_dir,
|
||||
f"Update facts/secrets for service {service} in machine {machine.name}",
|
||||
)
|
||||
|
||||
|
||||
def generate_secrets(
|
||||
machine: Machine,
|
||||
prompt: None | Callable[[str], str] = None,
|
||||
) -> None:
|
||||
secrets_module = importlib.import_module(machine.secrets_module)
|
||||
secret_store = secrets_module.SecretStore(machine=machine)
|
||||
|
||||
facts_module = importlib.import_module(machine.facts_module)
|
||||
fact_store = facts_module.FactStore(machine=machine)
|
||||
|
||||
with TemporaryDirectory() as d:
|
||||
if prompt is None:
|
||||
prompt = lambda text: input(f"{text}: ")
|
||||
|
||||
with TemporaryDirectory() as tmp:
|
||||
tmpdir = Path(tmp)
|
||||
for service in machine.secrets_data:
|
||||
tmpdir = Path(d) / service
|
||||
# check if all secrets exist and generate them if at least one is missing
|
||||
needs_regeneration = not check_secrets(machine)
|
||||
log.debug(f"{service} needs_regeneration: {needs_regeneration}")
|
||||
if needs_regeneration:
|
||||
if not isinstance(machine.flake, Path):
|
||||
msg = f"flake is not a Path: {machine.flake}"
|
||||
msg += "fact/secret generation is only supported for local flakes"
|
||||
|
||||
env = os.environ.copy()
|
||||
facts_dir = tmpdir / "facts"
|
||||
facts_dir.mkdir(parents=True)
|
||||
env["facts"] = str(facts_dir)
|
||||
secrets_dir = tmpdir / "secrets"
|
||||
secrets_dir.mkdir(parents=True)
|
||||
env["secrets"] = str(secrets_dir)
|
||||
# fmt: off
|
||||
cmd = nix_shell(
|
||||
[
|
||||
"nixpkgs#bash",
|
||||
"nixpkgs#bubblewrap",
|
||||
],
|
||||
[
|
||||
"bwrap",
|
||||
"--ro-bind", "/nix/store", "/nix/store",
|
||||
"--tmpfs", "/usr/lib/systemd",
|
||||
"--dev", "/dev",
|
||||
"--bind", str(facts_dir), str(facts_dir),
|
||||
"--bind", str(secrets_dir), str(secrets_dir),
|
||||
"--unshare-all",
|
||||
"--unshare-user",
|
||||
"--uid", "1000",
|
||||
"--",
|
||||
"bash", "-c", machine.secrets_data[service]["generator"]
|
||||
],
|
||||
)
|
||||
# fmt: on
|
||||
run(
|
||||
cmd,
|
||||
env=env,
|
||||
)
|
||||
files_to_commit = []
|
||||
# store secrets
|
||||
for secret in machine.secrets_data[service]["secrets"]:
|
||||
secret_file = secrets_dir / secret
|
||||
if not secret_file.is_file():
|
||||
msg = f"did not generate a file for '{secret}' when running the following command:\n"
|
||||
msg += machine.secrets_data[service]["generator"]
|
||||
raise ClanError(msg)
|
||||
secret_path = secret_store.set(
|
||||
service, secret, secret_file.read_bytes()
|
||||
)
|
||||
if secret_path:
|
||||
files_to_commit.append(secret_path)
|
||||
|
||||
# store facts
|
||||
for name in machine.secrets_data[service]["facts"]:
|
||||
fact_file = facts_dir / name
|
||||
if not fact_file.is_file():
|
||||
msg = f"did not generate a file for '{name}' when running the following command:\n"
|
||||
msg += machine.secrets_data[service]["generator"]
|
||||
raise ClanError(msg)
|
||||
fact_file = fact_store.set(service, name, fact_file.read_bytes())
|
||||
if fact_file:
|
||||
files_to_commit.append(fact_file)
|
||||
commit_files(
|
||||
files_to_commit,
|
||||
machine.flake_dir,
|
||||
f"Update facts/secrets for service {service} in machine {machine.name}",
|
||||
)
|
||||
generate_service_secrets(
|
||||
machine=machine,
|
||||
service=service,
|
||||
secret_store=secret_store,
|
||||
fact_store=fact_store,
|
||||
tmpdir=tmpdir,
|
||||
prompt=prompt,
|
||||
)
|
||||
|
||||
print("successfully generated secrets")
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import argparse
|
||||
from pathlib import Path
|
||||
|
||||
from ..errors import ClanError
|
||||
from ..git import commit_files
|
||||
from ..machines.types import machine_name_type, validate_hostname
|
||||
from . import secrets
|
||||
from .folders import list_objects, remove_object, sops_machines_folder
|
||||
@@ -10,7 +11,13 @@ from .types import public_or_private_age_key_type, secret_name_type
|
||||
|
||||
|
||||
def add_machine(flake_dir: Path, name: str, key: str, force: bool) -> None:
|
||||
write_key(sops_machines_folder(flake_dir) / name, key, force)
|
||||
path = sops_machines_folder(flake_dir) / name
|
||||
write_key(path, key, force)
|
||||
commit_files(
|
||||
[path],
|
||||
flake_dir,
|
||||
f"Add machine {name} to secrets",
|
||||
)
|
||||
|
||||
|
||||
def remove_machine(flake_dir: Path, name: str) -> None:
|
||||
@@ -35,11 +42,16 @@ def list_machines(flake_dir: Path) -> list[str]:
|
||||
|
||||
|
||||
def add_secret(flake_dir: Path, machine: str, secret: str) -> None:
|
||||
secrets.allow_member(
|
||||
path = secrets.allow_member(
|
||||
secrets.machines_folder(flake_dir, secret),
|
||||
sops_machines_folder(flake_dir),
|
||||
machine,
|
||||
)
|
||||
commit_files(
|
||||
[path],
|
||||
flake_dir,
|
||||
f"Add {machine} to secret",
|
||||
)
|
||||
|
||||
|
||||
def remove_secret(flake_dir: Path, machine: str, secret: str) -> None:
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
|
||||
from clan_cli.machines.machines import Machine
|
||||
|
||||
|
||||
class SecretStoreBase(ABC):
|
||||
@abstractmethod
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set(
|
||||
self, service: str, name: str, value: bytes, groups: list[str]
|
||||
) -> Path | None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get(self, service: str, name: str) -> bytes:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def exists(self, service: str, name: str) -> bool:
|
||||
pass
|
||||
|
||||
def update_check(self) -> bool:
|
||||
return False
|
||||
|
||||
@abstractmethod
|
||||
def upload(self, output_dir: Path) -> None:
|
||||
pass
|
||||
|
||||
@@ -5,12 +5,16 @@ from pathlib import Path
|
||||
from clan_cli.machines.machines import Machine
|
||||
from clan_cli.nix import nix_shell
|
||||
|
||||
from . import SecretStoreBase
|
||||
|
||||
class SecretStore:
|
||||
|
||||
class SecretStore(SecretStoreBase):
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
self.machine = machine
|
||||
|
||||
def set(self, _service: str, name: str, value: bytes) -> Path | None:
|
||||
def set(
|
||||
self, service: str, name: str, value: bytes, groups: list[str]
|
||||
) -> Path | None:
|
||||
subprocess.run(
|
||||
nix_shell(
|
||||
["nixpkgs#pass"],
|
||||
@@ -21,7 +25,7 @@ class SecretStore:
|
||||
)
|
||||
return None # we manage the files outside of the git repo
|
||||
|
||||
def get(self, _service: str, name: str) -> bytes:
|
||||
def get(self, service: str, name: str) -> bytes:
|
||||
return subprocess.run(
|
||||
nix_shell(
|
||||
["nixpkgs#pass"],
|
||||
@@ -31,7 +35,7 @@ class SecretStore:
|
||||
stdout=subprocess.PIPE,
|
||||
).stdout
|
||||
|
||||
def exists(self, _service: str, name: str) -> bool:
|
||||
def exists(self, service: str, name: str) -> bool:
|
||||
password_store = os.environ.get(
|
||||
"PASSWORD_STORE_DIR", f"{os.environ['HOME']}/.password-store"
|
||||
)
|
||||
@@ -84,6 +88,8 @@ class SecretStore:
|
||||
hashes.sort()
|
||||
return b"\n".join(hashes)
|
||||
|
||||
# FIXME: add this when we switch to python3.12
|
||||
# @override
|
||||
def update_check(self) -> bool:
|
||||
local_hash = self.generate_hash()
|
||||
remote_hash = self.machine.target_host.run(
|
||||
@@ -102,5 +108,10 @@ class SecretStore:
|
||||
def upload(self, output_dir: Path) -> None:
|
||||
for service in self.machine.secrets_data:
|
||||
for secret in self.machine.secrets_data[service]["secrets"]:
|
||||
(output_dir / secret).write_bytes(self.get(service, secret))
|
||||
if isinstance(secret, dict):
|
||||
secret_name = secret["name"]
|
||||
else:
|
||||
# TODO: drop old format soon
|
||||
secret_name = secret
|
||||
(output_dir / secret_name).write_bytes(self.get(service, secret_name))
|
||||
(output_dir / ".pass_info").write_bytes(self.generate_hash())
|
||||
|
||||
@@ -6,8 +6,10 @@ from clan_cli.secrets.machines import add_machine, has_machine
|
||||
from clan_cli.secrets.secrets import decrypt_secret, encrypt_secret, has_secret
|
||||
from clan_cli.secrets.sops import generate_private_key
|
||||
|
||||
from . import SecretStoreBase
|
||||
|
||||
class SecretStore:
|
||||
|
||||
class SecretStore(SecretStoreBase):
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
self.machine = machine
|
||||
|
||||
@@ -28,7 +30,9 @@ class SecretStore:
|
||||
)
|
||||
add_machine(self.machine.flake_dir, self.machine.name, pub_key, False)
|
||||
|
||||
def set(self, _service: str, name: str, value: bytes) -> Path | None:
|
||||
def set(
|
||||
self, service: str, name: str, value: bytes, groups: list[str]
|
||||
) -> Path | None:
|
||||
path = (
|
||||
sops_secrets_folder(self.machine.flake_dir) / f"{self.machine.name}-{name}"
|
||||
)
|
||||
@@ -37,13 +41,14 @@ class SecretStore:
|
||||
path,
|
||||
value.decode(),
|
||||
add_machines=[self.machine.name],
|
||||
add_groups=groups,
|
||||
)
|
||||
return path
|
||||
|
||||
def get(self, _service: str, _name: str) -> bytes:
|
||||
def get(self, service: str, name: str) -> bytes:
|
||||
raise NotImplementedError()
|
||||
|
||||
def exists(self, _service: str, name: str) -> bool:
|
||||
def exists(self, service: str, name: str) -> bool:
|
||||
return has_secret(
|
||||
self.machine.flake_dir,
|
||||
f"{self.machine.name}-{name}",
|
||||
|
||||
@@ -5,14 +5,18 @@ from pathlib import Path
|
||||
from clan_cli.dirs import vm_state_dir
|
||||
from clan_cli.machines.machines import Machine
|
||||
|
||||
from . import SecretStoreBase
|
||||
|
||||
class SecretStore:
|
||||
|
||||
class SecretStore(SecretStoreBase):
|
||||
def __init__(self, machine: Machine) -> None:
|
||||
self.machine = machine
|
||||
self.dir = vm_state_dir(str(machine.flake), machine.name) / "secrets"
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def set(self, service: str, name: str, value: bytes) -> Path | None:
|
||||
def set(
|
||||
self, service: str, name: str, value: bytes, groups: list[str]
|
||||
) -> Path | None:
|
||||
secret_file = self.dir / service / name
|
||||
secret_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
secret_file.write_bytes(value)
|
||||
|
||||
@@ -3,11 +3,13 @@ import getpass
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import IO
|
||||
|
||||
from .. import tty
|
||||
from ..errors import ClanError
|
||||
from ..git import commit_files
|
||||
from .folders import (
|
||||
list_objects,
|
||||
sops_groups_folder,
|
||||
@@ -63,42 +65,58 @@ def encrypt_secret(
|
||||
key = ensure_sops_key(flake_dir)
|
||||
keys = set([])
|
||||
|
||||
files_to_commit = []
|
||||
for user in add_users:
|
||||
allow_member(
|
||||
users_folder(flake_dir, secret.name),
|
||||
sops_users_folder(flake_dir),
|
||||
user,
|
||||
False,
|
||||
files_to_commit.append(
|
||||
allow_member(
|
||||
users_folder(flake_dir, secret.name),
|
||||
sops_users_folder(flake_dir),
|
||||
user,
|
||||
False,
|
||||
)
|
||||
)
|
||||
|
||||
for machine in add_machines:
|
||||
allow_member(
|
||||
machines_folder(flake_dir, secret.name),
|
||||
sops_machines_folder(flake_dir),
|
||||
machine,
|
||||
False,
|
||||
files_to_commit.append(
|
||||
allow_member(
|
||||
machines_folder(flake_dir, secret.name),
|
||||
sops_machines_folder(flake_dir),
|
||||
machine,
|
||||
False,
|
||||
)
|
||||
)
|
||||
|
||||
for group in add_groups:
|
||||
allow_member(
|
||||
groups_folder(flake_dir, secret.name),
|
||||
sops_groups_folder(flake_dir),
|
||||
group,
|
||||
False,
|
||||
files_to_commit.append(
|
||||
allow_member(
|
||||
groups_folder(flake_dir, secret.name),
|
||||
sops_groups_folder(flake_dir),
|
||||
group,
|
||||
False,
|
||||
)
|
||||
)
|
||||
|
||||
keys = collect_keys_for_path(secret)
|
||||
|
||||
if key.pubkey not in keys:
|
||||
keys.add(key.pubkey)
|
||||
allow_member(
|
||||
users_folder(flake_dir, secret.name),
|
||||
sops_users_folder(flake_dir),
|
||||
key.username,
|
||||
False,
|
||||
files_to_commit.append(
|
||||
allow_member(
|
||||
users_folder(flake_dir, secret.name),
|
||||
sops_users_folder(flake_dir),
|
||||
key.username,
|
||||
False,
|
||||
)
|
||||
)
|
||||
|
||||
encrypt_file(secret / "secret", value, list(sorted(keys)))
|
||||
secret_path = secret / "secret"
|
||||
encrypt_file(secret_path, value, list(sorted(keys)))
|
||||
files_to_commit.append(secret_path)
|
||||
commit_files(
|
||||
files_to_commit,
|
||||
flake_dir,
|
||||
f"Update secret {secret.name}",
|
||||
)
|
||||
|
||||
|
||||
def remove_secret(flake_dir: Path, secret: str) -> None:
|
||||
@@ -106,6 +124,11 @@ def remove_secret(flake_dir: Path, secret: str) -> None:
|
||||
if not path.exists():
|
||||
raise ClanError(f"Secret '{secret}' does not exist")
|
||||
shutil.rmtree(path)
|
||||
commit_files(
|
||||
[path],
|
||||
flake_dir,
|
||||
f"Remove secret {secret}",
|
||||
)
|
||||
|
||||
|
||||
def remove_command(args: argparse.Namespace) -> None:
|
||||
@@ -139,10 +162,10 @@ def list_directory(directory: Path) -> str:
|
||||
|
||||
def allow_member(
|
||||
group_folder: Path, source_folder: Path, name: str, do_update_keys: bool = True
|
||||
) -> None:
|
||||
) -> Path:
|
||||
source = source_folder / name
|
||||
if not source.exists():
|
||||
msg = f"{name} does not exist in {source_folder}: "
|
||||
msg = f"Cannot encrypt {group_folder.parent.name} for '{name}' group. '{name}' group does not exist in {source_folder}: "
|
||||
msg += list_directory(source_folder)
|
||||
raise ClanError(msg)
|
||||
group_folder.mkdir(parents=True, exist_ok=True)
|
||||
@@ -150,7 +173,7 @@ def allow_member(
|
||||
if user_target.exists():
|
||||
if not user_target.is_symlink():
|
||||
raise ClanError(
|
||||
f"Cannot add user {name}. {user_target} exists but is not a symlink"
|
||||
f"Cannot add user '{name}' to {group_folder.parent.name} secret. {user_target} exists but is not a symlink"
|
||||
)
|
||||
os.remove(user_target)
|
||||
|
||||
@@ -160,6 +183,7 @@ def allow_member(
|
||||
group_folder.parent,
|
||||
list(sorted(collect_keys_for_path(group_folder.parent))),
|
||||
)
|
||||
return user_target
|
||||
|
||||
|
||||
def disallow_member(group_folder: Path, name: str) -> None:
|
||||
@@ -192,17 +216,31 @@ def has_secret(flake_dir: Path, secret: str) -> bool:
|
||||
return (sops_secrets_folder(flake_dir) / secret / "secret").exists()
|
||||
|
||||
|
||||
def list_secrets(flake_dir: Path) -> list[str]:
|
||||
def list_secrets(flake_dir: Path, pattern: str | None = None) -> list[str]:
|
||||
path = sops_secrets_folder(flake_dir)
|
||||
|
||||
def validate(name: str) -> bool:
|
||||
return VALID_SECRET_NAME.match(name) is not None and has_secret(flake_dir, name)
|
||||
return (
|
||||
VALID_SECRET_NAME.match(name) is not None
|
||||
and has_secret(flake_dir, name)
|
||||
and (pattern is None or pattern in name)
|
||||
)
|
||||
|
||||
return list_objects(path, validate)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ListSecretsOptions:
|
||||
flake: Path
|
||||
pattern: str | None
|
||||
|
||||
|
||||
def list_command(args: argparse.Namespace) -> None:
|
||||
lst = list_secrets(Path(args.flake))
|
||||
options = ListSecretsOptions(
|
||||
flake=args.flake,
|
||||
pattern=args.pattern,
|
||||
)
|
||||
lst = list_secrets(options.flake, options.pattern)
|
||||
if len(lst) > 0:
|
||||
print("\n".join(lst))
|
||||
|
||||
@@ -239,17 +277,28 @@ def set_command(args: argparse.Namespace) -> None:
|
||||
|
||||
|
||||
def rename_command(args: argparse.Namespace) -> None:
|
||||
old_path = sops_secrets_folder(Path(args.flake)) / args.secret
|
||||
new_path = sops_secrets_folder(Path(args.flake)) / args.new_name
|
||||
flake_dir = Path(args.flake)
|
||||
old_path = sops_secrets_folder(flake_dir) / args.secret
|
||||
new_path = sops_secrets_folder(flake_dir) / args.new_name
|
||||
if not old_path.exists():
|
||||
raise ClanError(f"Secret '{args.secret}' does not exist")
|
||||
if new_path.exists():
|
||||
raise ClanError(f"Secret '{args.new_name}' already exists")
|
||||
os.rename(old_path, new_path)
|
||||
commit_files(
|
||||
[old_path, new_path],
|
||||
flake_dir,
|
||||
f"Rename secret {args.secret} to {args.new_name}",
|
||||
)
|
||||
|
||||
|
||||
def register_secrets_parser(subparser: argparse._SubParsersAction) -> None:
|
||||
parser_list = subparser.add_parser("list", help="list secrets")
|
||||
parser_list.add_argument(
|
||||
"pattern",
|
||||
nargs="?",
|
||||
help="a pattern to filter the secrets. All secrets containing the pattern will be listed.",
|
||||
)
|
||||
parser_list.set_defaults(func=list_command)
|
||||
|
||||
parser_get = subparser.add_parser("get", help="get a secret")
|
||||
|
||||
@@ -144,7 +144,9 @@ def encrypt_file(
|
||||
args = ["sops", "--config", str(manifest)]
|
||||
args.extend([str(secret_path)])
|
||||
cmd = nix_shell(["nixpkgs#sops"], args)
|
||||
p = run(cmd, log=Log.BOTH, check=False)
|
||||
# Don't use our `run` here, because it breaks editor integration.
|
||||
# We never need this in our UI.
|
||||
p = subprocess.run(cmd, check=False)
|
||||
# returns 200 if the file is changed
|
||||
if p.returncode != 0 and p.returncode != 200:
|
||||
raise ClanError(
|
||||
|
||||
@@ -15,11 +15,9 @@ def upload_secrets(machine: Machine) -> None:
|
||||
secrets_module = importlib.import_module(machine.secrets_module)
|
||||
secret_store = secrets_module.SecretStore(machine=machine)
|
||||
|
||||
update_check = getattr(secret_store, "update_check", None)
|
||||
if callable(update_check):
|
||||
if update_check():
|
||||
log.info("Secrets already up to date")
|
||||
return
|
||||
if secret_store.update_check():
|
||||
log.info("Secrets already up to date")
|
||||
return
|
||||
with TemporaryDirectory() as tempdir:
|
||||
secret_store.upload(Path(tempdir))
|
||||
host = machine.target_host
|
||||
|
||||
@@ -2,6 +2,7 @@ import argparse
|
||||
from pathlib import Path
|
||||
|
||||
from ..errors import ClanError
|
||||
from ..git import commit_files
|
||||
from . import secrets
|
||||
from .folders import list_objects, remove_object, sops_users_folder
|
||||
from .sops import read_key, write_key
|
||||
@@ -14,7 +15,13 @@ from .types import (
|
||||
|
||||
|
||||
def add_user(flake_dir: Path, name: str, key: str, force: bool) -> None:
|
||||
write_key(sops_users_folder(flake_dir) / name, key, force)
|
||||
path = sops_users_folder(flake_dir) / name
|
||||
write_key(path, key, force)
|
||||
commit_files(
|
||||
[path],
|
||||
flake_dir,
|
||||
f"Add user {name} to secrets",
|
||||
)
|
||||
|
||||
|
||||
def remove_user(flake_dir: Path, name: str) -> None:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
import random
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
@@ -24,24 +25,38 @@ def graphics_options(vm: VmConfig) -> GraphicOptions:
|
||||
return GraphicOptions([
|
||||
*common,
|
||||
"-nographic",
|
||||
"-vga", "none",
|
||||
"-device", f"vhost-vsock-pci,id=vhost-vsock-pci0,guest-cid={cid}",
|
||||
# TODO: vgpu
|
||||
"-vga", "none",
|
||||
#"-display", "egl-headless,gl=core",
|
||||
#"-device", "virtio-vga,blob=true",
|
||||
|
||||
# this would make the gpu part of the hypervisor
|
||||
#"-device", "virtio-vga-gl,blob=true",
|
||||
|
||||
# This is for an external gpu process
|
||||
#"-device", "virtio-serial-pci",
|
||||
#"-device", "vhost-user-vga,chardev=vgpu",
|
||||
#"-chardev", "socket,id=vgpu,path=/tmp/vgpu.sock",
|
||||
], cid)
|
||||
# fmt: on
|
||||
else:
|
||||
if not os.path.exists("/run/opengl-driver"):
|
||||
display_options = [
|
||||
"-vga",
|
||||
"none",
|
||||
"-display",
|
||||
"gtk,gl=on",
|
||||
"-device",
|
||||
"virtio-gpu-gl",
|
||||
"-display",
|
||||
"spice-app,gl=on",
|
||||
]
|
||||
else:
|
||||
display_options = ["-display", "spice-app"]
|
||||
|
||||
# fmt: off
|
||||
return GraphicOptions([
|
||||
*common,
|
||||
"-vga", "none",
|
||||
"-display", "gtk,gl=on",
|
||||
"-device", "virtio-gpu-gl",
|
||||
"-display", "spice-app,gl=on",
|
||||
*display_options,
|
||||
"-device", "virtio-serial-pci",
|
||||
"-chardev", "spicevmc,id=vdagent0,name=vdagent",
|
||||
"-device", "virtserialport,chardev=vdagent0,name=com.redhat.spice.0",
|
||||
|
||||
@@ -3,7 +3,6 @@ import importlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
@@ -37,8 +36,9 @@ def facts_to_nixos_config(facts: dict[str, dict[str, bytes]]) -> dict:
|
||||
|
||||
# TODO move this to the Machines class
|
||||
def build_vm(
|
||||
machine: Machine, vm: VmConfig, tmpdir: Path, nix_options: list[str] = []
|
||||
machine: Machine, tmpdir: Path, nix_options: list[str] = []
|
||||
) -> dict[str, str]:
|
||||
# TODO pass prompt here for the GTK gui
|
||||
secrets_dir = get_secrets(machine, tmpdir)
|
||||
|
||||
facts_module = importlib.import_module(machine.facts_module)
|
||||
@@ -68,7 +68,6 @@ def get_secrets(
|
||||
secrets_module = importlib.import_module(machine.secrets_module)
|
||||
secret_store = secrets_module.SecretStore(machine=machine)
|
||||
|
||||
# TODO Only generate secrets for local clans
|
||||
generate_secrets(machine)
|
||||
|
||||
secret_store.upload(secrets_dir)
|
||||
@@ -113,7 +112,7 @@ def run_vm(vm: VmConfig, nix_options: list[str] = []) -> None:
|
||||
tmpdir = Path(cachedir)
|
||||
|
||||
# TODO: We should get this from the vm argument
|
||||
nixos_config = build_vm(machine, vm, tmpdir, nix_options)
|
||||
nixos_config = build_vm(machine, tmpdir, nix_options)
|
||||
|
||||
state_dir = vm_state_dir(str(vm.flake_url), machine.name)
|
||||
state_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -177,29 +176,20 @@ def run_vm(vm: VmConfig, nix_options: list[str] = []) -> None:
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunOptions:
|
||||
machine: str
|
||||
flake: Path
|
||||
nix_options: list[str] = field(default_factory=list)
|
||||
waypipe: bool = False
|
||||
def run_command(
|
||||
machine: str, flake: Path, option: list[str] = [], **args: argparse.Namespace
|
||||
) -> None:
|
||||
machine_obj: Machine = Machine(machine, flake)
|
||||
|
||||
vm: VmConfig = inspect_vm(machine=machine_obj)
|
||||
|
||||
run_vm(vm, option)
|
||||
|
||||
|
||||
def run_command(args: argparse.Namespace) -> None:
|
||||
run_options = RunOptions(
|
||||
machine=args.machine,
|
||||
flake=args.flake,
|
||||
nix_options=args.option,
|
||||
)
|
||||
|
||||
machine = Machine(run_options.machine, run_options.flake)
|
||||
|
||||
vm = inspect_vm(machine=machine)
|
||||
|
||||
run_vm(vm, run_options.nix_options)
|
||||
def _run_command(args: argparse.Namespace) -> None:
|
||||
run_command(**args.vars())
|
||||
|
||||
|
||||
def register_run_parser(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument("machine", type=str, help="machine in the flake to run")
|
||||
parser.add_argument("--flake-url", type=str, help="flake url")
|
||||
parser.set_defaults(func=run_command)
|
||||
parser.set_defaults(func=_run_command)
|
||||
|
||||
@@ -9,7 +9,7 @@ dynamic = ["version"]
|
||||
scripts = { clan = "clan_cli:main" }
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
exclude = ["clan_cli.nixpkgs*"]
|
||||
exclude = ["clan_cli.nixpkgs*", "result"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
clan_cli = ["config/jsonschema/*", "webui/assets/**/*", "vms/mimetypes/**/*"]
|
||||
@@ -55,5 +55,5 @@ ignore_missing_imports = true
|
||||
[tool.ruff]
|
||||
target-version = "py311"
|
||||
line-length = 88
|
||||
select = [ "E", "F", "I", "U", "N", "RUF", "ANN", "A" ]
|
||||
ignore = ["E501", "E402", "ANN101", "ANN401", "A003"]
|
||||
lint.select = [ "E", "F", "I", "U", "N", "RUF", "ANN", "A" ]
|
||||
lint.ignore = ["E501", "E402", "E731", "ANN101", "ANN401", "A003"]
|
||||
|
||||
@@ -138,7 +138,7 @@ class QEMUMonitorProtocol:
|
||||
self.__sock.settimeout(wait)
|
||||
try:
|
||||
ret = self.__json_read(only_event=True)
|
||||
except socket.timeout:
|
||||
except TimeoutError:
|
||||
raise QMPTimeoutError("Timeout waiting for event")
|
||||
except Exception:
|
||||
raise QMPConnectError("Error while reading from socket")
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
system.stateVersion = lib.version;
|
||||
sops.age.keyFile = "__CLAN_SOPS_KEY_PATH__";
|
||||
clanCore.secretsUploadDirectory = "__CLAN_SOPS_KEY_DIR__";
|
||||
clanCore.sops.defaultGroups = [ "admins" ];
|
||||
clan.virtualisation.graphics = false;
|
||||
|
||||
clan.networking.zerotier.controller.enable = true;
|
||||
|
||||
@@ -275,6 +275,14 @@ def test_secrets(
|
||||
cli.run(["--flake", str(test_flake.path), "secrets", "list"])
|
||||
assert capsys.readouterr().out == "key\n"
|
||||
|
||||
capsys.readouterr() # empty the buffer
|
||||
cli.run(["--flake", str(test_flake.path), "secrets", "list", "nonexisting"])
|
||||
assert capsys.readouterr().out == ""
|
||||
|
||||
capsys.readouterr() # empty the buffer
|
||||
cli.run(["--flake", str(test_flake.path), "secrets", "list", "key"])
|
||||
assert capsys.readouterr().out == "key\n"
|
||||
|
||||
cli.run(
|
||||
[
|
||||
"--flake",
|
||||
|
||||
@@ -33,6 +33,17 @@ def test_generate_secret(
|
||||
age_keys[0].pubkey,
|
||||
]
|
||||
)
|
||||
cli.run(
|
||||
[
|
||||
"--flake",
|
||||
str(test_flake_with_core.path),
|
||||
"secrets",
|
||||
"groups",
|
||||
"add-user",
|
||||
"admins",
|
||||
"user1",
|
||||
]
|
||||
)
|
||||
cmd = ["--flake", str(test_flake_with_core.path), "secrets", "generate", "vm1"]
|
||||
cli.run(cmd)
|
||||
has_secret(test_flake_with_core.path, "vm1-age.key")
|
||||
|
||||
@@ -110,6 +110,15 @@ def test_run(
|
||||
age_keys[0].pubkey,
|
||||
]
|
||||
)
|
||||
cli.run(
|
||||
[
|
||||
"secrets",
|
||||
"groups",
|
||||
"add-user",
|
||||
"admins",
|
||||
"user1",
|
||||
]
|
||||
)
|
||||
cli.run(["vms", "run", "vm1"])
|
||||
|
||||
|
||||
|
||||
@@ -37,3 +37,9 @@ gsettings set org.gtk.Settings.Debug enable-inspector-keybinding true
|
||||
# Start the application with the debugger attached
|
||||
GTK_DEBUG=interactive ./bin/clan-vm-manager --debug
|
||||
```
|
||||
|
||||
## Profiling
|
||||
To activate profiling execute:
|
||||
```
|
||||
PERF=1 ./bin/clan-vm-manager
|
||||
```
|
||||
@@ -1,13 +1,13 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from .app import MainApplication
|
||||
from clan_vm_manager.app import MainApplication
|
||||
from clan_vm_manager.components.profiler import profile
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# TODO: Trayicon support
|
||||
# https://github.com/nicotine-plus/nicotine-plus/blob/b08552584eb6f35782ad77da93ae4aae3362bf64/pynicotine/gtkgui/widgets/trayicon.py#L982
|
||||
def main() -> None:
|
||||
@profile
|
||||
def main() -> int:
|
||||
app = MainApplication()
|
||||
return app.run(sys.argv)
|
||||
|
||||
@@ -12,17 +12,21 @@ gi.require_version("Adw", "1")
|
||||
from clan_cli.custom_logger import setup_logging
|
||||
from gi.repository import Adw, Gdk, Gio, Gtk
|
||||
|
||||
from clan_vm_manager.models.interfaces import ClanConfig
|
||||
from clan_vm_manager.models.use_join import GLib, GObject
|
||||
from clan_vm_manager.models.use_vms import VMs
|
||||
from clan_vm_manager.components.interfaces import ClanConfig
|
||||
from clan_vm_manager.singletons.use_join import GLib, GObject
|
||||
|
||||
from .trayicon import TrayIcon
|
||||
from .windows.main_window import MainWindow
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MainApplication(Adw.Application):
|
||||
"""
|
||||
This class is initialized every time the app is started
|
||||
Only the Adw.ApplicationWindow is a singleton.
|
||||
So don't use any singletons in the Adw.Application class.
|
||||
"""
|
||||
|
||||
__gsignals__: ClassVar = {
|
||||
"join_request": (GObject.SignalFlags.RUN_FIRST, None, [str]),
|
||||
}
|
||||
@@ -30,11 +34,10 @@ class MainApplication(Adw.Application):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(
|
||||
*args,
|
||||
application_id="lol.clan.vm.manager",
|
||||
application_id="org.clan.vm-manager",
|
||||
flags=Gio.ApplicationFlags.HANDLES_COMMAND_LINE,
|
||||
**kwargs,
|
||||
)
|
||||
self.tray_icon: TrayIcon | None = None
|
||||
|
||||
self.add_main_option(
|
||||
"debug",
|
||||
@@ -44,10 +47,8 @@ class MainApplication(Adw.Application):
|
||||
"enable debug mode",
|
||||
None,
|
||||
)
|
||||
self.vms = VMs.use()
|
||||
log.debug(f"VMS object: {self.vms}")
|
||||
|
||||
self.window: Adw.ApplicationWindow | None = None
|
||||
self.connect("shutdown", self.on_shutdown)
|
||||
self.connect("activate", self.show_window)
|
||||
|
||||
def do_command_line(self, command_line: Any) -> int:
|
||||
@@ -56,10 +57,10 @@ class MainApplication(Adw.Application):
|
||||
options = options.end().unpack()
|
||||
|
||||
if "debug" in options:
|
||||
setup_logging("DEBUG", root_log_name=__name__.split(".")[0])
|
||||
setup_logging("DEBUG", root_log_name="clan_cli")
|
||||
setup_logging(logging.DEBUG, root_log_name=__name__.split(".")[0])
|
||||
setup_logging(logging.DEBUG, root_log_name="clan_cli")
|
||||
else:
|
||||
setup_logging("INFO", root_log_name=__name__.split(".")[0])
|
||||
setup_logging(logging.INFO, root_log_name=__name__.split(".")[0])
|
||||
log.debug("Debug logging enabled")
|
||||
|
||||
args = command_line.get_arguments()
|
||||
@@ -72,14 +73,6 @@ class MainApplication(Adw.Application):
|
||||
self.emit("join_request", uri)
|
||||
return 0
|
||||
|
||||
def on_shutdown(self, app: Gtk.Application) -> None:
|
||||
log.debug("Shutting down")
|
||||
|
||||
self.vms.kill_all()
|
||||
|
||||
if self.tray_icon is not None:
|
||||
self.tray_icon.destroy()
|
||||
|
||||
def on_window_hide_unhide(self, *_args: Any) -> None:
|
||||
assert self.window is not None
|
||||
if self.window.is_visible():
|
||||
@@ -90,15 +83,12 @@ class MainApplication(Adw.Application):
|
||||
def dummy_menu_entry(self) -> None:
|
||||
log.info("Dummy menu entry called")
|
||||
|
||||
def do_activate(self) -> None:
|
||||
self.show_window()
|
||||
|
||||
def show_window(self, app: Any = None) -> None:
|
||||
def show_window(self, *_args: Any) -> None:
|
||||
if not self.window:
|
||||
self.init_style()
|
||||
self.window = MainWindow(config=ClanConfig(initial_view="list"))
|
||||
self.window.set_application(self)
|
||||
self.tray_icon = TrayIcon(self)
|
||||
|
||||
self.window.present()
|
||||
|
||||
# TODO: For css styling
|
||||
|
||||
@@ -3,6 +3,7 @@ import os
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
@@ -63,6 +64,7 @@ def _init_proc(
|
||||
out_file: Path,
|
||||
proc_name: str,
|
||||
on_except: Callable[[Exception, mp.process.BaseProcess], None] | None,
|
||||
tstart: datetime,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
# Create a new process group
|
||||
@@ -76,7 +78,6 @@ def _init_proc(
|
||||
# Print some information
|
||||
pid = os.getpid()
|
||||
gpid = os.getpgid(pid=pid)
|
||||
print(f"Started new process pid={pid} gpid={gpid}", file=sys.stderr)
|
||||
|
||||
# Set the process name
|
||||
_set_proc_name(proc_name)
|
||||
@@ -84,19 +85,25 @@ def _init_proc(
|
||||
# Close stdin
|
||||
sys.stdin.close()
|
||||
|
||||
linebreak = "=" * 5
|
||||
# Execute the main function
|
||||
print(f"Executing function {func.__name__} now", file=sys.stderr)
|
||||
print(linebreak + f" {func.__name__}:{pid} " + linebreak, file=sys.stderr)
|
||||
print(f"Spawn overhead time: {datetime.now() - tstart}s", file=sys.stderr)
|
||||
try:
|
||||
func(**kwargs)
|
||||
except Exception as ex:
|
||||
traceback.print_exc()
|
||||
if on_except is not None:
|
||||
on_except(ex, mp.current_process())
|
||||
finally:
|
||||
pid = os.getpid()
|
||||
gpid = os.getpgid(pid=pid)
|
||||
print(f"Killing process group pid={pid} gpid={gpid}", file=sys.stderr)
|
||||
os.killpg(gpid, signal.SIGTERM)
|
||||
|
||||
# Kill the new process and all its children by sending a SIGTERM signal to the process group
|
||||
pid = os.getpid()
|
||||
gpid = os.getpgid(pid=pid)
|
||||
print(f"Killing process group pid={pid} gpid={gpid}", file=sys.stderr)
|
||||
os.killpg(gpid, signal.SIGTERM)
|
||||
sys.exit(1)
|
||||
# Don't use a finally block here, because we want the exitcode to be set to
|
||||
# 0 if the function returns normally
|
||||
|
||||
|
||||
def spawn(
|
||||
@@ -106,6 +113,8 @@ def spawn(
|
||||
func: Callable,
|
||||
**kwargs: Any,
|
||||
) -> MPProcess:
|
||||
tstart = datetime.now()
|
||||
|
||||
# Decouple the process from the parent
|
||||
if mp.get_start_method(allow_none=True) is None:
|
||||
mp.set_start_method(method="forkserver")
|
||||
@@ -116,16 +125,12 @@ def spawn(
|
||||
# Start the process
|
||||
proc = mp.Process(
|
||||
target=_init_proc,
|
||||
args=(func, out_file, proc_name, on_except),
|
||||
args=(func, out_file, proc_name, on_except, tstart),
|
||||
name=proc_name,
|
||||
kwargs=kwargs,
|
||||
)
|
||||
proc.start()
|
||||
|
||||
# Print some information
|
||||
cmd = f"tail -f {out_file}"
|
||||
log.info(f"Connect to stdout with: {cmd}")
|
||||
|
||||
# Return the process
|
||||
mp_proc = MPProcess(name=proc_name, proc=proc, out_file=out_file)
|
||||
|
||||
216
pkgs/clan-vm-manager/clan_vm_manager/components/gkvstore.py
Normal file
216
pkgs/clan-vm-manager/clan_vm_manager/components/gkvstore.py
Normal file
@@ -0,0 +1,216 @@
|
||||
import logging
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
import gi
|
||||
|
||||
gi.require_version("Gio", "2.0")
|
||||
from gi.repository import Gio, GObject
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Define type variables for key and value types
|
||||
K = TypeVar("K") # Key type
|
||||
V = TypeVar(
|
||||
"V", bound=GObject.Object
|
||||
) # Value type, bound to GObject.GObject or its subclasses
|
||||
|
||||
|
||||
class GKVStore(GObject.GObject, Gio.ListModel, Generic[K, V]):
|
||||
"""
|
||||
A simple key-value store that implements the Gio.ListModel interface, with generic types for keys and values.
|
||||
Only use self[key] and del self[key] for accessing the items for better performance.
|
||||
This class could be optimized by having the objects remember their position in the list.
|
||||
"""
|
||||
|
||||
def __init__(self, gtype: type[V], key_gen: Callable[[V], K]) -> None:
|
||||
super().__init__()
|
||||
self.gtype = gtype
|
||||
self.key_gen = key_gen
|
||||
# From Python 3.7 onwards dictionaries are ordered by default
|
||||
self._items: "dict[K, V]" = dict()
|
||||
|
||||
##################################
|
||||
# #
|
||||
# Gio.ListStore Interface #
|
||||
# #
|
||||
##################################
|
||||
@classmethod
|
||||
def new(cls: Any, gtype: type[V]) -> "GKVStore":
|
||||
return cls.__new__(cls, gtype)
|
||||
|
||||
def append(self, item: V) -> None:
|
||||
key = self.key_gen(item)
|
||||
self[key] = item
|
||||
|
||||
def find(self, item: V) -> tuple[bool, int]:
|
||||
log.warning("Finding is O(n) in GKVStore. Better use indexing")
|
||||
for i, v in enumerate(self.values()):
|
||||
if v == item:
|
||||
return True, i
|
||||
return False, -1
|
||||
|
||||
def find_with_equal_func(
|
||||
self, item: V, equal_func: Callable[[V, V], bool]
|
||||
) -> tuple[bool, int]:
|
||||
log.warning("Finding is O(n) in GKVStore. Better use indexing")
|
||||
for i, v in enumerate(self.values()):
|
||||
if equal_func(v, item):
|
||||
return True, i
|
||||
return False, -1
|
||||
|
||||
def find_with_equal_func_full(
|
||||
self, item: V, equal_func: Callable[[V, V, Any], bool], user_data: Any
|
||||
) -> tuple[bool, int]:
|
||||
log.warning("Finding is O(n) in GKVStore. Better use indexing")
|
||||
for i, v in enumerate(self.values()):
|
||||
if equal_func(v, item, user_data):
|
||||
return True, i
|
||||
return False, -1
|
||||
|
||||
def insert(self, position: int, item: V) -> None:
|
||||
log.warning("Inserting is O(n) in GKVStore. Better use append")
|
||||
log.warning(
|
||||
"This functions may have incorrect items_changed signal behavior. Please test it"
|
||||
)
|
||||
key = self.key_gen(item)
|
||||
if key in self._items:
|
||||
raise ValueError("Key already exists in the dictionary")
|
||||
if position < 0 or position > len(self._items):
|
||||
raise IndexError("Index out of range")
|
||||
|
||||
# Temporary storage for items to be reinserted
|
||||
temp_list = [(k, self._items[k]) for k in list(self.keys())[position:]]
|
||||
|
||||
# Delete items from the original dict
|
||||
for k in list(self.keys())[position:]:
|
||||
del self._items[k]
|
||||
|
||||
# Insert the new key-value pair
|
||||
self._items[key] = item
|
||||
|
||||
# Reinsert the items
|
||||
for i, (k, v) in enumerate(temp_list):
|
||||
self._items[k] = v
|
||||
|
||||
# Notify the model of the changes
|
||||
self.items_changed(position, 0, 1)
|
||||
|
||||
def insert_sorted(
|
||||
self, item: V, compare_func: Callable[[V, V, Any], int], user_data: Any
|
||||
) -> None:
|
||||
raise NotImplementedError("insert_sorted is not implemented in GKVStore")
|
||||
|
||||
def remove(self, position: int) -> None:
|
||||
if position < 0 or position >= self.get_n_items():
|
||||
return
|
||||
key = self.keys()[position]
|
||||
del self[key]
|
||||
self.items_changed(position, 1, 0)
|
||||
|
||||
def remove_all(self) -> None:
|
||||
self._items.clear()
|
||||
self.items_changed(0, len(self._items), 0)
|
||||
|
||||
def sort(self, compare_func: Callable[[V, V, Any], int], user_data: Any) -> None:
|
||||
raise NotImplementedError("sort is not implemented in GKVStore")
|
||||
|
||||
def splice(self, position: int, n_removals: int, additions: list[V]) -> None:
|
||||
raise NotImplementedError("splice is not implemented in GKVStore")
|
||||
|
||||
##################################
|
||||
# #
|
||||
# Gio.ListModel Interface #
|
||||
# #
|
||||
##################################
|
||||
def get_item(self, position: int) -> V | None:
|
||||
if position < 0 or position >= self.get_n_items():
|
||||
return None
|
||||
# Access items by index since OrderedDict does not support direct indexing
|
||||
key = list(self._items.keys())[position]
|
||||
return self._items[key]
|
||||
|
||||
def do_get_item(self, position: int) -> V | None:
|
||||
return self.get_item(position)
|
||||
|
||||
def get_item_type(self) -> GObject.GType:
|
||||
return self.gtype.__gtype__
|
||||
|
||||
def do_get_item_type(self) -> GObject.GType:
|
||||
return self.get_item_type()
|
||||
|
||||
def get_n_items(self) -> int:
|
||||
return len(self._items)
|
||||
|
||||
def do_get_n_items(self) -> int:
|
||||
return self.get_n_items()
|
||||
|
||||
##################################
|
||||
# #
|
||||
# Dict Interface #
|
||||
# #
|
||||
##################################
|
||||
def keys(self) -> list[K]:
|
||||
return list(self._items.keys())
|
||||
|
||||
def values(self) -> list[V]:
|
||||
return list(self._items.values())
|
||||
|
||||
def items(self) -> list[tuple[K, V]]:
|
||||
return list(self._items.items())
|
||||
|
||||
def get(self, key: K, default: V | None = None) -> V | None:
|
||||
return self._items.get(key, default)
|
||||
|
||||
# O(1) operation if the key does not exist, O(n) if it does
|
||||
def __setitem__(self, key: K, value: V) -> None:
|
||||
# If the key already exists, remove it O(n)
|
||||
# TODO: We have to check if updating an existing key is working correctly
|
||||
if key in self._items:
|
||||
log.warning("Updating an existing key in GKVStore is O(n)")
|
||||
position = self.keys().index(key)
|
||||
self._items[key] = value
|
||||
self.items_changed(position, 1, 1)
|
||||
else:
|
||||
# Add the new key-value pair
|
||||
self._items[key] = value
|
||||
position = max(len(self._items) - 1, 0)
|
||||
self.items_changed(position, 0, 1)
|
||||
|
||||
# O(n) operation
|
||||
def __delitem__(self, key: K) -> None:
|
||||
position = self.keys().index(key)
|
||||
del self._items[key]
|
||||
self.items_changed(position, 1, 0)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._items)
|
||||
|
||||
# O(1) operation
|
||||
def __getitem__(self, key: K) -> V:
|
||||
return self._items[key]
|
||||
|
||||
def __contains__(self, key: K) -> bool:
|
||||
return key in self._items
|
||||
|
||||
def __str__(self) -> str:
|
||||
resp = "GKVStore(\n"
|
||||
for k, v in self._items.items():
|
||||
resp += f"{k}: {v}\n"
|
||||
resp += ")"
|
||||
return resp
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return self._items.__str__()
|
||||
|
||||
##################################
|
||||
# #
|
||||
# Custom Methods #
|
||||
# #
|
||||
##################################
|
||||
def first(self) -> V:
|
||||
return self.values()[0]
|
||||
|
||||
def last(self) -> V:
|
||||
return self.values()[-1]
|
||||
119
pkgs/clan-vm-manager/clan_vm_manager/components/profiler.py
Normal file
119
pkgs/clan-vm-manager/clan_vm_manager/components/profiler.py
Normal file
@@ -0,0 +1,119 @@
|
||||
import cProfile
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import pstats
|
||||
import re
|
||||
import weakref
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
# Ensure you have a logger set up for logging exceptions
|
||||
log = logging.getLogger(__name__)
|
||||
explanation = """
|
||||
cProfile Output Columns Explanation:
|
||||
|
||||
- ncalls: The number of calls to the function. This includes both direct and indirect (recursive) calls.
|
||||
|
||||
- tottime: The total time spent in the given function alone, excluding time spent in calls to sub-functions.
|
||||
This measures the function's own overhead and execution time.
|
||||
|
||||
- percall (first instance): Represents the average time spent in the function per call, calculated as tottime divided by ncalls.
|
||||
This value excludes time spent in sub-function calls, focusing on the function's own processing time.
|
||||
|
||||
- cumtime: The cumulative time spent in this function and all the sub-functions it calls (both directly and indirectly).
|
||||
This includes all execution time within the function, from the start of its invocation to its return,
|
||||
including all calls to other functions and the time those calls take.
|
||||
|
||||
- percall (second instance): Represents the average time per call, including time spent in this function and in all sub-function calls.
|
||||
It is calculated as cumtime divided by ncalls, providing an average over all calls that includes all nested function calls.
|
||||
"""
|
||||
|
||||
|
||||
def print_profile(profiler: cProfile.Profile, sortkey: pstats.SortKey) -> None:
|
||||
s = io.StringIO()
|
||||
ps = pstats.Stats(profiler, stream=s)
|
||||
ps.sort_stats(sortkey)
|
||||
ps.print_stats(12)
|
||||
|
||||
# Process the output to trim file paths
|
||||
output_lines = s.getvalue().split("\n")
|
||||
for line in output_lines:
|
||||
try:
|
||||
parts = re.split(r"\s+", line)[
|
||||
1:
|
||||
] # Split on the first space to separate the time from the path
|
||||
fqpath = parts[-1]
|
||||
fpath, line_num = fqpath.split(":")
|
||||
if os.path.sep in fpath: # Check if this looks like it includes a path
|
||||
fpath = trim_path_to_three_levels(fpath)
|
||||
prefix = f"{parts[0]:>7}"
|
||||
prefix += f"{parts[1]:>9}"
|
||||
prefix += f"{parts[2]:>9}"
|
||||
prefix += f"{parts[3]:>9}"
|
||||
prefix += f"{parts[4]:>9}"
|
||||
new_line = f"{prefix:} {fpath}:{line_num}"
|
||||
else:
|
||||
new_line = line
|
||||
except (ValueError, IndexError):
|
||||
new_line = line # If there's no path, leave the line as is
|
||||
print(new_line)
|
||||
|
||||
|
||||
# TODO: Add an RLock for every profiler, currently not thread safe
|
||||
class ProfilerStore:
|
||||
profilers: dict[str, cProfile.Profile]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.profilers = {}
|
||||
self._exit_callback = weakref.finalize(self, self.on_exit)
|
||||
|
||||
def __getitem__(self, func: Callable) -> cProfile.Profile:
|
||||
key = f"{func.__module__}:{func.__name__}"
|
||||
if key not in self.profilers:
|
||||
self.profilers[key] = cProfile.Profile()
|
||||
return self.profilers[key]
|
||||
|
||||
def on_exit(self) -> None:
|
||||
for key, profiler in self.profilers.items():
|
||||
print("=" * 7 + key + "=" * 7)
|
||||
print_profile(profiler, pstats.SortKey.TIME)
|
||||
print_profile(profiler, pstats.SortKey.CUMULATIVE)
|
||||
print(explanation)
|
||||
|
||||
|
||||
def trim_path_to_three_levels(path: str) -> str:
|
||||
parts = path.split(os.path.sep)
|
||||
if len(parts) > 4:
|
||||
return os.path.sep.join(parts[-4:])
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
PROFS = ProfilerStore()
|
||||
|
||||
|
||||
def profile(func: Callable) -> Callable:
|
||||
"""
|
||||
A decorator that profiles the decorated function, printing out the profiling
|
||||
results with paths trimmed to three directories deep.
|
||||
"""
|
||||
|
||||
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
global PROFS
|
||||
profiler = PROFS[func]
|
||||
|
||||
try:
|
||||
profiler.enable()
|
||||
res = func(*args, **kwargs)
|
||||
profiler.disable()
|
||||
except Exception as ex:
|
||||
log.exception(ex)
|
||||
profiler.disable()
|
||||
return None
|
||||
return res
|
||||
|
||||
if os.getenv("PERF", "0") == "1":
|
||||
return wrapper
|
||||
else:
|
||||
return func
|
||||
@@ -559,7 +559,7 @@ class StatusNotifierImplementation(BaseImplementation):
|
||||
)
|
||||
self.tray_icon.register()
|
||||
|
||||
from .assets import loc
|
||||
from clan_vm_manager.assets import loc
|
||||
|
||||
icon_path = str(loc / "clan_white_notext.png")
|
||||
self.set_icon(icon_path)
|
||||
310
pkgs/clan-vm-manager/clan_vm_manager/components/vmobj.py
Normal file
310
pkgs/clan-vm-manager/clan_vm_manager/components/vmobj.py
Normal file
@@ -0,0 +1,310 @@
|
||||
import logging
|
||||
import multiprocessing as mp
|
||||
import os
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import weakref
|
||||
from collections.abc import Generator
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import IO, ClassVar
|
||||
|
||||
import gi
|
||||
from clan_cli import vms
|
||||
from clan_cli.clan_uri import ClanScheme, ClanURI
|
||||
from clan_cli.errors import ClanError
|
||||
from clan_cli.history.add import HistoryEntry
|
||||
from clan_cli.machines.machines import Machine
|
||||
|
||||
from clan_vm_manager.components.executor import MPProcess, spawn
|
||||
|
||||
gi.require_version("GObject", "2.0")
|
||||
gi.require_version("Gtk", "4.0")
|
||||
from gi.repository import GLib, GObject, Gtk
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VMObject(GObject.Object):
|
||||
# Define a custom signal with the name "vm_stopped" and a string argument for the message
|
||||
__gsignals__: ClassVar = {
|
||||
"vm_status_changed": (GObject.SignalFlags.RUN_FIRST, None, [GObject.Object])
|
||||
}
|
||||
|
||||
def vm_status_changed_task(self) -> bool:
|
||||
self.emit("vm_status_changed", self)
|
||||
return GLib.SOURCE_REMOVE
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
icon: Path,
|
||||
data: HistoryEntry,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
|
||||
# Store the data from the history entry
|
||||
self.data = data
|
||||
|
||||
# Create a process object to store the VM process
|
||||
self.vm_process = MPProcess("vm_dummy", mp.Process(), Path("./dummy"))
|
||||
self.build_process = MPProcess("build_dummy", mp.Process(), Path("./dummy"))
|
||||
self._start_thread: threading.Thread = threading.Thread()
|
||||
self.machine: Machine | None = None
|
||||
|
||||
# Watcher to stop the VM
|
||||
self.KILL_TIMEOUT = 20 # seconds
|
||||
self._stop_thread: threading.Thread = threading.Thread()
|
||||
|
||||
# Build progress bar vars
|
||||
self.progress_bar: Gtk.ProgressBar = Gtk.ProgressBar()
|
||||
self.progress_bar.hide()
|
||||
self.progress_bar.set_hexpand(True) # Horizontally expand
|
||||
self.prog_bar_id: int = 0
|
||||
|
||||
# Create a temporary directory to store the logs
|
||||
self.log_dir = tempfile.TemporaryDirectory(
|
||||
prefix="clan_vm-", suffix=f"-{self.data.flake.flake_attr}"
|
||||
)
|
||||
self._logs_id: int = 0
|
||||
self._log_file: IO[str] | None = None
|
||||
|
||||
# To be able to set the switch state programmatically
|
||||
# we need to store the handler id returned by the connect method
|
||||
# and block the signal while we change the state. This is cursed.
|
||||
self.switch = Gtk.Switch()
|
||||
self.switch_handler_id: int = self.switch.connect(
|
||||
"notify::active", self.on_switch_toggle
|
||||
)
|
||||
self.connect("vm_status_changed", self.on_vm_status_changed)
|
||||
|
||||
# Make sure the VM is killed when the reference to this object is dropped
|
||||
self._finalizer = weakref.finalize(self, self.kill_ref_drop)
|
||||
|
||||
def on_vm_status_changed(self, vm: "VMObject", _vm: "VMObject") -> None:
|
||||
self.switch.set_state(self.is_running() and not self.is_building())
|
||||
if self.switch.get_sensitive() is False and not self.is_building():
|
||||
self.switch.set_sensitive(True)
|
||||
|
||||
exit_vm = self.vm_process.proc.exitcode
|
||||
exit_build = self.build_process.proc.exitcode
|
||||
exitc = exit_vm or exit_build
|
||||
if not self.is_running() and exitc != 0:
|
||||
self.switch.handler_block(self.switch_handler_id)
|
||||
self.switch.set_active(False)
|
||||
self.switch.handler_unblock(self.switch_handler_id)
|
||||
log.error(f"VM exited with error. Exitcode: {exitc}")
|
||||
|
||||
def on_switch_toggle(self, switch: Gtk.Switch, user_state: bool) -> None:
|
||||
if switch.get_active():
|
||||
switch.set_state(False)
|
||||
self.start()
|
||||
else:
|
||||
switch.set_state(True)
|
||||
self.shutdown()
|
||||
switch.set_sensitive(False)
|
||||
|
||||
# We use a context manager to create the machine object
|
||||
# and make sure it is destroyed when the context is exited
|
||||
@contextmanager
|
||||
def create_machine(self) -> Generator[Machine, None, None]:
|
||||
uri = ClanURI.from_str(
|
||||
url=self.data.flake.flake_url, flake_attr=self.data.flake.flake_attr
|
||||
)
|
||||
match uri.scheme:
|
||||
case ClanScheme.LOCAL.value(path):
|
||||
self.machine = Machine(
|
||||
name=self.data.flake.flake_attr,
|
||||
flake=path, # type: ignore
|
||||
)
|
||||
case ClanScheme.REMOTE.value(url):
|
||||
self.machine = Machine(
|
||||
name=self.data.flake.flake_attr,
|
||||
flake=url, # type: ignore
|
||||
)
|
||||
yield self.machine
|
||||
self.machine = None
|
||||
|
||||
def _pulse_progress_bar_task(self) -> bool:
|
||||
if self.progress_bar.is_visible():
|
||||
self.progress_bar.pulse()
|
||||
return GLib.SOURCE_CONTINUE
|
||||
else:
|
||||
return GLib.SOURCE_REMOVE
|
||||
|
||||
def __start(self) -> None:
|
||||
with self.create_machine() as machine:
|
||||
# Start building VM
|
||||
tstart = datetime.now()
|
||||
log.info(f"Building VM {self.get_id()}")
|
||||
log_dir = Path(str(self.log_dir.name))
|
||||
self.build_process = spawn(
|
||||
on_except=None,
|
||||
out_file=log_dir / "build.log",
|
||||
func=vms.run.build_vm,
|
||||
machine=machine,
|
||||
tmpdir=log_dir,
|
||||
)
|
||||
GLib.idle_add(self.vm_status_changed_task)
|
||||
|
||||
# Start the logs watcher
|
||||
self._logs_id = GLib.timeout_add(
|
||||
50, self._get_logs_task, self.build_process
|
||||
)
|
||||
if self._logs_id == 0:
|
||||
log.error("Failed to start VM log watcher")
|
||||
log.debug(f"Starting logs watcher on file: {self.build_process.out_file}")
|
||||
|
||||
# Start the progress bar and show it
|
||||
self.progress_bar.show()
|
||||
self.prog_bar_id = GLib.timeout_add(100, self._pulse_progress_bar_task)
|
||||
if self.prog_bar_id == 0:
|
||||
log.error("Couldn't spawn a progess bar task")
|
||||
|
||||
# Wait for the build to finish then hide the progress bar
|
||||
self.build_process.proc.join()
|
||||
tend = datetime.now()
|
||||
log.info(f"VM {self.get_id()} build took {tend - tstart}s")
|
||||
self.progress_bar.hide()
|
||||
|
||||
# Check if the VM was built successfully
|
||||
if self.build_process.proc.exitcode != 0:
|
||||
log.error(f"Failed to build VM {self.get_id()}")
|
||||
GLib.idle_add(self.vm_status_changed_task)
|
||||
return
|
||||
log.info(f"Successfully built VM {self.get_id()}")
|
||||
|
||||
# Start the VM
|
||||
self.vm_process = spawn(
|
||||
on_except=None,
|
||||
out_file=Path(str(self.log_dir.name)) / "vm.log",
|
||||
func=vms.run.run_vm,
|
||||
vm=self.data.flake.vm,
|
||||
)
|
||||
log.debug(f"Started VM {self.get_id()}")
|
||||
GLib.idle_add(self.vm_status_changed_task)
|
||||
|
||||
# Start the logs watcher
|
||||
self._logs_id = GLib.timeout_add(50, self._get_logs_task, self.vm_process)
|
||||
if self._logs_id == 0:
|
||||
log.error("Failed to start VM log watcher")
|
||||
log.debug(f"Starting logs watcher on file: {self.vm_process.out_file}")
|
||||
|
||||
# Wait for the VM to stop
|
||||
self.vm_process.proc.join()
|
||||
log.debug(f"VM {self.get_id()} has stopped")
|
||||
GLib.idle_add(self.vm_status_changed_task)
|
||||
|
||||
def start(self) -> None:
|
||||
if self.is_running():
|
||||
log.warn("VM is already running. Ignoring start request")
|
||||
self.emit("vm_status_changed", self)
|
||||
return
|
||||
log.debug(f"VM state dir {self.log_dir.name}")
|
||||
self._start_thread = threading.Thread(target=self.__start)
|
||||
self._start_thread.start()
|
||||
|
||||
def _get_logs_task(self, proc: MPProcess) -> bool:
|
||||
if not proc.out_file.exists():
|
||||
return GLib.SOURCE_CONTINUE
|
||||
|
||||
if not self._log_file:
|
||||
try:
|
||||
self._log_file = open(proc.out_file)
|
||||
except Exception as ex:
|
||||
log.exception(ex)
|
||||
self._log_file = None
|
||||
return GLib.SOURCE_REMOVE
|
||||
|
||||
line = os.read(self._log_file.fileno(), 4096)
|
||||
if len(line) != 0:
|
||||
print(line.decode("utf-8"), end="", flush=True)
|
||||
|
||||
if not proc.proc.is_alive():
|
||||
log.debug("Removing logs watcher")
|
||||
self._log_file = None
|
||||
return GLib.SOURCE_REMOVE
|
||||
|
||||
return GLib.SOURCE_CONTINUE
|
||||
|
||||
def is_running(self) -> bool:
|
||||
return self._start_thread.is_alive()
|
||||
|
||||
def is_building(self) -> bool:
|
||||
return self.build_process.proc.is_alive()
|
||||
|
||||
def is_shutting_down(self) -> bool:
|
||||
return self._stop_thread.is_alive()
|
||||
|
||||
def get_id(self) -> str:
|
||||
return f"{self.data.flake.flake_url}#{self.data.flake.flake_attr}"
|
||||
|
||||
def __stop(self) -> None:
|
||||
log.info(f"Stopping VM {self.get_id()}")
|
||||
|
||||
start_time = datetime.now()
|
||||
while self.is_running():
|
||||
diff = datetime.now() - start_time
|
||||
if diff.seconds > self.KILL_TIMEOUT:
|
||||
log.error(
|
||||
f"VM {self.get_id()} has not stopped after {self.KILL_TIMEOUT}s. Killing it"
|
||||
)
|
||||
self.vm_process.kill_group()
|
||||
return
|
||||
if self.is_building():
|
||||
log.info(f"VM {self.get_id()} is still building. Killing it")
|
||||
self.build_process.kill_group()
|
||||
return
|
||||
if not self.machine:
|
||||
log.error(f"Machine object is None. Killing VM {self.get_id()}")
|
||||
self.vm_process.kill_group()
|
||||
return
|
||||
|
||||
# Try to shutdown the VM gracefully using QMP
|
||||
try:
|
||||
with self.machine.vm.qmp_ctx() as qmp:
|
||||
qmp.command("system_powerdown")
|
||||
except (OSError, ClanError) as ex:
|
||||
log.debug(f"QMP command 'system_powerdown' ignored. Error: {ex}")
|
||||
|
||||
# Try 20 times to stop the VM
|
||||
time.sleep(self.KILL_TIMEOUT / 20)
|
||||
GLib.idle_add(self.vm_status_changed_task)
|
||||
log.debug(f"VM {self.get_id()} has stopped")
|
||||
|
||||
def shutdown(self) -> None:
|
||||
if not self.is_running():
|
||||
log.warning("VM not running. Ignoring shutdown request.")
|
||||
self.emit("vm_status_changed", self)
|
||||
return
|
||||
if self.is_shutting_down():
|
||||
log.warning("Shutdown already in progress")
|
||||
self.emit("vm_status_changed", self)
|
||||
return
|
||||
self._stop_thread = threading.Thread(target=self.__stop)
|
||||
self._stop_thread.start()
|
||||
|
||||
def kill_ref_drop(self) -> None:
|
||||
if self.is_running():
|
||||
log.warning("Killing VM due to reference drop")
|
||||
self.kill()
|
||||
|
||||
def kill(self) -> None:
|
||||
if not self.is_running():
|
||||
log.warning(f"Tried to kill VM {self.get_id()} is not running")
|
||||
return
|
||||
log.info(f"Killing VM {self.get_id()} now")
|
||||
self.vm_process.kill_group()
|
||||
|
||||
def read_whole_log(self) -> str:
|
||||
if not self.vm_process.out_file.exists():
|
||||
log.error(f"Log file {self.vm_process.out_file} does not exist")
|
||||
return ""
|
||||
return self.vm_process.out_file.read_text()
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"VM({self.get_id()})"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return self.__str__()
|
||||
@@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import logging
|
||||
from typing import Literal
|
||||
|
||||
import gi
|
||||
|
||||
gi.require_version("Gtk", "4.0")
|
||||
gi.require_version("Adw", "1")
|
||||
from clan_cli.errors import ClanError
|
||||
from gi.repository import Adw
|
||||
|
||||
Severity = Literal["Error"] | Literal["Warning"] | Literal["Info"] | str
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def show_error_dialog(error: ClanError, severity: Severity | None = "Error") -> None:
|
||||
message = str(error)
|
||||
dialog = Adw.MessageDialog(parent=None, heading=severity, body=message)
|
||||
log.error(message)
|
||||
dialog.add_response("ok", "ok")
|
||||
dialog.choose()
|
||||
@@ -1,356 +0,0 @@
|
||||
import os
|
||||
import tempfile
|
||||
import weakref
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import IO, Any, ClassVar
|
||||
|
||||
import gi
|
||||
from clan_cli import vms
|
||||
from clan_cli.clan_uri import ClanScheme, ClanURI
|
||||
from clan_cli.errors import ClanError
|
||||
from clan_cli.history.add import HistoryEntry
|
||||
from clan_cli.history.list import list_history
|
||||
|
||||
from clan_vm_manager import assets
|
||||
from clan_vm_manager.errors.show_error import show_error_dialog
|
||||
|
||||
from .executor import MPProcess, spawn
|
||||
|
||||
gi.require_version("Gtk", "4.0")
|
||||
import logging
|
||||
import multiprocessing as mp
|
||||
import threading
|
||||
|
||||
from clan_cli.machines.machines import Machine
|
||||
from gi.repository import Gio, GLib, GObject, Gtk
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ClanGroup(GObject.Object):
|
||||
def __init__(self, url: str | Path, vms: list["VM"]) -> None:
|
||||
super().__init__()
|
||||
self.url = url
|
||||
self.vms = vms
|
||||
self.clan_name = vms[0].data.flake.clan_name
|
||||
self.list_store = Gio.ListStore.new(VM)
|
||||
|
||||
for vm in vms:
|
||||
self.list_store.append(vm)
|
||||
|
||||
|
||||
def init_grp_store(list_store: Gio.ListStore) -> None:
|
||||
groups: dict[str | Path, list["VM"]] = {}
|
||||
for vm in get_saved_vms():
|
||||
ll = groups.get(vm.data.flake.flake_url, [])
|
||||
ll.append(vm)
|
||||
groups[vm.data.flake.flake_url] = ll
|
||||
|
||||
for url, vm_list in groups.items():
|
||||
grp = ClanGroup(url, vm_list)
|
||||
list_store.append(grp)
|
||||
|
||||
|
||||
class Clans:
|
||||
list_store: Gio.ListStore
|
||||
_instance: "None | ClanGroup" = None
|
||||
|
||||
# Make sure the VMS class is used as a singleton
|
||||
def __init__(self) -> None:
|
||||
raise RuntimeError("Call use() instead")
|
||||
|
||||
@classmethod
|
||||
def use(cls: Any) -> "ClanGroup":
|
||||
if cls._instance is None:
|
||||
cls._instance = cls.__new__(cls)
|
||||
cls.list_store = Gio.ListStore.new(ClanGroup)
|
||||
init_grp_store(cls.list_store)
|
||||
|
||||
return cls._instance
|
||||
|
||||
def filter_by_name(self, text: str) -> None:
|
||||
if text:
|
||||
filtered_list = self.list_store
|
||||
filtered_list.remove_all()
|
||||
|
||||
groups: dict[str | Path, list["VM"]] = {}
|
||||
for vm in get_saved_vms():
|
||||
ll = groups.get(vm.data.flake.flake_url, [])
|
||||
print(text, vm.data.flake.vm.machine_name)
|
||||
if text.lower() in vm.data.flake.vm.machine_name.lower():
|
||||
ll.append(vm)
|
||||
groups[vm.data.flake.flake_url] = ll
|
||||
|
||||
for url, vm_list in groups.items():
|
||||
grp = ClanGroup(url, vm_list)
|
||||
filtered_list.append(grp)
|
||||
|
||||
else:
|
||||
self.refresh()
|
||||
|
||||
def refresh(self) -> None:
|
||||
self.list_store.remove_all()
|
||||
init_grp_store(self.list_store)
|
||||
|
||||
|
||||
class VM(GObject.Object):
|
||||
# Define a custom signal with the name "vm_stopped" and a string argument for the message
|
||||
__gsignals__: ClassVar = {
|
||||
"vm_status_changed": (GObject.SignalFlags.RUN_FIRST, None, [GObject.Object]),
|
||||
"build_vm": (GObject.SignalFlags.RUN_FIRST, None, [GObject.Object, bool]),
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
icon: Path,
|
||||
data: HistoryEntry,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self.KILL_TIMEOUT = 6 # seconds
|
||||
self.data = data
|
||||
self.process = MPProcess("dummy", mp.Process(), Path("./dummy"))
|
||||
self._watcher_id: int = 0
|
||||
self._stop_watcher_id: int = 0
|
||||
self._stop_timer_init: datetime | None = None
|
||||
self._logs_id: int = 0
|
||||
self._log_file: IO[str] | None = None
|
||||
self.progress_bar: Gtk.ProgressBar = Gtk.ProgressBar()
|
||||
self.progress_bar.hide()
|
||||
self.progress_bar.set_hexpand(True) # Horizontally expand
|
||||
self.prog_bar_id: int = 0
|
||||
self.log_dir = tempfile.TemporaryDirectory(
|
||||
prefix="clan_vm-", suffix=f"-{self.data.flake.flake_attr}"
|
||||
)
|
||||
self._finalizer = weakref.finalize(self, self.kill)
|
||||
self.connect("build_vm", self.build_vm)
|
||||
uri = ClanURI.from_str(
|
||||
url=self.data.flake.flake_url, flake_attr=self.data.flake.flake_attr
|
||||
)
|
||||
match uri.scheme:
|
||||
case ClanScheme.LOCAL.value(path):
|
||||
self.machine = Machine(
|
||||
name=self.data.flake.flake_attr,
|
||||
flake=path, # type: ignore
|
||||
)
|
||||
case ClanScheme.REMOTE.value(url):
|
||||
self.machine = Machine(
|
||||
name=self.data.flake.flake_attr,
|
||||
flake=url, # type: ignore
|
||||
)
|
||||
|
||||
def _pulse_progress_bar(self) -> bool:
|
||||
self.progress_bar.pulse()
|
||||
return GLib.SOURCE_CONTINUE
|
||||
|
||||
def build_vm(self, vm: "VM", _vm: "VM", building: bool) -> None:
|
||||
if building:
|
||||
log.info("Building VM")
|
||||
self.progress_bar.show()
|
||||
self.prog_bar_id = GLib.timeout_add(100, self._pulse_progress_bar)
|
||||
if self.prog_bar_id == 0:
|
||||
raise ClanError("Couldn't spawn a progess bar task")
|
||||
else:
|
||||
self.progress_bar.hide()
|
||||
if not GLib.Source.remove(self.prog_bar_id):
|
||||
log.error("Failed to remove progress bar task")
|
||||
log.info("VM built")
|
||||
|
||||
def __start(self) -> None:
|
||||
log.info(f"Starting VM {self.get_id()}")
|
||||
vm = vms.run.inspect_vm(self.machine)
|
||||
|
||||
# GLib.idle_add(self.emit, "build_vm", self, True)
|
||||
# self.process = spawn(
|
||||
# on_except=None,
|
||||
# log_dir=Path(str(self.log_dir.name)),
|
||||
# func=vms.run.build_vm,
|
||||
# machine=self.machine,
|
||||
# vm=vm,
|
||||
# )
|
||||
# self.process.proc.join()
|
||||
|
||||
# GLib.idle_add(self.emit, "build_vm", self, False)
|
||||
|
||||
# if self.process.proc.exitcode != 0:
|
||||
# log.error(f"Failed to build VM {self.get_id()}")
|
||||
# return
|
||||
|
||||
self.process = spawn(
|
||||
on_except=None,
|
||||
out_file=Path(str(self.log_dir.name)) / "vm.log",
|
||||
func=vms.run.run_vm,
|
||||
vm=vm,
|
||||
)
|
||||
log.debug(f"Started VM {self.get_id()}")
|
||||
GLib.idle_add(self.emit, "vm_status_changed", self)
|
||||
log.debug(f"Starting logs watcher on file: {self.process.out_file}")
|
||||
self._logs_id = GLib.timeout_add(50, self._get_logs_task)
|
||||
if self._logs_id == 0:
|
||||
raise ClanError("Failed to add logs watcher")
|
||||
|
||||
log.debug(f"Starting VM watcher for: {self.machine.name}")
|
||||
self._watcher_id = GLib.timeout_add(50, self._vm_watcher_task)
|
||||
if self._watcher_id == 0:
|
||||
raise ClanError("Failed to add watcher")
|
||||
|
||||
def start(self) -> None:
|
||||
if self.is_running():
|
||||
log.warn("VM is already running")
|
||||
return
|
||||
threading.Thread(target=self.__start).start()
|
||||
|
||||
def _vm_watcher_task(self) -> bool:
|
||||
if not self.is_running():
|
||||
self.emit("vm_status_changed", self)
|
||||
log.debug("Removing VM watcher")
|
||||
return GLib.SOURCE_REMOVE
|
||||
|
||||
return GLib.SOURCE_CONTINUE
|
||||
|
||||
def _get_logs_task(self) -> bool:
|
||||
if not self.process.out_file.exists():
|
||||
return GLib.SOURCE_CONTINUE
|
||||
|
||||
if not self._log_file:
|
||||
try:
|
||||
self._log_file = open(self.process.out_file)
|
||||
except Exception as ex:
|
||||
log.exception(ex)
|
||||
self._log_file = None
|
||||
return GLib.SOURCE_REMOVE
|
||||
|
||||
line = os.read(self._log_file.fileno(), 4096)
|
||||
if len(line) != 0:
|
||||
print(line.decode("utf-8"), end="", flush=True)
|
||||
|
||||
if not self.is_running():
|
||||
log.debug("Removing logs watcher")
|
||||
self._log_file = None
|
||||
return GLib.SOURCE_REMOVE
|
||||
|
||||
return GLib.SOURCE_CONTINUE
|
||||
|
||||
def is_running(self) -> bool:
|
||||
return self.process.proc.is_alive()
|
||||
|
||||
def get_id(self) -> str:
|
||||
return f"{self.data.flake.flake_url}#{self.data.flake.flake_attr}"
|
||||
|
||||
def __shutdown_watchdog(self) -> None:
|
||||
if self.is_running():
|
||||
assert self._stop_timer_init is not None
|
||||
diff = datetime.now() - self._stop_timer_init
|
||||
if diff.seconds > self.KILL_TIMEOUT:
|
||||
log.error(f"VM {self.get_id()} has not stopped. Killing it")
|
||||
self.process.kill_group()
|
||||
return GLib.SOURCE_CONTINUE
|
||||
else:
|
||||
log.info(f"VM {self.get_id()} has stopped")
|
||||
return GLib.SOURCE_REMOVE
|
||||
|
||||
def __stop(self) -> None:
|
||||
log.info(f"Stopping VM {self.get_id()}")
|
||||
|
||||
try:
|
||||
with self.machine.vm.qmp_ctx() as qmp:
|
||||
qmp.command("system_powerdown")
|
||||
except ClanError as e:
|
||||
log.debug(e)
|
||||
|
||||
self._stop_timer_init = datetime.now()
|
||||
self._stop_watcher_id = GLib.timeout_add(100, self.__shutdown_watchdog)
|
||||
if self._stop_watcher_id == 0:
|
||||
raise ClanError("Failed to add stop watcher")
|
||||
|
||||
def shutdown(self) -> None:
|
||||
if not self.is_running():
|
||||
return
|
||||
log.info(f"Stopping VM {self.get_id()}")
|
||||
threading.Thread(target=self.__stop).start()
|
||||
|
||||
def kill(self) -> None:
|
||||
if not self.is_running():
|
||||
log.warning(f"Tried to kill VM {self.get_id()} is not running")
|
||||
return
|
||||
log.info(f"Killing VM {self.get_id()} now")
|
||||
self.process.kill_group()
|
||||
|
||||
def read_whole_log(self) -> str:
|
||||
if not self.process.out_file.exists():
|
||||
log.error(f"Log file {self.process.out_file} does not exist")
|
||||
return ""
|
||||
return self.process.out_file.read_text()
|
||||
|
||||
|
||||
class VMs:
|
||||
list_store: Gio.ListStore
|
||||
_instance: "None | VMs" = None
|
||||
|
||||
# Make sure the VMS class is used as a singleton
|
||||
def __init__(self) -> None:
|
||||
raise RuntimeError("Call use() instead")
|
||||
|
||||
@classmethod
|
||||
def use(cls: Any) -> "VMs":
|
||||
if cls._instance is None:
|
||||
cls._instance = cls.__new__(cls)
|
||||
cls.list_store = Gio.ListStore.new(VM)
|
||||
|
||||
for vm in get_saved_vms():
|
||||
cls.list_store.append(vm)
|
||||
return cls._instance
|
||||
|
||||
def filter_by_name(self, text: str) -> None:
|
||||
if text:
|
||||
filtered_list = self.list_store
|
||||
filtered_list.remove_all()
|
||||
for vm in get_saved_vms():
|
||||
if text.lower() in vm.data.flake.vm.machine_name.lower():
|
||||
filtered_list.append(vm)
|
||||
else:
|
||||
self.refresh()
|
||||
|
||||
def get_by_id(self, ident: str) -> None | VM:
|
||||
for vm in self.list_store:
|
||||
if ident == vm.get_id():
|
||||
return vm
|
||||
|
||||
return None
|
||||
|
||||
def get_running_vms(self) -> list[VM]:
|
||||
return list(filter(lambda vm: vm.is_running(), self.list_store))
|
||||
|
||||
def kill_all(self) -> None:
|
||||
log.debug(f"Running vms: {self.get_running_vms()}")
|
||||
for vm in self.get_running_vms():
|
||||
vm.kill()
|
||||
|
||||
def refresh(self) -> None:
|
||||
log.error("NEVER FUCKING DO THIS")
|
||||
return
|
||||
self.list_store.remove_all()
|
||||
for vm in get_saved_vms():
|
||||
self.list_store.append(vm)
|
||||
|
||||
|
||||
def get_saved_vms() -> list[VM]:
|
||||
vm_list = []
|
||||
log.info("=====CREATING NEW VM OBJ====")
|
||||
try:
|
||||
# Execute `clan flakes add <path>` to democlan for this to work
|
||||
for entry in list_history():
|
||||
if entry.flake.icon is None:
|
||||
icon = assets.loc / "placeholder.jpeg"
|
||||
else:
|
||||
icon = entry.flake.icon
|
||||
|
||||
base = VM(
|
||||
icon=Path(icon),
|
||||
data=entry,
|
||||
)
|
||||
vm_list.append(base)
|
||||
except ClanError as e:
|
||||
show_error_dialog(e)
|
||||
|
||||
return vm_list
|
||||
@@ -4,12 +4,10 @@ from collections.abc import Callable
|
||||
from typing import Any, ClassVar
|
||||
|
||||
import gi
|
||||
from clan_cli import ClanError
|
||||
from clan_cli.clan_uri import ClanURI
|
||||
from clan_cli.history.add import add_history
|
||||
from clan_cli.history.add import HistoryEntry, add_history
|
||||
|
||||
from clan_vm_manager.errors.show_error import show_error_dialog
|
||||
from clan_vm_manager.models.use_vms import Clans
|
||||
from clan_vm_manager.singletons.use_vms import ClanStore
|
||||
|
||||
gi.require_version("Gtk", "4.0")
|
||||
gi.require_version("Adw", "1")
|
||||
@@ -26,29 +24,33 @@ class JoinValue(GObject.Object):
|
||||
}
|
||||
|
||||
url: ClanURI
|
||||
entry: HistoryEntry | None
|
||||
|
||||
def __init__(
|
||||
self, url: ClanURI, on_join: Callable[["JoinValue", Any], None]
|
||||
) -> None:
|
||||
def _join_finished_task(self) -> bool:
|
||||
self.emit("join_finished", self)
|
||||
return GLib.SOURCE_REMOVE
|
||||
|
||||
def __init__(self, url: ClanURI) -> None:
|
||||
super().__init__()
|
||||
self.url = url
|
||||
self.connect("join_finished", on_join)
|
||||
self.entry = None
|
||||
|
||||
def __join(self) -> None:
|
||||
add_history(self.url, all_machines=False)
|
||||
GLib.idle_add(lambda: self.emit("join_finished", self))
|
||||
new_entry = add_history(self.url)
|
||||
self.entry = new_entry
|
||||
GLib.idle_add(self._join_finished_task)
|
||||
|
||||
def join(self) -> None:
|
||||
threading.Thread(target=self.__join).start()
|
||||
|
||||
|
||||
class Join:
|
||||
class JoinList:
|
||||
"""
|
||||
This is a singleton.
|
||||
It is initialized with the first call of use()
|
||||
"""
|
||||
|
||||
_instance: "None | Join" = None
|
||||
_instance: "None | JoinList" = None
|
||||
list_store: Gio.ListStore
|
||||
|
||||
# Make sure the VMS class is used as a singleton
|
||||
@@ -56,40 +58,39 @@ class Join:
|
||||
raise RuntimeError("Call use() instead")
|
||||
|
||||
@classmethod
|
||||
def use(cls: Any) -> "Join":
|
||||
def use(cls: Any) -> "JoinList":
|
||||
if cls._instance is None:
|
||||
cls._instance = cls.__new__(cls)
|
||||
cls.list_store = Gio.ListStore.new(JoinValue)
|
||||
|
||||
return cls._instance
|
||||
|
||||
def push(self, url: ClanURI, on_join: Callable[[JoinValue], None]) -> None:
|
||||
def is_empty(self) -> bool:
|
||||
return self.list_store.get_n_items() == 0
|
||||
|
||||
def push(
|
||||
self, value: JoinValue, after_join: Callable[[JoinValue, JoinValue], None]
|
||||
) -> None:
|
||||
"""
|
||||
Add a join request.
|
||||
This method can add multiple join requests if called subsequently for each request.
|
||||
"""
|
||||
|
||||
if url.get_id() in [item.url.get_id() for item in self.list_store]:
|
||||
log.info(f"Join request already exists: {url}")
|
||||
if value.url.get_id() in [item.url.get_id() for item in self.list_store]:
|
||||
log.info(f"Join request already exists: {value.url}. Ignoring.")
|
||||
return
|
||||
|
||||
def after_join(item: JoinValue, _: Any) -> None:
|
||||
self.discard(item)
|
||||
Clans.use().refresh()
|
||||
# VMS.use().refresh()
|
||||
print("Refreshed list after join")
|
||||
on_join(item)
|
||||
value.connect("join_finished", self._on_join_finished)
|
||||
value.connect("join_finished", after_join)
|
||||
|
||||
self.list_store.append(JoinValue(url, after_join))
|
||||
self.list_store.append(value)
|
||||
|
||||
def join(self, item: JoinValue) -> None:
|
||||
try:
|
||||
log.info(f"trying to join: {item.url}")
|
||||
item.join()
|
||||
except ClanError as e:
|
||||
show_error_dialog(e)
|
||||
def _on_join_finished(self, _source: GObject.Object, value: JoinValue) -> None:
|
||||
log.info(f"Join finished: {value.url}")
|
||||
self.discard(value)
|
||||
ClanStore.use().push_history_entry(value.entry)
|
||||
|
||||
def discard(self, item: JoinValue) -> None:
|
||||
(has, idx) = self.list_store.find(item)
|
||||
def discard(self, value: JoinValue) -> None:
|
||||
(has, idx) = self.list_store.find(value)
|
||||
if has:
|
||||
self.list_store.remove(idx)
|
||||
@@ -7,34 +7,30 @@ gi.require_version("Adw", "1")
|
||||
from gi.repository import Adw
|
||||
|
||||
|
||||
class Views:
|
||||
class ViewStack:
|
||||
"""
|
||||
This is a singleton.
|
||||
It is initialized with the first call of use()
|
||||
|
||||
Usage:
|
||||
|
||||
Views.use().set_visible()
|
||||
ViewStack.use().set_visible()
|
||||
|
||||
Views.use() can also be called before the data is needed. e.g. to eliminate/reduce waiting time.
|
||||
ViewStack.use() can also be called before the data is needed. e.g. to eliminate/reduce waiting time.
|
||||
|
||||
"""
|
||||
|
||||
_instance: "None | Views" = None
|
||||
_instance: "None | ViewStack" = None
|
||||
view: Adw.ViewStack
|
||||
main_window: Adw.ApplicationWindow = None
|
||||
|
||||
# Make sure the VMS class is used as a singleton
|
||||
def __init__(self) -> None:
|
||||
raise RuntimeError("Call use() instead")
|
||||
|
||||
@classmethod
|
||||
def use(cls: Any) -> "Views":
|
||||
def use(cls: Any) -> "ViewStack":
|
||||
if cls._instance is None:
|
||||
cls._instance = cls.__new__(cls)
|
||||
cls.view = Adw.ViewStack()
|
||||
|
||||
return cls._instance
|
||||
|
||||
def set_main_window(self, window: Adw.ApplicationWindow) -> None:
|
||||
self.main_window = window
|
||||
97
pkgs/clan-vm-manager/clan_vm_manager/singletons/use_vms.py
Normal file
97
pkgs/clan-vm-manager/clan_vm_manager/singletons/use_vms.py
Normal file
@@ -0,0 +1,97 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import gi
|
||||
from clan_cli.history.add import HistoryEntry
|
||||
|
||||
from clan_vm_manager import assets
|
||||
from clan_vm_manager.components.gkvstore import GKVStore
|
||||
from clan_vm_manager.components.vmobj import VMObject
|
||||
|
||||
gi.require_version("GObject", "2.0")
|
||||
gi.require_version("Gtk", "4.0")
|
||||
from gi.repository import GLib
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VMStore(GKVStore):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(VMObject, lambda vm: vm.data.flake.flake_attr)
|
||||
|
||||
|
||||
class ClanStore:
|
||||
_instance: "None | ClanStore" = None
|
||||
_clan_store: GKVStore[str, VMStore]
|
||||
|
||||
# Make sure the VMS class is used as a singleton
|
||||
def __init__(self) -> None:
|
||||
raise RuntimeError("Call use() instead")
|
||||
|
||||
@classmethod
|
||||
def use(cls: Any) -> "ClanStore":
|
||||
if cls._instance is None:
|
||||
cls._instance = cls.__new__(cls)
|
||||
cls._clan_store = GKVStore(
|
||||
VMStore, lambda store: store.first().data.flake.flake_url
|
||||
)
|
||||
|
||||
return cls._instance
|
||||
|
||||
@property
|
||||
def clan_store(self) -> GKVStore[str, VMStore]:
|
||||
return self._clan_store
|
||||
|
||||
def create_vm_task(self, vm: HistoryEntry) -> bool:
|
||||
self.push_history_entry(vm)
|
||||
return GLib.SOURCE_REMOVE
|
||||
|
||||
def push_history_entry(self, entry: HistoryEntry) -> None:
|
||||
# TODO: We shouldn't do this here but in the list view
|
||||
if entry.flake.icon is None:
|
||||
icon = assets.loc / "placeholder.jpeg"
|
||||
else:
|
||||
icon = entry.flake.icon
|
||||
|
||||
vm = VMObject(
|
||||
icon=Path(icon),
|
||||
data=entry,
|
||||
)
|
||||
self.push(vm)
|
||||
|
||||
def push(self, vm: VMObject) -> None:
|
||||
url = vm.data.flake.flake_url
|
||||
|
||||
# Only write to the store if the VM is not already in it
|
||||
# Every write to the KVStore rerenders bound widgets to the clan_store
|
||||
if url not in self.clan_store:
|
||||
log.debug(f"Creating new VMStore for {url}")
|
||||
vm_store = VMStore()
|
||||
vm_store.append(vm)
|
||||
self.clan_store[url] = vm_store
|
||||
else:
|
||||
log.debug(f"Appending VM {vm.data.flake.flake_attr} to store")
|
||||
vm_store = self.clan_store[url]
|
||||
vm_store.append(vm)
|
||||
|
||||
def remove(self, vm: VMObject) -> None:
|
||||
del self.clan_store[vm.data.flake.flake_url][vm.data.flake.flake_attr]
|
||||
|
||||
def get_vm(self, flake_url: str, flake_attr: str) -> None | VMObject:
|
||||
clan = self.clan_store.get(flake_url)
|
||||
if clan is None:
|
||||
return None
|
||||
return clan.get(flake_attr, None)
|
||||
|
||||
def get_running_vms(self) -> list[VMObject]:
|
||||
return [
|
||||
vm
|
||||
for clan in self.clan_store.values()
|
||||
for vm in clan.values()
|
||||
if vm.is_running()
|
||||
]
|
||||
|
||||
def kill_all(self) -> None:
|
||||
for vm in self.get_running_vms():
|
||||
vm.kill()
|
||||
@@ -4,18 +4,17 @@ from functools import partial
|
||||
from typing import Any
|
||||
|
||||
import gi
|
||||
from clan_cli import ClanError, history, machines
|
||||
from clan_cli import history, machines
|
||||
from clan_cli.clan_uri import ClanURI
|
||||
|
||||
from clan_vm_manager.models.interfaces import ClanConfig
|
||||
from clan_vm_manager.models.use_join import Join, JoinValue
|
||||
from clan_vm_manager.models.use_views import Views
|
||||
from clan_vm_manager.components.interfaces import ClanConfig
|
||||
from clan_vm_manager.components.vmobj import VMObject
|
||||
from clan_vm_manager.singletons.use_join import JoinList, JoinValue
|
||||
from clan_vm_manager.singletons.use_vms import ClanStore, VMStore
|
||||
|
||||
gi.require_version("Adw", "1")
|
||||
from gi.repository import Adw, Gdk, Gio, GLib, GObject, Gtk
|
||||
|
||||
from clan_vm_manager.models.use_vms import VM, ClanGroup, Clans
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -51,41 +50,30 @@ class ClanList(Gtk.Box):
|
||||
self.app = Gio.Application.get_default()
|
||||
self.app.connect("join_request", self.on_join_request)
|
||||
|
||||
groups = Clans.use()
|
||||
join = Join.use()
|
||||
|
||||
self.log_label: Gtk.Label = Gtk.Label()
|
||||
self.__init_machines = history.add.list_history()
|
||||
|
||||
# Add join list
|
||||
self.join_boxed_list = create_boxed_list(
|
||||
model=join.list_store, render_row=self.render_join_row
|
||||
model=JoinList.use().list_store, render_row=self.render_join_row
|
||||
)
|
||||
self.join_boxed_list.add_css_class("join-list")
|
||||
self.append(self.join_boxed_list)
|
||||
|
||||
self.group_list = create_boxed_list(
|
||||
model=groups.list_store, render_row=self.render_group_row
|
||||
model=ClanStore.use().clan_store, render_row=self.render_group_row
|
||||
)
|
||||
self.group_list.add_css_class("group-list")
|
||||
|
||||
search_bar = Gtk.SearchBar()
|
||||
# This widget will typically be the top-level window
|
||||
search_bar.set_key_capture_widget(Views.use().main_window)
|
||||
entry = Gtk.SearchEntry()
|
||||
entry.set_placeholder_text("Search cLan")
|
||||
entry.connect("search-changed", self.on_search_changed)
|
||||
entry.add_css_class("search-entry")
|
||||
search_bar.set_child(entry)
|
||||
|
||||
self.append(search_bar)
|
||||
self.append(self.join_boxed_list)
|
||||
self.append(self.group_list)
|
||||
|
||||
def render_group_row(self, boxed_list: Gtk.ListBox, group: ClanGroup) -> Gtk.Widget:
|
||||
# if boxed_list.has_css_class("no-shadow"):
|
||||
# boxed_list.remove_css_class("no-shadow")
|
||||
|
||||
def render_group_row(
|
||||
self, boxed_list: Gtk.ListBox, vm_store: VMStore
|
||||
) -> Gtk.Widget:
|
||||
vm = vm_store.first()
|
||||
log.debug("Rendering group row for %s", vm.data.flake.flake_url)
|
||||
grp = Adw.PreferencesGroup()
|
||||
grp.set_title(group.clan_name)
|
||||
grp.set_description(group.url)
|
||||
grp.set_title(vm.data.flake.clan_name)
|
||||
grp.set_description(vm.data.flake.flake_url)
|
||||
|
||||
add_action = Gio.SimpleAction.new("add", GLib.VariantType.new("s"))
|
||||
add_action.connect("activate", self.on_add)
|
||||
@@ -93,8 +81,8 @@ class ClanList(Gtk.Box):
|
||||
app.add_action(add_action)
|
||||
|
||||
menu_model = Gio.Menu()
|
||||
for vm in machines.list.list_machines(flake_url=group.url):
|
||||
if vm not in [item.data.flake.flake_attr for item in group.list_store]:
|
||||
for vm in machines.list.list_machines(flake_url=vm.data.flake.flake_url):
|
||||
if vm not in vm_store:
|
||||
menu_model.append(vm, f"app.add::{vm}")
|
||||
|
||||
box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=5)
|
||||
@@ -108,10 +96,7 @@ class ClanList(Gtk.Box):
|
||||
|
||||
grp.set_header_suffix(box)
|
||||
|
||||
vm_list = create_boxed_list(
|
||||
model=group.list_store, render_row=self.render_vm_row
|
||||
)
|
||||
|
||||
vm_list = create_boxed_list(model=vm_store, render_row=self.render_vm_row)
|
||||
grp.add(vm_list)
|
||||
|
||||
return grp
|
||||
@@ -120,13 +105,7 @@ class ClanList(Gtk.Box):
|
||||
target = parameter.get_string()
|
||||
print("Adding new machine", target)
|
||||
|
||||
def on_search_changed(self, entry: Gtk.SearchEntry) -> None:
|
||||
Clans.use().filter_by_name(entry.get_text())
|
||||
# Disable the shadow if the list is empty
|
||||
if not self.app.vms.list_store.get_n_items():
|
||||
self.group_list.add_css_class("no-shadow")
|
||||
|
||||
def render_vm_row(self, boxed_list: Gtk.ListBox, vm: VM) -> Gtk.Widget:
|
||||
def render_vm_row(self, boxed_list: Gtk.ListBox, vm: VMObject) -> Gtk.Widget:
|
||||
# Remove no-shadow class if attached
|
||||
if boxed_list.has_css_class("no-shadow"):
|
||||
boxed_list.remove_css_class("no-shadow")
|
||||
@@ -135,8 +114,10 @@ class ClanList(Gtk.Box):
|
||||
|
||||
# ====== Display Avatar ======
|
||||
avatar = Adw.Avatar()
|
||||
|
||||
machine_icon = flake.vm.machine_icon
|
||||
|
||||
# If there is a machine icon, display it else
|
||||
# display the clan icon
|
||||
if machine_icon:
|
||||
avatar.set_custom_image(Gdk.Texture.new_from_filename(str(machine_icon)))
|
||||
elif flake.icon:
|
||||
@@ -150,10 +131,11 @@ class ClanList(Gtk.Box):
|
||||
|
||||
# ====== Display Name And Url =====
|
||||
row.set_title(flake.flake_attr)
|
||||
|
||||
row.set_title_lines(1)
|
||||
row.set_title_selectable(True)
|
||||
|
||||
# If there is a machine description, display it else
|
||||
# display the clan name
|
||||
if flake.vm.machine_description:
|
||||
row.set_subtitle(flake.vm.machine_description)
|
||||
else:
|
||||
@@ -161,53 +143,42 @@ class ClanList(Gtk.Box):
|
||||
row.set_subtitle_lines(1)
|
||||
|
||||
# ==== Display build progress bar ====
|
||||
box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=5)
|
||||
box.set_valign(Gtk.Align.CENTER)
|
||||
box.append(vm.progress_bar)
|
||||
box.set_homogeneous(False)
|
||||
row.add_suffix(box) # This allows children to have different sizes
|
||||
build_box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=5)
|
||||
build_box.set_valign(Gtk.Align.CENTER)
|
||||
build_box.append(vm.progress_bar)
|
||||
build_box.set_homogeneous(False)
|
||||
row.add_suffix(build_box) # This allows children to have different sizes
|
||||
|
||||
# ==== Action buttons ====
|
||||
switch = Gtk.Switch()
|
||||
|
||||
switch_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
|
||||
switch_box.set_valign(Gtk.Align.CENTER)
|
||||
switch_box.append(switch)
|
||||
|
||||
box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=5)
|
||||
box.set_valign(Gtk.Align.CENTER)
|
||||
button_box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=5)
|
||||
button_box.set_valign(Gtk.Align.CENTER)
|
||||
|
||||
## Drop down menu
|
||||
open_action = Gio.SimpleAction.new("edit", GLib.VariantType.new("s"))
|
||||
open_action.connect("activate", self.on_edit)
|
||||
|
||||
app = Gio.Application.get_default()
|
||||
app.add_action(open_action)
|
||||
|
||||
menu_model = Gio.Menu()
|
||||
menu_model.append("Edit", f"app.edit::{vm.get_id()}")
|
||||
pref_button = Gtk.MenuButton()
|
||||
pref_button.set_icon_name("open-menu-symbolic")
|
||||
pref_button.set_menu_model(menu_model)
|
||||
button_box.append(pref_button)
|
||||
|
||||
box.append(switch_box)
|
||||
box.append(pref_button)
|
||||
## VM switch button
|
||||
switch_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
|
||||
switch_box.set_valign(Gtk.Align.CENTER)
|
||||
switch_box.append(vm.switch)
|
||||
button_box.append(switch_box)
|
||||
|
||||
switch.connect("notify::active", partial(self.on_row_toggle, vm))
|
||||
vm.connect("vm_status_changed", partial(self.vm_status_changed, switch))
|
||||
|
||||
# suffix.append(box)
|
||||
row.add_suffix(box)
|
||||
row.add_suffix(button_box)
|
||||
|
||||
return row
|
||||
|
||||
def on_edit(self, action: Any, parameter: Any) -> None:
|
||||
target = parameter.get_string()
|
||||
vm = self.app.vms.get_by_id(target)
|
||||
|
||||
if not vm:
|
||||
raise ClanError("Something went wrong. Please restart the app.")
|
||||
|
||||
print("Editing settings for machine", vm)
|
||||
print("Editing settings for machine", target)
|
||||
|
||||
def render_join_row(self, boxed_list: Gtk.ListBox, item: JoinValue) -> Gtk.Widget:
|
||||
if boxed_list.has_css_class("no-shadow"):
|
||||
@@ -219,9 +190,7 @@ class ClanList(Gtk.Box):
|
||||
row.set_subtitle(item.url.get_internal())
|
||||
row.add_css_class("trust")
|
||||
|
||||
# TODO: figure out how to detect that
|
||||
exist = self.app.vms.use().get_by_id(item.url.get_id())
|
||||
if exist:
|
||||
if item.url.params.flake_attr in ClanStore.use().clan_store:
|
||||
sub = row.get_subtitle()
|
||||
row.set_subtitle(
|
||||
sub + "\nClan already exists. Joining again will update it"
|
||||
@@ -251,52 +220,23 @@ class ClanList(Gtk.Box):
|
||||
|
||||
return row
|
||||
|
||||
def show_error_dialog(self, error: str) -> None:
|
||||
p = Views.use().main_window
|
||||
|
||||
dialog = Adw.MessageDialog(heading="Error")
|
||||
dialog.add_response("ok", "ok")
|
||||
dialog.set_body(error)
|
||||
dialog.set_transient_for(p) # set the parent window of the dialog
|
||||
dialog.choose()
|
||||
|
||||
def on_join_request(self, widget: Any, url: str) -> None:
|
||||
log.debug("Join request: %s", url)
|
||||
clan_uri = ClanURI.from_str(url)
|
||||
Join.use().push(clan_uri, self.after_join)
|
||||
value = JoinValue(url=clan_uri)
|
||||
JoinList.use().push(value, self.on_after_join)
|
||||
|
||||
def after_join(self, item: JoinValue) -> None:
|
||||
def on_after_join(self, source: JoinValue, item: JoinValue) -> None:
|
||||
# If the join request list is empty disable the shadow artefact
|
||||
if not Join.use().list_store.get_n_items():
|
||||
if JoinList.use().is_empty():
|
||||
self.join_boxed_list.add_css_class("no-shadow")
|
||||
print("after join in list")
|
||||
|
||||
def on_trust_clicked(self, item: JoinValue, widget: Gtk.Widget) -> None:
|
||||
def on_trust_clicked(self, value: JoinValue, widget: Gtk.Widget) -> None:
|
||||
widget.set_sensitive(False)
|
||||
self.cancel_button.set_sensitive(False)
|
||||
value.join()
|
||||
|
||||
# TODO(@hsjobeki): Confirm and edit details
|
||||
# Views.use().view.set_visible_child_name("details")
|
||||
|
||||
Join.use().join(item)
|
||||
|
||||
def on_discard_clicked(self, item: JoinValue, widget: Gtk.Widget) -> None:
|
||||
Join.use().discard(item)
|
||||
if not Join.use().list_store.get_n_items():
|
||||
def on_discard_clicked(self, value: JoinValue, widget: Gtk.Widget) -> None:
|
||||
JoinList.use().discard(value)
|
||||
if JoinList.use().is_empty():
|
||||
self.join_boxed_list.add_css_class("no-shadow")
|
||||
|
||||
def on_row_toggle(self, vm: VM, row: Adw.SwitchRow, state: bool) -> None:
|
||||
if row.get_active():
|
||||
row.set_state(False)
|
||||
vm.start()
|
||||
|
||||
if not row.get_active():
|
||||
row.set_state(True)
|
||||
vm.shutdown()
|
||||
|
||||
def vm_status_changed(self, switch: Gtk.Switch, vm: VM, _vm: VM) -> None:
|
||||
switch.set_active(vm.is_running())
|
||||
switch.set_state(vm.is_running())
|
||||
exitc = vm.process.proc.exitcode
|
||||
if not vm.is_running() and exitc != 0:
|
||||
log.error(f"VM exited with error. Exitcode: {exitc}")
|
||||
|
||||
@@ -1,13 +1,23 @@
|
||||
import gi
|
||||
import logging
|
||||
import threading
|
||||
from typing import Any
|
||||
|
||||
from clan_vm_manager.models.interfaces import ClanConfig
|
||||
from clan_vm_manager.models.use_views import Views
|
||||
import gi
|
||||
from clan_cli.history.list import list_history
|
||||
|
||||
from clan_vm_manager.components.interfaces import ClanConfig
|
||||
from clan_vm_manager.singletons.use_views import ViewStack
|
||||
from clan_vm_manager.singletons.use_vms import ClanStore
|
||||
from clan_vm_manager.views.details import Details
|
||||
from clan_vm_manager.views.list import ClanList
|
||||
|
||||
gi.require_version("Adw", "1")
|
||||
|
||||
from gi.repository import Adw, Gtk
|
||||
from gi.repository import Adw, Gio, GLib, Gtk
|
||||
|
||||
from clan_vm_manager.components.trayicon import TrayIcon
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MainWindow(Adw.ApplicationWindow):
|
||||
@@ -22,9 +32,14 @@ class MainWindow(Adw.ApplicationWindow):
|
||||
header = Adw.HeaderBar()
|
||||
view.add_top_bar(header)
|
||||
|
||||
app = Gio.Application.get_default()
|
||||
self.tray_icon: TrayIcon = TrayIcon(app)
|
||||
|
||||
# Initialize all ClanStore
|
||||
threading.Thread(target=self._populate_vms).start()
|
||||
|
||||
# Initialize all views
|
||||
stack_view = Views.use().view
|
||||
Views.use().set_main_window(self)
|
||||
stack_view = ViewStack.use().view
|
||||
|
||||
scroll = Gtk.ScrolledWindow()
|
||||
scroll.set_propagate_natural_height(True)
|
||||
@@ -41,3 +56,15 @@ class MainWindow(Adw.ApplicationWindow):
|
||||
clamp.set_maximum_size(1000)
|
||||
|
||||
view.set_content(clamp)
|
||||
|
||||
self.connect("destroy", self.on_destroy)
|
||||
|
||||
def _populate_vms(self) -> None:
|
||||
# Execute `clan flakes add <path>` to democlan for this to work
|
||||
# TODO: Make list_history a generator function
|
||||
for entry in list_history():
|
||||
GLib.idle_add(ClanStore.use().create_vm_task, entry)
|
||||
|
||||
def on_destroy(self, *_args: Any) -> None:
|
||||
self.tray_icon.destroy()
|
||||
ClanStore.use().kill_all()
|
||||
|
||||
@@ -13,6 +13,14 @@
|
||||
}:
|
||||
let
|
||||
source = ./.;
|
||||
desktop-file = makeDesktopItem {
|
||||
name = "org.clan.vm-manager";
|
||||
exec = "clan-vm-manager %u";
|
||||
icon = ./clan_vm_manager/assets/clan_white.png;
|
||||
desktopName = "cLAN Manager";
|
||||
startupWMClass = "clan";
|
||||
mimeTypes = [ "x-scheme-handler/clan" ];
|
||||
};
|
||||
in
|
||||
python3.pkgs.buildPythonApplication {
|
||||
name = "clan-vm-manager";
|
||||
@@ -36,15 +44,18 @@ python3.pkgs.buildPythonApplication {
|
||||
propagatedBuildInputs = [ pygobject3 clan-cli ];
|
||||
|
||||
# also re-expose dependencies so we test them in CI
|
||||
passthru.tests = {
|
||||
clan-vm-manager-no-breakpoints = runCommand "clan-vm-manager-no-breakpoints" { } ''
|
||||
if grep --include \*.py -Rq "breakpoint()" ${source}; then
|
||||
echo "breakpoint() found in ${source}:"
|
||||
grep --include \*.py -Rn "breakpoint()" ${source}
|
||||
exit 1
|
||||
fi
|
||||
touch $out
|
||||
'';
|
||||
passthru = {
|
||||
inherit desktop-file;
|
||||
tests = {
|
||||
clan-vm-manager-no-breakpoints = runCommand "clan-vm-manager-no-breakpoints" { } ''
|
||||
if grep --include \*.py -Rq "breakpoint()" ${source}; then
|
||||
echo "breakpoint() found in ${source}:"
|
||||
grep --include \*.py -Rn "breakpoint()" ${source}
|
||||
exit 1
|
||||
fi
|
||||
touch $out
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
# Don't leak python packages into a devshell.
|
||||
@@ -56,13 +67,6 @@ python3.pkgs.buildPythonApplication {
|
||||
PYTHONPATH= $out/bin/clan-vm-manager --help
|
||||
'';
|
||||
desktopItems = [
|
||||
(makeDesktopItem {
|
||||
name = "lol.clan.vm.manager";
|
||||
exec = "clan-vm-manager %u";
|
||||
icon = ./clan_vm_manager/assets/clan_white.png;
|
||||
desktopName = "cLAN Manager";
|
||||
startupWMClass = "clan";
|
||||
mimeTypes = [ "x-scheme-handler/clan" ];
|
||||
})
|
||||
desktop-file
|
||||
];
|
||||
}
|
||||
|
||||
@@ -1,8 +1,36 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -eux -o pipefail
|
||||
set -e -o pipefail
|
||||
|
||||
check_git_tag() {
|
||||
local repo_path="$1"
|
||||
local target_tag="$2"
|
||||
|
||||
# Change directory to the specified Git repository
|
||||
pushd "$repo_path" > /dev/null 2>&1
|
||||
# shellcheck disable=SC2181
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Error: Failed to change directory to $repo_path"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Get the current Git tag
|
||||
local current_tag
|
||||
current_tag=$(git describe --tags --exact-match 2>/dev/null)
|
||||
|
||||
# Restore the original directory
|
||||
popd > /dev/null 2>&1
|
||||
|
||||
# Check if the current tag is 2.0
|
||||
if [ "$current_tag" = "$target_tag" ]; then
|
||||
echo "Current Git tag in $repo_path is $target_tag"
|
||||
else
|
||||
echo "Error: Current Git tag in $repo_path is not $target_tag"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
rm -r ~/.config/clan
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Usage: $0 <democlan>"
|
||||
@@ -11,19 +39,16 @@ fi
|
||||
|
||||
democlan="$1"
|
||||
|
||||
clan history add "clan://$democlan#syncthing-peer1"
|
||||
clan history add "clan://$democlan#syncthing-peer2"
|
||||
check_git_tag "$democlan" "v2.2"
|
||||
|
||||
clan history add "clan://$democlan#moonlight-peer1"
|
||||
clan history add "clan://$democlan#moonlight-peer2"
|
||||
check_git_tag "." "v2.3"
|
||||
|
||||
rm -rf ~/.config/clan
|
||||
|
||||
clan history add "clan://$democlan#localsend-wayland1"
|
||||
|
||||
clear
|
||||
cat << EOF
|
||||
Open up this link in a browser:
|
||||
"clan://$democlan#syncthing-introducer"
|
||||
EOF
|
||||
|
||||
cat << EOF
|
||||
Execute this command to show waypipe windows:
|
||||
$ clan --flake $democlan vms run wayland
|
||||
"clan://$democlan#localsend-wayland2"
|
||||
EOF
|
||||
|
||||
@@ -9,6 +9,9 @@ name = "clan-vm-manager"
|
||||
dynamic = ["version"]
|
||||
scripts = { clan-vm-manager = "clan_vm_manager:main" }
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
exclude = ["result"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
clan_vm_manager = ["**/assets/*"]
|
||||
|
||||
@@ -30,5 +33,5 @@ ignore_missing_imports = true
|
||||
[tool.ruff]
|
||||
target-version = "py311"
|
||||
line-length = 88
|
||||
select = [ "E", "F", "I", "U", "N", "RUF", "ANN", "A" ]
|
||||
ignore = ["E501", "E402", "N802", "ANN101", "ANN401", "A003"]
|
||||
lint.select = [ "E", "F", "I", "U", "N", "RUF", "ANN", "A" ]
|
||||
lint.ignore = ["E501", "E402", "N802", "ANN101", "ANN401", "A003"]
|
||||
|
||||
@@ -1,47 +1,59 @@
|
||||
{ lib, stdenv, clan-vm-manager, gtk4, libadwaita, clan-cli, mkShell, ruff, desktop-file-utils, xdg-utils, mypy, python3Packages }:
|
||||
mkShell {
|
||||
inherit (clan-vm-manager) propagatedBuildInputs buildInputs;
|
||||
{ lib, runCommand, makeWrapper, stdenv, clan-vm-manager, gdb, gtk4, libadwaita, clan-cli, mkShell, ruff, desktop-file-utils, xdg-utils, mypy, python3, python3Packages }:
|
||||
mkShell (
|
||||
let
|
||||
pygdb = runCommand "pygdb" { buildInputs = [ gdb python3 makeWrapper ]; } ''
|
||||
mkdir -p "$out/bin"
|
||||
makeWrapper "${gdb}/bin/gdb" "$out/bin/pygdb" \
|
||||
--add-flags '-ex "source ${python3}/share/gdb/libpython.py"'
|
||||
'';
|
||||
in
|
||||
{
|
||||
inherit (clan-vm-manager) propagatedBuildInputs buildInputs;
|
||||
|
||||
linuxOnlyPackages = lib.optionals stdenv.isLinux [
|
||||
xdg-utils
|
||||
];
|
||||
|
||||
nativeBuildInputs = [
|
||||
ruff
|
||||
desktop-file-utils
|
||||
mypy
|
||||
python3Packages.ipdb
|
||||
gtk4.dev
|
||||
libadwaita.devdoc # has the demo called 'adwaita-1-demo'
|
||||
] ++ clan-vm-manager.nativeBuildInputs;
|
||||
|
||||
PYTHONBREAKPOINT = "ipdb.set_trace";
|
||||
|
||||
shellHook = ''
|
||||
ln -sfT ${clan-cli.nixpkgs} ../clan-cli/clan_cli/nixpkgs
|
||||
|
||||
# prepend clan-cli for development
|
||||
export PYTHONPATH=../clan-cli:$PYTHONPATH
|
||||
linuxOnlyPackages = lib.optionals stdenv.isLinux [
|
||||
xdg-utils
|
||||
pygdb
|
||||
];
|
||||
|
||||
|
||||
if ! command -v xdg-mime &> /dev/null; then
|
||||
echo "Warning: 'xdg-mime' is not available. The desktop file cannot be installed."
|
||||
fi
|
||||
# To debug clan-vm-manger execute pygdb --args python ./bin/clan-vm-manager
|
||||
nativeBuildInputs = [
|
||||
ruff
|
||||
desktop-file-utils
|
||||
mypy
|
||||
python3Packages.ipdb
|
||||
gtk4.dev
|
||||
libadwaita.devdoc # has the demo called 'adwaita-1-demo'
|
||||
] ++ clan-vm-manager.nativeBuildInputs;
|
||||
|
||||
# install desktop file
|
||||
set -eou pipefail
|
||||
DESKTOP_FILE_NAME=lol.clan.vm.manager.desktop
|
||||
DESKTOP_DST=~/.local/share/applications/$DESKTOP_FILE_NAME
|
||||
DESKTOP_SRC=${clan-vm-manager}/share/applications/$DESKTOP_FILE_NAME
|
||||
UI_BIN="${clan-vm-manager}/bin/clan-vm-manager"
|
||||
PYTHONBREAKPOINT = "ipdb.set_trace";
|
||||
|
||||
cp -f $DESKTOP_SRC $DESKTOP_DST
|
||||
sleep 2
|
||||
sed -i "s|Exec=.*clan-vm-manager|Exec=$UI_BIN|" $DESKTOP_DST
|
||||
xdg-mime default $DESKTOP_FILE_NAME x-scheme-handler/clan
|
||||
echo "==== Validating desktop file installation ===="
|
||||
set -x
|
||||
desktop-file-validate $DESKTOP_DST
|
||||
set +xeou pipefail
|
||||
'';
|
||||
}
|
||||
shellHook = ''
|
||||
ln -sfT ${clan-cli.nixpkgs} ../clan-cli/clan_cli/nixpkgs
|
||||
|
||||
# prepend clan-cli for development
|
||||
export PYTHONPATH=../clan-cli:$PYTHONPATH
|
||||
|
||||
|
||||
if ! command -v xdg-mime &> /dev/null; then
|
||||
echo "Warning: 'xdg-mime' is not available. The desktop file cannot be installed."
|
||||
fi
|
||||
|
||||
# install desktop file
|
||||
set -eou pipefail
|
||||
DESKTOP_FILE_NAME=org.clan.vm-manager.desktop
|
||||
DESKTOP_DST=~/.local/share/applications/$DESKTOP_FILE_NAME
|
||||
DESKTOP_SRC=${clan-vm-manager}/share/applications/$DESKTOP_FILE_NAME
|
||||
UI_BIN="${clan-vm-manager}/bin/clan-vm-manager"
|
||||
|
||||
cp -f $DESKTOP_SRC $DESKTOP_DST
|
||||
sleep 2
|
||||
sed -i "s|Exec=.*clan-vm-manager|Exec=$UI_BIN|" $DESKTOP_DST
|
||||
xdg-mime default $DESKTOP_FILE_NAME x-scheme-handler/clan
|
||||
echo "==== Validating desktop file installation ===="
|
||||
set -x
|
||||
desktop-file-validate $DESKTOP_DST
|
||||
set +xeou pipefail
|
||||
'';
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,26 +1,33 @@
|
||||
{ writeShellApplication
|
||||
{ bash
|
||||
, callPackage
|
||||
, coreutils
|
||||
, bash
|
||||
, git
|
||||
, tea
|
||||
, lib
|
||||
, nix
|
||||
, openssh
|
||||
, tea
|
||||
, tea-create-pr
|
||||
, ...
|
||||
}:
|
||||
writeShellApplication {
|
||||
name = "merge-after-ci";
|
||||
runtimeInputs = [
|
||||
bash
|
||||
coreutils
|
||||
git
|
||||
tea
|
||||
openssh
|
||||
tea-create-pr
|
||||
let
|
||||
writers = callPackage ../builders/script-writers.nix { };
|
||||
in
|
||||
writers.writePython3Bin "merge-after-ci"
|
||||
{
|
||||
makeWrapperArgs = [
|
||||
"--prefix"
|
||||
"PATH"
|
||||
":"
|
||||
(lib.makeBinPath [
|
||||
bash
|
||||
coreutils
|
||||
git
|
||||
nix
|
||||
openssh
|
||||
tea
|
||||
tea-create-pr
|
||||
])
|
||||
];
|
||||
text = ''
|
||||
remoteName="''${1:-origin}"
|
||||
targetBranch="''${2:-main}"
|
||||
shift && shift
|
||||
tea-create-pr "$remoteName" "$targetBranch" --assignees clan-bot "$@"
|
||||
'';
|
||||
}
|
||||
./merge-after-ci.py
|
||||
|
||||
|
||||
24
pkgs/merge-after-ci/merge-after-ci.py
Normal file
24
pkgs/merge-after-ci/merge-after-ci.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import argparse
|
||||
import subprocess
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--reviewers", nargs="*")
|
||||
parser.add_argument("--no-review", action="store_true")
|
||||
parser.add_argument("args", nargs="*")
|
||||
args = parser.parse_args()
|
||||
|
||||
# complain if neither --reviewers nor --no-review is given
|
||||
if not args.reviewers and not args.no_review:
|
||||
parser.error("either --reviewers or --no-review must be given")
|
||||
|
||||
subprocess.run(
|
||||
[
|
||||
"tea-create-pr",
|
||||
"origin",
|
||||
"main",
|
||||
"--assignees",
|
||||
"clan-bot",
|
||||
*([*args.reviewers] if args.reviewers else []),
|
||||
*args.args,
|
||||
]
|
||||
)
|
||||
39
pkgs/scripts/select-shell.py
Normal file
39
pkgs/scripts/select-shell.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import argparse
|
||||
import json
|
||||
import pathlib
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
parser = argparse.ArgumentParser(description="Select a devshell")
|
||||
parser.add_argument("shell", help="the name of the devshell to select", nargs="?")
|
||||
parser.add_argument("--list", action="store_true", help="list available devshells")
|
||||
args = parser.parse_args()
|
||||
|
||||
selected_shell_file = pathlib.Path(".direnv/selected-shell")
|
||||
|
||||
if not args.list and not args.shell:
|
||||
parser.print_help()
|
||||
exit(0)
|
||||
if args.list:
|
||||
flake_show = subprocess.run(
|
||||
["nix", "flake", "show", "--json", "--no-write-lock-file"],
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
data = json.loads(flake_show.stdout.decode())
|
||||
print("Available devshells:")
|
||||
print("\n".join(data["devShells"]["x86_64-linux"].keys()))
|
||||
exit(0)
|
||||
if selected_shell_file.exists():
|
||||
with open(selected_shell_file) as f:
|
||||
current_shell = f.read().strip()
|
||||
else:
|
||||
current_shell = ""
|
||||
|
||||
if current_shell == args.shell:
|
||||
print(f"{args.shell} devshell already selected. No changes made.")
|
||||
sys.exit(0)
|
||||
|
||||
with open(selected_shell_file, "w") as f:
|
||||
f.write(args.shell)
|
||||
|
||||
print(f"{args.shell} devshell selected")
|
||||
@@ -10,5 +10,5 @@ exclude = "clan_cli.nixpkgs"
|
||||
[tool.ruff]
|
||||
line-length = 88
|
||||
target-version = "py311"
|
||||
select = [ "E", "F", "I", "U", "N", "RUF", "ANN", "A" ]
|
||||
ignore = [ "E501", "ANN101", "ANN401", "A003"]
|
||||
lint.select = [ "E", "F", "I", "U", "N", "RUF", "ANN", "A" ]
|
||||
lint.ignore = [ "E501", "ANN101", "ANN401", "A003"]
|
||||
|
||||
Reference in New Issue
Block a user