Merge pull request 'zerotierone: fix with recent nixpkgs update' (#1902) from zerotierone into main
This commit is contained in:
6
flake.lock
generated
6
flake.lock
generated
@@ -63,11 +63,11 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1723316219,
|
"lastModified": 1723764322,
|
||||||
"narHash": "sha256-2B9qh8QBvw3kV/8cHc7ZJcrbVsRwP8wKjkwPXTSz76Y=",
|
"narHash": "sha256-1V4C7f7sUjslxreqbpvBzitl9I2nY7nOrR17DjnatcU=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "bef98989a27429e1cb9e3d9c25701ba2da742af2",
|
"rev": "03bd12dcf88b6aabc047fbf909bda4c7e344416c",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|||||||
@@ -1,17 +1,16 @@
|
|||||||
{ lib, pkgs, ... }:
|
|
||||||
|
|
||||||
{
|
{
|
||||||
# use latest kernel we can support to get more hardware support
|
lib,
|
||||||
boot.kernelPackages =
|
pkgs,
|
||||||
lib.mkForce
|
config,
|
||||||
(pkgs.zfs.override { removeLinuxDRM = pkgs.hostPlatform.isAarch64; }).latestCompatibleLinuxPackages;
|
...
|
||||||
boot.zfs.removeLinuxDRM = lib.mkDefault pkgs.hostPlatform.isAarch64;
|
}:
|
||||||
|
{
|
||||||
|
# If we also need zfs, we can use the unstable version as we otherwise don't have a new enough kernel version
|
||||||
|
boot.zfs.package = pkgs.zfsUnstable;
|
||||||
|
boot.kernelPackages = lib.mkIf config.boot.zfs.enabled (
|
||||||
|
lib.mkForce config.boot.zfs.package.latestCompatibleLinuxPackages
|
||||||
|
);
|
||||||
|
|
||||||
# Enable bcachefs support
|
# Enable bcachefs support
|
||||||
boot.supportedFilesystems.bcachefs = lib.mkDefault true;
|
boot.supportedFilesystems.bcachefs = lib.mkDefault true;
|
||||||
|
|
||||||
environment.systemPackages = with pkgs; [
|
|
||||||
bcachefs-tools
|
|
||||||
keyutils
|
|
||||||
];
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -89,11 +89,7 @@ in
|
|||||||
({
|
({
|
||||||
# Override license so that we can build zerotierone without
|
# Override license so that we can build zerotierone without
|
||||||
# having to re-import nixpkgs.
|
# having to re-import nixpkgs.
|
||||||
services.zerotierone.package = lib.mkDefault (
|
services.zerotierone.package = lib.mkDefault (pkgs.callPackage ../../../pkgs/zerotierone { });
|
||||||
pkgs.zerotierone.overrideAttrs (_old: {
|
|
||||||
meta = { };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
})
|
})
|
||||||
(lib.mkIf ((facts.zerotier-ip.value or null) != null) {
|
(lib.mkIf ((facts.zerotier-ip.value or null) != null) {
|
||||||
environment.etc."zerotier/ip".text = facts.zerotier-ip.value;
|
environment.etc."zerotier/ip".text = facts.zerotier-ip.value;
|
||||||
|
|||||||
@@ -3,10 +3,12 @@
|
|||||||
flake.nixosModules = {
|
flake.nixosModules = {
|
||||||
hidden-ssh-announce.imports = [ ./hidden-ssh-announce.nix ];
|
hidden-ssh-announce.imports = [ ./hidden-ssh-announce.nix ];
|
||||||
bcachefs.imports = [ ./bcachefs.nix ];
|
bcachefs.imports = [ ./bcachefs.nix ];
|
||||||
|
zfs.imports = [ ./zfs.nix ];
|
||||||
installer.imports = [
|
installer.imports = [
|
||||||
./installer
|
./installer
|
||||||
self.nixosModules.hidden-ssh-announce
|
self.nixosModules.hidden-ssh-announce
|
||||||
self.nixosModules.bcachefs
|
self.nixosModules.bcachefs
|
||||||
|
self.nixosModules.zfs
|
||||||
];
|
];
|
||||||
clanCore.imports = [
|
clanCore.imports = [
|
||||||
inputs.sops-nix.nixosModules.sops
|
inputs.sops-nix.nixosModules.sops
|
||||||
|
|||||||
16
nixosModules/zfs.nix
Normal file
16
nixosModules/zfs.nix
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{ lib, config, ... }:
|
||||||
|
{
|
||||||
|
# Use the same default hostID as the NixOS install ISO and nixos-anywhere.
|
||||||
|
# This allows us to import zfs pool without using a force import.
|
||||||
|
# ZFS has this as a safety mechanism for networked block storage (ISCSI), but
|
||||||
|
# in practice we found it causes more breakages like unbootable machines,
|
||||||
|
# while people using ZFS on ISCSI is quite rare.
|
||||||
|
networking.hostId = lib.mkDefault "8425e349";
|
||||||
|
|
||||||
|
services.zfs = lib.mkIf (config.boot.zfs.enabled) {
|
||||||
|
autoSnapshot.enable = true;
|
||||||
|
# defaults to 12, which is a bit much given how much data is written
|
||||||
|
autoSnapshot.monthly = lib.mkDefault 1;
|
||||||
|
autoScrub.enable = true;
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -20,31 +20,18 @@
|
|||||||
};
|
};
|
||||||
|
|
||||||
perSystem =
|
perSystem =
|
||||||
|
{ pkgs, config, ... }:
|
||||||
{
|
{
|
||||||
pkgs,
|
packages = {
|
||||||
config,
|
tea-create-pr = pkgs.callPackage ./tea-create-pr { };
|
||||||
lib,
|
zerotier-members = pkgs.callPackage ./zerotier-members { };
|
||||||
...
|
zt-tcp-relay = pkgs.callPackage ./zt-tcp-relay { };
|
||||||
}:
|
moonlight-sunshine-accept = pkgs.callPackage ./moonlight-sunshine-accept { };
|
||||||
{
|
merge-after-ci = pkgs.callPackage ./merge-after-ci { inherit (config.packages) tea-create-pr; };
|
||||||
packages =
|
pending-reviews = pkgs.callPackage ./pending-reviews { };
|
||||||
{
|
editor = pkgs.callPackage ./editor/clan-edit-codium.nix { };
|
||||||
tea-create-pr = pkgs.callPackage ./tea-create-pr { };
|
classgen = pkgs.callPackage ./classgen { };
|
||||||
zerotier-members = pkgs.callPackage ./zerotier-members { };
|
zerotierone = pkgs.callPackage ./zerotierone { };
|
||||||
zt-tcp-relay = pkgs.callPackage ./zt-tcp-relay { };
|
};
|
||||||
moonlight-sunshine-accept = pkgs.callPackage ./moonlight-sunshine-accept { };
|
|
||||||
merge-after-ci = pkgs.callPackage ./merge-after-ci { inherit (config.packages) tea-create-pr; };
|
|
||||||
pending-reviews = pkgs.callPackage ./pending-reviews { };
|
|
||||||
editor = pkgs.callPackage ./editor/clan-edit-codium.nix { };
|
|
||||||
classgen = pkgs.callPackage ./classgen { };
|
|
||||||
}
|
|
||||||
// lib.optionalAttrs pkgs.stdenv.isLinux {
|
|
||||||
# halalify zerotierone
|
|
||||||
zerotierone = pkgs.zerotierone.overrideAttrs (_old: {
|
|
||||||
meta = _old.meta // {
|
|
||||||
license = lib.licenses.apsl20;
|
|
||||||
};
|
|
||||||
});
|
|
||||||
};
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
3562
pkgs/zerotierone/Cargo.lock
generated
Normal file
3562
pkgs/zerotierone/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
53
pkgs/zerotierone/default.nix
Normal file
53
pkgs/zerotierone/default.nix
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
{
|
||||||
|
callPackage,
|
||||||
|
zerotierone,
|
||||||
|
fetchFromGitHub,
|
||||||
|
lib,
|
||||||
|
stdenv,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
importCargoLock = callPackage ./import-cargo-lock.nix { };
|
||||||
|
in
|
||||||
|
zerotierone.overrideAttrs (old: {
|
||||||
|
name = "zerotierone-1.14.0-unstable-2024-07-31";
|
||||||
|
src = fetchFromGitHub {
|
||||||
|
owner = "zerotier";
|
||||||
|
repo = "ZeroTierOne";
|
||||||
|
rev = "f176e2539e10e8c0f61eb1d2e1f0e690a267a646";
|
||||||
|
hash = "sha256-pGozwaBy9eMA8izYtGhhmJeHzGjHFLID7WC01977XxQ=";
|
||||||
|
};
|
||||||
|
cargoDeps = importCargoLock {
|
||||||
|
lockFile = ./Cargo.lock;
|
||||||
|
outputHashes = {
|
||||||
|
"jwt-0.16.0" = "sha256-P5aJnNlcLe9sBtXZzfqHdRvxNfm6DPBcfcKOVeLZxcM=";
|
||||||
|
"rustfsm-0.1.0" = "sha256-AYMk31QuwB1R/yr1wNl9MSWL52ERJMtkR4aSPf2waWs=";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
patches = [ ];
|
||||||
|
postPatch = "cp ${./Cargo.lock} Cargo.lock";
|
||||||
|
|
||||||
|
preBuild =
|
||||||
|
if stdenv.isDarwin then
|
||||||
|
''
|
||||||
|
makeFlagsArray+=("ARCH_FLAGS=") # disable multi-arch build
|
||||||
|
if ! grep -q MACOS_VERSION_MIN=10.13 make-mac.mk; then
|
||||||
|
echo "You may need to update MACOSX_DEPLOYMENT_TARGET to match the value in make-mac.mk"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
(cd rustybits && MACOSX_DEPLOYMENT_TARGET=10.13 cargo build -p zeroidc --release)
|
||||||
|
|
||||||
|
cp \
|
||||||
|
./rustybits/target/${stdenv.hostPlatform.rust.rustcTarget}/release/libzeroidc.a \
|
||||||
|
./rustybits/target
|
||||||
|
|
||||||
|
# zerotier uses the "FORCE" target as a phony target to force rebuilds.
|
||||||
|
# We don't want to rebuild libzeroidc.a as we build want to build this library ourself for a single architecture
|
||||||
|
touch FORCE
|
||||||
|
''
|
||||||
|
else
|
||||||
|
old.preBuild;
|
||||||
|
meta = old.meta // {
|
||||||
|
# halalify zerotierone
|
||||||
|
license = lib.licenses.apsl20;
|
||||||
|
};
|
||||||
|
})
|
||||||
309
pkgs/zerotierone/import-cargo-lock.nix
Normal file
309
pkgs/zerotierone/import-cargo-lock.nix
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
{
|
||||||
|
fetchgit,
|
||||||
|
fetchurl,
|
||||||
|
lib,
|
||||||
|
writers,
|
||||||
|
python3Packages,
|
||||||
|
runCommand,
|
||||||
|
cargo,
|
||||||
|
jq,
|
||||||
|
}:
|
||||||
|
|
||||||
|
{
|
||||||
|
# Cargo lock file
|
||||||
|
lockFile ? null,
|
||||||
|
|
||||||
|
# Cargo lock file contents as string
|
||||||
|
lockFileContents ? null,
|
||||||
|
|
||||||
|
# Allow `builtins.fetchGit` to be used to not require hashes for git dependencies
|
||||||
|
allowBuiltinFetchGit ? false,
|
||||||
|
|
||||||
|
# Additional registries to pull sources from
|
||||||
|
# { "https://<registry index URL>" = "https://<registry download URL>"; }
|
||||||
|
# or if the registry is using the new sparse protocol
|
||||||
|
# { "sparse+https://<registry download URL>" = "https://<registry download URL>"; }
|
||||||
|
# where:
|
||||||
|
# - "index URL" is the "index" value of the configuration entry for that registry
|
||||||
|
# https://doc.rust-lang.org/cargo/reference/registries.html#using-an-alternate-registry
|
||||||
|
# - "download URL" is the "dl" value of its associated index configuration
|
||||||
|
# https://doc.rust-lang.org/cargo/reference/registry-index.html#index-configuration
|
||||||
|
extraRegistries ? { },
|
||||||
|
|
||||||
|
# Hashes for git dependencies.
|
||||||
|
outputHashes ? { },
|
||||||
|
}@args:
|
||||||
|
|
||||||
|
assert (lockFile == null) != (lockFileContents == null);
|
||||||
|
|
||||||
|
let
|
||||||
|
# Parse a git source into different components.
|
||||||
|
parseGit =
|
||||||
|
src:
|
||||||
|
let
|
||||||
|
parts = builtins.match ''git\+([^?]+)(\?(rev|tag|branch)=(.*))?#(.*)'' src;
|
||||||
|
type = builtins.elemAt parts 2; # rev, tag or branch
|
||||||
|
value = builtins.elemAt parts 3;
|
||||||
|
in
|
||||||
|
if parts == null then
|
||||||
|
null
|
||||||
|
else
|
||||||
|
{
|
||||||
|
url = builtins.elemAt parts 0;
|
||||||
|
sha = builtins.elemAt parts 4;
|
||||||
|
}
|
||||||
|
// lib.optionalAttrs (type != null) { inherit type value; };
|
||||||
|
|
||||||
|
# shadows args.lockFileContents
|
||||||
|
lockFileContents = if lockFile != null then builtins.readFile lockFile else args.lockFileContents;
|
||||||
|
|
||||||
|
parsedLockFile = builtins.fromTOML lockFileContents;
|
||||||
|
|
||||||
|
packages = parsedLockFile.package;
|
||||||
|
|
||||||
|
# There is no source attribute for the source package itself. But
|
||||||
|
# since we do not want to vendor the source package anyway, we can
|
||||||
|
# safely skip it.
|
||||||
|
depPackages = builtins.filter (p: p ? "source") packages;
|
||||||
|
|
||||||
|
# Create dependent crates from packages.
|
||||||
|
#
|
||||||
|
# Force evaluation of the git SHA -> hash mapping, so that an error is
|
||||||
|
# thrown if there are stale hashes. We cannot rely on gitShaOutputHash
|
||||||
|
# being evaluated otherwise, since there could be no git dependencies.
|
||||||
|
depCrates = builtins.deepSeq gitShaOutputHash (builtins.map mkCrate depPackages);
|
||||||
|
|
||||||
|
# Map package name + version to git commit SHA for packages with a git source.
|
||||||
|
namesGitShas = builtins.listToAttrs (
|
||||||
|
builtins.map nameGitSha (builtins.filter (pkg: lib.hasPrefix "git+" pkg.source) depPackages)
|
||||||
|
);
|
||||||
|
|
||||||
|
nameGitSha =
|
||||||
|
pkg:
|
||||||
|
let
|
||||||
|
gitParts = parseGit pkg.source;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
name = "${pkg.name}-${pkg.version}";
|
||||||
|
value = gitParts.sha;
|
||||||
|
};
|
||||||
|
|
||||||
|
# Convert the attrset provided through the `outputHashes` argument to a
|
||||||
|
# a mapping from git commit SHA -> output hash.
|
||||||
|
#
|
||||||
|
# There may be multiple different packages with different names
|
||||||
|
# originating from the same git repository (typically a Cargo
|
||||||
|
# workspace). By using the git commit SHA as a universal identifier,
|
||||||
|
# the user does not have to specify the output hash for every package
|
||||||
|
# individually.
|
||||||
|
gitShaOutputHash = lib.mapAttrs' (
|
||||||
|
nameVer: hash:
|
||||||
|
let
|
||||||
|
unusedHash = throw "A hash was specified for ${nameVer}, but there is no corresponding git dependency.";
|
||||||
|
rev = namesGitShas.${nameVer} or unusedHash;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
name = rev;
|
||||||
|
value = hash;
|
||||||
|
}
|
||||||
|
) outputHashes;
|
||||||
|
|
||||||
|
# We can't use the existing fetchCrate function, since it uses a
|
||||||
|
# recursive hash of the unpacked crate.
|
||||||
|
fetchCrate =
|
||||||
|
pkg: downloadUrl:
|
||||||
|
let
|
||||||
|
checksum =
|
||||||
|
pkg.checksum or parsedLockFile.metadata."checksum ${pkg.name} ${pkg.version} (${pkg.source})";
|
||||||
|
in
|
||||||
|
assert lib.assertMsg (checksum != null) ''
|
||||||
|
Package ${pkg.name} does not have a checksum.
|
||||||
|
'';
|
||||||
|
fetchurl {
|
||||||
|
name = "crate-${pkg.name}-${pkg.version}.tar.gz";
|
||||||
|
url = "${downloadUrl}/${pkg.name}/${pkg.version}/download";
|
||||||
|
sha256 = checksum;
|
||||||
|
};
|
||||||
|
|
||||||
|
registries = {
|
||||||
|
"https://github.com/rust-lang/crates.io-index" = "https://crates.io/api/v1/crates";
|
||||||
|
} // extraRegistries;
|
||||||
|
|
||||||
|
# Replaces values inherited by workspace members.
|
||||||
|
replaceWorkspaceValues = writers.writePython3 "replace-workspace-values" {
|
||||||
|
libraries = with python3Packages; [
|
||||||
|
tomli
|
||||||
|
tomli-w
|
||||||
|
];
|
||||||
|
flakeIgnore = [
|
||||||
|
"E501"
|
||||||
|
"W503"
|
||||||
|
];
|
||||||
|
} (builtins.readFile ./replace-workspace-values.py);
|
||||||
|
|
||||||
|
# Fetch and unpack a crate.
|
||||||
|
mkCrate =
|
||||||
|
pkg:
|
||||||
|
let
|
||||||
|
gitParts = parseGit pkg.source;
|
||||||
|
registryIndexUrl = lib.removePrefix "registry+" pkg.source;
|
||||||
|
in
|
||||||
|
if
|
||||||
|
(lib.hasPrefix "registry+" pkg.source || lib.hasPrefix "sparse+" pkg.source)
|
||||||
|
&& builtins.hasAttr registryIndexUrl registries
|
||||||
|
then
|
||||||
|
let
|
||||||
|
crateTarball = fetchCrate pkg registries.${registryIndexUrl};
|
||||||
|
in
|
||||||
|
runCommand "${pkg.name}-${pkg.version}" { } ''
|
||||||
|
mkdir $out
|
||||||
|
tar xf "${crateTarball}" -C $out --strip-components=1
|
||||||
|
|
||||||
|
# Cargo is happy with largely empty metadata.
|
||||||
|
printf '{"files":{},"package":"${crateTarball.outputHash}"}' > "$out/.cargo-checksum.json"
|
||||||
|
''
|
||||||
|
else if gitParts != null then
|
||||||
|
let
|
||||||
|
missingHash = throw ''
|
||||||
|
No hash was found while vendoring the git dependency ${pkg.name}-${pkg.version}. You can add
|
||||||
|
a hash through the `outputHashes` argument of `importCargoLock`:
|
||||||
|
|
||||||
|
outputHashes = {
|
||||||
|
"${pkg.name}-${pkg.version}" = "<hash>";
|
||||||
|
};
|
||||||
|
|
||||||
|
If you use `buildRustPackage`, you can add this attribute to the `cargoLock`
|
||||||
|
attribute set.
|
||||||
|
'';
|
||||||
|
tree =
|
||||||
|
if gitShaOutputHash ? ${gitParts.sha} then
|
||||||
|
fetchgit {
|
||||||
|
inherit (gitParts) url;
|
||||||
|
rev = gitParts.sha; # The commit SHA is always available.
|
||||||
|
sha256 = gitShaOutputHash.${gitParts.sha};
|
||||||
|
}
|
||||||
|
else if allowBuiltinFetchGit then
|
||||||
|
builtins.fetchGit {
|
||||||
|
inherit (gitParts) url;
|
||||||
|
rev = gitParts.sha;
|
||||||
|
allRefs = true;
|
||||||
|
submodules = true;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
missingHash;
|
||||||
|
in
|
||||||
|
runCommand "${pkg.name}-${pkg.version}" { } ''
|
||||||
|
tree=${tree}
|
||||||
|
|
||||||
|
# If the target package is in a workspace, or if it's the top-level
|
||||||
|
# crate, we should find the crate path using `cargo metadata`.
|
||||||
|
# Some packages do not have a Cargo.toml at the top-level,
|
||||||
|
# but only in nested directories.
|
||||||
|
# Only check the top-level Cargo.toml, if it actually exists
|
||||||
|
if [[ -f $tree/Cargo.toml ]]; then
|
||||||
|
crateCargoTOML=$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path $tree/Cargo.toml | \
|
||||||
|
${jq}/bin/jq -r '.packages[] | select(.name == "${pkg.name}") | .manifest_path')
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If the repository is not a workspace the package might be in a subdirectory.
|
||||||
|
if [[ -z $crateCargoTOML ]]; then
|
||||||
|
for manifest in $(find $tree -name "Cargo.toml"); do
|
||||||
|
echo Looking at $manifest
|
||||||
|
crateCargoTOML=$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path "$manifest" | ${jq}/bin/jq -r '.packages[] | select(.name == "${pkg.name}") | .manifest_path' || :)
|
||||||
|
if [[ ! -z $crateCargoTOML ]]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -z $crateCargoTOML ]]; then
|
||||||
|
>&2 echo "Cannot find path for crate '${pkg.name}-${pkg.version}' in the tree in: $tree"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo Found crate ${pkg.name} at $crateCargoTOML
|
||||||
|
tree=$(dirname $crateCargoTOML)
|
||||||
|
|
||||||
|
cp -prvL "$tree/" $out
|
||||||
|
chmod u+w $out
|
||||||
|
|
||||||
|
if grep -q workspace "$out/Cargo.toml"; then
|
||||||
|
chmod u+w "$out/Cargo.toml"
|
||||||
|
${replaceWorkspaceValues} "$out/Cargo.toml" "$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path $crateCargoTOML | ${jq}/bin/jq -r .workspace_root)/Cargo.toml"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Cargo is happy with empty metadata.
|
||||||
|
printf '{"files":{},"package":null}' > "$out/.cargo-checksum.json"
|
||||||
|
|
||||||
|
# Set up configuration for the vendor directory.
|
||||||
|
cat > $out/.cargo-config <<EOF
|
||||||
|
[source."${gitParts.url}${
|
||||||
|
lib.optionalString (gitParts ? type) "?${gitParts.type}=${gitParts.value}"
|
||||||
|
}"]
|
||||||
|
git = "${gitParts.url}"
|
||||||
|
${lib.optionalString (gitParts ? type) "${gitParts.type} = \"${gitParts.value}\""}
|
||||||
|
replace-with = "vendored-sources"
|
||||||
|
EOF
|
||||||
|
''
|
||||||
|
else
|
||||||
|
throw "Cannot handle crate source: ${pkg.source}";
|
||||||
|
|
||||||
|
vendorDir =
|
||||||
|
runCommand "cargo-vendor-dir"
|
||||||
|
(
|
||||||
|
if lockFile == null then
|
||||||
|
{
|
||||||
|
inherit lockFileContents;
|
||||||
|
passAsFile = [ "lockFileContents" ];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
passthru = {
|
||||||
|
inherit lockFile;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
)
|
||||||
|
''
|
||||||
|
mkdir -p $out/.cargo
|
||||||
|
|
||||||
|
${
|
||||||
|
if lockFile != null then
|
||||||
|
"ln -s ${lockFile} $out/Cargo.lock"
|
||||||
|
else
|
||||||
|
"cp $lockFileContentsPath $out/Cargo.lock"
|
||||||
|
}
|
||||||
|
|
||||||
|
cat > $out/.cargo/config <<EOF
|
||||||
|
[source.crates-io]
|
||||||
|
replace-with = "vendored-sources"
|
||||||
|
|
||||||
|
[source.vendored-sources]
|
||||||
|
directory = "cargo-vendor-dir"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
declare -A keysSeen
|
||||||
|
|
||||||
|
for registry in ${toString (builtins.attrNames extraRegistries)}; do
|
||||||
|
cat >> $out/.cargo/config <<EOF
|
||||||
|
|
||||||
|
[source."$registry"]
|
||||||
|
registry = "$registry"
|
||||||
|
replace-with = "vendored-sources"
|
||||||
|
EOF
|
||||||
|
done
|
||||||
|
|
||||||
|
for crate in ${toString depCrates}; do
|
||||||
|
# Link the crate directory, removing the output path hash from the destination.
|
||||||
|
ln -s "$crate" $out/$(basename "$crate" | cut -c 34-)
|
||||||
|
|
||||||
|
if [ -e "$crate/.cargo-config" ]; then
|
||||||
|
key=$(sed 's/\[source\."\(.*\)"\]/\1/; t; d' < "$crate/.cargo-config")
|
||||||
|
if [[ -z ''${keysSeen[$key]} ]]; then
|
||||||
|
keysSeen[$key]=1
|
||||||
|
cat "$crate/.cargo-config" >> $out/.cargo/config
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
vendorDir
|
||||||
128
pkgs/zerotierone/replace-workspace-values.py
Normal file
128
pkgs/zerotierone/replace-workspace-values.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
# This script implements the workspace inheritance mechanism described
|
||||||
|
# here: https://doc.rust-lang.org/cargo/reference/workspaces.html#the-package-table
|
||||||
|
#
|
||||||
|
# Please run `mypy --strict`, `black`, and `isort --profile black` on this after editing, thanks!
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from typing import Any, Literal, assert_type
|
||||||
|
|
||||||
|
import tomli
|
||||||
|
import tomli_w
|
||||||
|
|
||||||
|
|
||||||
|
def load_file(path: str) -> dict[str, Any]:
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
return tomli.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
# This replicates the dependency merging logic from Cargo.
|
||||||
|
# See `inner_dependency_inherit_with`:
|
||||||
|
# https://github.com/rust-lang/cargo/blob/4de0094ac78743d2c8ff682489e35c8a7cafe8e4/src/cargo/util/toml/mod.rs#L982
|
||||||
|
def replace_key(
|
||||||
|
workspace_manifest: dict[str, Any],
|
||||||
|
table: dict[str, Any],
|
||||||
|
section: Literal["package", "dependencies"],
|
||||||
|
key: str,
|
||||||
|
) -> bool:
|
||||||
|
if not isinstance(table[key], dict) or table[key].get("workspace") is not True:
|
||||||
|
return False
|
||||||
|
print("replacing " + key)
|
||||||
|
|
||||||
|
local_dep = table[key]
|
||||||
|
del local_dep["workspace"]
|
||||||
|
|
||||||
|
workspace_dep: str | dict[str, Any] = workspace_manifest[section][key]
|
||||||
|
|
||||||
|
if section == "package":
|
||||||
|
table[key] = workspace_dep
|
||||||
|
return True
|
||||||
|
|
||||||
|
_ = assert_type(section, Literal["dependencies"])
|
||||||
|
|
||||||
|
if isinstance(workspace_dep, str):
|
||||||
|
workspace_dep = {"version": workspace_dep}
|
||||||
|
|
||||||
|
final: dict[str, Any] = workspace_dep.copy()
|
||||||
|
|
||||||
|
merged_features = local_dep.pop("features", []) + workspace_dep.get("features", [])
|
||||||
|
if merged_features:
|
||||||
|
final["features"] = merged_features
|
||||||
|
|
||||||
|
local_default_features = local_dep.pop("default-features", None)
|
||||||
|
workspace_default_features = workspace_dep.get("default-features")
|
||||||
|
|
||||||
|
if not workspace_default_features and local_default_features:
|
||||||
|
final["default-features"] = True
|
||||||
|
|
||||||
|
optional = local_dep.pop("optional", False)
|
||||||
|
if optional:
|
||||||
|
final["optional"] = True
|
||||||
|
|
||||||
|
if local_dep:
|
||||||
|
raise Exception(f"Unhandled keys in inherited dependency {key}: {local_dep}")
|
||||||
|
|
||||||
|
table[key] = final
|
||||||
|
|
||||||
|
# crate_features = local_dep.get("features", [])
|
||||||
|
|
||||||
|
# local_dep.update(workspace_copy)
|
||||||
|
# merged_features = crate_features + workspace_copy.get("features", [])
|
||||||
|
# if len(merged_features) > len(crate_features):
|
||||||
|
# local_dep["features"] = list(dict.fromkeys(merged_features))
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def replace_dependencies(
|
||||||
|
workspace_manifest: dict[str, Any], root: dict[str, Any]
|
||||||
|
) -> bool:
|
||||||
|
changed = False
|
||||||
|
|
||||||
|
for key in ["dependencies", "dev-dependencies", "build-dependencies"]:
|
||||||
|
deps = root.get(key, {})
|
||||||
|
for k in deps:
|
||||||
|
changed |= replace_key(workspace_manifest, deps, "dependencies", k)
|
||||||
|
|
||||||
|
return changed
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
top_cargo_toml = load_file(sys.argv[2])
|
||||||
|
|
||||||
|
if "workspace" not in top_cargo_toml:
|
||||||
|
# If top_cargo_toml is not a workspace manifest, then this script was probably
|
||||||
|
# ran on something that does not actually use workspace dependencies
|
||||||
|
raise Exception(f"{sys.argv[2]} is not a workspace manifest.")
|
||||||
|
|
||||||
|
crate_manifest = load_file(sys.argv[1])
|
||||||
|
workspace_manifest = top_cargo_toml["workspace"]
|
||||||
|
|
||||||
|
if "workspace" in crate_manifest:
|
||||||
|
print(f"{sys.argv[1]} is a workspace manifest, skipping", file=sys.stderr)
|
||||||
|
return
|
||||||
|
|
||||||
|
changed = False
|
||||||
|
|
||||||
|
for key in crate_manifest["package"].keys():
|
||||||
|
changed |= replace_key(
|
||||||
|
workspace_manifest, crate_manifest["package"], "package", key
|
||||||
|
)
|
||||||
|
|
||||||
|
changed |= replace_dependencies(workspace_manifest, crate_manifest)
|
||||||
|
|
||||||
|
for value in crate_manifest.get("target", {}).values():
|
||||||
|
changed |= replace_dependencies(workspace_manifest, value)
|
||||||
|
|
||||||
|
if crate_manifest.get("lints", {}).get("workspace") is True:
|
||||||
|
changed = True
|
||||||
|
crate_manifest["lints"] = workspace_manifest["lints"]
|
||||||
|
|
||||||
|
if not changed:
|
||||||
|
return print(f"{sys.argv[1]} is unchanged, skipping", file=sys.stderr)
|
||||||
|
|
||||||
|
with open(sys.argv[1], "wb") as f:
|
||||||
|
tomli_w.dump(crate_manifest, f)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user