Merge pull request 'add PT linting' (#2018) from type-checking into main

This commit is contained in:
clan-bot
2024-09-02 14:16:18 +00:00
41 changed files with 218 additions and 4309 deletions

View File

@@ -104,7 +104,6 @@ class GObjApi:
for m_name, m_signature in fn_signatures.items():
if m_name not in overwrite_fns:
continue
else:
# check if the signature of the overriden method matches
# the implementation signature
exp_args = []
@@ -146,11 +145,10 @@ class GObjApi:
self.thread = MethodExecutor(plain_fn, *args, **kwargs)
self.thread.start()
return GLib.SOURCE_CONTINUE
elif self.thread.finished:
if self.thread.finished:
result = self.thread.result
self.returns(method_name=fn_name, result=result)
return GLib.SOURCE_REMOVE
else:
return GLib.SOURCE_CONTINUE
return cast(type[ImplFunc], GenericFnRuntime)

View File

@@ -67,9 +67,7 @@ class WebExecutor(GObject.Object):
if self.content_uri.startswith("http://") and uri.startswith(self.content_uri):
log.debug(f"Allow navigation request: {uri}")
return False
elif self.content_uri.startswith("file://") and uri.startswith(
self.content_uri
):
if self.content_uri.startswith("file://") and uri.startswith(self.content_uri):
log.debug(f"Allow navigation request: {uri}")
return False
log.warning(

View File

@@ -19,7 +19,7 @@ def wayland_compositor() -> Generator[Popen, None, None]:
GtkProc = NewType("GtkProc", Popen)
@pytest.fixture(scope="function")
@pytest.fixture
def app() -> Generator[GtkProc, None, None]:
rapp = Popen([sys.executable, "-m", "clan_app"], text=True)
yield GtkProc(rapp)

View File

@@ -56,7 +56,8 @@ def get_directory(current_path: str) -> Directory:
directory = Directory(path=str(curr_dir))
if not curr_dir.is_dir():
raise ClanError()
msg = f"Path {curr_dir} is not a directory"
raise ClanError(msg)
with os.scandir(curr_dir.resolve()) as it:
for entry in it:

View File

@@ -71,15 +71,14 @@ def dataclass_to_dict(obj: Any, *, use_alias: bool = True) -> Any:
if not field.name.startswith("_")
and getattr(obj, field.name) is not None # type: ignore
}
elif isinstance(obj, list | tuple):
if isinstance(obj, list | tuple):
return [_to_dict(item) for item in obj]
elif isinstance(obj, dict):
if isinstance(obj, dict):
return {sanitize_string(k): _to_dict(v) for k, v in obj.items()}
elif isinstance(obj, Path):
if isinstance(obj, Path):
return sanitize_string(str(obj))
elif isinstance(obj, str):
if isinstance(obj, str):
return sanitize_string(obj)
else:
return obj
return _to_dict(obj)
@@ -144,7 +143,7 @@ def construct_value(
# If the field expects a path
# Field_value must be a string
elif is_type_in_union(t, Path):
if is_type_in_union(t, Path):
if not isinstance(field_value, str):
msg = (
f"Expected string, cannot construct pathlib.Path() from: {field_value} "
@@ -157,22 +156,22 @@ def construct_value(
return Path(field_value)
# Trivial values
elif t is str:
if t is str:
if not isinstance(field_value, str):
msg = f"Expected string, got {field_value}"
raise ClanError(msg, location=f"{loc}")
return field_value
elif t is int and not isinstance(field_value, str):
if t is int and not isinstance(field_value, str):
return int(field_value) # type: ignore
elif t is float and not isinstance(field_value, str):
if t is float and not isinstance(field_value, str):
return float(field_value) # type: ignore
elif t is bool and isinstance(field_value, bool):
if t is bool and isinstance(field_value, bool):
return field_value # type: ignore
# Union types construct the first non-None type
elif is_union_type(t):
if is_union_type(t):
# Unwrap the union type
inner = unwrap_none_type(t)
# Construct the field value
@@ -181,32 +180,31 @@ def construct_value(
# Nested types
# list
# dict
elif get_origin(t) is list:
if get_origin(t) is list:
if not isinstance(field_value, list):
msg = f"Expected list, got {field_value}"
raise ClanError(msg, location=f"{loc}")
return [construct_value(get_args(t)[0], item) for item in field_value]
elif get_origin(t) is dict and isinstance(field_value, dict):
if get_origin(t) is dict and isinstance(field_value, dict):
return {
key: construct_value(get_args(t)[1], value)
for key, value in field_value.items()
}
elif get_origin(t) is Literal:
if get_origin(t) is Literal:
valid_values = get_args(t)
if field_value not in valid_values:
msg = f"Expected one of {valid_values}, got {field_value}"
raise ClanError(msg, location=f"{loc}")
return field_value
elif get_origin(t) is Annotated:
if get_origin(t) is Annotated:
(base_type,) = get_args(t)
return construct_value(base_type, field_value)
# elif get_origin(t) is Union:
# Unhandled
else:
msg = f"Unhandled field type {t} with value {field_value}"
raise ClanError(msg)
@@ -274,5 +272,4 @@ def from_dict(
msg = f"{data} is not a dict. Expected {t}"
raise ClanError(msg)
return construct_dataclass(t, data, path) # type: ignore
else:
return construct_value(t, data, path)

View File

@@ -112,7 +112,7 @@ def type_to_dict(
"additionalProperties": False,
}
elif type(t) is UnionType:
if type(t) is UnionType:
return {
"oneOf": [type_to_dict(arg, scope, type_map) for arg in t.__args__],
}
@@ -126,7 +126,7 @@ def type_to_dict(
raise JSchemaTypeError(msg)
return type_to_dict(type_map.get(t), scope, type_map)
elif hasattr(t, "__origin__"): # Check if it's a generic type
if hasattr(t, "__origin__"): # Check if it's a generic type
origin = get_origin(t)
args = get_args(t)
@@ -136,41 +136,40 @@ def type_to_dict(
msg = f"{scope} Unhandled Type: "
raise JSchemaTypeError(msg, origin)
elif origin is Literal:
if origin is Literal:
# Handle Literal values for enums in JSON Schema
return {
"type": "string",
"enum": list(args), # assumes all args are strings
}
elif origin is Annotated:
if origin is Annotated:
base_type, *metadata = get_args(t)
schema = type_to_dict(base_type, scope) # Generate schema for the base type
return apply_annotations(schema, metadata)
elif origin is Union:
if origin is Union:
union_types = [type_to_dict(arg, scope, type_map) for arg in t.__args__]
return {
"oneOf": union_types,
}
elif origin in {list, set, frozenset}:
if origin in {list, set, frozenset}:
return {
"type": "array",
"items": type_to_dict(t.__args__[0], scope, type_map),
}
elif issubclass(origin, dict):
if issubclass(origin, dict):
value_type = t.__args__[1]
if value_type is Any:
return {"type": "object", "additionalProperties": True}
else:
return {
"type": "object",
"additionalProperties": type_to_dict(value_type, scope, type_map),
}
# Generic dataclass with type parameters
elif dataclasses.is_dataclass(origin):
if dataclasses.is_dataclass(origin):
# This behavior should mimic the scoping of typeVars in dataclasses
# Once type_to_dict() encounters a TypeVar, it will look up the type in the type_map
# When type_to_dict() returns the map goes out of scope.
@@ -182,7 +181,7 @@ def type_to_dict(
msg = f"{scope} - Error api type not yet supported {t!s}"
raise JSchemaTypeError(msg)
elif isinstance(t, type):
if isinstance(t, type):
if t is str:
return {"type": "string"}
if t is int:
@@ -211,6 +210,5 @@ def type_to_dict(
msg = f"{scope} - Error primitive type not supported {t!s}"
raise JSchemaTypeError(msg)
else:
msg = f"{scope} - Error type not supported {t!s}"
raise JSchemaTypeError(msg)

View File

@@ -24,7 +24,6 @@ def create_backup(machine: Machine, provider: str | None = None) -> None:
if proc.returncode != 0:
msg = "failed to start backup"
raise ClanError(msg)
else:
print("successfully started backup")
else:
if provider not in backup_scripts["providers"]:
@@ -36,7 +35,6 @@ def create_backup(machine: Machine, provider: str | None = None) -> None:
if proc.returncode != 0:
msg = "failed to start backup"
raise ClanError(msg)
else:
print("successfully started backup")

View File

@@ -30,12 +30,9 @@ def list_provider(machine: Machine, provider: str) -> list[Backup]:
# TODO this should be a warning, only raise exception if no providers succeed
msg = f"failed to list backups for provider {provider}: {proc.stdout}"
raise ClanError(msg)
else:
parsed_json = json.loads(proc.stdout)
for archive in parsed_json:
results.append(
Backup(name=archive["name"], job_name=archive.get("job_name"))
)
results.append(Backup(name=archive["name"], job_name=archive.get("job_name")))
return results

View File

@@ -53,7 +53,6 @@ def show_clan_meta(uri: str | Path) -> Meta:
description="Icon path must be a URL or a relative path.",
)
else:
icon_path = str((Path(uri) / meta_icon).resolve())
else:
msg = "Invalid schema"

View File

@@ -171,6 +171,5 @@ def run_no_stdout(
cwd = Path.cwd()
if logging.getLogger(__name__.split(".")[0]).isEnabledFor(logging.DEBUG):
return run(cmd, env=env, log=log, check=check, error_msg=error_msg)
else:
log = Log.NONE
return run(cmd, env=env, log=log, check=check, error_msg=error_msg)

View File

@@ -22,24 +22,23 @@ log = logging.getLogger(__name__)
def map_type(nix_type: str) -> Any:
if nix_type == "boolean":
return bool
elif nix_type in [
if nix_type in [
"integer",
"signed integer",
"16 bit unsigned integer; between 0 and 65535 (both inclusive)",
]:
return int
elif nix_type.startswith("string"):
if nix_type.startswith("string"):
return str
elif nix_type.startswith("null or "):
if nix_type.startswith("null or "):
subtype = nix_type.removeprefix("null or ")
return map_type(subtype) | None
elif nix_type.startswith("attribute set of"):
if nix_type.startswith("attribute set of"):
subtype = nix_type.removeprefix("attribute set of ")
return dict[str, map_type(subtype)] # type: ignore
elif nix_type.startswith("list of"):
if nix_type.startswith("list of"):
subtype = nix_type.removeprefix("list of ")
return list[map_type(subtype)] # type: ignore
else:
msg = f"Unknown type {nix_type}"
raise ClanError(msg)
@@ -77,27 +76,25 @@ def cast(value: Any, input_type: Any, opt_description: str) -> Any:
if isinstance(input_type, bool):
if value[0] in ["true", "True", "yes", "y", "1"]:
return True
elif value[0] in ["false", "False", "no", "n", "0"]:
if value[0] in ["false", "False", "no", "n", "0"]:
return False
else:
msg = f"Invalid value {value} for boolean"
raise ClanError(msg)
# handle lists
elif get_origin(input_type) is list:
if get_origin(input_type) is list:
subtype = input_type.__args__[0]
return [cast([x], subtype, opt_description) for x in value]
# handle dicts
elif get_origin(input_type) is dict:
if get_origin(input_type) is dict:
if not isinstance(value, dict):
msg = f"Cannot set {opt_description} directly. Specify a suboption like {opt_description}.<name>"
raise ClanError(msg)
subtype = input_type.__args__[1]
return {k: cast(v, subtype, opt_description) for k, v in value.items()}
elif str(input_type) == "str | None":
if str(input_type) == "str | None":
if value[0] in ["null", "None"]:
return None
return value[0]
else:
if len(value) > 1:
msg = f"Too many values for {opt_description}"
raise ClanError(msg)

View File

@@ -41,19 +41,17 @@ def subtype_from_schema(schema: dict[str, Any]) -> type:
if "additionalProperties" in schema:
sub_type = subtype_from_schema(schema["additionalProperties"])
return dict[str, sub_type] # type: ignore
elif "properties" in schema:
if "properties" in schema:
msg = "Nested dicts are not supported"
raise ClanError(msg)
else:
msg = "Unknown object type"
raise ClanError(msg)
elif schema["type"] == "array":
if schema["type"] == "array":
if "items" not in schema:
msg = "Untyped arrays are not supported"
raise ClanError(msg)
sub_type = subtype_from_schema(schema["items"])
return list[sub_type] # type: ignore
else:
return type_map[schema["type"]]
@@ -66,17 +64,15 @@ def type_from_schema_path(
full_path = path
if len(path) == 0:
return subtype_from_schema(schema)
elif schema["type"] == "object":
if schema["type"] == "object":
if "properties" in schema:
subtype = type_from_schema_path(schema["properties"][path[0]], path[1:])
return subtype
elif "additionalProperties" in schema:
if "additionalProperties" in schema:
subtype = type_from_schema_path(schema["additionalProperties"], path[1:])
return subtype
else:
msg = f"Unknown type for path {path}"
raise ClanError(msg)
else:
msg = f"Unknown type for path {path}"
raise ClanError(msg)
@@ -100,7 +96,7 @@ def options_types_from_schema(schema: dict[str, Any]) -> dict[str, type]:
for sub_name, sub_type in sub_result.items():
result[f"{name}.{sub_name}"] = sub_type
continue
elif type_ == "array":
if type_ == "array":
if "items" not in value:
msg = f"Untyped arrays are not supported (field: {name})"
raise ClanError(msg)

View File

@@ -47,7 +47,6 @@ def clan_templates() -> Path:
template_path = module_root().parent.parent.parent / "templates"
if template_path.exists():
return template_path
else:
template_path = module_root() / "templates"
if not template_path.exists():
msg = f"BUG! clan core not found at {template_path}. This is an issue with packaging the cli"
@@ -58,9 +57,8 @@ def clan_templates() -> Path:
def user_config_dir() -> Path:
if sys.platform == "win32":
return Path(os.getenv("APPDATA", os.path.expanduser("~\\AppData\\Roaming\\")))
elif sys.platform == "darwin":
if sys.platform == "darwin":
return Path(os.path.expanduser("~/Library/Application Support/"))
else:
return Path(os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config")))
@@ -69,9 +67,8 @@ def user_data_dir() -> Path:
return Path(
os.getenv("LOCALAPPDATA", os.path.expanduser("~\\AppData\\Local\\"))
)
elif sys.platform == "darwin":
if sys.platform == "darwin":
return Path(os.path.expanduser("~/Library/Application Support/"))
else:
return Path(os.getenv("XDG_DATA_HOME", os.path.expanduser("~/.local/share")))
@@ -80,9 +77,8 @@ def user_cache_dir() -> Path:
return Path(
os.getenv("LOCALAPPDATA", os.path.expanduser("~\\AppData\\Local\\"))
)
elif sys.platform == "darwin":
if sys.platform == "darwin":
return Path(os.path.expanduser("~/Library/Caches/"))
else:
return Path(os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache")))

View File

@@ -24,7 +24,6 @@ class FactStore(FactStoreBase):
fact_path.touch()
fact_path.write_bytes(value)
return fact_path
else:
msg = f"in_flake fact storage is only supported for local flakes: {self.machine.flake}"
raise ClanError(msg)

View File

@@ -30,7 +30,6 @@ def help_hyperlink(description: str, url: str) -> str:
if sys.argv[0].__contains__("docs.py"):
return docs_hyperlink(description, url)
else:
return hyperlink_same_text_and_url(url)

View File

@@ -9,7 +9,7 @@ class ClanJSONEncoder(json.JSONEncoder):
if hasattr(o, "to_json") and callable(o.to_json):
return o.to_json()
# Check if the object is a dataclass
elif dataclasses.is_dataclass(o):
if dataclasses.is_dataclass(o):
return dataclasses.asdict(o)
# Otherwise, use the default serialization
return super().default(o)

View File

@@ -145,7 +145,7 @@ def install_command(args: argparse.Namespace) -> None:
if not args.yes:
ask = input(f"Install {args.machine} to {target_host}? [y/N] ")
if ask != "y":
return
return None
return install_machine(
InstallOptions(

View File

@@ -110,9 +110,8 @@ class Machine:
def flake_dir(self) -> Path:
if self.flake.is_local():
return self.flake.path
elif self.flake.is_remote():
if self.flake.is_remote():
return Path(nix_metadata(self.flake.url)["path"])
else:
msg = f"Unsupported flake url: {self.flake}"
raise ClanError(msg)
@@ -218,10 +217,9 @@ class Machine:
if method == "eval":
output = run_no_stdout(nix_eval(args)).stdout.strip()
return output
elif method == "build":
if method == "build":
outpath = run_no_stdout(nix_build(args)).stdout.strip()
return Path(outpath)
else:
msg = f"Unknown method {method}"
raise ValueError(msg)
@@ -246,7 +244,6 @@ class Machine:
if isinstance(output, str):
self._eval_cache[attr] = output
return output
else:
msg = "eval_nix returned not a string"
raise ClanError(msg)
@@ -272,6 +269,5 @@ class Machine:
if isinstance(output, Path):
self._build_cache[attr] = output
return output
else:
msg = "build_nix returned not a Path"
raise ClanError(msg)

View File

@@ -40,7 +40,6 @@ def nix_build(flags: list[str], gcroot: Path | None = None) -> list[str]:
)
+ flags
)
else:
return (
nix_command(
[

View File

@@ -88,7 +88,6 @@ def trim_path_to_three_levels(path: str) -> str:
parts = path.split(os.path.sep)
if len(parts) > 4:
return os.path.sep.join(parts[-4:])
else:
return path
@@ -116,5 +115,4 @@ def profile(func: Callable) -> Callable:
if os.getenv("PERF", "0") == "1":
return wrapper
else:
return func

View File

@@ -96,7 +96,6 @@ def default_sops_key_path() -> Path:
raw_path = os.environ.get("SOPS_AGE_KEY_FILE")
if raw_path:
return Path(raw_path)
else:
return user_config_dir() / "sops" / "age" / "keys.txt"
@@ -107,7 +106,6 @@ def ensure_sops_key(flake_dir: Path) -> SopsKey:
path = default_sops_key_path()
if path.exists():
return ensure_user_or_machine(flake_dir, get_public_key(path.read_text()))
else:
msg = "No sops key found. Please generate one with 'clan secrets key generate'."
raise ClanError(msg)

View File

@@ -348,7 +348,6 @@ class Host:
raise subprocess.CalledProcessError(
ret, cmd=cmd, output=stdout_data, stderr=stderr_data
)
else:
cmdlog.warning(
f"[Command failed: {ret}] {displayed_cmd}",
extra={"command_prefix": self.command_prefix},

View File

@@ -18,7 +18,6 @@ class FactStore(FactStoreBase):
def _var_path(self, generator_name: str, name: str, shared: bool) -> Path:
if shared:
return self.shared_folder / generator_name / name
else:
return self.per_machine_folder / generator_name / name
def set(
@@ -30,7 +29,6 @@ class FactStore(FactStoreBase):
fact_path.touch()
fact_path.write_bytes(value)
return fact_path
else:
msg = f"in_flake fact storage is only supported for local flakes: {self.machine.flake}"
raise ClanError(msg)

View File

@@ -21,7 +21,6 @@ class SecretStore(SecretStoreBase):
def _var_path(self, generator_name: str, name: str, shared: bool) -> Path:
if shared:
return Path(f"shared/{generator_name}/{name}")
else:
return Path(f"machines/{self.machine.name}/{generator_name}/{name}")
def set(

View File

@@ -43,7 +43,6 @@ def graphics_options(vm: VmConfig) -> GraphicOptions:
#"-chardev", "socket,id=vgpu,path=/tmp/vgpu.sock",
], cid)
# fmt: on
else:
if not os.path.exists("/run/opengl-driver"):
display_options = [
"-vga",

View File

@@ -246,7 +246,7 @@ def test_flake_with_core(
@pytest.fixture
def test_local_democlan(
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
) -> Iterator[FlakeForTest]:
) -> FlakeForTest:
democlan = os.getenv(key="DEMOCLAN_ROOT")
if democlan is None:
msg = (
@@ -258,7 +258,7 @@ def test_local_democlan(
msg = f"DEMOCLAN_ROOT ({democlan_p}) is not a directory. This test requires the democlan directory to be present"
raise Exception(msg)
yield FlakeForTest(democlan_p)
return FlakeForTest(democlan_p)
@pytest.fixture

View File

@@ -10,7 +10,6 @@ def is_valid_age_key(secret_key: str) -> bool:
if result.returncode == 0:
return True
else:
msg = f"Invalid age key: {secret_key}"
raise ValueError(msg)
@@ -30,6 +29,5 @@ def is_valid_ssh_key(secret_key: str, ssh_pub: str) -> bool:
msg = f"Expected '{ssh_pub}' got '{result.stdout}' for ssh key: {secret_key}"
raise ValueError(msg)
return True
else:
msg = f"Invalid ssh key: {secret_key}"
raise ValueError(msg)

View File

@@ -1,11 +1,10 @@
import types
import pytest
from pytest import CaptureFixture
class CaptureOutput:
def __init__(self, capsys: CaptureFixture) -> None:
def __init__(self, capsys: pytest.CaptureFixture) -> None:
self.capsys = capsys
self.capsys_disabled = capsys.disabled()
self.capsys_disabled.__enter__()
@@ -31,5 +30,5 @@ class CaptureOutput:
@pytest.fixture
def capture_output(capsys: CaptureFixture) -> CaptureOutput:
def capture_output(capsys: pytest.CaptureFixture) -> CaptureOutput:
return CaptureOutput(capsys)

View File

@@ -143,7 +143,7 @@ def test_cast() -> None:
@pytest.mark.parametrize(
"option,value,options,expected",
("option", "value", "options", "expected"),
[
("foo.bar", ["baz"], {"foo.bar": {"type": "str"}}, ("foo.bar", ["baz"])),
("foo.bar", ["baz"], {"foo": {"type": "attrs"}}, ("foo", {"bar": ["baz"]})),

View File

@@ -28,10 +28,8 @@ def test_commit_file(git_repo: Path) -> None:
def test_commit_file_outside_git_raises_error(git_repo: Path) -> None:
# create a file outside the git (a temporary file)
with tempfile.NamedTemporaryFile() as tmp:
# commit the file
with pytest.raises(ClanError):
git.commit_file(Path(tmp.name), git_repo, "test commit")
# this should not fail but skip the commit
with pytest.raises(ClanError):
git.commit_file(Path(tmp.name), git_repo, "test commit")

View File

@@ -175,7 +175,6 @@ class VMObject(GObject.Object):
if self.progress_bar.is_visible():
self.progress_bar.pulse()
return GLib.SOURCE_CONTINUE
else:
return GLib.SOURCE_REMOVE
def __start(self) -> None:

View File

@@ -19,7 +19,7 @@ def wayland_compositor() -> Generator[Popen, None, None]:
GtkProc = NewType("GtkProc", Popen)
@pytest.fixture(scope="function")
@pytest.fixture
def app() -> Generator[GtkProc, None, None]:
rapp = Popen([sys.executable, "-m", "clan_vm_manager"], text=True)
yield GtkProc(rapp)

View File

@@ -19,21 +19,20 @@ def map_json_type(
return res
if isinstance(json_type, dict):
return map_json_type(json_type.get("type"))
elif json_type == "string":
if json_type == "string":
return {"str"}
elif json_type == "integer":
if json_type == "integer":
return {"int"}
elif json_type == "boolean":
if json_type == "boolean":
return {"bool"}
elif json_type == "array":
if json_type == "array":
assert nested_types, f"Array type not found for {parent}"
return {f"""list[{" | ".join(nested_types)}]"""}
elif json_type == "object":
if json_type == "object":
assert nested_types, f"dict type not found for {parent}"
return {f"""dict[str, {" | ".join(nested_types)}]"""}
elif json_type == "null":
if json_type == "null":
return {"None"}
else:
msg = f"Python type not found for {json_type}"
raise ValueError(msg)
@@ -116,12 +115,12 @@ def field_def_from_default_value(
field_types=field_types | {"None"},
default="None",
)
elif isinstance(default_value, list):
if isinstance(default_value, list):
return finalize_field(
field_types=field_types,
default_factory="list",
)
elif isinstance(default_value, dict):
if isinstance(default_value, dict):
serialised_types = " | ".join(field_types)
if serialised_types == nested_class_name:
return finalize_field(
@@ -129,25 +128,23 @@ def field_def_from_default_value(
default_factory=nested_class_name,
)
elif "dict[str," in serialised_types:
if "dict[str," in serialised_types:
return finalize_field(
field_types=field_types,
default_factory="dict",
)
else:
return finalize_field(
field_types=field_types,
default_factory="dict",
type_apendix=" | dict[str,Any]",
)
elif default_value == "name":
if default_value == "name":
return None
elif isinstance(default_value, str):
if isinstance(default_value, str):
return finalize_field(
field_types=field_types,
default=f"'{default_value}'",
)
else:
# Other default values unhandled yet.
msg = f"Unhandled default value for field '{field_name}' - default value: {default_value}"
raise ValueError(msg)

View File

@@ -20,7 +20,7 @@
};
perSystem =
{ pkgs, config, ... }:
{ config, pkgs, ... }:
{
packages =
{
@@ -34,6 +34,8 @@
classgen = pkgs.callPackage ./classgen { };
zerotierone = pkgs.callPackage ./zerotierone { };
}
// lib.optionalAttrs (pkgs.stdenv.isLinux) { nixos-facter = pkgs.callPackage ./nixos-facter { }; };
// lib.optionalAttrs pkgs.stdenv.isLinux {
nixos-facter = pkgs.callPackage ./nixos-facter { };
};
};
}

View File

@@ -39,6 +39,7 @@ def convert_bytearray_to_string(byte_array: str) -> str:
if byte_array.startswith('"@ByteArray(') and byte_array.endswith(')"'):
byte_array = byte_array[12:-2]
return byte_array.replace("\\n", "\n")
return byte_array
# this must be created before moonlight is first run

File diff suppressed because it is too large Load Diff

View File

@@ -1,53 +1,7 @@
{
callPackage,
zerotierone,
fetchFromGitHub,
lib,
stdenv,
}:
let
importCargoLock = callPackage ./import-cargo-lock.nix { };
in
zerotierone.overrideAttrs (old: {
name = "zerotierone-1.14.0-unstable-2024-07-31";
src = fetchFromGitHub {
owner = "zerotier";
repo = "ZeroTierOne";
rev = "f176e2539e10e8c0f61eb1d2e1f0e690a267a646";
hash = "sha256-pGozwaBy9eMA8izYtGhhmJeHzGjHFLID7WC01977XxQ=";
};
cargoDeps = importCargoLock {
lockFile = ./Cargo.lock;
outputHashes = {
"jwt-0.16.0" = "sha256-P5aJnNlcLe9sBtXZzfqHdRvxNfm6DPBcfcKOVeLZxcM=";
"rustfsm-0.1.0" = "sha256-AYMk31QuwB1R/yr1wNl9MSWL52ERJMtkR4aSPf2waWs=";
};
};
patches = [ ];
postPatch = "cp ${./Cargo.lock} Cargo.lock";
preBuild =
if stdenv.isDarwin then
''
makeFlagsArray+=("ARCH_FLAGS=") # disable multi-arch build
if ! grep -q MACOS_VERSION_MIN=10.13 make-mac.mk; then
echo "You may need to update MACOSX_DEPLOYMENT_TARGET to match the value in make-mac.mk"
exit 1
fi
(cd rustybits && MACOSX_DEPLOYMENT_TARGET=10.13 cargo build -p zeroidc --release)
cp \
./rustybits/target/${stdenv.hostPlatform.rust.rustcTarget}/release/libzeroidc.a \
./rustybits/target
# zerotier uses the "FORCE" target as a phony target to force rebuilds.
# We don't want to rebuild libzeroidc.a as we build want to build this library ourself for a single architecture
touch FORCE
''
else
old.preBuild;
meta = old.meta // {
# halalify zerotierone
{ zerotierone, lib }:
# halalify zerotierone
zerotierone.overrideAttrs (_old: {
meta = _old.meta // {
license = lib.licenses.apsl20;
};
})

View File

@@ -1,309 +0,0 @@
{
fetchgit,
fetchurl,
lib,
writers,
python3Packages,
runCommand,
cargo,
jq,
}:
{
# Cargo lock file
lockFile ? null,
# Cargo lock file contents as string
lockFileContents ? null,
# Allow `builtins.fetchGit` to be used to not require hashes for git dependencies
allowBuiltinFetchGit ? false,
# Additional registries to pull sources from
# { "https://<registry index URL>" = "https://<registry download URL>"; }
# or if the registry is using the new sparse protocol
# { "sparse+https://<registry download URL>" = "https://<registry download URL>"; }
# where:
# - "index URL" is the "index" value of the configuration entry for that registry
# https://doc.rust-lang.org/cargo/reference/registries.html#using-an-alternate-registry
# - "download URL" is the "dl" value of its associated index configuration
# https://doc.rust-lang.org/cargo/reference/registry-index.html#index-configuration
extraRegistries ? { },
# Hashes for git dependencies.
outputHashes ? { },
}@args:
assert (lockFile == null) != (lockFileContents == null);
let
# Parse a git source into different components.
parseGit =
src:
let
parts = builtins.match ''git\+([^?]+)(\?(rev|tag|branch)=(.*))?#(.*)'' src;
type = builtins.elemAt parts 2; # rev, tag or branch
value = builtins.elemAt parts 3;
in
if parts == null then
null
else
{
url = builtins.elemAt parts 0;
sha = builtins.elemAt parts 4;
}
// lib.optionalAttrs (type != null) { inherit type value; };
# shadows args.lockFileContents
lockFileContents = if lockFile != null then builtins.readFile lockFile else args.lockFileContents;
parsedLockFile = builtins.fromTOML lockFileContents;
packages = parsedLockFile.package;
# There is no source attribute for the source package itself. But
# since we do not want to vendor the source package anyway, we can
# safely skip it.
depPackages = builtins.filter (p: p ? "source") packages;
# Create dependent crates from packages.
#
# Force evaluation of the git SHA -> hash mapping, so that an error is
# thrown if there are stale hashes. We cannot rely on gitShaOutputHash
# being evaluated otherwise, since there could be no git dependencies.
depCrates = builtins.deepSeq gitShaOutputHash (builtins.map mkCrate depPackages);
# Map package name + version to git commit SHA for packages with a git source.
namesGitShas = builtins.listToAttrs (
builtins.map nameGitSha (builtins.filter (pkg: lib.hasPrefix "git+" pkg.source) depPackages)
);
nameGitSha =
pkg:
let
gitParts = parseGit pkg.source;
in
{
name = "${pkg.name}-${pkg.version}";
value = gitParts.sha;
};
# Convert the attrset provided through the `outputHashes` argument to a
# a mapping from git commit SHA -> output hash.
#
# There may be multiple different packages with different names
# originating from the same git repository (typically a Cargo
# workspace). By using the git commit SHA as a universal identifier,
# the user does not have to specify the output hash for every package
# individually.
gitShaOutputHash = lib.mapAttrs' (
nameVer: hash:
let
unusedHash = throw "A hash was specified for ${nameVer}, but there is no corresponding git dependency.";
rev = namesGitShas.${nameVer} or unusedHash;
in
{
name = rev;
value = hash;
}
) outputHashes;
# We can't use the existing fetchCrate function, since it uses a
# recursive hash of the unpacked crate.
fetchCrate =
pkg: downloadUrl:
let
checksum =
pkg.checksum or parsedLockFile.metadata."checksum ${pkg.name} ${pkg.version} (${pkg.source})";
in
assert lib.assertMsg (checksum != null) ''
Package ${pkg.name} does not have a checksum.
'';
fetchurl {
name = "crate-${pkg.name}-${pkg.version}.tar.gz";
url = "${downloadUrl}/${pkg.name}/${pkg.version}/download";
sha256 = checksum;
};
registries = {
"https://github.com/rust-lang/crates.io-index" = "https://crates.io/api/v1/crates";
} // extraRegistries;
# Replaces values inherited by workspace members.
replaceWorkspaceValues = writers.writePython3 "replace-workspace-values" {
libraries = with python3Packages; [
tomli
tomli-w
];
flakeIgnore = [
"E501"
"W503"
];
} (builtins.readFile ./replace-workspace-values.py);
# Fetch and unpack a crate.
mkCrate =
pkg:
let
gitParts = parseGit pkg.source;
registryIndexUrl = lib.removePrefix "registry+" pkg.source;
in
if
(lib.hasPrefix "registry+" pkg.source || lib.hasPrefix "sparse+" pkg.source)
&& builtins.hasAttr registryIndexUrl registries
then
let
crateTarball = fetchCrate pkg registries.${registryIndexUrl};
in
runCommand "${pkg.name}-${pkg.version}" { } ''
mkdir $out
tar xf "${crateTarball}" -C $out --strip-components=1
# Cargo is happy with largely empty metadata.
printf '{"files":{},"package":"${crateTarball.outputHash}"}' > "$out/.cargo-checksum.json"
''
else if gitParts != null then
let
missingHash = throw ''
No hash was found while vendoring the git dependency ${pkg.name}-${pkg.version}. You can add
a hash through the `outputHashes` argument of `importCargoLock`:
outputHashes = {
"${pkg.name}-${pkg.version}" = "<hash>";
};
If you use `buildRustPackage`, you can add this attribute to the `cargoLock`
attribute set.
'';
tree =
if gitShaOutputHash ? ${gitParts.sha} then
fetchgit {
inherit (gitParts) url;
rev = gitParts.sha; # The commit SHA is always available.
sha256 = gitShaOutputHash.${gitParts.sha};
}
else if allowBuiltinFetchGit then
builtins.fetchGit {
inherit (gitParts) url;
rev = gitParts.sha;
allRefs = true;
submodules = true;
}
else
missingHash;
in
runCommand "${pkg.name}-${pkg.version}" { } ''
tree=${tree}
# If the target package is in a workspace, or if it's the top-level
# crate, we should find the crate path using `cargo metadata`.
# Some packages do not have a Cargo.toml at the top-level,
# but only in nested directories.
# Only check the top-level Cargo.toml, if it actually exists
if [[ -f $tree/Cargo.toml ]]; then
crateCargoTOML=$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path $tree/Cargo.toml | \
${jq}/bin/jq -r '.packages[] | select(.name == "${pkg.name}") | .manifest_path')
fi
# If the repository is not a workspace the package might be in a subdirectory.
if [[ -z $crateCargoTOML ]]; then
for manifest in $(find $tree -name "Cargo.toml"); do
echo Looking at $manifest
crateCargoTOML=$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path "$manifest" | ${jq}/bin/jq -r '.packages[] | select(.name == "${pkg.name}") | .manifest_path' || :)
if [[ ! -z $crateCargoTOML ]]; then
break
fi
done
if [[ -z $crateCargoTOML ]]; then
>&2 echo "Cannot find path for crate '${pkg.name}-${pkg.version}' in the tree in: $tree"
exit 1
fi
fi
echo Found crate ${pkg.name} at $crateCargoTOML
tree=$(dirname $crateCargoTOML)
cp -prvL "$tree/" $out
chmod u+w $out
if grep -q workspace "$out/Cargo.toml"; then
chmod u+w "$out/Cargo.toml"
${replaceWorkspaceValues} "$out/Cargo.toml" "$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path $crateCargoTOML | ${jq}/bin/jq -r .workspace_root)/Cargo.toml"
fi
# Cargo is happy with empty metadata.
printf '{"files":{},"package":null}' > "$out/.cargo-checksum.json"
# Set up configuration for the vendor directory.
cat > $out/.cargo-config <<EOF
[source."${gitParts.url}${
lib.optionalString (gitParts ? type) "?${gitParts.type}=${gitParts.value}"
}"]
git = "${gitParts.url}"
${lib.optionalString (gitParts ? type) "${gitParts.type} = \"${gitParts.value}\""}
replace-with = "vendored-sources"
EOF
''
else
throw "Cannot handle crate source: ${pkg.source}";
vendorDir =
runCommand "cargo-vendor-dir"
(
if lockFile == null then
{
inherit lockFileContents;
passAsFile = [ "lockFileContents" ];
}
else
{
passthru = {
inherit lockFile;
};
}
)
''
mkdir -p $out/.cargo
${
if lockFile != null then
"ln -s ${lockFile} $out/Cargo.lock"
else
"cp $lockFileContentsPath $out/Cargo.lock"
}
cat > $out/.cargo/config <<EOF
[source.crates-io]
replace-with = "vendored-sources"
[source.vendored-sources]
directory = "cargo-vendor-dir"
EOF
declare -A keysSeen
for registry in ${toString (builtins.attrNames extraRegistries)}; do
cat >> $out/.cargo/config <<EOF
[source."$registry"]
registry = "$registry"
replace-with = "vendored-sources"
EOF
done
for crate in ${toString depCrates}; do
# Link the crate directory, removing the output path hash from the destination.
ln -s "$crate" $out/$(basename "$crate" | cut -c 34-)
if [ -e "$crate/.cargo-config" ]; then
key=$(sed 's/\[source\."\(.*\)"\]/\1/; t; d' < "$crate/.cargo-config")
if [[ -z ''${keysSeen[$key]} ]]; then
keysSeen[$key]=1
cat "$crate/.cargo-config" >> $out/.cargo/config
fi
fi
done
'';
in
vendorDir

View File

@@ -1,130 +0,0 @@
# This script implements the workspace inheritance mechanism described
# here: https://doc.rust-lang.org/cargo/reference/workspaces.html#the-package-table
#
# Please run `mypy --strict`, `black`, and `isort --profile black` on this after editing, thanks!
import sys
from typing import Any, Literal, assert_type
import tomli
import tomli_w
def load_file(path: str) -> dict[str, Any]:
with open(path, "rb") as f:
return tomli.load(f)
# This replicates the dependency merging logic from Cargo.
# See `inner_dependency_inherit_with`:
# https://github.com/rust-lang/cargo/blob/4de0094ac78743d2c8ff682489e35c8a7cafe8e4/src/cargo/util/toml/mod.rs#L982
def replace_key(
workspace_manifest: dict[str, Any],
table: dict[str, Any],
section: Literal["package", "dependencies"],
key: str,
) -> bool:
if not isinstance(table[key], dict) or table[key].get("workspace") is not True:
return False
print("replacing " + key)
local_dep = table[key]
del local_dep["workspace"]
workspace_dep: str | dict[str, Any] = workspace_manifest[section][key]
if section == "package":
table[key] = workspace_dep
return True
_ = assert_type(section, Literal["dependencies"])
if isinstance(workspace_dep, str):
workspace_dep = {"version": workspace_dep}
final: dict[str, Any] = workspace_dep.copy()
merged_features = local_dep.pop("features", []) + workspace_dep.get("features", [])
if merged_features:
final["features"] = merged_features
local_default_features = local_dep.pop("default-features", None)
workspace_default_features = workspace_dep.get("default-features")
if not workspace_default_features and local_default_features:
final["default-features"] = True
optional = local_dep.pop("optional", False)
if optional:
final["optional"] = True
if local_dep:
msg = f"Unhandled keys in inherited dependency {key}: {local_dep}"
raise Exception(msg)
table[key] = final
# crate_features = local_dep.get("features", [])
# local_dep.update(workspace_copy)
# merged_features = crate_features + workspace_copy.get("features", [])
# if len(merged_features) > len(crate_features):
# local_dep["features"] = list(dict.fromkeys(merged_features))
return True
def replace_dependencies(
workspace_manifest: dict[str, Any], root: dict[str, Any]
) -> bool:
changed = False
for key in ["dependencies", "dev-dependencies", "build-dependencies"]:
deps = root.get(key, {})
for k in deps:
changed |= replace_key(workspace_manifest, deps, "dependencies", k)
return changed
def main() -> None:
top_cargo_toml = load_file(sys.argv[2])
if "workspace" not in top_cargo_toml:
# If top_cargo_toml is not a workspace manifest, then this script was probably
# ran on something that does not actually use workspace dependencies
msg = f"{sys.argv[2]} is not a workspace manifest."
raise Exception(msg)
crate_manifest = load_file(sys.argv[1])
workspace_manifest = top_cargo_toml["workspace"]
if "workspace" in crate_manifest:
print(f"{sys.argv[1]} is a workspace manifest, skipping", file=sys.stderr)
return
changed = False
for key in crate_manifest["package"].keys():
changed |= replace_key(
workspace_manifest, crate_manifest["package"], "package", key
)
changed |= replace_dependencies(workspace_manifest, crate_manifest)
for value in crate_manifest.get("target", {}).values():
changed |= replace_dependencies(workspace_manifest, value)
if crate_manifest.get("lints", {}).get("workspace") is True:
changed = True
crate_manifest["lints"] = workspace_manifest["lints"]
if not changed:
return print(f"{sys.argv[1]} is unchanged, skipping", file=sys.stderr)
with open(sys.argv[1], "wb") as f:
tomli_w.dump(crate_manifest, f)
if __name__ == "__main__":
main()

View File

@@ -27,11 +27,15 @@ lint.select = [
"LOG",
"N",
"PIE",
"PT",
"PYI",
"Q",
"RET",
"RSE",
"RUF",
"T10",
"TID",
"U",
"YTT",
]
lint.ignore = ["E501", "E402", "E731", "ANN101", "ANN401", "A003"]
lint.ignore = ["E501", "E402", "E731", "ANN101", "ANN401", "A003", "RET504"]