Merge pull request 'add PT linting' (#2018) from type-checking into main
This commit is contained in:
@@ -104,25 +104,24 @@ class GObjApi:
|
||||
for m_name, m_signature in fn_signatures.items():
|
||||
if m_name not in overwrite_fns:
|
||||
continue
|
||||
else:
|
||||
# check if the signature of the overriden method matches
|
||||
# the implementation signature
|
||||
exp_args = []
|
||||
exp_return = m_signature.return_annotation
|
||||
for param in dict(m_signature.parameters).values():
|
||||
exp_args.append(param.annotation)
|
||||
exp_signature = (tuple(exp_args), exp_return)
|
||||
# check if the signature of the overriden method matches
|
||||
# the implementation signature
|
||||
exp_args = []
|
||||
exp_return = m_signature.return_annotation
|
||||
for param in dict(m_signature.parameters).values():
|
||||
exp_args.append(param.annotation)
|
||||
exp_signature = (tuple(exp_args), exp_return)
|
||||
|
||||
# implementation signature
|
||||
obj = dict(overwrite_fns[m_name].__dict__)
|
||||
obj_type = obj["__orig_bases__"][0]
|
||||
got_signature = obj_type.__args__
|
||||
# implementation signature
|
||||
obj = dict(overwrite_fns[m_name].__dict__)
|
||||
obj_type = obj["__orig_bases__"][0]
|
||||
got_signature = obj_type.__args__
|
||||
|
||||
if exp_signature != got_signature:
|
||||
log.error(f"Expected signature: {exp_signature}")
|
||||
log.error(f"Actual signature: {got_signature}")
|
||||
msg = f"Overwritten method '{m_name}' has different signature than the implementation"
|
||||
raise ValueError(msg)
|
||||
if exp_signature != got_signature:
|
||||
log.error(f"Expected signature: {exp_signature}")
|
||||
log.error(f"Actual signature: {got_signature}")
|
||||
msg = f"Overwritten method '{m_name}' has different signature than the implementation"
|
||||
raise ValueError(msg)
|
||||
|
||||
def get_obj(self, fn_name: str) -> type[ImplFunc]:
|
||||
result = self._obj_registry.get(fn_name, None)
|
||||
@@ -146,11 +145,10 @@ class GObjApi:
|
||||
self.thread = MethodExecutor(plain_fn, *args, **kwargs)
|
||||
self.thread.start()
|
||||
return GLib.SOURCE_CONTINUE
|
||||
elif self.thread.finished:
|
||||
if self.thread.finished:
|
||||
result = self.thread.result
|
||||
self.returns(method_name=fn_name, result=result)
|
||||
return GLib.SOURCE_REMOVE
|
||||
else:
|
||||
return GLib.SOURCE_CONTINUE
|
||||
return GLib.SOURCE_CONTINUE
|
||||
|
||||
return cast(type[ImplFunc], GenericFnRuntime)
|
||||
|
||||
@@ -67,9 +67,7 @@ class WebExecutor(GObject.Object):
|
||||
if self.content_uri.startswith("http://") and uri.startswith(self.content_uri):
|
||||
log.debug(f"Allow navigation request: {uri}")
|
||||
return False
|
||||
elif self.content_uri.startswith("file://") and uri.startswith(
|
||||
self.content_uri
|
||||
):
|
||||
if self.content_uri.startswith("file://") and uri.startswith(self.content_uri):
|
||||
log.debug(f"Allow navigation request: {uri}")
|
||||
return False
|
||||
log.warning(
|
||||
|
||||
@@ -19,7 +19,7 @@ def wayland_compositor() -> Generator[Popen, None, None]:
|
||||
GtkProc = NewType("GtkProc", Popen)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
@pytest.fixture
|
||||
def app() -> Generator[GtkProc, None, None]:
|
||||
rapp = Popen([sys.executable, "-m", "clan_app"], text=True)
|
||||
yield GtkProc(rapp)
|
||||
|
||||
@@ -56,7 +56,8 @@ def get_directory(current_path: str) -> Directory:
|
||||
directory = Directory(path=str(curr_dir))
|
||||
|
||||
if not curr_dir.is_dir():
|
||||
raise ClanError()
|
||||
msg = f"Path {curr_dir} is not a directory"
|
||||
raise ClanError(msg)
|
||||
|
||||
with os.scandir(curr_dir.resolve()) as it:
|
||||
for entry in it:
|
||||
|
||||
@@ -71,16 +71,15 @@ def dataclass_to_dict(obj: Any, *, use_alias: bool = True) -> Any:
|
||||
if not field.name.startswith("_")
|
||||
and getattr(obj, field.name) is not None # type: ignore
|
||||
}
|
||||
elif isinstance(obj, list | tuple):
|
||||
if isinstance(obj, list | tuple):
|
||||
return [_to_dict(item) for item in obj]
|
||||
elif isinstance(obj, dict):
|
||||
if isinstance(obj, dict):
|
||||
return {sanitize_string(k): _to_dict(v) for k, v in obj.items()}
|
||||
elif isinstance(obj, Path):
|
||||
if isinstance(obj, Path):
|
||||
return sanitize_string(str(obj))
|
||||
elif isinstance(obj, str):
|
||||
if isinstance(obj, str):
|
||||
return sanitize_string(obj)
|
||||
else:
|
||||
return obj
|
||||
return obj
|
||||
|
||||
return _to_dict(obj)
|
||||
|
||||
@@ -144,7 +143,7 @@ def construct_value(
|
||||
|
||||
# If the field expects a path
|
||||
# Field_value must be a string
|
||||
elif is_type_in_union(t, Path):
|
||||
if is_type_in_union(t, Path):
|
||||
if not isinstance(field_value, str):
|
||||
msg = (
|
||||
f"Expected string, cannot construct pathlib.Path() from: {field_value} "
|
||||
@@ -157,22 +156,22 @@ def construct_value(
|
||||
return Path(field_value)
|
||||
|
||||
# Trivial values
|
||||
elif t is str:
|
||||
if t is str:
|
||||
if not isinstance(field_value, str):
|
||||
msg = f"Expected string, got {field_value}"
|
||||
raise ClanError(msg, location=f"{loc}")
|
||||
|
||||
return field_value
|
||||
|
||||
elif t is int and not isinstance(field_value, str):
|
||||
if t is int and not isinstance(field_value, str):
|
||||
return int(field_value) # type: ignore
|
||||
elif t is float and not isinstance(field_value, str):
|
||||
if t is float and not isinstance(field_value, str):
|
||||
return float(field_value) # type: ignore
|
||||
elif t is bool and isinstance(field_value, bool):
|
||||
if t is bool and isinstance(field_value, bool):
|
||||
return field_value # type: ignore
|
||||
|
||||
# Union types construct the first non-None type
|
||||
elif is_union_type(t):
|
||||
if is_union_type(t):
|
||||
# Unwrap the union type
|
||||
inner = unwrap_none_type(t)
|
||||
# Construct the field value
|
||||
@@ -181,34 +180,33 @@ def construct_value(
|
||||
# Nested types
|
||||
# list
|
||||
# dict
|
||||
elif get_origin(t) is list:
|
||||
if get_origin(t) is list:
|
||||
if not isinstance(field_value, list):
|
||||
msg = f"Expected list, got {field_value}"
|
||||
raise ClanError(msg, location=f"{loc}")
|
||||
|
||||
return [construct_value(get_args(t)[0], item) for item in field_value]
|
||||
elif get_origin(t) is dict and isinstance(field_value, dict):
|
||||
if get_origin(t) is dict and isinstance(field_value, dict):
|
||||
return {
|
||||
key: construct_value(get_args(t)[1], value)
|
||||
for key, value in field_value.items()
|
||||
}
|
||||
elif get_origin(t) is Literal:
|
||||
if get_origin(t) is Literal:
|
||||
valid_values = get_args(t)
|
||||
if field_value not in valid_values:
|
||||
msg = f"Expected one of {valid_values}, got {field_value}"
|
||||
raise ClanError(msg, location=f"{loc}")
|
||||
return field_value
|
||||
|
||||
elif get_origin(t) is Annotated:
|
||||
if get_origin(t) is Annotated:
|
||||
(base_type,) = get_args(t)
|
||||
return construct_value(base_type, field_value)
|
||||
|
||||
# elif get_origin(t) is Union:
|
||||
|
||||
# Unhandled
|
||||
else:
|
||||
msg = f"Unhandled field type {t} with value {field_value}"
|
||||
raise ClanError(msg)
|
||||
msg = f"Unhandled field type {t} with value {field_value}"
|
||||
raise ClanError(msg)
|
||||
|
||||
|
||||
def construct_dataclass(
|
||||
@@ -274,5 +272,4 @@ def from_dict(
|
||||
msg = f"{data} is not a dict. Expected {t}"
|
||||
raise ClanError(msg)
|
||||
return construct_dataclass(t, data, path) # type: ignore
|
||||
else:
|
||||
return construct_value(t, data, path)
|
||||
return construct_value(t, data, path)
|
||||
|
||||
@@ -112,7 +112,7 @@ def type_to_dict(
|
||||
"additionalProperties": False,
|
||||
}
|
||||
|
||||
elif type(t) is UnionType:
|
||||
if type(t) is UnionType:
|
||||
return {
|
||||
"oneOf": [type_to_dict(arg, scope, type_map) for arg in t.__args__],
|
||||
}
|
||||
@@ -126,7 +126,7 @@ def type_to_dict(
|
||||
raise JSchemaTypeError(msg)
|
||||
return type_to_dict(type_map.get(t), scope, type_map)
|
||||
|
||||
elif hasattr(t, "__origin__"): # Check if it's a generic type
|
||||
if hasattr(t, "__origin__"): # Check if it's a generic type
|
||||
origin = get_origin(t)
|
||||
args = get_args(t)
|
||||
|
||||
@@ -136,41 +136,40 @@ def type_to_dict(
|
||||
msg = f"{scope} Unhandled Type: "
|
||||
raise JSchemaTypeError(msg, origin)
|
||||
|
||||
elif origin is Literal:
|
||||
if origin is Literal:
|
||||
# Handle Literal values for enums in JSON Schema
|
||||
return {
|
||||
"type": "string",
|
||||
"enum": list(args), # assumes all args are strings
|
||||
}
|
||||
|
||||
elif origin is Annotated:
|
||||
if origin is Annotated:
|
||||
base_type, *metadata = get_args(t)
|
||||
schema = type_to_dict(base_type, scope) # Generate schema for the base type
|
||||
return apply_annotations(schema, metadata)
|
||||
|
||||
elif origin is Union:
|
||||
if origin is Union:
|
||||
union_types = [type_to_dict(arg, scope, type_map) for arg in t.__args__]
|
||||
return {
|
||||
"oneOf": union_types,
|
||||
}
|
||||
|
||||
elif origin in {list, set, frozenset}:
|
||||
if origin in {list, set, frozenset}:
|
||||
return {
|
||||
"type": "array",
|
||||
"items": type_to_dict(t.__args__[0], scope, type_map),
|
||||
}
|
||||
|
||||
elif issubclass(origin, dict):
|
||||
if issubclass(origin, dict):
|
||||
value_type = t.__args__[1]
|
||||
if value_type is Any:
|
||||
return {"type": "object", "additionalProperties": True}
|
||||
else:
|
||||
return {
|
||||
"type": "object",
|
||||
"additionalProperties": type_to_dict(value_type, scope, type_map),
|
||||
}
|
||||
return {
|
||||
"type": "object",
|
||||
"additionalProperties": type_to_dict(value_type, scope, type_map),
|
||||
}
|
||||
# Generic dataclass with type parameters
|
||||
elif dataclasses.is_dataclass(origin):
|
||||
if dataclasses.is_dataclass(origin):
|
||||
# This behavior should mimic the scoping of typeVars in dataclasses
|
||||
# Once type_to_dict() encounters a TypeVar, it will look up the type in the type_map
|
||||
# When type_to_dict() returns the map goes out of scope.
|
||||
@@ -182,7 +181,7 @@ def type_to_dict(
|
||||
msg = f"{scope} - Error api type not yet supported {t!s}"
|
||||
raise JSchemaTypeError(msg)
|
||||
|
||||
elif isinstance(t, type):
|
||||
if isinstance(t, type):
|
||||
if t is str:
|
||||
return {"type": "string"}
|
||||
if t is int:
|
||||
@@ -211,6 +210,5 @@ def type_to_dict(
|
||||
|
||||
msg = f"{scope} - Error primitive type not supported {t!s}"
|
||||
raise JSchemaTypeError(msg)
|
||||
else:
|
||||
msg = f"{scope} - Error type not supported {t!s}"
|
||||
raise JSchemaTypeError(msg)
|
||||
msg = f"{scope} - Error type not supported {t!s}"
|
||||
raise JSchemaTypeError(msg)
|
||||
|
||||
@@ -24,8 +24,7 @@ def create_backup(machine: Machine, provider: str | None = None) -> None:
|
||||
if proc.returncode != 0:
|
||||
msg = "failed to start backup"
|
||||
raise ClanError(msg)
|
||||
else:
|
||||
print("successfully started backup")
|
||||
print("successfully started backup")
|
||||
else:
|
||||
if provider not in backup_scripts["providers"]:
|
||||
msg = f"provider {provider} not found"
|
||||
@@ -36,8 +35,7 @@ def create_backup(machine: Machine, provider: str | None = None) -> None:
|
||||
if proc.returncode != 0:
|
||||
msg = "failed to start backup"
|
||||
raise ClanError(msg)
|
||||
else:
|
||||
print("successfully started backup")
|
||||
print("successfully started backup")
|
||||
|
||||
|
||||
def create_command(args: argparse.Namespace) -> None:
|
||||
|
||||
@@ -30,12 +30,9 @@ def list_provider(machine: Machine, provider: str) -> list[Backup]:
|
||||
# TODO this should be a warning, only raise exception if no providers succeed
|
||||
msg = f"failed to list backups for provider {provider}: {proc.stdout}"
|
||||
raise ClanError(msg)
|
||||
else:
|
||||
parsed_json = json.loads(proc.stdout)
|
||||
for archive in parsed_json:
|
||||
results.append(
|
||||
Backup(name=archive["name"], job_name=archive.get("job_name"))
|
||||
)
|
||||
parsed_json = json.loads(proc.stdout)
|
||||
for archive in parsed_json:
|
||||
results.append(Backup(name=archive["name"], job_name=archive.get("job_name")))
|
||||
return results
|
||||
|
||||
|
||||
|
||||
@@ -53,8 +53,7 @@ def show_clan_meta(uri: str | Path) -> Meta:
|
||||
description="Icon path must be a URL or a relative path.",
|
||||
)
|
||||
|
||||
else:
|
||||
icon_path = str((Path(uri) / meta_icon).resolve())
|
||||
icon_path = str((Path(uri) / meta_icon).resolve())
|
||||
else:
|
||||
msg = "Invalid schema"
|
||||
raise ClanError(
|
||||
|
||||
@@ -171,6 +171,5 @@ def run_no_stdout(
|
||||
cwd = Path.cwd()
|
||||
if logging.getLogger(__name__.split(".")[0]).isEnabledFor(logging.DEBUG):
|
||||
return run(cmd, env=env, log=log, check=check, error_msg=error_msg)
|
||||
else:
|
||||
log = Log.NONE
|
||||
return run(cmd, env=env, log=log, check=check, error_msg=error_msg)
|
||||
log = Log.NONE
|
||||
return run(cmd, env=env, log=log, check=check, error_msg=error_msg)
|
||||
|
||||
@@ -22,26 +22,25 @@ log = logging.getLogger(__name__)
|
||||
def map_type(nix_type: str) -> Any:
|
||||
if nix_type == "boolean":
|
||||
return bool
|
||||
elif nix_type in [
|
||||
if nix_type in [
|
||||
"integer",
|
||||
"signed integer",
|
||||
"16 bit unsigned integer; between 0 and 65535 (both inclusive)",
|
||||
]:
|
||||
return int
|
||||
elif nix_type.startswith("string"):
|
||||
if nix_type.startswith("string"):
|
||||
return str
|
||||
elif nix_type.startswith("null or "):
|
||||
if nix_type.startswith("null or "):
|
||||
subtype = nix_type.removeprefix("null or ")
|
||||
return map_type(subtype) | None
|
||||
elif nix_type.startswith("attribute set of"):
|
||||
if nix_type.startswith("attribute set of"):
|
||||
subtype = nix_type.removeprefix("attribute set of ")
|
||||
return dict[str, map_type(subtype)] # type: ignore
|
||||
elif nix_type.startswith("list of"):
|
||||
if nix_type.startswith("list of"):
|
||||
subtype = nix_type.removeprefix("list of ")
|
||||
return list[map_type(subtype)] # type: ignore
|
||||
else:
|
||||
msg = f"Unknown type {nix_type}"
|
||||
raise ClanError(msg)
|
||||
msg = f"Unknown type {nix_type}"
|
||||
raise ClanError(msg)
|
||||
|
||||
|
||||
# merge two dicts recursively
|
||||
@@ -77,31 +76,29 @@ def cast(value: Any, input_type: Any, opt_description: str) -> Any:
|
||||
if isinstance(input_type, bool):
|
||||
if value[0] in ["true", "True", "yes", "y", "1"]:
|
||||
return True
|
||||
elif value[0] in ["false", "False", "no", "n", "0"]:
|
||||
if value[0] in ["false", "False", "no", "n", "0"]:
|
||||
return False
|
||||
else:
|
||||
msg = f"Invalid value {value} for boolean"
|
||||
raise ClanError(msg)
|
||||
msg = f"Invalid value {value} for boolean"
|
||||
raise ClanError(msg)
|
||||
# handle lists
|
||||
elif get_origin(input_type) is list:
|
||||
if get_origin(input_type) is list:
|
||||
subtype = input_type.__args__[0]
|
||||
return [cast([x], subtype, opt_description) for x in value]
|
||||
# handle dicts
|
||||
elif get_origin(input_type) is dict:
|
||||
if get_origin(input_type) is dict:
|
||||
if not isinstance(value, dict):
|
||||
msg = f"Cannot set {opt_description} directly. Specify a suboption like {opt_description}.<name>"
|
||||
raise ClanError(msg)
|
||||
subtype = input_type.__args__[1]
|
||||
return {k: cast(v, subtype, opt_description) for k, v in value.items()}
|
||||
elif str(input_type) == "str | None":
|
||||
if str(input_type) == "str | None":
|
||||
if value[0] in ["null", "None"]:
|
||||
return None
|
||||
return value[0]
|
||||
else:
|
||||
if len(value) > 1:
|
||||
msg = f"Too many values for {opt_description}"
|
||||
raise ClanError(msg)
|
||||
return input_type(value[0])
|
||||
if len(value) > 1:
|
||||
msg = f"Too many values for {opt_description}"
|
||||
raise ClanError(msg)
|
||||
return input_type(value[0])
|
||||
except ValueError as e:
|
||||
msg = f"Invalid type for option {opt_description} (expected {input_type.__name__})"
|
||||
raise ClanError(msg) from e
|
||||
|
||||
@@ -41,20 +41,18 @@ def subtype_from_schema(schema: dict[str, Any]) -> type:
|
||||
if "additionalProperties" in schema:
|
||||
sub_type = subtype_from_schema(schema["additionalProperties"])
|
||||
return dict[str, sub_type] # type: ignore
|
||||
elif "properties" in schema:
|
||||
if "properties" in schema:
|
||||
msg = "Nested dicts are not supported"
|
||||
raise ClanError(msg)
|
||||
else:
|
||||
msg = "Unknown object type"
|
||||
raise ClanError(msg)
|
||||
elif schema["type"] == "array":
|
||||
msg = "Unknown object type"
|
||||
raise ClanError(msg)
|
||||
if schema["type"] == "array":
|
||||
if "items" not in schema:
|
||||
msg = "Untyped arrays are not supported"
|
||||
raise ClanError(msg)
|
||||
sub_type = subtype_from_schema(schema["items"])
|
||||
return list[sub_type] # type: ignore
|
||||
else:
|
||||
return type_map[schema["type"]]
|
||||
return type_map[schema["type"]]
|
||||
|
||||
|
||||
def type_from_schema_path(
|
||||
@@ -66,19 +64,17 @@ def type_from_schema_path(
|
||||
full_path = path
|
||||
if len(path) == 0:
|
||||
return subtype_from_schema(schema)
|
||||
elif schema["type"] == "object":
|
||||
if schema["type"] == "object":
|
||||
if "properties" in schema:
|
||||
subtype = type_from_schema_path(schema["properties"][path[0]], path[1:])
|
||||
return subtype
|
||||
elif "additionalProperties" in schema:
|
||||
if "additionalProperties" in schema:
|
||||
subtype = type_from_schema_path(schema["additionalProperties"], path[1:])
|
||||
return subtype
|
||||
else:
|
||||
msg = f"Unknown type for path {path}"
|
||||
raise ClanError(msg)
|
||||
else:
|
||||
msg = f"Unknown type for path {path}"
|
||||
raise ClanError(msg)
|
||||
msg = f"Unknown type for path {path}"
|
||||
raise ClanError(msg)
|
||||
|
||||
|
||||
def options_types_from_schema(schema: dict[str, Any]) -> dict[str, type]:
|
||||
@@ -100,7 +96,7 @@ def options_types_from_schema(schema: dict[str, Any]) -> dict[str, type]:
|
||||
for sub_name, sub_type in sub_result.items():
|
||||
result[f"{name}.{sub_name}"] = sub_type
|
||||
continue
|
||||
elif type_ == "array":
|
||||
if type_ == "array":
|
||||
if "items" not in value:
|
||||
msg = f"Untyped arrays are not supported (field: {name})"
|
||||
raise ClanError(msg)
|
||||
|
||||
@@ -47,21 +47,19 @@ def clan_templates() -> Path:
|
||||
template_path = module_root().parent.parent.parent / "templates"
|
||||
if template_path.exists():
|
||||
return template_path
|
||||
else:
|
||||
template_path = module_root() / "templates"
|
||||
if not template_path.exists():
|
||||
msg = f"BUG! clan core not found at {template_path}. This is an issue with packaging the cli"
|
||||
raise ClanError(msg)
|
||||
return template_path
|
||||
template_path = module_root() / "templates"
|
||||
if not template_path.exists():
|
||||
msg = f"BUG! clan core not found at {template_path}. This is an issue with packaging the cli"
|
||||
raise ClanError(msg)
|
||||
return template_path
|
||||
|
||||
|
||||
def user_config_dir() -> Path:
|
||||
if sys.platform == "win32":
|
||||
return Path(os.getenv("APPDATA", os.path.expanduser("~\\AppData\\Roaming\\")))
|
||||
elif sys.platform == "darwin":
|
||||
if sys.platform == "darwin":
|
||||
return Path(os.path.expanduser("~/Library/Application Support/"))
|
||||
else:
|
||||
return Path(os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config")))
|
||||
return Path(os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config")))
|
||||
|
||||
|
||||
def user_data_dir() -> Path:
|
||||
@@ -69,10 +67,9 @@ def user_data_dir() -> Path:
|
||||
return Path(
|
||||
os.getenv("LOCALAPPDATA", os.path.expanduser("~\\AppData\\Local\\"))
|
||||
)
|
||||
elif sys.platform == "darwin":
|
||||
if sys.platform == "darwin":
|
||||
return Path(os.path.expanduser("~/Library/Application Support/"))
|
||||
else:
|
||||
return Path(os.getenv("XDG_DATA_HOME", os.path.expanduser("~/.local/share")))
|
||||
return Path(os.getenv("XDG_DATA_HOME", os.path.expanduser("~/.local/share")))
|
||||
|
||||
|
||||
def user_cache_dir() -> Path:
|
||||
@@ -80,10 +77,9 @@ def user_cache_dir() -> Path:
|
||||
return Path(
|
||||
os.getenv("LOCALAPPDATA", os.path.expanduser("~\\AppData\\Local\\"))
|
||||
)
|
||||
elif sys.platform == "darwin":
|
||||
if sys.platform == "darwin":
|
||||
return Path(os.path.expanduser("~/Library/Caches/"))
|
||||
else:
|
||||
return Path(os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache")))
|
||||
return Path(os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache")))
|
||||
|
||||
|
||||
def user_gcroot_dir() -> Path:
|
||||
|
||||
@@ -24,9 +24,8 @@ class FactStore(FactStoreBase):
|
||||
fact_path.touch()
|
||||
fact_path.write_bytes(value)
|
||||
return fact_path
|
||||
else:
|
||||
msg = f"in_flake fact storage is only supported for local flakes: {self.machine.flake}"
|
||||
raise ClanError(msg)
|
||||
msg = f"in_flake fact storage is only supported for local flakes: {self.machine.flake}"
|
||||
raise ClanError(msg)
|
||||
|
||||
def exists(self, service: str, name: str) -> bool:
|
||||
fact_path = (
|
||||
|
||||
@@ -30,8 +30,7 @@ def help_hyperlink(description: str, url: str) -> str:
|
||||
if sys.argv[0].__contains__("docs.py"):
|
||||
return docs_hyperlink(description, url)
|
||||
|
||||
else:
|
||||
return hyperlink_same_text_and_url(url)
|
||||
return hyperlink_same_text_and_url(url)
|
||||
|
||||
|
||||
def docs_hyperlink(description: str, url: str) -> str:
|
||||
|
||||
@@ -9,7 +9,7 @@ class ClanJSONEncoder(json.JSONEncoder):
|
||||
if hasattr(o, "to_json") and callable(o.to_json):
|
||||
return o.to_json()
|
||||
# Check if the object is a dataclass
|
||||
elif dataclasses.is_dataclass(o):
|
||||
if dataclasses.is_dataclass(o):
|
||||
return dataclasses.asdict(o)
|
||||
# Otherwise, use the default serialization
|
||||
return super().default(o)
|
||||
|
||||
@@ -145,7 +145,7 @@ def install_command(args: argparse.Namespace) -> None:
|
||||
if not args.yes:
|
||||
ask = input(f"Install {args.machine} to {target_host}? [y/N] ")
|
||||
if ask != "y":
|
||||
return
|
||||
return None
|
||||
|
||||
return install_machine(
|
||||
InstallOptions(
|
||||
|
||||
@@ -110,11 +110,10 @@ class Machine:
|
||||
def flake_dir(self) -> Path:
|
||||
if self.flake.is_local():
|
||||
return self.flake.path
|
||||
elif self.flake.is_remote():
|
||||
if self.flake.is_remote():
|
||||
return Path(nix_metadata(self.flake.url)["path"])
|
||||
else:
|
||||
msg = f"Unsupported flake url: {self.flake}"
|
||||
raise ClanError(msg)
|
||||
msg = f"Unsupported flake url: {self.flake}"
|
||||
raise ClanError(msg)
|
||||
|
||||
@property
|
||||
def target_host(self) -> Host:
|
||||
@@ -218,12 +217,11 @@ class Machine:
|
||||
if method == "eval":
|
||||
output = run_no_stdout(nix_eval(args)).stdout.strip()
|
||||
return output
|
||||
elif method == "build":
|
||||
if method == "build":
|
||||
outpath = run_no_stdout(nix_build(args)).stdout.strip()
|
||||
return Path(outpath)
|
||||
else:
|
||||
msg = f"Unknown method {method}"
|
||||
raise ValueError(msg)
|
||||
msg = f"Unknown method {method}"
|
||||
raise ValueError(msg)
|
||||
|
||||
def eval_nix(
|
||||
self,
|
||||
@@ -246,9 +244,8 @@ class Machine:
|
||||
if isinstance(output, str):
|
||||
self._eval_cache[attr] = output
|
||||
return output
|
||||
else:
|
||||
msg = "eval_nix returned not a string"
|
||||
raise ClanError(msg)
|
||||
msg = "eval_nix returned not a string"
|
||||
raise ClanError(msg)
|
||||
|
||||
def build_nix(
|
||||
self,
|
||||
@@ -272,6 +269,5 @@ class Machine:
|
||||
if isinstance(output, Path):
|
||||
self._build_cache[attr] = output
|
||||
return output
|
||||
else:
|
||||
msg = "build_nix returned not a Path"
|
||||
raise ClanError(msg)
|
||||
msg = "build_nix returned not a Path"
|
||||
raise ClanError(msg)
|
||||
|
||||
@@ -40,19 +40,18 @@ def nix_build(flags: list[str], gcroot: Path | None = None) -> list[str]:
|
||||
)
|
||||
+ flags
|
||||
)
|
||||
else:
|
||||
return (
|
||||
nix_command(
|
||||
[
|
||||
"build",
|
||||
"--no-link",
|
||||
"--print-out-paths",
|
||||
"--no-write-lock-file",
|
||||
"--show-trace",
|
||||
]
|
||||
)
|
||||
+ flags
|
||||
return (
|
||||
nix_command(
|
||||
[
|
||||
"build",
|
||||
"--no-link",
|
||||
"--print-out-paths",
|
||||
"--no-write-lock-file",
|
||||
"--show-trace",
|
||||
]
|
||||
)
|
||||
+ flags
|
||||
)
|
||||
|
||||
|
||||
def nix_add_to_gcroots(nix_path: Path, dest: Path) -> None:
|
||||
|
||||
@@ -88,8 +88,7 @@ def trim_path_to_three_levels(path: str) -> str:
|
||||
parts = path.split(os.path.sep)
|
||||
if len(parts) > 4:
|
||||
return os.path.sep.join(parts[-4:])
|
||||
else:
|
||||
return path
|
||||
return path
|
||||
|
||||
|
||||
PROFS = ProfilerStore()
|
||||
@@ -116,5 +115,4 @@ def profile(func: Callable) -> Callable:
|
||||
|
||||
if os.getenv("PERF", "0") == "1":
|
||||
return wrapper
|
||||
else:
|
||||
return func
|
||||
return func
|
||||
|
||||
@@ -96,8 +96,7 @@ def default_sops_key_path() -> Path:
|
||||
raw_path = os.environ.get("SOPS_AGE_KEY_FILE")
|
||||
if raw_path:
|
||||
return Path(raw_path)
|
||||
else:
|
||||
return user_config_dir() / "sops" / "age" / "keys.txt"
|
||||
return user_config_dir() / "sops" / "age" / "keys.txt"
|
||||
|
||||
|
||||
def ensure_sops_key(flake_dir: Path) -> SopsKey:
|
||||
@@ -107,9 +106,8 @@ def ensure_sops_key(flake_dir: Path) -> SopsKey:
|
||||
path = default_sops_key_path()
|
||||
if path.exists():
|
||||
return ensure_user_or_machine(flake_dir, get_public_key(path.read_text()))
|
||||
else:
|
||||
msg = "No sops key found. Please generate one with 'clan secrets key generate'."
|
||||
raise ClanError(msg)
|
||||
msg = "No sops key found. Please generate one with 'clan secrets key generate'."
|
||||
raise ClanError(msg)
|
||||
|
||||
|
||||
@contextmanager
|
||||
|
||||
@@ -348,11 +348,10 @@ class Host:
|
||||
raise subprocess.CalledProcessError(
|
||||
ret, cmd=cmd, output=stdout_data, stderr=stderr_data
|
||||
)
|
||||
else:
|
||||
cmdlog.warning(
|
||||
f"[Command failed: {ret}] {displayed_cmd}",
|
||||
extra={"command_prefix": self.command_prefix},
|
||||
)
|
||||
cmdlog.warning(
|
||||
f"[Command failed: {ret}] {displayed_cmd}",
|
||||
extra={"command_prefix": self.command_prefix},
|
||||
)
|
||||
return subprocess.CompletedProcess(
|
||||
cmd, ret, stdout=stdout_data, stderr=stderr_data
|
||||
)
|
||||
|
||||
@@ -18,8 +18,7 @@ class FactStore(FactStoreBase):
|
||||
def _var_path(self, generator_name: str, name: str, shared: bool) -> Path:
|
||||
if shared:
|
||||
return self.shared_folder / generator_name / name
|
||||
else:
|
||||
return self.per_machine_folder / generator_name / name
|
||||
return self.per_machine_folder / generator_name / name
|
||||
|
||||
def set(
|
||||
self, generator_name: str, name: str, value: bytes, shared: bool = False
|
||||
@@ -30,9 +29,8 @@ class FactStore(FactStoreBase):
|
||||
fact_path.touch()
|
||||
fact_path.write_bytes(value)
|
||||
return fact_path
|
||||
else:
|
||||
msg = f"in_flake fact storage is only supported for local flakes: {self.machine.flake}"
|
||||
raise ClanError(msg)
|
||||
msg = f"in_flake fact storage is only supported for local flakes: {self.machine.flake}"
|
||||
raise ClanError(msg)
|
||||
|
||||
def exists(self, generator_name: str, name: str, shared: bool = False) -> bool:
|
||||
return self._var_path(generator_name, name, shared).exists()
|
||||
|
||||
@@ -21,8 +21,7 @@ class SecretStore(SecretStoreBase):
|
||||
def _var_path(self, generator_name: str, name: str, shared: bool) -> Path:
|
||||
if shared:
|
||||
return Path(f"shared/{generator_name}/{name}")
|
||||
else:
|
||||
return Path(f"machines/{self.machine.name}/{generator_name}/{name}")
|
||||
return Path(f"machines/{self.machine.name}/{generator_name}/{name}")
|
||||
|
||||
def set(
|
||||
self,
|
||||
|
||||
@@ -43,39 +43,38 @@ def graphics_options(vm: VmConfig) -> GraphicOptions:
|
||||
#"-chardev", "socket,id=vgpu,path=/tmp/vgpu.sock",
|
||||
], cid)
|
||||
# fmt: on
|
||||
if not os.path.exists("/run/opengl-driver"):
|
||||
display_options = [
|
||||
"-vga",
|
||||
"none",
|
||||
"-display",
|
||||
"gtk,gl=on",
|
||||
"-device",
|
||||
"virtio-gpu-gl",
|
||||
"-display",
|
||||
"spice-app,gl=on",
|
||||
]
|
||||
else:
|
||||
if not os.path.exists("/run/opengl-driver"):
|
||||
display_options = [
|
||||
"-vga",
|
||||
"none",
|
||||
"-display",
|
||||
"gtk,gl=on",
|
||||
"-device",
|
||||
"virtio-gpu-gl",
|
||||
"-display",
|
||||
"spice-app,gl=on",
|
||||
]
|
||||
else:
|
||||
display_options = ["-display", "spice-app"]
|
||||
display_options = ["-display", "spice-app"]
|
||||
|
||||
# fmt: off
|
||||
return GraphicOptions([
|
||||
*common,
|
||||
*display_options,
|
||||
"-device", "virtio-serial-pci",
|
||||
"-chardev", "spicevmc,id=vdagent0,name=vdagent",
|
||||
"-device", "virtserialport,chardev=vdagent0,name=com.redhat.spice.0",
|
||||
"-device", "qemu-xhci,id=spicepass",
|
||||
"-chardev", "spicevmc,id=usbredirchardev1,name=usbredir",
|
||||
"-device", "usb-redir,chardev=usbredirchardev1,id=usbredirdev1",
|
||||
"-chardev", "spicevmc,id=usbredirchardev2,name=usbredir",
|
||||
"-device", "usb-redir,chardev=usbredirchardev2,id=usbredirdev2",
|
||||
"-chardev", "spicevmc,id=usbredirchardev3,name=usbredir",
|
||||
"-device", "usb-redir,chardev=usbredirchardev3,id=usbredirdev3",
|
||||
"-device", "pci-ohci,id=smartpass",
|
||||
"-device", "usb-ccid",
|
||||
"-chardev", "spicevmc,id=ccid,name=smartcard",
|
||||
], None)
|
||||
# fmt: off
|
||||
return GraphicOptions([
|
||||
*common,
|
||||
*display_options,
|
||||
"-device", "virtio-serial-pci",
|
||||
"-chardev", "spicevmc,id=vdagent0,name=vdagent",
|
||||
"-device", "virtserialport,chardev=vdagent0,name=com.redhat.spice.0",
|
||||
"-device", "qemu-xhci,id=spicepass",
|
||||
"-chardev", "spicevmc,id=usbredirchardev1,name=usbredir",
|
||||
"-device", "usb-redir,chardev=usbredirchardev1,id=usbredirdev1",
|
||||
"-chardev", "spicevmc,id=usbredirchardev2,name=usbredir",
|
||||
"-device", "usb-redir,chardev=usbredirchardev2,id=usbredirdev2",
|
||||
"-chardev", "spicevmc,id=usbredirchardev3,name=usbredir",
|
||||
"-device", "usb-redir,chardev=usbredirchardev3,id=usbredirdev3",
|
||||
"-device", "pci-ohci,id=smartpass",
|
||||
"-device", "usb-ccid",
|
||||
"-chardev", "spicevmc,id=ccid,name=smartcard",
|
||||
], None)
|
||||
# fmt: on
|
||||
|
||||
|
||||
|
||||
@@ -246,7 +246,7 @@ def test_flake_with_core(
|
||||
@pytest.fixture
|
||||
def test_local_democlan(
|
||||
monkeypatch: pytest.MonkeyPatch, temporary_home: Path
|
||||
) -> Iterator[FlakeForTest]:
|
||||
) -> FlakeForTest:
|
||||
democlan = os.getenv(key="DEMOCLAN_ROOT")
|
||||
if democlan is None:
|
||||
msg = (
|
||||
@@ -258,7 +258,7 @@ def test_local_democlan(
|
||||
msg = f"DEMOCLAN_ROOT ({democlan_p}) is not a directory. This test requires the democlan directory to be present"
|
||||
raise Exception(msg)
|
||||
|
||||
yield FlakeForTest(democlan_p)
|
||||
return FlakeForTest(democlan_p)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -10,9 +10,8 @@ def is_valid_age_key(secret_key: str) -> bool:
|
||||
|
||||
if result.returncode == 0:
|
||||
return True
|
||||
else:
|
||||
msg = f"Invalid age key: {secret_key}"
|
||||
raise ValueError(msg)
|
||||
msg = f"Invalid age key: {secret_key}"
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
def is_valid_ssh_key(secret_key: str, ssh_pub: str) -> bool:
|
||||
@@ -30,6 +29,5 @@ def is_valid_ssh_key(secret_key: str, ssh_pub: str) -> bool:
|
||||
msg = f"Expected '{ssh_pub}' got '{result.stdout}' for ssh key: {secret_key}"
|
||||
raise ValueError(msg)
|
||||
return True
|
||||
else:
|
||||
msg = f"Invalid ssh key: {secret_key}"
|
||||
raise ValueError(msg)
|
||||
msg = f"Invalid ssh key: {secret_key}"
|
||||
raise ValueError(msg)
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import types
|
||||
|
||||
import pytest
|
||||
from pytest import CaptureFixture
|
||||
|
||||
|
||||
class CaptureOutput:
|
||||
def __init__(self, capsys: CaptureFixture) -> None:
|
||||
def __init__(self, capsys: pytest.CaptureFixture) -> None:
|
||||
self.capsys = capsys
|
||||
self.capsys_disabled = capsys.disabled()
|
||||
self.capsys_disabled.__enter__()
|
||||
@@ -31,5 +30,5 @@ class CaptureOutput:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def capture_output(capsys: CaptureFixture) -> CaptureOutput:
|
||||
def capture_output(capsys: pytest.CaptureFixture) -> CaptureOutput:
|
||||
return CaptureOutput(capsys)
|
||||
|
||||
@@ -143,7 +143,7 @@ def test_cast() -> None:
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"option,value,options,expected",
|
||||
("option", "value", "options", "expected"),
|
||||
[
|
||||
("foo.bar", ["baz"], {"foo.bar": {"type": "str"}}, ("foo.bar", ["baz"])),
|
||||
("foo.bar", ["baz"], {"foo": {"type": "attrs"}}, ("foo", {"bar": ["baz"]})),
|
||||
|
||||
@@ -28,11 +28,9 @@ def test_commit_file(git_repo: Path) -> None:
|
||||
def test_commit_file_outside_git_raises_error(git_repo: Path) -> None:
|
||||
# create a file outside the git (a temporary file)
|
||||
with tempfile.NamedTemporaryFile() as tmp:
|
||||
# commit the file
|
||||
# this should not fail but skip the commit
|
||||
with pytest.raises(ClanError):
|
||||
git.commit_file(Path(tmp.name), git_repo, "test commit")
|
||||
# this should not fail but skip the commit
|
||||
git.commit_file(Path(tmp.name), git_repo, "test commit")
|
||||
|
||||
|
||||
def test_commit_file_not_existing_raises_error(git_repo: Path) -> None:
|
||||
|
||||
@@ -496,7 +496,7 @@ def test_secrets(
|
||||
# user2 is not in the group anymore
|
||||
with capture_output as output:
|
||||
cli.run(["secrets", "get", "--flake", str(test_flake.path), "key"])
|
||||
print(output.out)
|
||||
print(output.out)
|
||||
|
||||
cli.run(
|
||||
[
|
||||
|
||||
@@ -175,8 +175,7 @@ class VMObject(GObject.Object):
|
||||
if self.progress_bar.is_visible():
|
||||
self.progress_bar.pulse()
|
||||
return GLib.SOURCE_CONTINUE
|
||||
else:
|
||||
return GLib.SOURCE_REMOVE
|
||||
return GLib.SOURCE_REMOVE
|
||||
|
||||
def __start(self) -> None:
|
||||
with self._create_machine() as machine:
|
||||
|
||||
@@ -19,7 +19,7 @@ def wayland_compositor() -> Generator[Popen, None, None]:
|
||||
GtkProc = NewType("GtkProc", Popen)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
@pytest.fixture
|
||||
def app() -> Generator[GtkProc, None, None]:
|
||||
rapp = Popen([sys.executable, "-m", "clan_vm_manager"], text=True)
|
||||
yield GtkProc(rapp)
|
||||
|
||||
@@ -19,23 +19,22 @@ def map_json_type(
|
||||
return res
|
||||
if isinstance(json_type, dict):
|
||||
return map_json_type(json_type.get("type"))
|
||||
elif json_type == "string":
|
||||
if json_type == "string":
|
||||
return {"str"}
|
||||
elif json_type == "integer":
|
||||
if json_type == "integer":
|
||||
return {"int"}
|
||||
elif json_type == "boolean":
|
||||
if json_type == "boolean":
|
||||
return {"bool"}
|
||||
elif json_type == "array":
|
||||
if json_type == "array":
|
||||
assert nested_types, f"Array type not found for {parent}"
|
||||
return {f"""list[{" | ".join(nested_types)}]"""}
|
||||
elif json_type == "object":
|
||||
if json_type == "object":
|
||||
assert nested_types, f"dict type not found for {parent}"
|
||||
return {f"""dict[str, {" | ".join(nested_types)}]"""}
|
||||
elif json_type == "null":
|
||||
if json_type == "null":
|
||||
return {"None"}
|
||||
else:
|
||||
msg = f"Python type not found for {json_type}"
|
||||
raise ValueError(msg)
|
||||
msg = f"Python type not found for {json_type}"
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
known_classes = set()
|
||||
@@ -116,12 +115,12 @@ def field_def_from_default_value(
|
||||
field_types=field_types | {"None"},
|
||||
default="None",
|
||||
)
|
||||
elif isinstance(default_value, list):
|
||||
if isinstance(default_value, list):
|
||||
return finalize_field(
|
||||
field_types=field_types,
|
||||
default_factory="list",
|
||||
)
|
||||
elif isinstance(default_value, dict):
|
||||
if isinstance(default_value, dict):
|
||||
serialised_types = " | ".join(field_types)
|
||||
if serialised_types == nested_class_name:
|
||||
return finalize_field(
|
||||
@@ -129,28 +128,26 @@ def field_def_from_default_value(
|
||||
default_factory=nested_class_name,
|
||||
)
|
||||
|
||||
elif "dict[str," in serialised_types:
|
||||
if "dict[str," in serialised_types:
|
||||
return finalize_field(
|
||||
field_types=field_types,
|
||||
default_factory="dict",
|
||||
)
|
||||
else:
|
||||
return finalize_field(
|
||||
field_types=field_types,
|
||||
default_factory="dict",
|
||||
type_apendix=" | dict[str,Any]",
|
||||
)
|
||||
elif default_value == "‹name›":
|
||||
return finalize_field(
|
||||
field_types=field_types,
|
||||
default_factory="dict",
|
||||
type_apendix=" | dict[str,Any]",
|
||||
)
|
||||
if default_value == "‹name›":
|
||||
return None
|
||||
elif isinstance(default_value, str):
|
||||
if isinstance(default_value, str):
|
||||
return finalize_field(
|
||||
field_types=field_types,
|
||||
default=f"'{default_value}'",
|
||||
)
|
||||
else:
|
||||
# Other default values unhandled yet.
|
||||
msg = f"Unhandled default value for field '{field_name}' - default value: {default_value}"
|
||||
raise ValueError(msg)
|
||||
# Other default values unhandled yet.
|
||||
msg = f"Unhandled default value for field '{field_name}' - default value: {default_value}"
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
def get_field_def(
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
};
|
||||
|
||||
perSystem =
|
||||
{ pkgs, config, ... }:
|
||||
{ config, pkgs, ... }:
|
||||
{
|
||||
packages =
|
||||
{
|
||||
@@ -34,6 +34,8 @@
|
||||
classgen = pkgs.callPackage ./classgen { };
|
||||
zerotierone = pkgs.callPackage ./zerotierone { };
|
||||
}
|
||||
// lib.optionalAttrs (pkgs.stdenv.isLinux) { nixos-facter = pkgs.callPackage ./nixos-facter { }; };
|
||||
// lib.optionalAttrs pkgs.stdenv.isLinux {
|
||||
nixos-facter = pkgs.callPackage ./nixos-facter { };
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -39,6 +39,7 @@ def convert_bytearray_to_string(byte_array: str) -> str:
|
||||
if byte_array.startswith('"@ByteArray(') and byte_array.endswith(')"'):
|
||||
byte_array = byte_array[12:-2]
|
||||
return byte_array.replace("\\n", "\n")
|
||||
return byte_array
|
||||
|
||||
|
||||
# this must be created before moonlight is first run
|
||||
|
||||
3562
pkgs/zerotierone/Cargo.lock
generated
3562
pkgs/zerotierone/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,53 +1,7 @@
|
||||
{
|
||||
callPackage,
|
||||
zerotierone,
|
||||
fetchFromGitHub,
|
||||
lib,
|
||||
stdenv,
|
||||
}:
|
||||
let
|
||||
importCargoLock = callPackage ./import-cargo-lock.nix { };
|
||||
in
|
||||
zerotierone.overrideAttrs (old: {
|
||||
name = "zerotierone-1.14.0-unstable-2024-07-31";
|
||||
src = fetchFromGitHub {
|
||||
owner = "zerotier";
|
||||
repo = "ZeroTierOne";
|
||||
rev = "f176e2539e10e8c0f61eb1d2e1f0e690a267a646";
|
||||
hash = "sha256-pGozwaBy9eMA8izYtGhhmJeHzGjHFLID7WC01977XxQ=";
|
||||
};
|
||||
cargoDeps = importCargoLock {
|
||||
lockFile = ./Cargo.lock;
|
||||
outputHashes = {
|
||||
"jwt-0.16.0" = "sha256-P5aJnNlcLe9sBtXZzfqHdRvxNfm6DPBcfcKOVeLZxcM=";
|
||||
"rustfsm-0.1.0" = "sha256-AYMk31QuwB1R/yr1wNl9MSWL52ERJMtkR4aSPf2waWs=";
|
||||
};
|
||||
};
|
||||
patches = [ ];
|
||||
postPatch = "cp ${./Cargo.lock} Cargo.lock";
|
||||
|
||||
preBuild =
|
||||
if stdenv.isDarwin then
|
||||
''
|
||||
makeFlagsArray+=("ARCH_FLAGS=") # disable multi-arch build
|
||||
if ! grep -q MACOS_VERSION_MIN=10.13 make-mac.mk; then
|
||||
echo "You may need to update MACOSX_DEPLOYMENT_TARGET to match the value in make-mac.mk"
|
||||
exit 1
|
||||
fi
|
||||
(cd rustybits && MACOSX_DEPLOYMENT_TARGET=10.13 cargo build -p zeroidc --release)
|
||||
|
||||
cp \
|
||||
./rustybits/target/${stdenv.hostPlatform.rust.rustcTarget}/release/libzeroidc.a \
|
||||
./rustybits/target
|
||||
|
||||
# zerotier uses the "FORCE" target as a phony target to force rebuilds.
|
||||
# We don't want to rebuild libzeroidc.a as we build want to build this library ourself for a single architecture
|
||||
touch FORCE
|
||||
''
|
||||
else
|
||||
old.preBuild;
|
||||
meta = old.meta // {
|
||||
# halalify zerotierone
|
||||
{ zerotierone, lib }:
|
||||
# halalify zerotierone
|
||||
zerotierone.overrideAttrs (_old: {
|
||||
meta = _old.meta // {
|
||||
license = lib.licenses.apsl20;
|
||||
};
|
||||
})
|
||||
|
||||
@@ -1,309 +0,0 @@
|
||||
{
|
||||
fetchgit,
|
||||
fetchurl,
|
||||
lib,
|
||||
writers,
|
||||
python3Packages,
|
||||
runCommand,
|
||||
cargo,
|
||||
jq,
|
||||
}:
|
||||
|
||||
{
|
||||
# Cargo lock file
|
||||
lockFile ? null,
|
||||
|
||||
# Cargo lock file contents as string
|
||||
lockFileContents ? null,
|
||||
|
||||
# Allow `builtins.fetchGit` to be used to not require hashes for git dependencies
|
||||
allowBuiltinFetchGit ? false,
|
||||
|
||||
# Additional registries to pull sources from
|
||||
# { "https://<registry index URL>" = "https://<registry download URL>"; }
|
||||
# or if the registry is using the new sparse protocol
|
||||
# { "sparse+https://<registry download URL>" = "https://<registry download URL>"; }
|
||||
# where:
|
||||
# - "index URL" is the "index" value of the configuration entry for that registry
|
||||
# https://doc.rust-lang.org/cargo/reference/registries.html#using-an-alternate-registry
|
||||
# - "download URL" is the "dl" value of its associated index configuration
|
||||
# https://doc.rust-lang.org/cargo/reference/registry-index.html#index-configuration
|
||||
extraRegistries ? { },
|
||||
|
||||
# Hashes for git dependencies.
|
||||
outputHashes ? { },
|
||||
}@args:
|
||||
|
||||
assert (lockFile == null) != (lockFileContents == null);
|
||||
|
||||
let
|
||||
# Parse a git source into different components.
|
||||
parseGit =
|
||||
src:
|
||||
let
|
||||
parts = builtins.match ''git\+([^?]+)(\?(rev|tag|branch)=(.*))?#(.*)'' src;
|
||||
type = builtins.elemAt parts 2; # rev, tag or branch
|
||||
value = builtins.elemAt parts 3;
|
||||
in
|
||||
if parts == null then
|
||||
null
|
||||
else
|
||||
{
|
||||
url = builtins.elemAt parts 0;
|
||||
sha = builtins.elemAt parts 4;
|
||||
}
|
||||
// lib.optionalAttrs (type != null) { inherit type value; };
|
||||
|
||||
# shadows args.lockFileContents
|
||||
lockFileContents = if lockFile != null then builtins.readFile lockFile else args.lockFileContents;
|
||||
|
||||
parsedLockFile = builtins.fromTOML lockFileContents;
|
||||
|
||||
packages = parsedLockFile.package;
|
||||
|
||||
# There is no source attribute for the source package itself. But
|
||||
# since we do not want to vendor the source package anyway, we can
|
||||
# safely skip it.
|
||||
depPackages = builtins.filter (p: p ? "source") packages;
|
||||
|
||||
# Create dependent crates from packages.
|
||||
#
|
||||
# Force evaluation of the git SHA -> hash mapping, so that an error is
|
||||
# thrown if there are stale hashes. We cannot rely on gitShaOutputHash
|
||||
# being evaluated otherwise, since there could be no git dependencies.
|
||||
depCrates = builtins.deepSeq gitShaOutputHash (builtins.map mkCrate depPackages);
|
||||
|
||||
# Map package name + version to git commit SHA for packages with a git source.
|
||||
namesGitShas = builtins.listToAttrs (
|
||||
builtins.map nameGitSha (builtins.filter (pkg: lib.hasPrefix "git+" pkg.source) depPackages)
|
||||
);
|
||||
|
||||
nameGitSha =
|
||||
pkg:
|
||||
let
|
||||
gitParts = parseGit pkg.source;
|
||||
in
|
||||
{
|
||||
name = "${pkg.name}-${pkg.version}";
|
||||
value = gitParts.sha;
|
||||
};
|
||||
|
||||
# Convert the attrset provided through the `outputHashes` argument to a
|
||||
# a mapping from git commit SHA -> output hash.
|
||||
#
|
||||
# There may be multiple different packages with different names
|
||||
# originating from the same git repository (typically a Cargo
|
||||
# workspace). By using the git commit SHA as a universal identifier,
|
||||
# the user does not have to specify the output hash for every package
|
||||
# individually.
|
||||
gitShaOutputHash = lib.mapAttrs' (
|
||||
nameVer: hash:
|
||||
let
|
||||
unusedHash = throw "A hash was specified for ${nameVer}, but there is no corresponding git dependency.";
|
||||
rev = namesGitShas.${nameVer} or unusedHash;
|
||||
in
|
||||
{
|
||||
name = rev;
|
||||
value = hash;
|
||||
}
|
||||
) outputHashes;
|
||||
|
||||
# We can't use the existing fetchCrate function, since it uses a
|
||||
# recursive hash of the unpacked crate.
|
||||
fetchCrate =
|
||||
pkg: downloadUrl:
|
||||
let
|
||||
checksum =
|
||||
pkg.checksum or parsedLockFile.metadata."checksum ${pkg.name} ${pkg.version} (${pkg.source})";
|
||||
in
|
||||
assert lib.assertMsg (checksum != null) ''
|
||||
Package ${pkg.name} does not have a checksum.
|
||||
'';
|
||||
fetchurl {
|
||||
name = "crate-${pkg.name}-${pkg.version}.tar.gz";
|
||||
url = "${downloadUrl}/${pkg.name}/${pkg.version}/download";
|
||||
sha256 = checksum;
|
||||
};
|
||||
|
||||
registries = {
|
||||
"https://github.com/rust-lang/crates.io-index" = "https://crates.io/api/v1/crates";
|
||||
} // extraRegistries;
|
||||
|
||||
# Replaces values inherited by workspace members.
|
||||
replaceWorkspaceValues = writers.writePython3 "replace-workspace-values" {
|
||||
libraries = with python3Packages; [
|
||||
tomli
|
||||
tomli-w
|
||||
];
|
||||
flakeIgnore = [
|
||||
"E501"
|
||||
"W503"
|
||||
];
|
||||
} (builtins.readFile ./replace-workspace-values.py);
|
||||
|
||||
# Fetch and unpack a crate.
|
||||
mkCrate =
|
||||
pkg:
|
||||
let
|
||||
gitParts = parseGit pkg.source;
|
||||
registryIndexUrl = lib.removePrefix "registry+" pkg.source;
|
||||
in
|
||||
if
|
||||
(lib.hasPrefix "registry+" pkg.source || lib.hasPrefix "sparse+" pkg.source)
|
||||
&& builtins.hasAttr registryIndexUrl registries
|
||||
then
|
||||
let
|
||||
crateTarball = fetchCrate pkg registries.${registryIndexUrl};
|
||||
in
|
||||
runCommand "${pkg.name}-${pkg.version}" { } ''
|
||||
mkdir $out
|
||||
tar xf "${crateTarball}" -C $out --strip-components=1
|
||||
|
||||
# Cargo is happy with largely empty metadata.
|
||||
printf '{"files":{},"package":"${crateTarball.outputHash}"}' > "$out/.cargo-checksum.json"
|
||||
''
|
||||
else if gitParts != null then
|
||||
let
|
||||
missingHash = throw ''
|
||||
No hash was found while vendoring the git dependency ${pkg.name}-${pkg.version}. You can add
|
||||
a hash through the `outputHashes` argument of `importCargoLock`:
|
||||
|
||||
outputHashes = {
|
||||
"${pkg.name}-${pkg.version}" = "<hash>";
|
||||
};
|
||||
|
||||
If you use `buildRustPackage`, you can add this attribute to the `cargoLock`
|
||||
attribute set.
|
||||
'';
|
||||
tree =
|
||||
if gitShaOutputHash ? ${gitParts.sha} then
|
||||
fetchgit {
|
||||
inherit (gitParts) url;
|
||||
rev = gitParts.sha; # The commit SHA is always available.
|
||||
sha256 = gitShaOutputHash.${gitParts.sha};
|
||||
}
|
||||
else if allowBuiltinFetchGit then
|
||||
builtins.fetchGit {
|
||||
inherit (gitParts) url;
|
||||
rev = gitParts.sha;
|
||||
allRefs = true;
|
||||
submodules = true;
|
||||
}
|
||||
else
|
||||
missingHash;
|
||||
in
|
||||
runCommand "${pkg.name}-${pkg.version}" { } ''
|
||||
tree=${tree}
|
||||
|
||||
# If the target package is in a workspace, or if it's the top-level
|
||||
# crate, we should find the crate path using `cargo metadata`.
|
||||
# Some packages do not have a Cargo.toml at the top-level,
|
||||
# but only in nested directories.
|
||||
# Only check the top-level Cargo.toml, if it actually exists
|
||||
if [[ -f $tree/Cargo.toml ]]; then
|
||||
crateCargoTOML=$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path $tree/Cargo.toml | \
|
||||
${jq}/bin/jq -r '.packages[] | select(.name == "${pkg.name}") | .manifest_path')
|
||||
fi
|
||||
|
||||
# If the repository is not a workspace the package might be in a subdirectory.
|
||||
if [[ -z $crateCargoTOML ]]; then
|
||||
for manifest in $(find $tree -name "Cargo.toml"); do
|
||||
echo Looking at $manifest
|
||||
crateCargoTOML=$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path "$manifest" | ${jq}/bin/jq -r '.packages[] | select(.name == "${pkg.name}") | .manifest_path' || :)
|
||||
if [[ ! -z $crateCargoTOML ]]; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z $crateCargoTOML ]]; then
|
||||
>&2 echo "Cannot find path for crate '${pkg.name}-${pkg.version}' in the tree in: $tree"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo Found crate ${pkg.name} at $crateCargoTOML
|
||||
tree=$(dirname $crateCargoTOML)
|
||||
|
||||
cp -prvL "$tree/" $out
|
||||
chmod u+w $out
|
||||
|
||||
if grep -q workspace "$out/Cargo.toml"; then
|
||||
chmod u+w "$out/Cargo.toml"
|
||||
${replaceWorkspaceValues} "$out/Cargo.toml" "$(${cargo}/bin/cargo metadata --format-version 1 --no-deps --manifest-path $crateCargoTOML | ${jq}/bin/jq -r .workspace_root)/Cargo.toml"
|
||||
fi
|
||||
|
||||
# Cargo is happy with empty metadata.
|
||||
printf '{"files":{},"package":null}' > "$out/.cargo-checksum.json"
|
||||
|
||||
# Set up configuration for the vendor directory.
|
||||
cat > $out/.cargo-config <<EOF
|
||||
[source."${gitParts.url}${
|
||||
lib.optionalString (gitParts ? type) "?${gitParts.type}=${gitParts.value}"
|
||||
}"]
|
||||
git = "${gitParts.url}"
|
||||
${lib.optionalString (gitParts ? type) "${gitParts.type} = \"${gitParts.value}\""}
|
||||
replace-with = "vendored-sources"
|
||||
EOF
|
||||
''
|
||||
else
|
||||
throw "Cannot handle crate source: ${pkg.source}";
|
||||
|
||||
vendorDir =
|
||||
runCommand "cargo-vendor-dir"
|
||||
(
|
||||
if lockFile == null then
|
||||
{
|
||||
inherit lockFileContents;
|
||||
passAsFile = [ "lockFileContents" ];
|
||||
}
|
||||
else
|
||||
{
|
||||
passthru = {
|
||||
inherit lockFile;
|
||||
};
|
||||
}
|
||||
)
|
||||
''
|
||||
mkdir -p $out/.cargo
|
||||
|
||||
${
|
||||
if lockFile != null then
|
||||
"ln -s ${lockFile} $out/Cargo.lock"
|
||||
else
|
||||
"cp $lockFileContentsPath $out/Cargo.lock"
|
||||
}
|
||||
|
||||
cat > $out/.cargo/config <<EOF
|
||||
[source.crates-io]
|
||||
replace-with = "vendored-sources"
|
||||
|
||||
[source.vendored-sources]
|
||||
directory = "cargo-vendor-dir"
|
||||
EOF
|
||||
|
||||
declare -A keysSeen
|
||||
|
||||
for registry in ${toString (builtins.attrNames extraRegistries)}; do
|
||||
cat >> $out/.cargo/config <<EOF
|
||||
|
||||
[source."$registry"]
|
||||
registry = "$registry"
|
||||
replace-with = "vendored-sources"
|
||||
EOF
|
||||
done
|
||||
|
||||
for crate in ${toString depCrates}; do
|
||||
# Link the crate directory, removing the output path hash from the destination.
|
||||
ln -s "$crate" $out/$(basename "$crate" | cut -c 34-)
|
||||
|
||||
if [ -e "$crate/.cargo-config" ]; then
|
||||
key=$(sed 's/\[source\."\(.*\)"\]/\1/; t; d' < "$crate/.cargo-config")
|
||||
if [[ -z ''${keysSeen[$key]} ]]; then
|
||||
keysSeen[$key]=1
|
||||
cat "$crate/.cargo-config" >> $out/.cargo/config
|
||||
fi
|
||||
fi
|
||||
done
|
||||
'';
|
||||
in
|
||||
vendorDir
|
||||
@@ -1,130 +0,0 @@
|
||||
# This script implements the workspace inheritance mechanism described
|
||||
# here: https://doc.rust-lang.org/cargo/reference/workspaces.html#the-package-table
|
||||
#
|
||||
# Please run `mypy --strict`, `black`, and `isort --profile black` on this after editing, thanks!
|
||||
|
||||
import sys
|
||||
from typing import Any, Literal, assert_type
|
||||
|
||||
import tomli
|
||||
import tomli_w
|
||||
|
||||
|
||||
def load_file(path: str) -> dict[str, Any]:
|
||||
with open(path, "rb") as f:
|
||||
return tomli.load(f)
|
||||
|
||||
|
||||
# This replicates the dependency merging logic from Cargo.
|
||||
# See `inner_dependency_inherit_with`:
|
||||
# https://github.com/rust-lang/cargo/blob/4de0094ac78743d2c8ff682489e35c8a7cafe8e4/src/cargo/util/toml/mod.rs#L982
|
||||
def replace_key(
|
||||
workspace_manifest: dict[str, Any],
|
||||
table: dict[str, Any],
|
||||
section: Literal["package", "dependencies"],
|
||||
key: str,
|
||||
) -> bool:
|
||||
if not isinstance(table[key], dict) or table[key].get("workspace") is not True:
|
||||
return False
|
||||
print("replacing " + key)
|
||||
|
||||
local_dep = table[key]
|
||||
del local_dep["workspace"]
|
||||
|
||||
workspace_dep: str | dict[str, Any] = workspace_manifest[section][key]
|
||||
|
||||
if section == "package":
|
||||
table[key] = workspace_dep
|
||||
return True
|
||||
|
||||
_ = assert_type(section, Literal["dependencies"])
|
||||
|
||||
if isinstance(workspace_dep, str):
|
||||
workspace_dep = {"version": workspace_dep}
|
||||
|
||||
final: dict[str, Any] = workspace_dep.copy()
|
||||
|
||||
merged_features = local_dep.pop("features", []) + workspace_dep.get("features", [])
|
||||
if merged_features:
|
||||
final["features"] = merged_features
|
||||
|
||||
local_default_features = local_dep.pop("default-features", None)
|
||||
workspace_default_features = workspace_dep.get("default-features")
|
||||
|
||||
if not workspace_default_features and local_default_features:
|
||||
final["default-features"] = True
|
||||
|
||||
optional = local_dep.pop("optional", False)
|
||||
if optional:
|
||||
final["optional"] = True
|
||||
|
||||
if local_dep:
|
||||
msg = f"Unhandled keys in inherited dependency {key}: {local_dep}"
|
||||
raise Exception(msg)
|
||||
|
||||
table[key] = final
|
||||
|
||||
# crate_features = local_dep.get("features", [])
|
||||
|
||||
# local_dep.update(workspace_copy)
|
||||
# merged_features = crate_features + workspace_copy.get("features", [])
|
||||
# if len(merged_features) > len(crate_features):
|
||||
# local_dep["features"] = list(dict.fromkeys(merged_features))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def replace_dependencies(
|
||||
workspace_manifest: dict[str, Any], root: dict[str, Any]
|
||||
) -> bool:
|
||||
changed = False
|
||||
|
||||
for key in ["dependencies", "dev-dependencies", "build-dependencies"]:
|
||||
deps = root.get(key, {})
|
||||
for k in deps:
|
||||
changed |= replace_key(workspace_manifest, deps, "dependencies", k)
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
def main() -> None:
|
||||
top_cargo_toml = load_file(sys.argv[2])
|
||||
|
||||
if "workspace" not in top_cargo_toml:
|
||||
# If top_cargo_toml is not a workspace manifest, then this script was probably
|
||||
# ran on something that does not actually use workspace dependencies
|
||||
msg = f"{sys.argv[2]} is not a workspace manifest."
|
||||
raise Exception(msg)
|
||||
|
||||
crate_manifest = load_file(sys.argv[1])
|
||||
workspace_manifest = top_cargo_toml["workspace"]
|
||||
|
||||
if "workspace" in crate_manifest:
|
||||
print(f"{sys.argv[1]} is a workspace manifest, skipping", file=sys.stderr)
|
||||
return
|
||||
|
||||
changed = False
|
||||
|
||||
for key in crate_manifest["package"].keys():
|
||||
changed |= replace_key(
|
||||
workspace_manifest, crate_manifest["package"], "package", key
|
||||
)
|
||||
|
||||
changed |= replace_dependencies(workspace_manifest, crate_manifest)
|
||||
|
||||
for value in crate_manifest.get("target", {}).values():
|
||||
changed |= replace_dependencies(workspace_manifest, value)
|
||||
|
||||
if crate_manifest.get("lints", {}).get("workspace") is True:
|
||||
changed = True
|
||||
crate_manifest["lints"] = workspace_manifest["lints"]
|
||||
|
||||
if not changed:
|
||||
return print(f"{sys.argv[1]} is unchanged, skipping", file=sys.stderr)
|
||||
|
||||
with open(sys.argv[1], "wb") as f:
|
||||
tomli_w.dump(crate_manifest, f)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -27,11 +27,15 @@ lint.select = [
|
||||
"LOG",
|
||||
"N",
|
||||
"PIE",
|
||||
"PT",
|
||||
"PYI",
|
||||
"Q",
|
||||
"RET",
|
||||
"RSE",
|
||||
"RUF",
|
||||
"T10",
|
||||
"TID",
|
||||
"U",
|
||||
"YTT",
|
||||
]
|
||||
lint.ignore = ["E501", "E402", "E731", "ANN101", "ANN401", "A003"]
|
||||
lint.ignore = ["E501", "E402", "E731", "ANN101", "ANN401", "A003", "RET504"]
|
||||
|
||||
Reference in New Issue
Block a user