enable comprehensions linting rules
This commit is contained in:
@@ -158,7 +158,7 @@ API.register(open_file)
|
||||
"$comment": "An object containing API methods. ",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": [func_name for func_name in self._registry.keys()],
|
||||
"required": list(self._registry.keys()),
|
||||
"properties": {},
|
||||
}
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ def get_command(args: argparse.Namespace) -> None:
|
||||
|
||||
# the raw_facts are bytestrings making them not json serializable
|
||||
raw_facts = get_all_facts(machine)
|
||||
facts = dict()
|
||||
facts = {}
|
||||
for key in raw_facts["TODO"]:
|
||||
facts[key] = raw_facts["TODO"][key].decode("utf8")
|
||||
|
||||
|
||||
@@ -123,7 +123,7 @@ def flash_machine(
|
||||
|
||||
if system_config.ssh_keys_path:
|
||||
root_keys = []
|
||||
for key_path in map(lambda x: Path(x), system_config.ssh_keys_path):
|
||||
for key_path in (Path(x) for x in system_config.ssh_keys_path):
|
||||
try:
|
||||
root_keys.append(key_path.read_text())
|
||||
except OSError as e:
|
||||
|
||||
@@ -10,7 +10,7 @@ T = TypeVar("T")
|
||||
|
||||
class MachineGroup:
|
||||
def __init__(self, machines: list[Machine]) -> None:
|
||||
self.group = HostGroup(list(m.target_host for m in machines))
|
||||
self.group = HostGroup([m.target_host for m in machines])
|
||||
|
||||
def run_function(
|
||||
self, func: Callable[[Machine], T], check: bool = True
|
||||
|
||||
@@ -144,7 +144,7 @@ class Machine:
|
||||
config = nix_config()
|
||||
system = config["system"]
|
||||
|
||||
file_info = dict()
|
||||
file_info = {}
|
||||
with NamedTemporaryFile(mode="w") as config_json:
|
||||
if extra_config is not None:
|
||||
json.dump(extra_config, config_json, indent=2)
|
||||
|
||||
@@ -103,7 +103,7 @@ def update_group_keys(flake_dir: Path, group: str) -> list[Path]:
|
||||
if (secret / "groups" / group).is_symlink():
|
||||
updated_paths += update_keys(
|
||||
secret,
|
||||
list(sorted(secrets.collect_keys_for_path(secret))),
|
||||
sorted(secrets.collect_keys_for_path(secret)),
|
||||
)
|
||||
return updated_paths
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ def update_secrets(
|
||||
changed_files.extend(
|
||||
update_keys(
|
||||
secret_path,
|
||||
list(sorted(collect_keys_for_path(secret_path))),
|
||||
sorted(collect_keys_for_path(secret_path)),
|
||||
)
|
||||
)
|
||||
return changed_files
|
||||
@@ -69,7 +69,7 @@ def collect_keys_for_type(folder: Path) -> set[str]:
|
||||
|
||||
|
||||
def collect_keys_for_path(path: Path) -> set[str]:
|
||||
keys = set([])
|
||||
keys = set()
|
||||
keys.update(collect_keys_for_type(path / "machines"))
|
||||
keys.update(collect_keys_for_type(path / "users"))
|
||||
groups = path / "groups"
|
||||
@@ -99,7 +99,7 @@ def encrypt_secret(
|
||||
if add_users is None:
|
||||
add_users = []
|
||||
key = ensure_sops_key(flake_dir)
|
||||
recipient_keys = set([])
|
||||
recipient_keys = set()
|
||||
|
||||
files_to_commit = []
|
||||
for user in add_users:
|
||||
@@ -146,7 +146,7 @@ def encrypt_secret(
|
||||
)
|
||||
|
||||
secret_path = secret_path / "secret"
|
||||
encrypt_file(secret_path, value, list(sorted(recipient_keys)), meta)
|
||||
encrypt_file(secret_path, value, sorted(recipient_keys), meta)
|
||||
files_to_commit.append(secret_path)
|
||||
commit_files(
|
||||
files_to_commit,
|
||||
@@ -226,7 +226,7 @@ def allow_member(
|
||||
changed.extend(
|
||||
update_keys(
|
||||
group_folder.parent,
|
||||
list(sorted(collect_keys_for_path(group_folder.parent))),
|
||||
sorted(collect_keys_for_path(group_folder.parent)),
|
||||
)
|
||||
)
|
||||
return changed
|
||||
@@ -254,7 +254,7 @@ def disallow_member(group_folder: Path, name: str) -> list[Path]:
|
||||
os.rmdir(group_folder.parent)
|
||||
|
||||
return update_keys(
|
||||
target.parent.parent, list(sorted(collect_keys_for_path(group_folder.parent)))
|
||||
target.parent.parent, sorted(collect_keys_for_path(group_folder.parent))
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ def ensure_sops_key(flake_dir: Path) -> SopsKey:
|
||||
def sops_manifest(keys: list[str]) -> Iterator[Path]:
|
||||
with NamedTemporaryFile(delete=False, mode="w") as manifest:
|
||||
json.dump(
|
||||
dict(creation_rules=[dict(key_groups=[dict(age=keys)])]), manifest, indent=2
|
||||
{"creation_rules": [{"key_groups": [{"age": keys}]}]}, manifest, indent=2
|
||||
)
|
||||
manifest.flush()
|
||||
yield Path(manifest.name)
|
||||
|
||||
@@ -222,12 +222,12 @@ class Host:
|
||||
for line in lines:
|
||||
if not is_err:
|
||||
cmdlog.info(
|
||||
line, extra=dict(command_prefix=self.command_prefix)
|
||||
line, extra={"command_prefix": self.command_prefix}
|
||||
)
|
||||
pass
|
||||
else:
|
||||
cmdlog.error(
|
||||
line, extra=dict(command_prefix=self.command_prefix)
|
||||
line, extra={"command_prefix": self.command_prefix}
|
||||
)
|
||||
print_buf = ""
|
||||
last_output = time.time()
|
||||
@@ -248,7 +248,7 @@ class Host:
|
||||
elapsed_msg = time.strftime("%H:%M:%S", time.gmtime(elapsed))
|
||||
cmdlog.warn(
|
||||
f"still waiting for '{displayed_cmd}' to finish... ({elapsed_msg} elapsed)",
|
||||
extra=dict(command_prefix=self.command_prefix),
|
||||
extra={"command_prefix": self.command_prefix},
|
||||
)
|
||||
|
||||
def handle_fd(fd: IO[Any] | None, readlist: list[IO[Any]]) -> str:
|
||||
@@ -350,7 +350,7 @@ class Host:
|
||||
else:
|
||||
cmdlog.warning(
|
||||
f"[Command failed: {ret}] {displayed_cmd}",
|
||||
extra=dict(command_prefix=self.command_prefix),
|
||||
extra={"command_prefix": self.command_prefix},
|
||||
)
|
||||
return subprocess.CompletedProcess(
|
||||
cmd, ret, stdout=stdout_data, stderr=stderr_data
|
||||
@@ -386,9 +386,7 @@ class Host:
|
||||
cmd = [cmd]
|
||||
shell = True
|
||||
displayed_cmd = " ".join(cmd)
|
||||
cmdlog.info(
|
||||
f"$ {displayed_cmd}", extra=dict(command_prefix=self.command_prefix)
|
||||
)
|
||||
cmdlog.info(f"$ {displayed_cmd}", extra={"command_prefix": self.command_prefix})
|
||||
return self._run(
|
||||
cmd,
|
||||
displayed_cmd,
|
||||
@@ -446,9 +444,7 @@ class Host:
|
||||
displayed_cmd += " ".join(cmd)
|
||||
else:
|
||||
displayed_cmd += cmd
|
||||
cmdlog.info(
|
||||
f"$ {displayed_cmd}", extra=dict(command_prefix=self.command_prefix)
|
||||
)
|
||||
cmdlog.info(f"$ {displayed_cmd}", extra={"command_prefix": self.command_prefix})
|
||||
|
||||
bash_cmd = export_cmd
|
||||
bash_args = []
|
||||
@@ -624,7 +620,7 @@ class HostGroup:
|
||||
if e:
|
||||
cmdlog.error(
|
||||
f"failed with: {e}",
|
||||
extra=dict(command_prefix=result.host.command_prefix),
|
||||
extra={"command_prefix": result.host.command_prefix},
|
||||
)
|
||||
errors += 1
|
||||
if errors > 0:
|
||||
@@ -653,19 +649,19 @@ class HostGroup:
|
||||
fn = self._run_local if local else self._run_remote
|
||||
thread = Thread(
|
||||
target=fn,
|
||||
kwargs=dict(
|
||||
results=results,
|
||||
cmd=cmd,
|
||||
host=host,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
extra_env=extra_env,
|
||||
cwd=cwd,
|
||||
check=check,
|
||||
timeout=timeout,
|
||||
verbose_ssh=verbose_ssh,
|
||||
tty=tty,
|
||||
),
|
||||
kwargs={
|
||||
"results": results,
|
||||
"cmd": cmd,
|
||||
"host": host,
|
||||
"stdout": stdout,
|
||||
"stderr": stderr,
|
||||
"extra_env": extra_env,
|
||||
"cwd": cwd,
|
||||
"check": check,
|
||||
"timeout": timeout,
|
||||
"verbose_ssh": verbose_ssh,
|
||||
"tty": tty,
|
||||
},
|
||||
)
|
||||
thread.start()
|
||||
threads.append(thread)
|
||||
|
||||
@@ -67,9 +67,9 @@ class SecretStore(SecretStoreBase):
|
||||
value,
|
||||
add_machines=[self.machine.name],
|
||||
add_groups=groups,
|
||||
meta=dict(
|
||||
deploy=deployed,
|
||||
),
|
||||
meta={
|
||||
"deploy": deployed,
|
||||
},
|
||||
)
|
||||
return path
|
||||
|
||||
|
||||
@@ -208,7 +208,7 @@ def run_command(
|
||||
|
||||
vm: VmConfig = inspect_vm(machine=machine_obj)
|
||||
|
||||
portmap = [(h, g) for h, g in (p.split(":") for p in args.publish)]
|
||||
portmap = [p.split(":") for p in args.publish]
|
||||
|
||||
run_vm(vm, nix_options=args.option, portmap=portmap)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user