Reworked machines list, and history commands
This commit is contained in:
@@ -3,7 +3,6 @@ import argparse
|
||||
import dataclasses
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
@@ -22,11 +21,13 @@ class EnhancedJSONEncoder(json.JSONEncoder):
|
||||
|
||||
@dataclasses.dataclass
|
||||
class HistoryEntry:
|
||||
path: str
|
||||
last_used: str
|
||||
dir_datetime: str
|
||||
flake: FlakeConfig
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if isinstance(self.flake, dict):
|
||||
self.flake = FlakeConfig(**self.flake)
|
||||
|
||||
|
||||
def list_history() -> list[HistoryEntry]:
|
||||
logs: list[HistoryEntry] = []
|
||||
@@ -45,35 +46,26 @@ def list_history() -> list[HistoryEntry]:
|
||||
return logs
|
||||
|
||||
|
||||
def get_dir_time(path: Path) -> str:
|
||||
# Get the last modified dir time in seconds
|
||||
dir_mtime = os.path.getmtime(path)
|
||||
dir_datetime = datetime.datetime.fromtimestamp(dir_mtime).isoformat()
|
||||
return dir_datetime
|
||||
|
||||
|
||||
def add_history(path: Path) -> list[HistoryEntry]:
|
||||
user_history_file().parent.mkdir(parents=True, exist_ok=True)
|
||||
logs = list_history()
|
||||
found = False
|
||||
|
||||
for entry in logs:
|
||||
if entry.path == str(path):
|
||||
if entry.flake.flake_url == str(path):
|
||||
found = True
|
||||
entry.last_used = datetime.datetime.now().isoformat()
|
||||
|
||||
if found:
|
||||
break
|
||||
|
||||
flake = inspect_flake(path, "defaultVM")
|
||||
flake.flake_url = str(flake.flake_url)
|
||||
dir_datetime = get_dir_time(path)
|
||||
|
||||
history = HistoryEntry(
|
||||
flake=flake,
|
||||
dir_datetime=dir_datetime,
|
||||
path=str(path),
|
||||
last_used=datetime.datetime.now().isoformat(),
|
||||
)
|
||||
if not found:
|
||||
logs.append(history)
|
||||
logs.append(history)
|
||||
|
||||
with locked_open(user_history_file(), "w+") as f:
|
||||
f.write(json.dumps(logs, cls=EnhancedJSONEncoder, indent=4))
|
||||
|
||||
@@ -6,7 +6,7 @@ from .add import list_history
|
||||
|
||||
def list_history_command(args: argparse.Namespace) -> None:
|
||||
for history_entry in list_history():
|
||||
print(history_entry.path)
|
||||
print(history_entry.flake.flake_url)
|
||||
|
||||
|
||||
# takes a (sub)parser and configures it
|
||||
|
||||
@@ -3,11 +3,11 @@ import argparse
|
||||
import copy
|
||||
import datetime
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from ..dirs import user_history_file
|
||||
from ..locked_open import locked_open
|
||||
from .add import EnhancedJSONEncoder, HistoryEntry, get_dir_time, list_history
|
||||
from ..nix import nix_metadata
|
||||
from .add import EnhancedJSONEncoder, HistoryEntry, list_history
|
||||
|
||||
|
||||
def update_history() -> list[HistoryEntry]:
|
||||
@@ -16,11 +16,17 @@ def update_history() -> list[HistoryEntry]:
|
||||
new_logs = []
|
||||
for entry in logs:
|
||||
new_entry = copy.deepcopy(entry)
|
||||
new_time = get_dir_time(Path(entry.path))
|
||||
if new_time != entry.dir_datetime:
|
||||
print(f"Updating {entry.path} from {entry.dir_datetime} to {new_time}")
|
||||
new_entry.dir_datetime = new_time
|
||||
|
||||
meta = nix_metadata(entry.flake.flake_url)
|
||||
new_hash = meta["locked"]["narHash"]
|
||||
if new_hash != entry.flake.nar_hash:
|
||||
print(
|
||||
f"Updating {entry.flake.flake_url} from {entry.flake.nar_hash} to {new_hash}"
|
||||
)
|
||||
new_entry.last_used = datetime.datetime.now().isoformat()
|
||||
new_entry.flake.nar_hash = new_hash
|
||||
|
||||
# TODO: Delete stale entries
|
||||
new_logs.append(new_entry)
|
||||
|
||||
with locked_open(user_history_file(), "w+") as f:
|
||||
|
||||
Reference in New Issue
Block a user