From 155a1ee98c95955378956fff9605389fe1a8bd44 Mon Sep 17 00:00:00 2001 From: Qubasa Date: Mon, 29 Jan 2024 13:40:30 +0700 Subject: [PATCH] Secret mgmt problem --- .../clan-vm-manager.code-workspace | 3 +- .../clan_vm_manager/models/executor.py | 1 - .../clan_vm_manager/models/use_vms.py | 81 +++++++++++-------- .../clan_vm_manager/views/list.py | 73 +++++++++-------- 4 files changed, 90 insertions(+), 68 deletions(-) diff --git a/pkgs/clan-vm-manager/clan-vm-manager.code-workspace b/pkgs/clan-vm-manager/clan-vm-manager.code-workspace index 08aa93b5..39c8845b 100644 --- a/pkgs/clan-vm-manager/clan-vm-manager.code-workspace +++ b/pkgs/clan-vm-manager/clan-vm-manager.code-workspace @@ -19,7 +19,7 @@ "**/.mypy_cache": true, "**/.reports": true, "**/.ruff_cache": true, - "**/result": true, + "**/result/**": true, "/nix/store/**": true }, "search.exclude": { @@ -29,6 +29,7 @@ "**/.mypy_cache": true, "**/.reports": true, "**/.ruff_cache": true, + "**/result/": true, "/nix/store/**": true }, "files.autoSave": "off" diff --git a/pkgs/clan-vm-manager/clan_vm_manager/models/executor.py b/pkgs/clan-vm-manager/clan_vm_manager/models/executor.py index 1c33a246..48d0f8da 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/models/executor.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/models/executor.py @@ -18,7 +18,6 @@ from collections.abc import Callable # Kill the new process and all its children by sending a SIGTERM signal to the process group def _kill_group(proc: mp.Process) -> None: pid = proc.pid - assert pid is not None if proc.is_alive(): os.killpg(pid, signal.SIGTERM) else: diff --git a/pkgs/clan-vm-manager/clan_vm_manager/models/use_vms.py b/pkgs/clan-vm-manager/clan_vm_manager/models/use_vms.py index 47bae536..9e8f3046 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/models/use_vms.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/models/use_vms.py @@ -21,13 +21,17 @@ gi.require_version("Gtk", "4.0") import threading from gi.repository import Gio, GLib, GObject +import logging +import multiprocessing as mp +from clan_cli.machines.machines import Machine + +log = logging.getLogger(__name__) class VM(GObject.Object): # Define a custom signal with the name "vm_stopped" and a string argument for the message __gsignals__: ClassVar = { - "vm_started": (GObject.SignalFlags.RUN_FIRST, None, [GObject.Object]), - "vm_stopped": (GObject.SignalFlags.RUN_FIRST, None, [GObject.Object]), + "vm_status_changed": (GObject.SignalFlags.RUN_FIRST, None, [GObject.Object]), } def __init__( @@ -35,23 +39,28 @@ class VM(GObject.Object): icon: Path, status: VMStatus, data: HistoryEntry, - process: MPProcess | None = None, ) -> None: super().__init__() self.data = data - self.process = process + self.process = MPProcess("dummy", mp.Process(), Path("./dummy")) + self._watcher_id: int = 0 self.status = status + self._last_liveness: bool = False self.log_dir = tempfile.TemporaryDirectory( prefix="clan_vm-", suffix=f"-{self.data.flake.flake_attr}" ) self._finalizer = weakref.finalize(self, self.stop) - def start(self) -> None: - if self.process is not None: - show_error_dialog(ClanError("VM is already running")) + def __start(self) -> None: + if self.is_running(): + log.warn("VM is already running") return + machine = Machine( + name=self.data.flake.flake_attr, + flake=self.data.flake.flake_url, + ) vm = vms.run.inspect_vm( - flake_url=self.data.flake.flake_url, flake_attr=self.data.flake.flake_attr + machine ) self.process = spawn( on_except=None, @@ -59,39 +68,51 @@ class VM(GObject.Object): func=vms.run.run_vm, vm=vm, ) - self.emit("vm_started", self) - GLib.timeout_add(50, self.vm_stopped_task) - def start_async(self) -> None: - threading.Thread(target=self.start).start() + def start(self) -> None: + if self.is_running(): + log.warn("VM is already running") + return + + threading.Thread(target=self.__start).start() + + if self._watcher_id == 0: + # Every 50ms check if the VM is still running + self._watcher_id = GLib.timeout_add(50, self._vm_watcher_task) + + if self._watcher_id == 0: + log.error("Failed to add watcher") + raise ClanError("Failed to add watcher") + + def _vm_watcher_task(self) -> bool: + if self.is_running() != self._last_liveness: + self.emit("vm_status_changed", self) + prev_liveness = self._last_liveness + self._last_liveness = self.is_running() + + # If the VM was running and now it is not, remove the watcher + if prev_liveness == True and not self.is_running(): + return GLib.SOURCE_REMOVE - def vm_stopped_task(self) -> bool: - if not self.is_running(): - self.emit("vm_stopped", self) - return GLib.SOURCE_REMOVE return GLib.SOURCE_CONTINUE def is_running(self) -> bool: - if self.process is not None: - return self.process.proc.is_alive() - return False + return self.process.proc.is_alive() def get_id(self) -> str: return f"{self.data.flake.flake_url}#{self.data.flake.flake_attr}" - def stop_async(self) -> None: - threading.Thread(target=self.stop).start() - def stop(self) -> None: - if self.process is None: - print("VM is already stopped", file=sys.stderr) + log.info("Stopping VM") + if not self.is_running(): + log.error("VM already stopped") return self.process.kill_group() - self.process = None def read_log(self) -> str: - if self.process is None: + if not self.process.out_file.exists(): + log.error(f"Log file {self.process.out_file} does not exist") return "" return self.process.out_file.read_text() @@ -137,14 +158,6 @@ class VMS: else: self.refresh() - def handle_vm_stopped(self, func: Callable[[VM, VM], None]) -> None: - for vm in self.list_store: - vm.connect("vm_stopped", func) - - def handle_vm_started(self, func: Callable[[VM, VM], None]) -> None: - for vm in self.list_store: - vm.connect("vm_started", func) - def get_running_vms(self) -> list[VM]: return list(filter(lambda vm: vm.is_running(), self.list_store)) diff --git a/pkgs/clan-vm-manager/clan_vm_manager/views/list.py b/pkgs/clan-vm-manager/clan_vm_manager/views/list.py index ee1acc68..88079f78 100644 --- a/pkgs/clan-vm-manager/clan_vm_manager/views/list.py +++ b/pkgs/clan-vm-manager/clan_vm_manager/views/list.py @@ -1,6 +1,5 @@ from collections.abc import Callable from functools import partial - import gi from clan_cli.history.add import HistoryEntry @@ -45,10 +44,6 @@ class ClanList(Gtk.Box): vms = VMS.use() join = Join.use() - # TODO: Move this up to create_widget and connect every VM signal to its corresponding switch - vms.handle_vm_stopped(self.stopped_vm) - vms.handle_vm_started(self.started_vm) - self.join_boxed_list = create_boxed_list( model=join.list_store, render_row=self.render_join_row ) @@ -77,12 +72,12 @@ class ClanList(Gtk.Box): if not VMS.use().list_store.get_n_items(): self.vm_boxed_list.add_css_class("no-shadow") - def render_vm_row(self, boxed_list: Gtk.ListBox, item: VM) -> Gtk.Widget: + def render_vm_row(self, boxed_list: Gtk.ListBox, vm: VM) -> Gtk.Widget: if boxed_list.has_css_class("no-shadow"): boxed_list.remove_css_class("no-shadow") - flake = item.data.flake + flake = vm.data.flake row = Adw.ActionRow() - + # Title row.set_title(flake.clan_name) @@ -90,16 +85,16 @@ class ClanList(Gtk.Box): row.set_title_selectable(True) # Subtitle - row.set_subtitle(item.get_id()) + row.set_subtitle(vm.get_id()) row.set_subtitle_lines(1) - # Avatar - avatar = Adw.Avatar() - avatar.set_custom_image(Gdk.Texture.new_from_filename(flake.icon)) - avatar.set_text(flake.clan_name + " " + flake.flake_attr) - avatar.set_show_initials(True) - avatar.set_size(50) - row.add_prefix(avatar) + # # Avatar + # avatar = Adw.Avatar() + # avatar.set_custom_image(Gdk.Texture.new_from_filename(flake.icon)) + # avatar.set_text(flake.clan_name + " " + flake.flake_attr) + # avatar.set_show_initials(True) + # avatar.set_size(50) + # row.add_prefix(avatar) # Switch switch = Gtk.Switch() @@ -107,7 +102,8 @@ class ClanList(Gtk.Box): box.set_valign(Gtk.Align.CENTER) box.append(switch) - switch.connect("notify::active", partial(self.on_row_toggle, item)) + switch.connect("notify::active", partial(self.on_row_toggle, vm)) + vm.connect("vm_status_changed", partial(self.vm_status_changed, switch)) row.add_suffix(box) return row @@ -148,22 +144,21 @@ class ClanList(Gtk.Box): return row - def started_vm(self, vm: VM, _vm: VM) -> None: - print("VM started", vm.data.flake.flake_attr) - - def stopped_vm(self, vm: VM, _vm: VM) -> None: - print("VM stopped", vm.data.flake.flake_attr) def show_error_dialog(self, error: str) -> None: - dialog = Gtk.MessageDialog( - parent=self.get_toplevel(), - modal=True, - message_type=Gtk.MessageType.ERROR, - buttons=Gtk.ButtonsType.OK, - text=error, + p = Views.use().main_window + + # app = Gio.Application.get_default() + # p = Gtk.Application.get_active_window(app) + + dialog = Adw.MessageDialog( + heading="Error" ) - dialog.run() - dialog.destroy() + dialog.add_response("ok", "ok") + dialog.set_body(error) + dialog.set_transient_for(p) # set the parent window of the dialog + dialog.choose() + def on_trust_clicked(self, item: JoinValue, widget: Gtk.Widget) -> None: def on_join(_history: list[HistoryEntry]) -> None: @@ -187,7 +182,21 @@ class ClanList(Gtk.Box): print("Toggled", vm.data.flake.flake_attr, "active:", row.get_active()) if row.get_active(): - vm.start_async() + row.set_state(False) + vm.start() if not row.get_active(): - vm.stop_async() + row.set_state(True) + vm.stop() + + + + def vm_status_changed(self, switch: Gtk.Switch, vm: VM, _vm: VM) -> None: + switch.set_active(vm.is_running()) + switch.set_state(vm.is_running()) + + if vm.process and vm.process.proc.exitcode != 0: + print(f"====== {vm.is_running()}") + self.show_error_dialog(vm.read_log()) + +