mirror of
https://github.com/hexedtech/codemp-sublime.git
synced 2024-12-23 13:14:52 +01:00
fix: properly handle the cleanup of workspaces when closing a window containing them. as well as tagging a window similarly to how we tag views that contain a buffer managed by codemp.
fix: TaskManager, properly implement the stopping logic: Tasks in asyncio when .cancel()'ed are not immediatelly stopped. but only a request of cancellation is made. subsequently awaiting the task allows it to raise a cancelledError exception, do any necessary cleanup (finally block). chore: removed unused import in lib. Former-commit-id: 04f294c50b180e3676fd026d9a47732cdf6511a6
This commit is contained in:
parent
8a67c7ce93
commit
a26a51cecf
5 changed files with 76 additions and 29 deletions
23
plugin.py
23
plugin.py
|
@ -38,8 +38,6 @@ async def disconnect_client():
|
|||
for vws in CLIENT.workspaces.values():
|
||||
vws.cleanup()
|
||||
|
||||
CLIENT = None
|
||||
|
||||
|
||||
def plugin_unloaded():
|
||||
global CLIENT
|
||||
|
@ -51,10 +49,26 @@ def plugin_unloaded():
|
|||
# Listeners
|
||||
##############################################################################
|
||||
class EventListener(sublime_plugin.EventListener):
|
||||
def on_exit(self) -> None:
|
||||
def on_exit(self):
|
||||
global CLIENT
|
||||
CLIENT.tm.release(True)
|
||||
|
||||
def on_pre_close_window(self, window):
|
||||
global CLIENT
|
||||
s = window.settings()
|
||||
if s.get(g.CODEMP_WINDOW_TAG, False):
|
||||
for wsid in s[g.CODEMP_WINDOW_WORKSPACES]:
|
||||
ws = CLIENT[wsid]
|
||||
if ws is not None:
|
||||
status_log(
|
||||
f"current active: {CLIENT.active_workspace.id}, ws = {ws.id}"
|
||||
)
|
||||
if ws.id == CLIENT.active_workspace.id:
|
||||
CLIENT.active_workspace = None
|
||||
CLIENT.tm.stop(f"{g.CURCTL_TASK_PREFIX}-{ws.id}")
|
||||
ws.cleanup()
|
||||
del CLIENT.workspaces[wsid]
|
||||
|
||||
|
||||
class CodempClientViewEventListener(sublime_plugin.ViewEventListener):
|
||||
@classmethod
|
||||
|
@ -102,7 +116,7 @@ class CodempClientViewEventListener(sublime_plugin.ViewEventListener):
|
|||
vbuff = CLIENT[wsid].get_by_local(self.view.buffer_id())
|
||||
vbuff.cleanup()
|
||||
|
||||
CLIENT.tm.stop_and_pop(f"{g.BUFFCTL_TASK_PREFIX}-{vbuff.codemp_id}")
|
||||
CLIENT.tm.stop(f"{g.BUFFCTL_TASK_PREFIX}-{vbuff.codemp_id}")
|
||||
|
||||
|
||||
class CodempClientTextChangeListener(sublime_plugin.TextChangeListener):
|
||||
|
@ -283,6 +297,7 @@ class RawBufferId(sublime_plugin.TextInputHandler):
|
|||
def placeholder(self):
|
||||
return "Buffer Id"
|
||||
|
||||
|
||||
# Share Command
|
||||
# #############################################################################
|
||||
# class CodempShareCommand(sublime_plugin.WindowCommand):
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from typing import Optional
|
||||
import asyncio
|
||||
import Codemp.ext.sublime_asyncio as rt
|
||||
|
||||
|
||||
|
@ -16,17 +17,18 @@ class TaskManager:
|
|||
def sync(self, coro):
|
||||
rt.sync(coro)
|
||||
|
||||
def store(self, task):
|
||||
self.tasks.append(task)
|
||||
def remove_stopped(self):
|
||||
self.tasks = list(filter(lambda T: not T.cancelled(), self.tasks))
|
||||
|
||||
def store_named(self, task, name=None):
|
||||
task.set_name(name)
|
||||
self.store(task)
|
||||
def store(self, task, name=None):
|
||||
if name is not None:
|
||||
task.set_name(name)
|
||||
self.tasks.append(task)
|
||||
self.remove_stopped()
|
||||
|
||||
def store_named_lambda(self, name):
|
||||
def _store(task):
|
||||
task.set_name(name)
|
||||
self.store(task)
|
||||
self.store(task, name)
|
||||
|
||||
return _store
|
||||
|
||||
|
@ -44,20 +46,20 @@ class TaskManager:
|
|||
return self.task.pop(idx)
|
||||
return None
|
||||
|
||||
async def _stop(self, task):
|
||||
task.cancel() # cancelling a task, merely requests a cancellation.
|
||||
try:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
return
|
||||
|
||||
def stop(self, name):
|
||||
t = self.get_task(name)
|
||||
if t is not None:
|
||||
t.cancel()
|
||||
|
||||
def stop_and_pop(self, name) -> Optional:
|
||||
idx, task = next(
|
||||
((i, t) for (i, t) in enumerate(self.tasks) if t.get_name() == name),
|
||||
(None, None),
|
||||
)
|
||||
if idx is not None:
|
||||
task.cancel()
|
||||
return self.tasks.pop(idx)
|
||||
rt.sync(
|
||||
self._stop(t)
|
||||
) # awaiting the task blocks until it actually is finished.
|
||||
|
||||
def stop_all(self):
|
||||
for task in self.tasks:
|
||||
task.cancel()
|
||||
rt.sync(self._stop(task))
|
||||
|
|
|
@ -73,7 +73,7 @@ class VirtualWorkspace:
|
|||
|
||||
# mapping remote ids -> local ids
|
||||
self.id_map: dict[str, str] = {}
|
||||
self.active_buffers: dict[str, VirtualBuffer] = {}
|
||||
self.active_buffers: dict[str, VirtualBuffer] = {} # local_id -> VBuff
|
||||
|
||||
# initialise the virtual filesystem
|
||||
tmpdir = tempfile.mkdtemp(prefix="codemp_")
|
||||
|
@ -90,6 +90,13 @@ class VirtualWorkspace:
|
|||
)
|
||||
self.sublime_window.set_project_data(proj_data)
|
||||
|
||||
s: dict = self.sublime_window.settings()
|
||||
if s.get(g.CODEMP_WINDOW_TAG, False):
|
||||
s[g.CODEMP_WINDOW_WORKSPACES].append(self.id)
|
||||
else:
|
||||
s[g.CODEMP_WINDOW_TAG] = True
|
||||
s[g.CODEMP_WINDOW_WORKSPACES] = [self.id]
|
||||
|
||||
def add_buffer(self, remote_id: str, vbuff: VirtualBuffer):
|
||||
self.id_map[remote_id] = vbuff.view.buffer_id()
|
||||
self.active_buffers[vbuff.view.buffer_id()] = vbuff
|
||||
|
@ -100,6 +107,8 @@ class VirtualWorkspace:
|
|||
for vbuff in self.active_buffers.values():
|
||||
vbuff.view.close()
|
||||
|
||||
self.active_buffers = {} # drop all buffers, let them be garbace collected (hopefully)
|
||||
|
||||
d = self.sublime_window.project_data()
|
||||
newf = list(
|
||||
filter(
|
||||
|
@ -112,6 +121,10 @@ class VirtualWorkspace:
|
|||
status_log(f"cleaning up virtual workspace '{self.id}'")
|
||||
shutil.rmtree(self.rootdir, ignore_errors=True)
|
||||
|
||||
s = self.sublime_window.settings()
|
||||
del s[g.CODEMP_WINDOW_TAG]
|
||||
del s[g.CODEMP_WINDOW_WORKSPACES]
|
||||
|
||||
def get_by_local(self, local_id: str) -> Optional[VirtualBuffer]:
|
||||
return self.active_buffers.get(local_id)
|
||||
|
||||
|
@ -171,7 +184,9 @@ class VirtualClient:
|
|||
try:
|
||||
await self.handle.connect(server_host)
|
||||
except Exception as e:
|
||||
sublime.error_message(f"Could not connect:\n Make sure the server is up.\nerror: {e}")
|
||||
sublime.error_message(
|
||||
f"Could not connect:\n Make sure the server is up.\nerror: {e}"
|
||||
)
|
||||
return
|
||||
|
||||
id = await self.handle.user_id()
|
||||
|
@ -185,7 +200,9 @@ class VirtualClient:
|
|||
await self.handle.login(user, password, workspace_id)
|
||||
except Exception as e:
|
||||
status_log(f"Failed to login to workspace '{workspace_id}'.\nerror: {e}")
|
||||
sublime.error_message(f"Failed to login to workspace '{workspace_id}'.\nerror: {e}")
|
||||
sublime.error_message(
|
||||
f"Failed to login to workspace '{workspace_id}'.\nerror: {e}"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
|
@ -193,7 +210,9 @@ class VirtualClient:
|
|||
workspace_handle = await self.handle.join_workspace(workspace_id)
|
||||
except Exception as e:
|
||||
status_log(f"Could not join workspace '{workspace_id}'.\nerror: {e}")
|
||||
sublime.error_message(f"Could not join workspace '{workspace_id}'.\nerror: {e}")
|
||||
sublime.error_message(
|
||||
f"Could not join workspace '{workspace_id}'.\nerror: {e}"
|
||||
)
|
||||
return
|
||||
|
||||
vws = VirtualWorkspace(self, workspace_id, workspace_handle)
|
||||
|
@ -233,7 +252,10 @@ class VirtualClient:
|
|||
|
||||
except asyncio.CancelledError:
|
||||
status_log(f"cursor worker for '{vws.id}' stopped...")
|
||||
return
|
||||
raise
|
||||
except Exception as e:
|
||||
status_log(f"cursor worker '{vws.id}' crashed:\n{e}")
|
||||
raise
|
||||
|
||||
self.tm.dispatch(
|
||||
move_cursor_task(virtual_workspace),
|
||||
|
@ -280,7 +302,11 @@ class VirtualClient:
|
|||
)
|
||||
|
||||
except asyncio.CancelledError:
|
||||
status_log("'{}' buffer worker stopped...".format(vb.codemp_id))
|
||||
status_log(f"'{vb.codemp_id}' buffer worker stopped...")
|
||||
raise
|
||||
except Exception as e:
|
||||
status_log(f"buffer worker '{vb.codemp_id}' crashed:\n{e}")
|
||||
raise
|
||||
|
||||
self.tm.dispatch(
|
||||
apply_buffer_change_task(vbuff),
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
BUFFCTL_TASK_PREFIX = "buffer-ctl"
|
||||
CURCTL_TASK_PREFIX = "cursor-ctl"
|
||||
|
||||
CODEMP_BUFFER_TAG = "codemp-buffer"
|
||||
CODEMP_REMOTE_ID = "codemp-buffer-id"
|
||||
CODEMP_WORKSPACE_ID = "codemp-workspace-id"
|
||||
|
||||
CODEMP_WINDOW_TAG = "codemp-window"
|
||||
CODEMP_WINDOW_WORKSPACES = "codemp-workspaces"
|
||||
|
||||
WORKSPACE_FOLDER_PREFIX = "CODEMP::"
|
||||
SUBLIME_REGIONS_PREFIX = "codemp-cursors"
|
||||
SUBLIME_STATUS_ID = "z_codemp_buffer"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use codemp::proto::common::Identity;
|
||||
use pyo3::types::PyList;
|
||||
use std::{format, ops::Deref, sync::Arc};
|
||||
use std::{format, sync::Arc};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use codemp::prelude::*;
|
||||
|
|
Loading…
Reference in a new issue