fix: simplify simplify simplify

Former-commit-id: 3e093fcaba0026c2585aca0ea9b714852f5e9839
This commit is contained in:
cschen 2024-08-31 15:24:22 +02:00
parent eb18401e91
commit 95694dc2ec
6 changed files with 173 additions and 231 deletions

View file

@ -1 +0,0 @@
e674134a5cc02257b28bd8572b9d9e7534c92e5f

View file

@ -0,0 +1 @@
d86401dbaaca8ebe4d8998563d9fceff4b6daac0

123
plugin.py
View file

@ -5,7 +5,6 @@ import logging
import random
from typing import Tuple
import codemp
from Codemp.src.client import client
from Codemp.src.utils import safe_listener_detach
from Codemp.src.utils import safe_listener_attach
@ -60,12 +59,11 @@ class EventListener(sublime_plugin.EventListener):
def on_exit(self):
client.disconnect()
if client.driver is not None:
client.driver.stop()
def on_pre_close_window(self, window):
assert client.codemp is not None
if not client.valid_window(window):
return
for vws in client.all_workspaces(window):
client.codemp.leave_workspace(vws.id)
@ -83,12 +81,11 @@ class EventListener(sublime_plugin.EventListener):
class CodempClientViewEventListener(sublime_plugin.ViewEventListener):
@classmethod
def is_applicable(cls, settings):
logger.debug(settings.get(g.CODEMP_BUFFER_TAG, False))
return settings.get(g.CODEMP_BUFFER_TAG, False)
@classmethod
def applies_to_primary_view_only(cls):
return True
return False
def on_selection_modified_async(self):
region = self.view.sel()[0]
@ -106,14 +103,12 @@ class CodempClientViewEventListener(sublime_plugin.ViewEventListener):
def on_activated(self):
global TEXT_LISTENER
vbuff = client.buffer_from_view(self.view)
logging.debug(f"'{vbuff.id}' view activated!")
logger.debug(f"'{self.view}' view activated!")
safe_listener_attach(TEXT_LISTENER, self.view.buffer()) # pyright: ignore
def on_deactivated(self):
global TEXT_LISTENER
vbuff = client.buffer_from_view(self.view)
logging.debug(f"'{vbuff.id}' view deactivated!")
logger.debug(f"'{self.view}' view deactivated!")
safe_listener_detach(TEXT_LISTENER) # pyright: ignore
def on_pre_close(self):
@ -126,6 +121,7 @@ class CodempClientViewEventListener(sublime_plugin.ViewEventListener):
if vws is None or vbuff is None:
raise
client.unregister_buffer(vbuff)
vws.uninstall_buffer(vbuff)
def on_text_command(self, command_name, args):
@ -144,8 +140,7 @@ class CodempClientTextChangeListener(sublime_plugin.TextChangeListener):
# we'll do it by hand with .attach(buffer).
return False
# we do the boring stuff in the async thread
def on_text_changed_async(self, changes):
def on_text_changed(self, changes):
s = self.buffer.primary_view().settings()
if s.get(g.CODEMP_IGNORE_NEXT_TEXT_CHANGE, False):
logger.debug("Ignoring echoing back the change.")
@ -154,9 +149,8 @@ class CodempClientTextChangeListener(sublime_plugin.TextChangeListener):
vbuff = client.buffer_from_view(self.buffer.primary_view())
if vbuff is not None:
# but then we block the main one for the actual sending!
logger.debug(f"local buffer change! {vbuff.id}")
sublime.set_timeout(lambda: vbuff.send_buffer_change(changes))
vbuff.send_buffer_change(changes)
# Client Commands:
@ -191,7 +185,7 @@ class CodempConnectCommand(sublime_plugin.WindowCommand):
def is_enabled(self) -> bool:
return client.codemp is None
def run(self, server_host, user_name, password="lmaodefaultpassword"):
def run(self, server_host, user_name, password):
logger.info(f"Connecting to {server_host} with user {user_name}...")
def try_connect():
@ -212,7 +206,7 @@ class CodempConnectCommand(sublime_plugin.WindowCommand):
def input(self, args):
if "server_host" not in args:
return SimpleTextInput(
("server_host", "http://codemp.alemi.dev:50053"),
("server_host", "http://codemp.dev:50053"),
("user_name", f"user-{random.random()}"),
)
@ -257,41 +251,10 @@ class CodempJoinWorkspaceCommand(sublime_plugin.WindowCommand):
def input(self, args):
if "workspace_id" not in args:
return SimpleTextInput(("workspace_id", "workspace?"))
# To allow for having a selection and choosing non existing workspaces
# we do a little dance: We pass this list input handler to a TextInputHandler
# when we select "Create New..." which adds his result to the list of possible
# workspaces and pop itself off the stack to go back to the list handler.
# class WorkspaceIdList(sublime_plugin.ListInputHandler):
# def __init__(self):
# assert client.codemp is not None # the command should not be available
# # at the moment, the client can't give us a full list of existing workspaces
# # so a textinputhandler would be more appropriate. but we keep this for the future
# self.add_entry_text = "* add entry..."
# self.list = client.codemp.active_workspaces()
# self.list.sort()
# self.list.append(self.add_entry_text)
# self.preselected = None
# def name(self):
# return "workspace_id"
# def placeholder(self):
# return "Workspace"
# def list_items(self):
# if self.preselected is not None:
# return (self.list, self.preselected)
# else:
# return self.list
# def next_input(self, args):
# if args["workspace_id"] == self.add_entry_text:
# return AddListEntry(self)
list = client.codemp.list_workspaces(True, True)
return SimpleListInput(
("workspace_id", list.wait()),
)
# Leave Workspace Command
@ -305,9 +268,11 @@ class CodempLeaveWorkspaceCommand(sublime_plugin.WindowCommand):
vws = client.workspace_from_id(workspace_id)
if vws is not None:
client.uninstall_workspace(vws)
else:
logger.error(f"could not leave the workspace '{workspace_id}'")
def input(self, args):
if "id" not in args:
if "workspace_id" not in args:
return ActiveWorkspacesIdList()
@ -331,8 +296,8 @@ class CodempJoinBufferCommand(sublime_plugin.WindowCommand):
assert vws is not None
# is the buffer already installed?
if vws.valid_buffer(buffer_id):
logger.debug("buffer already installed!")
if buffer_id in vws.codemp.buffer_list():
logger.info("buffer already installed!")
return # do nothing.
if buffer_id not in vws.codemp.filetree(filter=buffer_id):
@ -543,6 +508,24 @@ class SimpleTextInput(sublime_plugin.TextInputHandler):
return SimpleTextInput(*self.next_inputs)
class SimpleListInput(sublime_plugin.ListInputHandler):
def __init__(self, *args: Tuple[str, list]):
self.argname = args[0][0]
self.list = args[0][1]
self.next_inputs = args[1:]
def name(self):
return self.argname
def list_items(self):
return self.list
def next_input(self, args):
if len(self.next_inputs) > 0:
if self.next_inputs[0][0] not in args:
return SimpleListInput(*self.next_inputs)
class ActiveWorkspacesIdList(sublime_plugin.ListInputHandler):
def __init__(self, window=None, buffer_list=False, buffer_text=False):
self.window = window
@ -562,6 +545,40 @@ class ActiveWorkspacesIdList(sublime_plugin.ListInputHandler):
return SimpleTextInput(("buffer_id", "new buffer"))
# To allow for having a selection and choosing non existing workspaces
# we do a little dance: We pass this list input handler to a TextInputHandler
# when we select "Create New..." which adds his result to the list of possible
# workspaces and pop itself off the stack to go back to the list handler.
class WorkspaceIdList(sublime_plugin.ListInputHandler):
def __init__(self):
assert client.codemp is not None # the command should not be available
# at the moment, the client can't give us a full list of existing workspaces
# so a textinputhandler would be more appropriate. but we keep this for the future
self.add_entry_text = "* add entry..."
self.list = client.codemp.list_workspaces(True, True).wait()
self.list.sort()
self.list.append(self.add_entry_text)
self.preselected = None
def name(self):
return "workspace_id"
def placeholder(self):
return "Workspace"
def list_items(self):
if self.preselected is not None:
return (self.list, self.preselected)
else:
return self.list
def next_input(self, args):
if args["workspace_id"] == self.add_entry_text:
return AddListEntry(self)
class BufferIdList(sublime_plugin.ListInputHandler):
def __init__(self, workspace_id):
vws = client.workspace_from_id(workspace_id)

View file

@ -56,46 +56,13 @@ class VirtualBuffer:
def __init__(
self,
buffctl: codemp.BufferController,
view: sublime.View, # noqa: F821 # type: ignore
view: sublime.View,
rootdir: str,
):
self.buffctl = buffctl
self.view = view
self.id = self.buffctl.name()
def __hash__(self) -> int:
return hash(self.id)
def sync(self):
promise = self.buffctl.content()
def defer_sync(promise):
content = promise.wait()
populate_view(self.view, content)
sublime.set_timeout_async(lambda: defer_sync(promise))
def cleanup(self):
self.uninstall()
self.buffctl.stop()
def uninstall(self):
if not getattr(self, "installed", False):
return
self.__deactivate()
os.remove(self.tmpfile)
s = self.view.settings()
del s[g.CODEMP_BUFFER_TAG]
self.view.erase_status(g.SUBLIME_STATUS_ID)
self.installed = False
def install(self, rootdir):
if getattr(self, "installed", False):
return
self.tmpfile = os.path.join(rootdir, self.id)
open(self.tmpfile, "a").close()
@ -108,20 +75,39 @@ class VirtualBuffer:
s[g.CODEMP_BUFFER_TAG] = True
self.sync()
self.__activate()
self.installed = True
def __activate(self):
logger.info(f"registering a callback for buffer: {self.id}")
self.buffctl.callback(make_bufferchange_cb(self))
self.isactive = True
def __deactivate(self):
def __del__(self):
logger.info(f"clearing a callback for buffer: {self.id}")
self.buffctl.clear_callback()
self.buffctl.stop()
self.isactive = False
os.remove(self.tmpfile)
def onclose(did_close):
if did_close:
logger.info(f"'{self.id}' closed successfully")
else:
logger.info(f"failed to close the view for '{self.id}'")
self.view.close(onclose)
def __hash__(self) -> int:
return hash(self.id)
def sync(self):
promise = self.buffctl.content()
def defer_sync(promise):
content = promise.wait()
populate_view(self.view, content)
sublime.set_timeout_async(lambda: defer_sync(promise))
def send_buffer_change(self, changes):
# we do not do any index checking, and trust sublime with providing the correct
# sequential indexing, assuming the changes are applied in the order they are received.

View file

@ -13,6 +13,7 @@ from Codemp.src.utils import bidict
logger = logging.getLogger(__name__)
# the client will be responsible to keep track of everything!
# it will need 3 bidirectional dictionaries and 2 normal ones
# normal: workspace_id -> VirtualWorkspaces
@ -20,56 +21,53 @@ logger = logging.getLogger(__name__)
# bidir: VirtualBuffer <-> VirtualWorkspace
# bidir: VirtualBuffer <-> Sublime.View
# bidir: VirtualWorkspace <-> Sublime.Window
def log_async(msg):
sublime.set_timeout_async(lambda: logger.log(logger.level, msg))
class VirtualClient:
def __init__(self):
self.codemp: Optional[codemp.Client] = None
self.driver = codemp.init(lambda msg: logger.log(logger.level, msg), False)
self.driver: Optional[codemp.Driver] = None
# bookkeeping corner
self._id2buffer: dict[str, VirtualBuffer] = {}
self._id2workspace: dict[str, VirtualWorkspace] = {}
self._view2buff: dict[sublime.View, VirtualBuffer] = {}
self._view2buff: dict[sublime.View, VirtualBuffer] = {}
self._buff2workspace: bidict[VirtualBuffer, VirtualWorkspace] = bidict()
self._workspace2window: bidict[VirtualWorkspace, sublime.Window] = bidict()
# self._workspace2window: bidict[VirtualWorkspace, sublime.Window] = bidict()
self._workspace2window: dict[VirtualWorkspace, sublime.Window] = bidict()
def dump(self):
logger.debug("CLIENT STATUS:")
logger.debug("WORKSPACES:")
logger.debug(f"{self._id2workspace}")
logger.debug(f"{self._workspace2window}")
logger.debug(f"{self._workspace2window.inverse}")
logger.debug(f"{self._buff2workspace}")
logger.debug(f"{self._buff2workspace.inverse}")
logger.debug("VIEWS")
logger.debug(f"{self._view2buff}")
logger.debug(f"{self._id2buffer}")
def valid_window(self, window: sublime.Window):
return window in self._workspace2window.inverse
def valid_workspace(self, workspace: VirtualWorkspace | str):
if isinstance(workspace, str):
return client._id2workspace.get(workspace) is not None
return workspace in self._workspace2window
def all_workspaces(
self, window: Optional[sublime.Window] = None
) -> list[VirtualWorkspace]:
if window is None:
return list(self._workspace2window.keys())
else:
return self._workspace2window.inverse.get(window, [])
return [
ws
for ws in self._workspace2window
if self._workspace2window[ws] == window
]
def workspace_from_view(self, view: sublime.View) -> Optional[VirtualWorkspace]:
buff = self._view2buff.get(view, None)
return self._buff2workspace.get(buff, None)
return self.workspace_from_buffer(buff) if buff is not None else None
def workspace_from_buffer(self, buff: VirtualBuffer) -> Optional[VirtualWorkspace]:
return self._buff2workspace.get(buff)
def workspace_from_buffer(self, vbuff: VirtualBuffer) -> Optional[VirtualWorkspace]:
return self._buff2workspace.get(vbuff, None)
def workspace_from_id(self, id: str) -> Optional[VirtualWorkspace]:
return self._id2workspace.get(id)
@ -78,11 +76,12 @@ class VirtualClient:
self, workspace: Optional[VirtualWorkspace | str] = None
) -> list[VirtualBuffer]:
if workspace is None:
return list(self._buff2workspace.keys())
else:
if isinstance(workspace, str):
return list(self._id2buffer.values())
elif isinstance(workspace, str):
workspace = client._id2workspace[workspace]
return self._buff2workspace.inverse.get(workspace, [])
else:
return self._buff2workspace.inverse.get(workspace, [])
def buffer_from_view(self, view: sublime.View) -> Optional[VirtualBuffer]:
return self._view2buff.get(view)
@ -98,18 +97,12 @@ class VirtualClient:
self._id2buffer[buffer.id] = buffer
self._view2buff[buffer.view] = buffer
def unregister_buffer(self, buffer: VirtualBuffer):
del self._buff2workspace[buffer]
del self._id2buffer[buffer.id]
del self._view2buff[buffer.view]
def disconnect(self):
if self.codemp is None:
return
logger.info("disconnecting from the current client")
# for each workspace tell it to clean up after itself.
for vws in self.all_workspaces():
vws.cleanup()
self.codemp.leave_workspace(vws.id)
self._id2workspace.clear()
@ -124,36 +117,38 @@ class VirtualClient:
logger.info("Disconnecting from previous client.")
return self.disconnect()
self.codemp = codemp.Client(host, user, password)
if self.driver is None:
self.driver = codemp.init()
codemp.set_logger(log_async, False)
self.codemp = codemp.connect(host, user, password).wait()
id = self.codemp.user_id()
logger.debug(f"Connected to '{host}' as user {user} (id: {id})")
def install_workspace(
self, workspace: codemp.Workspace, window: sublime.Window
) -> VirtualWorkspace:
# we pass the window as well so if the window changes in the mean
# time we have the correct one!
def install_workspace(self, workspace: codemp.Workspace, window: sublime.Window):
vws = VirtualWorkspace(workspace, window)
self._workspace2window[vws] = window
self._id2workspace[vws.id] = vws
vws.install()
return vws
def uninstall_workspace(self, vws: VirtualWorkspace):
if vws not in self._workspace2window:
raise
# we aim at dropping all references to the workspace
# as well as all the buffers associated with it.
# if we did a good job the dunder del method will kick
# and continue with the cleanup.
logger.info(f"Uninstalling workspace '{vws.id}'...")
vws.cleanup()
del self._workspace2window[vws]
del self._id2workspace[vws.id]
buffers = self._buff2workspace.inverse[vws]
for vbuff in buffers:
for vbuff in self.all_buffers(vws):
self.unregister_buffer(vbuff)
del vws
# self._buff2workspace.inverse_del(vws) - if we delete all straight
# keys the last delete will remove also the empty key.
def unregister_buffer(self, buffer: VirtualBuffer):
del self._buff2workspace[buffer]
del self._id2buffer[buffer.id]
del self._view2buff[buffer.view]
def workspaces_in_server(self):
return self.codemp.active_workspaces() if self.codemp else []

View file

@ -17,7 +17,7 @@ logger = logging.getLogger(__name__)
def make_cursor_callback(workspace: VirtualWorkspace):
def __callback(ctl: codemp.CursorController):
def _callback(ctl: codemp.CursorController):
def get_event_and_draw():
while event := ctl.try_recv().wait():
logger.debug("received remote cursor movement!")
@ -35,7 +35,7 @@ def make_cursor_callback(workspace: VirtualWorkspace):
sublime.set_timeout_async(get_event_and_draw)
return __callback
return _callback
# A virtual workspace is a bridge class that aims to translate
@ -51,87 +51,9 @@ class VirtualWorkspace:
self.codemp.fetch_buffers()
self.codemp.fetch_users()
# mapping remote ids -> local ids
self._buff2view: bidict[VirtualBuffer, sublime.View] = bidict()
self._id2buff: dict[str, VirtualBuffer] = {}
# self.id_map: dict[str, int] = {}
# self.active_buffers: dict[int, VirtualBuffer] = {} # local_id -> VBuff
def __hash__(self) -> int:
# so we can use these as dict keys!
return hash(self.id)
def sync(self):
# check that the state we have here is the same as the one codemp has internally!
# if not get up to speed!
self.codemp.fetch_buffers().wait()
attached_buffers = self.codemp.buffer_list()
all(id in self._id2buff for id in attached_buffers)
# TODO!
def valid_buffer(self, buff: VirtualBuffer | str):
if isinstance(buff, str):
return self.buff_by_id(buff) is not None
return buff in self._buff2view
def all_buffers(self) -> list[VirtualBuffer]:
return list(self._buff2view.keys())
def buff_by_view(self, view: sublime.View) -> Optional[VirtualBuffer]:
buff = self._buff2view.inverse.get(view)
return buff[0] if buff is not None else None
def buff_by_id(self, id: str) -> Optional[VirtualBuffer]:
return self._id2buff.get(id)
def all_views(self) -> list[sublime.View]:
return list(self._buff2view.inverse.keys())
def view_by_buffer(self, buffer: VirtualBuffer) -> sublime.View:
return buffer.view
def cleanup(self):
# the worskpace only cares about closing the various open views of its buffers.
# the event listener calls the cleanup code for each buffer independently on its own
# upon closure.
for view in self.all_views():
view.close()
self.uninstall()
self.curctl.stop()
self._buff2view.clear()
self._id2buff.clear()
def uninstall(self):
if not getattr(self, "installed", False):
return
self.__deactivate()
proj: dict = self.window.project_data() # type:ignore
if proj is None:
raise
clean_proj_folders = list(
filter(
lambda f: f.get("name", "") != f"{g.WORKSPACE_FOLDER_PREFIX}{self.id}",
proj["folders"],
)
)
proj["folders"] = clean_proj_folders
self.window.set_project_data(proj)
logger.info(f"cleaning up virtual workspace '{self.id}'")
shutil.rmtree(self.rootdir, ignore_errors=True)
self.installed = False
def install(self):
if getattr(self, "installed", False):
return
# initialise the virtual filesystem
tmpdir = tempfile.mkdtemp(prefix="codemp_")
logging.debug(f"setting up virtual fs for workspace in: {tmpdir}")
self.rootdir = tmpdir
@ -145,36 +67,58 @@ class VirtualWorkspace:
)
self.window.set_project_data(proj)
self.__activate()
self.installed = True
def __activate(self):
self.curctl.callback(make_cursor_callback(self))
self.isactive = True
def __deactivate(self):
def __del__(self):
self.curctl.clear_callback()
self.isactive = False
self.curctl.stop()
proj: dict = self.window.project_data() # type:ignore
if proj is None:
raise
clean_proj_folders = list(
filter(
lambda f: f.get("name", "") != f"{g.WORKSPACE_FOLDER_PREFIX}{self.id}",
proj["folders"],
)
)
proj["folders"] = clean_proj_folders
self.window.set_project_data(proj)
logger.info(f"cleaning up virtual workspace '{self.id}'")
shutil.rmtree(self.rootdir, ignore_errors=True)
if not all(self.codemp.detach(buff) for buff in self._id2buff.keys()):
logger.warning(
f"could not detach from all buffers for workspace '{self.id}'."
)
self._id2buff.clear()
def __hash__(self) -> int:
# so we can use these as dict keys!
return hash(self.id)
def all_buffers(self) -> list[VirtualBuffer]:
return list(self._id2buff.values())
def buff_by_id(self, id: str) -> Optional[VirtualBuffer]:
return self._id2buff.get(id)
def install_buffer(self, buff: codemp.BufferController) -> VirtualBuffer:
logger.debug(f"installing buffer {buff.name()}")
view = self.window.new_file()
vbuff = VirtualBuffer(buff, view)
logger.debug("created virtual buffer")
self._buff2view[vbuff] = view
vbuff = VirtualBuffer(buff, view, self.rootdir)
self._id2buff[vbuff.id] = vbuff
vbuff.install(self.rootdir)
return vbuff
def uninstall_buffer(self, vbuff: VirtualBuffer):
vbuff.cleanup()
buffview = self.view_by_buffer(vbuff)
del self._buff2view[vbuff]
del self._id2buff[vbuff.id]
buffview.close()
self.codemp.detach(vbuff.id)
def send_cursor(self, id: str, start: Tuple[int, int], end: Tuple[int, int]):
# we can safely ignore the promise, we don't really care if everything