BIG CHANGE: fanculo avevo scritto un poema non ho voglia di riscriverlo.

TLDR: updated bindings, "virtual" classes now do less, and only deal with
managing the persistence of the codemp object within the editor.
the actual commands do the interaction with codemp.

moved away from asyncio, now its callbacks spawned on the async sublime thread.

the client now is much more central and knows everything.

split the join command into join workspace and join buffer, as it was before.
simpler and better ux.


Former-commit-id: 71c96d321fef2620da4301a8f7af5dff138921cd
This commit is contained in:
cschen 2024-08-23 20:59:06 +02:00
parent 507fca5057
commit 1e5aeda755
8 changed files with 625 additions and 513 deletions

View file

@ -29,8 +29,16 @@
}
},
{
"caption": "Codemp: Join",
"command": "codemp_join",
"caption": "Codemp: Join Workspace",
"command": "codemp_join_workspace",
"arg": {
// 'workspace_id': 'asd'
// 'buffer_id': 'test'
},
},
{
"caption": "Codemp: Join Buffer",
"command": "codemp_join_buffer",
"arg": {
// 'workspace_id': 'asd'
// 'buffer_id': 'test'

View file

@ -1 +1 @@
999fd917360a8fa68970c12c70bf5decf30c84a4
e674134a5cc02257b28bd8572b9d9e7534c92e5f

386
plugin.py
View file

@ -1,16 +1,14 @@
# pyright: reportIncompatibleMethodOverride=false
import sublime
import sublime_plugin
import logging
import random
# from Codemp.src.task_manager import rt
import codemp
from Codemp.src.client import client
from Codemp.src.utils import safe_listener_detach
from Codemp.src.utils import safe_listener_attach
from Codemp.src import globals as g
from codemp import register_logger
LOG_LEVEL = logging.DEBUG
handler = logging.StreamHandler()
@ -27,9 +25,6 @@ package_logger.propagate = False
logger = logging.getLogger(__name__)
# returns false if logger already exists
register_logger(lambda msg: logger.log(logger.level, msg), False)
TEXT_LISTENER = None
@ -49,37 +44,36 @@ def plugin_unloaded():
safe_listener_detach(TEXT_LISTENER)
package_logger.removeHandler(handler)
client.disconnect()
# client.disconnect()
# rt.stop_loop()
# Listeners
##############################################################################
class EventListener(sublime_plugin.EventListener):
def is_enabled(self):
return client.codemp is not None
def on_exit(self):
client.disconnect()
client.driver.stop()
def on_pre_close_window(self, window):
if client.active_workspace is None:
return # nothing to do
# deactivate all workspaces
client.make_active(None)
s = window.settings()
if not s.get(g.CODEMP_WINDOW_TAG, False):
assert client.codemp is not None
if not client.valid_window(window):
return
for wsid in s[g.CODEMP_WINDOW_WORKSPACES]:
ws = client[wsid]
if ws is None:
logger.warning(
"a tag on the window was found but not a matching workspace."
)
continue
for vws in client.all_workspaces(window):
client.codemp.leave_workspace(vws.id)
client.uninstall_workspace(vws)
ws.cleanup()
del client.workspaces[wsid]
def on_text_command(self, view, command_name, args):
if command_name == "codemp_replace_text":
logger.info("got a codemp_replace_text command!")
def on_post_text_command(self, view, command_name, args):
if command_name == "codemp_replace_text":
logger.info("got a codemp_replace_text command!")
class CodempClientViewEventListener(sublime_plugin.ViewEventListener):
@ -92,40 +86,43 @@ class CodempClientViewEventListener(sublime_plugin.ViewEventListener):
return False
def on_selection_modified_async(self):
ws = client.get_workspace(self.view)
if ws is None:
return
region = self.view.sel()[0]
start = self.view.rowcol(region.begin())
end = self.view.rowcol(region.end())
vbuff = ws.get_by_local(self.view.buffer_id())
if vbuff is not None:
vbuff.send_cursor(ws)
vws = client.workspace_from_view(self.view)
vbuff = client.buffer_from_view(self.view)
if vws is None or vbuff is None:
raise
vws.send_cursor(vbuff.id, start, end)
def on_activated(self):
# sublime has no proper way to check if a view gained or lost input focus outside of this
# callback (i know right?), so we have to manually keep track of which view has the focus
g.ACTIVE_CODEMP_VIEW = self.view.id()
# print("view {} activated".format(self.view.id()))
global TEXT_LISTENER
safe_listener_attach(TEXT_LISTENER, self.view.buffer()) # pyright: ignore
def on_deactivated(self):
g.ACTIVE_CODEMP_VIEW = None
# print("view {} deactivated".format(self.view.id()))
global TEXT_LISTENER
safe_listener_detach(TEXT_LISTENER) # pyright: ignore
def on_pre_close(self):
global TEXT_LISTENER
if self.view.id() == g.ACTIVE_CODEMP_VIEW:
if self.view == sublime.active_window().active_view():
global TEXT_LISTENER
safe_listener_detach(TEXT_LISTENER) # pyright: ignore
ws = client.get_workspace(self.view)
if ws is None:
return
vws = client.workspace_from_view(self.view)
vbuff = client.buffer_from_view(self.view)
if vws is None or vbuff is None:
raise
vbuff = ws.get_by_local(self.view.buffer_id())
if vbuff is not None:
vbuff.cleanup()
vws.uninstall_buffer(vbuff.id)
def on_text_command(self, command_name, args):
if command_name == "codemp_replace_text":
logger.info("got a codemp_replace_text command! but in the view listener")
def on_post_text_command(self, command_name, args):
if command_name == "codemp_replace_text":
logger.info("got a codemp_replace_text command! but in the view listener")
class CodempClientTextChangeListener(sublime_plugin.TextChangeListener):
@ -135,17 +132,18 @@ class CodempClientTextChangeListener(sublime_plugin.TextChangeListener):
# we'll do it by hand with .attach(buffer).
return False
# blocking :D
def on_text_changed(self, changes):
# we do the boring stuff in the async thread
def on_text_changed_async(self, changes):
s = self.buffer.primary_view().settings()
if s.get(g.CODEMP_IGNORE_NEXT_TEXT_CHANGE, None):
if s.get(g.CODEMP_IGNORE_NEXT_TEXT_CHANGE, False):
logger.debug("Ignoring echoing back the change.")
s[g.CODEMP_IGNORE_NEXT_TEXT_CHANGE] = False
return
vbuff = client.get_buffer(self.buffer.primary_view())
vbuff = client.buffer_from_view(self.buffer.primary_view())
if vbuff is not None:
rt.dispatch(vbuff.send_buffer_change(changes))
# but then we block the main one for the actual sending!
sublime.set_timeout(lambda: vbuff.send_buffer_change(changes))
# Commands:
@ -166,11 +164,23 @@ class CodempClientTextChangeListener(sublime_plugin.TextChangeListener):
# Connect Command
#############################################################################
class CodempConnectCommand(sublime_plugin.WindowCommand):
def run(self, server_host, user_name, password="lmaodefaultpassword"):
client.connect(server_host, user_name, password)
def is_enabled(self) -> bool:
return client.handle is None
return client.codemp is None
def run(self, server_host, user_name, password="lmaodefaultpassword"):
logger.info(f"Connecting to {server_host} with user {user_name}...")
def try_connect():
try:
client.connect(server_host, user_name, password)
except Exception as e:
logger.error(f"Could not connect: {e}")
sublime.error_message(
"Could not connect:\n Make sure the server is up\n\
and your credentials are correct."
)
sublime.set_timeout_async(try_connect)
def input(self, args):
if "server_host" not in args:
@ -206,93 +216,148 @@ class ConnectUserName(sublime_plugin.TextInputHandler):
# Separate the join command into two join workspace and join buffer commands that get called back to back
# Generic Join Command
# Generic Join Workspace Command
#############################################################################
class CodempJoinCommand(sublime_plugin.WindowCommand):
def run(self, workspace_id, buffer_id):
if workspace_id == "":
return
vws = client.workspaces.get(workspace_id)
if vws is None:
try:
vws = client.join_workspace(workspace_id)
except Exception as e:
raise e
if vws is None:
logger.warning("The client returned a void workspace.")
return
vws.materialize()
if buffer_id == "* Don't Join Any":
buffer_id = ""
if buffer_id != "":
vws.attach(buffer_id)
class CodempJoinWorkspaceCommand(sublime_plugin.WindowCommand):
def is_enabled(self) -> bool:
return client.handle is not None
return client.codemp is not None
def run(self, workspace_id):
assert client.codemp is not None
if client.valid_workspace(workspace_id):
logger.info(f"Joining workspace: '{workspace_id}'...")
promise = client.codemp.join_workspace(workspace_id)
active_window = sublime.active_window()
def defer_instantiation(promise):
try:
workspace = promise.wait()
except Exception as e:
logger.error(
f"Could not join workspace '{workspace_id}'.\n\nerror: {e}"
)
sublime.error_message(f"Could not join workspace '{workspace_id}'")
return
client.install_workspace(workspace, active_window)
sublime.set_timeout_async(lambda: defer_instantiation(promise))
# the else shouldn't really happen, and if it does, it should already be instantiated.
# ignore.
def input_description(self):
return "Join:"
def input(self, args):
if "workspace_id" not in args:
return JoinWorkspaceIdList()
return WorkspaceIdText()
class JoinWorkspaceIdList(sublime_plugin.ListInputHandler):
# To allow for having a selection and choosing non existing workspaces
# we do a little dance: We pass this list input handler to a TextInputHandler
# when we select "Create New..." which adds his result to the list of possible
# workspaces and pop itself off the stack to go back to the list handler.
def __init__(self):
self.list = client.active_workspaces()
self.list.sort()
self.list.append("* Create New...")
self.preselected = None
class WorkspaceIdText(sublime_plugin.TextInputHandler):
def name(self):
return "workspace_id"
def placeholder(self):
return "Workspace"
def list_items(self):
if self.preselected is not None:
return (self.list, self.preselected)
else:
return self.list
# To allow for having a selection and choosing non existing workspaces
# we do a little dance: We pass this list input handler to a TextInputHandler
# when we select "Create New..." which adds his result to the list of possible
# workspaces and pop itself off the stack to go back to the list handler.
# class WorkspaceIdList(sublime_plugin.ListInputHandler):
# def __init__(self):
# assert client.codemp is not None # the command should not be available
def next_input(self, args):
if args["workspace_id"] == "* Create New...":
return AddListEntryName(self)
# # at the moment, the client can't give us a full list of existing workspaces
# # so a textinputhandler would be more appropriate. but we keep this for the future
wid = args["workspace_id"]
if wid != "":
vws = client.join_workspace(wid)
else:
vws = None
try:
return ListBufferId(vws)
except Exception:
return TextBufferId()
# self.add_entry_text = "* add entry..."
# self.list = client.codemp.active_workspaces()
# self.list.sort()
# self.list.append(self.add_entry_text)
# self.preselected = None
# def name(self):
# return "workspace_id"
# def placeholder(self):
# return "Workspace"
# def list_items(self):
# if self.preselected is not None:
# return (self.list, self.preselected)
# else:
# return self.list
# def next_input(self, args):
# if args["workspace_id"] == self.add_entry_text:
# return AddListEntry(self)
class TextBufferId(sublime_plugin.TextInputHandler):
def name(self):
return "buffer_id"
class CodempJoinBufferCommand(sublime_plugin.WindowCommand):
def is_enabled(self):
available_workspaces = client.all_workspaces(self.window)
return len(available_workspaces) > 0
def run(self, workspace_id, buffer_id):
# A workspace has some Buffers inside of it (filetree)
# some of those you are already attached to
# If already attached to it return the same alredy existing bufferctl
# if existing but not attached (attach)
# if not existing ask for creation (create + attach)
vws = client.workspace_from_id(workspace_id)
assert vws is not None
# is the buffer already installed?
if vws.valid_buffer(buffer_id):
return # do nothing.
if buffer_id not in vws.codemp.filetree(filter=buffer_id):
create = sublime.ok_cancel_dialog(
"There is no buffer named '{buffer_id}' in the workspace '{workspace_id}'.\n\
Do you want to create it?",
ok_title="yes",
title="Create Buffer?",
)
if create:
try:
create_promise = vws.codemp.create(buffer_id)
except Exception as e:
logging.error(f"could not create buffer:\n\n {e}")
return
create_promise.wait()
# now we can defer the attaching process
promise = vws.codemp.attach(buffer_id)
def deferred_attach(promise):
try:
buff_ctl = promise.wait()
except Exception as e:
logging.error(f"error when attaching to buffer '{id}':\n\n {e}")
sublime.error_message(f"Could not attach to buffer '{buffer_id}'")
return
vbuff = vws.install_buffer(buff_ctl)
# TODO! if the view is already active calling focus_view() will not trigger the on_activate
self.window.focus_view(vbuff.view)
sublime.set_timeout_async(lambda: deferred_attach(promise))
def input_description(self) -> str:
return "Attach: "
def input(self, args):
# if we have only a workspace in the window, then
# skip to the buffer choice
if "workspace_id" not in args:
return ActiveWorkspacesIdList(self.window, get_buffer=True)
if "buffer_id" not in args:
return BufferIdList(args["workspace_id"])
class ListBufferId(sublime_plugin.ListInputHandler):
def __init__(self, vws):
self.ws = vws
self.list = vws.handle.filetree()
class BufferIdList(sublime_plugin.ListInputHandler):
def __init__(self, workspace_id):
self.add_entry_text = "* create new..."
self.list = [vbuff.id for vbuff in client.all_buffers(workspace_id)]
self.list.sort()
self.list.append("* Create New...")
self.list.append("* Don't Join Any")
self.list.append(self.add_entry_text)
self.preselected = None
def name(self):
@ -307,34 +372,9 @@ class ListBufferId(sublime_plugin.ListInputHandler):
else:
return self.list
def cancel(self):
client.leave_workspace(self.ws.id)
def next_input(self, args):
if args["buffer_id"] == "* Create New...":
return AddListEntryName(self)
if args["buffer_id"] == "* Dont' Join Any":
return None
class AddListEntryName(sublime_plugin.TextInputHandler):
def __init__(self, list_handler):
self.parent = list_handler
def name(self):
return None
def validate(self, text: str) -> bool:
return not len(text) == 0
def confirm(self, text: str):
self.parent.list.pop() # removes the "Create New..."
self.parent.list.insert(0, text)
self.parent.preselected = 0
def next_input(self, args):
return sublime_plugin.BackInputHandler()
if args["buffer_id"] == self.add_entry_text:
return AddListEntry(self)
# Text Change Command
@ -363,11 +403,8 @@ class CodempReplaceTextCommand(sublime_plugin.TextCommand):
# Disconnect Command
#############################################################################
class CodempDisconnectCommand(sublime_plugin.WindowCommand):
def is_enabled(self) -> bool:
if client.handle is not None:
return True
else:
return False
def is_enabled(self):
return client.codemp is not None
def run(self):
client.disconnect()
@ -375,23 +412,54 @@ class CodempDisconnectCommand(sublime_plugin.WindowCommand):
# Leave Workspace Command
class CodempLeaveWorkspaceCommand(sublime_plugin.WindowCommand):
def is_enabled(self) -> bool:
return client.handle is not None and len(client.workspaces.keys()) > 0
def is_enabled(self):
return client.codemp is not None and len(client.all_workspaces(self.window)) > 0
def run(self, id: str):
client.leave_workspace(id)
def run(self, workspace_id: str):
# client.leave_workspace(id)
pass
def input(self, args):
if "id" not in args:
return LeaveWorkspaceIdList()
return ActiveWorkspacesIdList()
class LeaveWorkspaceIdList(sublime_plugin.ListInputHandler):
class ActiveWorkspacesIdList(sublime_plugin.ListInputHandler):
def __init__(self, window=None, get_buffer=False):
self.window = window
self.get_buffer = get_buffer
def name(self):
return "id"
return "workspace_id"
def list_items(self):
return client.active_workspaces()
return [vws.id for vws in client.all_workspaces(self.window)]
def next_input(self, args):
if self.get_buffer:
return BufferIdList(args["workspace_id"])
class AddListEntry(sublime_plugin.TextInputHandler):
# this class works when the list input handler
# added appended a new element to it's list that will need to be
# replaced with the entry added from here!
def __init__(self, list_input_handler):
self.parent = list_input_handler
def name(self):
return None
def validate(self, text: str) -> bool:
return not len(text) == 0
def confirm(self, text: str):
self.parent.list.pop() # removes the add_entry_text
self.parent.list.insert(0, text)
self.parent.preselected = 0
def next_input(self, args):
return sublime_plugin.BackInputHandler()
# Proxy Commands ( NOT USED, left just in case we need it again. )

View file

@ -1,11 +1,9 @@
import sublime
import os
import logging
from asyncio import CancelledError
from codemp import BufferController
import codemp
from Codemp.src import globals as g
from Codemp.src.task_manager import rt
logger = logging.getLogger(__name__)
@ -17,58 +15,88 @@ logger = logging.getLogger(__name__)
class VirtualBuffer:
def __init__(
self,
workspace_id: str,
workspace_rootdir: str,
remote_id: str,
buffctl: BufferController,
buffctl: codemp.BufferController,
view: sublime.View, # noqa: F821 # type: ignore
):
self.view = sublime.active_window().new_file()
self.codemp_id = remote_id
self.sublime_id = self.view.buffer_id()
self.workspace_id = workspace_id
self.workspace_rootdir = workspace_rootdir
self.buffctl = buffctl
self.view = view
self.id = self.buffctl.name()
self.tmpfile = os.path.join(workspace_rootdir, self.codemp_id)
def __hash__(self) -> int:
return hash(self.id)
self.view.set_name(self.codemp_id)
def cleanup(self):
self.uninstall()
self.buffctl.stop()
def install(self, rootdir):
if self.installed:
return
self.tmpfile = os.path.join(rootdir, self.id)
open(self.tmpfile, "a").close()
self.view.retarget(self.tmpfile)
self.view.set_scratch(True)
self.view.set_name(self.id)
self.view.retarget(self.tmpfile)
rt.dispatch(
self.apply_bufferchange_task(),
f"{g.BUFFCTL_TASK_PREFIX}-{self.codemp_id}",
)
# mark the view as a codemp view
s = self.view.settings()
self.view.set_status(g.SUBLIME_STATUS_ID, "[Codemp]")
s[g.CODEMP_BUFFER_TAG] = True
s[g.CODEMP_REMOTE_ID] = self.codemp_id
s[g.CODEMP_WORKSPACE_ID] = self.workspace_id
def cleanup(self):
self.__activate()
self.installed = True
def uninstall(self):
if not self.installed:
return
self.__deactivate()
os.remove(self.tmpfile)
# cleanup views
s = self.view.settings()
del s[g.CODEMP_BUFFER_TAG]
del s[g.CODEMP_REMOTE_ID]
del s[g.CODEMP_WORKSPACE_ID]
self.view.erase_status(g.SUBLIME_STATUS_ID)
rt.stop_task(f"{g.BUFFCTL_TASK_PREFIX}-{self.codemp_id}")
self.buffctl.stop()
logger.info(f"cleaning up virtual buffer '{self.codemp_id}'")
self.installed = False
async def apply_bufferchange_task(self):
logger.debug(f"spinning up '{self.codemp_id}' buffer worker...")
try:
while text_change := await self.buffctl.recv():
change_id = self.view.change_id()
if text_change.is_empty():
def __activate(self):
logger.info(f"registering a callback for buffer: {self.id}")
self.buffctl.callback(self.__apply_bufferchange_cb)
self.isactive = True
def __deactivate(self):
logger.info(f"clearing a callback for buffer: {self.id}")
self.buffctl.clear_callback()
self.isactive = False
def send_buffer_change(self, changes):
# we do not do any index checking, and trust sublime with providing the correct
# sequential indexing, assuming the changes are applied in the order they are received.
for change in changes:
region = sublime.Region(change.a.pt, change.b.pt)
logger.debug(
"sending txt change: Reg({} {}) -> '{}'".format(
region.begin(), region.end(), change.str
)
)
# we must block and wait the send request to make sure the change went through ok
self.buffctl.send(region.begin(), region.end(), change.str).wait()
def __apply_bufferchange_cb(self, bufctl: codemp.BufferController):
def get_change_and_apply():
change_id = self.view.change_id()
while change := bufctl.try_recv().wait():
if change is None:
break
if change.is_empty():
logger.debug("change is empty. skipping.")
continue
# In case a change arrives to a background buffer, just apply it.
# We are not listening on it. Otherwise, interrupt the listening
# to avoid echoing back the change just received.
@ -81,37 +109,11 @@ class VirtualBuffer:
self.view.run_command(
"codemp_replace_text",
{
"start": text_change.start,
"end": text_change.end,
"content": text_change.content,
"start": change.start,
"end": change.end,
"content": change.content,
"change_id": change_id,
}, # pyright: ignore
)
except CancelledError:
logger.debug(f"'{self.codemp_id}' buffer worker stopped...")
raise
except Exception as e:
logger.error(f"buffer worker '{self.codemp_id}' crashed:\n{e}")
raise
def send_buffer_change(self, changes):
# we do not do any index checking, and trust sublime with providing the correct
# sequential indexing, assuming the changes are applied in the order they are received.
for change in changes:
region = sublime.Region(change.a.pt, change.b.pt)
logger.debug(
"sending txt change: Reg({} {}) -> '{}'".format(
region.begin(), region.end(), change.str
)
)
self.buffctl.send(region.begin(), region.end(), change.str)
def send_cursor(self, vws): # pyright: ignore # noqa: F821
# TODO: only the last placed cursor/selection.
# status_log(f"sending cursor position in workspace: {vbuff.workspace.id}")
region = self.view.sel()[0]
start = self.view.rowcol(region.begin()) # only counts UTF8 chars
end = self.view.rowcol(region.end())
vws.curctl.send(self.codemp_id, start, end)
sublime.set_timeout(get_change_and_apply)

View file

@ -1,5 +1,6 @@
from __future__ import annotations
from typing import Optional
from typing import Optional, Dict
import sublime
import logging
@ -7,114 +8,132 @@ import logging
import codemp
from Codemp.src import globals as g
from Codemp.src.workspace import VirtualWorkspace
from Codemp.src.buffers import VirtualBuffer
from Codemp.src.utils import bidict
logger = logging.getLogger(__name__)
# the client will be responsible to keep track of everything!
# it will need 3 bidirectional dictionaries and 2 normal ones
# normal: workspace_id -> VirtualWorkspaces
# normal: buffer_id -> VirtualBuffer
# bidir: VirtualBuffer <-> VirtualWorkspace
# bidir: VirtualBuffer <-> Sublime.View
# bidir: VirtualWorkspace <-> Sublime.Window
class VirtualClient:
handle: Optional[codemp.Client]
def __init__(self):
self.codemp: Optional[codemp.Client] = None
self.driver = codemp.init(lambda msg: logger.log(logger.level, msg), False)
self.workspaces: dict[str, VirtualWorkspace] = {}
self.active_workspace: Optional[None] = None
def __getitem__(self, key: str):
return self.workspaces.get(key)
# bookkeeping corner
self.__id2buffer: dict[str, VirtualBuffer] = {}
self.__id2workspace: dict[str, VirtualWorkspace] = {}
self.__view2buff: dict[sublime.View, VirtualBuffer] = {}
self.__buff2workspace: bidict[VirtualBuffer, VirtualWorkspace] = bidict()
self.__workspace2window: bidict[VirtualWorkspace, sublime.Window] = bidict()
def valid_window(self, window: sublime.Window):
return window in self.__workspace2window.inverse
def valid_workspace(self, workspace: VirtualWorkspace | str):
if isinstance(workspace, str):
return client.__id2workspace.get(workspace) is not None
return workspace in self.__workspace2window
def all_workspaces(
self, window: Optional[sublime.Window] = None
) -> list[VirtualWorkspace]:
if window is None:
return list(self.__workspace2window.keys())
else:
return self.__workspace2window.inverse[window]
def workspace_from_view(self, view: sublime.View) -> Optional[VirtualWorkspace]:
buff = self.__view2buff.get(view, None)
return self.__buff2workspace.get(buff, None)
def workspace_from_buffer(self, buff: VirtualBuffer) -> Optional[VirtualWorkspace]:
return self.__buff2workspace.get(buff)
def workspace_from_id(self, id: str) -> Optional[VirtualWorkspace]:
return self.__id2workspace.get(id)
def all_buffers(
self, workspace: Optional[VirtualWorkspace | str] = None
) -> list[VirtualBuffer]:
if workspace is None:
return list(self.__buff2workspace.keys())
else:
if isinstance(workspace, str):
workspace = client.__id2workspace[workspace]
return self.__buff2workspace.inverse[workspace]
def buffer_from_view(self, view: sublime.View) -> Optional[VirtualBuffer]:
return self.__view2buff.get(view)
def buffer_from_id(self, id: str) -> Optional[VirtualBuffer]:
return self.__id2buffer.get(id)
def view_from_buffer(self, buff: VirtualBuffer) -> sublime.View:
return buff.view
def disconnect(self):
if self.handle is None:
if self.codemp is None:
return
logger.info("disconnecting from the current client")
for vws in self.workspaces.values():
# for each workspace tell it to clean up after itself.
for vws in self.all_workspaces():
vws.cleanup()
self.codemp.leave_workspace(vws.id)
self.handle = None
self.__id2workspace.clear()
self.__id2buffer.clear()
self.__buff2workspace.clear()
self.__view2buff.clear()
self.__workspace2window.clear()
self.codemp = None
def connect(self, host: str, user: str, password: str):
if self.handle is not None:
if self.codemp is not None:
logger.info("Disconnecting from previous client.")
return self.disconnect()
logger.info(f"Connecting to {host} with user {user}")
try:
self.handle = codemp.Client(host, user, password)
self.codemp = codemp.Client(host, user, password)
id = self.codemp.user_id()
logger.debug(f"Connected to '{host}' as user {user} (id: {id})")
if self.handle is not None:
id = self.handle.user_id()
logger.debug(f"Connected to '{host}' with user {user} and id: {id}")
except Exception as e:
logger.error(f"Could not connect: {e}")
sublime.error_message(
"Could not connect:\n Make sure the server is up.\n\
or your credentials are correct."
)
raise
def join_workspace(
self,
workspace_id: str,
def install_workspace(
self, workspace: codemp.Workspace, window: sublime.Window
) -> VirtualWorkspace:
if self.handle is None:
sublime.error_message("Connect to a server first.")
raise
# we pass the window as well so if the window changes in the mean
# time we have the correct one!
vws = VirtualWorkspace(workspace, window)
self.__workspace2window[vws] = window
self.__id2workspace[vws.id] = vws
logger.info(f"Joining workspace: '{workspace_id}'")
try:
workspace = self.handle.join_workspace(workspace_id).wait()
except Exception as e:
logger.error(f"Could not join workspace '{workspace_id}'.\n\nerror: {e}")
sublime.error_message(f"Could not join workspace '{workspace_id}'")
raise
vws = VirtualWorkspace(workspace)
self.workspaces[workspace_id] = vws
vws.install()
return vws
def leave_workspace(self, id: str):
if self.handle is None:
def uninstall_workspace(self, vws: VirtualWorkspace):
if vws not in self.__workspace2window:
raise
if self.handle.leave_workspace(id):
logger.info(f"Leaving workspace: '{id}'")
self.workspaces[id].cleanup()
del self.workspaces[id]
logger.info(f"Uninstalling workspace '{vws.id}'...")
vws.cleanup()
del self.__id2workspace[vws.id]
del self.__workspace2window[vws]
self.__buff2workspace.inverse_del(vws)
def get_workspace(self, view):
tag_id = view.settings().get(g.CODEMP_WORKSPACE_ID)
if tag_id is None:
return
ws = self.workspaces.get(tag_id)
if ws is None:
logging.warning("a tag on the view was found but not a matching workspace.")
return
return ws
def active_workspaces(self):
return self.handle.active_workspaces() if self.handle else []
def workspaces_in_server(self):
return self.codemp.active_workspaces() if self.codemp else []
def user_id(self):
return self.handle.user_id() if self.handle else None
def get_buffer(self, view):
ws = self.get_workspace(view)
return None if ws is None else ws.get_by_local(view.buffer_id())
def make_active(self, ws: Optional[VirtualWorkspace]):
if self.active_workspace == ws:
return
if self.active_workspace is not None:
self.active_workspace.deactivate()
if ws is not None:
ws.activate()
self.active_workspace = ws # pyright: ignore
return self.codemp.user_id() if self.codemp else None
client = VirtualClient()

View file

@ -6,8 +6,6 @@ import asyncio
import threading
import concurrent.futures
# from ..ext import sublime_asyncio as rt
logger = logging.getLogger(__name__)

View file

@ -1,7 +1,58 @@
import sublime
import sublime_plugin
from typing import Dict, Generic, TypeVar
from Codemp.src import globals as g
# bidirectional dictionary so that we can have bidirectional
# lookup!
# In particular we can use it for:
# bd[workspace_id] = window
# bd[view] = virtual_buffer
D = TypeVar("D", Dict, dict)
K = TypeVar("K")
V = TypeVar("V")
# using del bd.inverse[key] doesn't work since it can't be intercepted.
# the only way is to iterate:
# for key in bd.inverse[inverse_key]
class bidict(dict, Generic[K, V]):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.inverse: Dict[V, list[K]] = {}
for key, value in self.items():
self.inverse.setdefault(value, []).append(key)
def __setitem__(self, key: K, value: V):
if key in self:
self.inverse[self[key]].remove(key)
super(bidict, self).__setitem__(key, value)
self.inverse.setdefault(value, []).append(key)
def __delitem__(self, key: K):
# if we delete a normal key, remove the key from the inverse element.
inverse_key = self[key]
self.inverse.setdefault(inverse_key, []).remove(key)
# if the resulting inverse key list is empty delete it
if inverse_key in self.inverse and not self.inverse[inverse_key]:
del self.inverse[inverse_key]
# delete the normal key
super(bidict, self).__delitem__(key)
def inverse_del(self, inverse_key: V):
# deletes all the elements matching the inverse key
# the last del will also delete the inverse key.
for key in self.inverse[inverse_key]:
self.pop(key, None)
def clear(self):
self.inverse.clear()
super(bidict, self).clear()
def status_log(msg, popup=False):
sublime.status_message("[codemp] {}".format(msg))
@ -50,20 +101,17 @@ def get_view_from_local_path(path):
return view
def draw_cursor_region(view, cursor):
reg = rowcol_to_region(view, cursor.start, cursor.end)
def draw_cursor_region(view, start, end, user):
reg = rowcol_to_region(view, start, end)
reg_flags = sublime.RegionFlags.DRAW_EMPTY
user_hash = hash(cursor.user)
user_hash = hash(user)
def draw():
view.add_regions(
f"{g.SUBLIME_REGIONS_PREFIX}-{user_hash}",
[reg],
flags=reg_flags,
scope=g.REGIONS_COLORS[user_hash % len(g.REGIONS_COLORS)],
annotations=[cursor.user], # pyright: ignore
annotation_color=g.PALETTE[user_hash % len(g.PALETTE)],
)
sublime.set_timeout_async(draw)
view.add_regions(
f"{g.SUBLIME_REGIONS_PREFIX}-{user_hash}",
[reg],
flags=reg_flags,
scope=g.REGIONS_COLORS[user_hash % len(g.REGIONS_COLORS)],
annotations=[user], # pyright: ignore
annotation_color=g.PALETTE[user_hash % len(g.PALETTE)],
)

View file

@ -1,19 +1,16 @@
from __future__ import annotations
from typing import Optional
from typing import Optional, Tuple
import sublime
import shutil
import tempfile
import logging
from asyncio import CancelledError
from codemp import Workspace, Promise, CursorController
import codemp
from Codemp.src import globals as g
from Codemp.src.buffers import VirtualBuffer
# from Codemp.src.task_manager import rt
from Codemp.src.utils import draw_cursor_region
from Codemp.src.utils import bidict
logger = logging.getLogger(__name__)
@ -22,235 +19,207 @@ logger = logging.getLogger(__name__)
# A virtual workspace is a bridge class that aims to translate
# events that happen to the codemp workspaces into sublime actions
class VirtualWorkspace:
def __init__(self, handle: Workspace):
self.handle: Workspace = handle
self.id: str = self.handle.id()
self.sublime_window: sublime.Window = sublime.active_window()
self.curctl: CursorController = handle.cursor()
self.materialized = False
self.isactive = False
def __init__(self, handle: codemp.Workspace, window: sublime.Window):
self.codemp: codemp.Workspace = handle
self.window: sublime.Window = window
self.curctl: codemp.CursorController = self.codemp.cursor()
self.id: str = self.codemp.id()
self.codemp.fetch_buffers()
self.codemp.fetch_users()
# mapping remote ids -> local ids
self.id_map: dict[str, int] = {}
self.active_buffers: dict[int, VirtualBuffer] = {} # local_id -> VBuff
self.__buff2view: bidict[VirtualBuffer, sublime.View] = bidict()
self.__id2buff: dict[str, VirtualBuffer] = {}
# self.id_map: dict[str, int] = {}
# self.active_buffers: dict[int, VirtualBuffer] = {} # local_id -> VBuff
def _cleanup(self):
self._deactivate()
def __hash__(self) -> int:
# so we can use these as dict keys!
return hash(self.id)
# the worskpace only cares about closing the various open views on its Buffer.
def sync(self):
# check that the state we have here is the same as the one codemp has internally!
# if not get up to speed!
self.codemp.fetch_buffers().wait()
attached_buffers = self.codemp.buffer_list()
all(id in self.__id2buff for id in attached_buffers)
# TODO!
def valid_bufffer(self, buff: VirtualBuffer | str):
if isinstance(buff, str):
return self.buff_by_id(buff) is not None
return buff in self.__buff2view
def all_buffers(self) -> list[VirtualBuffer]:
return list(self.__buff2view.keys())
def buff_by_view(self, view: sublime.View) -> Optional[VirtualBuffer]:
buff = self.__buff2view.inverse.get(view)
return buff[0] if buff is not None else None
def buff_by_id(self, id: str) -> Optional[VirtualBuffer]:
return self.__id2buff.get(id)
def all_views(self) -> list[sublime.View]:
return list(self.__buff2view.inverse.keys())
def view_by_buffer(self, buffer: VirtualBuffer) -> sublime.View:
return buffer.view
def cleanup(self):
# the worskpace only cares about closing the various open views of its buffers.
# the event listener calls the cleanup code for each buffer independently on its own
# upon closure.
for vbuff in self.active_buffers.values():
vbuff.view.close()
for view in self.all_views():
view.close()
self.active_buffers = {} # drop all Buffer, let them be garbage collected (hopefully)
self.__buff2view.clear()
self.__id2buff.clear()
if not self.materialized:
return # nothing to delete
self.uninstall()
self.curctl.stop()
# remove from the "virtual" project folders
d: dict = self.sublime_window.project_data() # pyright: ignore
if d is None:
def uninstall(self):
if not self.installed:
return
self.__deactivate()
proj: dict = self.window.project_data() # type:ignore
if proj is None:
raise
newf = list(
clean_proj_folders = list(
filter(
lambda f: f.get("name", "") != f"{g.WORKSPACE_FOLDER_PREFIX}{self.id}",
d["folders"],
proj["folders"],
)
)
d["folders"] = newf
self.sublime_window.set_project_data(d)
proj["folders"] = clean_proj_folders
self.window.set_project_data(proj)
logger.info(f"cleaning up virtual workspace '{self.id}'")
shutil.rmtree(self.rootdir, ignore_errors=True)
# stop the controller
self.curctl.stop()
self.installed = False
# clean the window form the tags
s = self.sublime_window.settings()
del s[g.CODEMP_WINDOW_TAG]
del s[g.CODEMP_WINDOW_WORKSPACES]
self.materialized = False
def _materialize(self):
# attach the workspace to the editor, tagging windows and populating
# virtual file systems
if self.materialized:
return # no op, we already have materialized the workspace in the editor
def install(self):
if self.installed:
return
# initialise the virtual filesystem
tmpdir = tempfile.mkdtemp(prefix="codemp_")
logging.debug(f"setting up virtual fs for workspace in: {tmpdir}")
self.rootdir = tmpdir
# and add a new "project folder"
proj_data: dict = self.sublime_window.project_data() # pyright: ignore
if proj_data is None:
proj_data = {"folders": []}
proj: dict = self.window.project_data() # pyright: ignore
if proj is None:
proj = {"folders": []} # pyright: ignore, Value can be None
proj_data["folders"].append(
proj["folders"].append(
{"name": f"{g.WORKSPACE_FOLDER_PREFIX}{self.id}", "path": self.rootdir}
)
self.sublime_window.set_project_data(proj_data)
self.window.set_project_data(proj)
s: dict = self.sublime_window.settings() # pyright: ignore
if s.get(g.CODEMP_WINDOW_TAG, False):
s[g.CODEMP_WINDOW_WORKSPACES].append(self.id)
else:
s[g.CODEMP_WINDOW_TAG] = True
s[g.CODEMP_WINDOW_WORKSPACES] = [self.id]
self.__activate()
self.installed = True
self.materialized = True
# def _activate(self):
# rt.dispatch(
# self.move_cursor_task(),
# f"{g.CURCTL_TASK_PREFIX}-{self.id}",
# )
# self.isactive = True
def _deactivate(self):
if self.isactive:
rt.stop_task(f"{g.CURCTL_TASK_PREFIX}-{self.id}")
def __activate(self):
self.curctl.callback(self.__move_cursor_callback)
self.isactive = True
def __deactivate(self):
self.curctl.clear_callback()
self.isactive = False
def _add_buffer(self, remote_id: str, vbuff: VirtualBuffer):
self.id_map[remote_id] = vbuff.view.buffer_id()
self.active_buffers[vbuff.view.buffer_id()] = vbuff
def install_buffer(self, buff: codemp.BufferController) -> VirtualBuffer:
view = self.window.new_file()
def _get_by_local(self, local_id: int) -> Optional[VirtualBuffer]:
return self.active_buffers.get(local_id)
vbuff = VirtualBuffer(buff, view)
self.__buff2view[vbuff] = view
self.__id2buff[vbuff.id] = vbuff
def _get_by_remote(self, remote_id: str) -> Optional[VirtualBuffer]:
local_id = self.id_map.get(remote_id)
if local_id is None:
return
vbuff = self.active_buffers.get(local_id)
if vbuff is None:
logging.warning(
"a local-remote buffer id pair was found but \
not the matching virtual buffer."
)
return
vbuff.install(self.rootdir)
return vbuff
def create(self, id: str) -> Promise[None]:
return self.handle.create(id)
def uninstall_buffer(self, vbuff: VirtualBuffer):
vbuff.cleanup()
buffview = self.view_by_buffer(vbuff)
del self.__buff2view[vbuff]
del self.__id2buff[vbuff.id]
buffview.close()
# A workspace has some Buffer inside of it (filetree)
# some of those you are already attached to (Buffer_by_name)
# If already attached to it return the same alredy existing bufferctl
# if existing but not attached (attach)
# if not existing ask for creation (create + attach)
def attach(self, id: str):
if id is None:
return
# def detach(self, id: str):
# attached_Buffer = self.codemp.buffer_by_name(id)
# if attached_Buffer is None:
# sublime.error_message(f"You are not attached to the buffer '{id}'")
# logging.warning(f"You are not attached to the buffer '{id}'")
# return
attached_Buffer = self.handle.buffer_by_name(id)
if attached_Buffer is not None:
return self._get_by_remote(id)
# self.codemp.detach(id)
self.handle.fetch_buffers()
existing_Buffer = self.handle.filetree(filter=None)
if id not in existing_Buffer:
create = sublime.ok_cancel_dialog(
"There is no buffer named '{id}' in the workspace.\n\
Do you want to create it?",
ok_title="yes",
title="Create Buffer?",
)
if create:
try:
create_promise = self.create(id)
except Exception as e:
logging.error(f"could not create buffer:\n\n {e}")
return
create_promise.wait()
else:
return
# def delete(self, id: str):
# self.codemp.fetch_buffers()
# existing_Buffer = self.codemp.filetree(filter=None)
# if id not in existing_Buffer:
# sublime.error_message(f"The buffer '{id}' does not exists.")
# logging.info(f"The buffer '{id}' does not exists.")
# return
# # delete a buffer that exists but you are not attached to
# attached_Buffer = self.codemp.buffer_by_name(id)
# if attached_Buffer is None:
# delete = sublime.ok_cancel_dialog(
# "Confirm you want to delete the buffer '{id}'",
# ok_title="delete",
# title="Delete Buffer?",
# )
# if delete:
# try:
# self.codemp.delete(id).wait()
# except Exception as e:
# logging.error(
# f"error when deleting the buffer '{id}':\n\n {e}", True
# )
# return
# else:
# return
# now either we created it or it exists already
try:
buff_ctl = self.handle.attach(id)
except Exception as e:
logging.error(f"error when attaching to buffer '{id}':\n\n {e}")
return
# # delete buffer that you are attached to
# delete = sublime.ok_cancel_dialog(
# "Confirm you want to delete the buffer '{id}'.\n\
# You will be disconnected from it.",
# ok_title="delete",
# title="Delete Buffer?",
# )
# if delete:
# self.codemp.detach(id)
# try:
# self.codemp.delete(id).wait()
# except Exception as e:
# logging.error(f"error when deleting the buffer '{id}':\n\n {e}", True)
# return
vbuff = VirtualBuffer(self.id, self.rootdir, id, buff_ctl.wait())
self._add_buffer(id, vbuff)
def send_cursor(self, id: str, start: Tuple[int, int], end: Tuple[int, int]):
# we can safely ignore the promise, we don't really care if everything
# is ok for now with the cursor.
self.curctl.send(id, start, end)
# TODO! if the view is already active calling focus_view() will not trigger the on_activate
self.sublime_window.focus_view(vbuff.view)
def detach(self, id: str):
attached_Buffer = self.handle.buffer_by_name(id)
if attached_Buffer is None:
sublime.error_message(f"You are not attached to the buffer '{id}'")
logging.warning(f"You are not attached to the buffer '{id}'")
return
self.handle.detach(id)
def delete(self, id: str):
self.handle.fetch_buffers()
existing_Buffer = self.handle.filetree(filter=None)
if id not in existing_Buffer:
sublime.error_message(f"The buffer '{id}' does not exists.")
logging.info(f"The buffer '{id}' does not exists.")
return
# delete a buffer that exists but you are not attached to
attached_Buffer = self.handle.buffer_by_name(id)
if attached_Buffer is None:
delete = sublime.ok_cancel_dialog(
"Confirm you want to delete the buffer '{id}'",
ok_title="delete",
title="Delete Buffer?",
)
if delete:
try:
self.handle.delete(id).wait()
except Exception as e:
logging.error(
f"error when deleting the buffer '{id}':\n\n {e}", True
)
return
else:
return
# delete buffer that you are attached to
delete = sublime.ok_cancel_dialog(
"Confirm you want to delete the buffer '{id}'.\n\
You will be disconnected from it.",
ok_title="delete",
title="Delete Buffer?",
)
if delete:
self.handle.detach(id)
try:
self.handle.delete(id).wait()
except Exception as e:
logging.error(f"error when deleting the buffer '{id}':\n\n {e}", True)
return
async def move_cursor_task(self):
logger.debug(f"spinning up cursor worker for workspace '{self.id}'...")
try:
# blocking for now ...
while cursor_event := self.curctl.recv().wait():
vbuff = self._get_by_remote(cursor_event.buffer)
def __move_cursor_callback(self, ctl: codemp.CursorController):
def get_event_and_draw():
while event := ctl.try_recv().wait():
if event is None:
break
vbuff = self.buff_by_id(event.buffer)
if vbuff is None:
logger.warning(
"received a cursor event for a buffer that wasn't saved internally."
)
continue
draw_cursor_region(vbuff.view, cursor_event)
draw_cursor_region(vbuff.view, event.start, event.end, event.user)
except CancelledError:
logger.debug(f"cursor worker for '{self.id}' stopped...")
raise
except Exception as e:
logger.error(f"cursor worker '{self.id}' crashed:\n{e}")
raise
sublime.set_timeout_async(get_event_and_draw)