wip: moved to the promise model from the glue

Former-commit-id: be08024cd256b631697b6c8fda6d99f8ccb4ece8
This commit is contained in:
cschen 2024-08-21 21:35:57 +02:00
parent 62ed439b41
commit 507fca5057
8 changed files with 159 additions and 186 deletions

View file

@ -1 +1 @@
def6ad401dc56ab672f6c77a3e0dada48efd8f9e
999fd917360a8fa68970c12c70bf5decf30c84a4

View file

@ -5,12 +5,12 @@ import sublime_plugin
import logging
import random
from Codemp.src.task_manager import rt
from Codemp.src.client import client, VirtualClient
from Codemp.src.logger import inner_logger
# from Codemp.src.task_manager import rt
from Codemp.src.client import client
from Codemp.src.utils import safe_listener_detach
from Codemp.src.utils import safe_listener_attach
from Codemp.src import globals as g
from codemp import register_logger
LOG_LEVEL = logging.DEBUG
handler = logging.StreamHandler()
@ -27,49 +27,37 @@ package_logger.propagate = False
logger = logging.getLogger(__name__)
# returns false if logger already exists
register_logger(lambda msg: logger.log(logger.level, msg), False)
TEXT_LISTENER = None
# rt.dispatch(inner_logger.listen(), "codemp-logger")
# Initialisation and Deinitialisation
##############################################################################
def plugin_loaded():
global TEXT_LISTENER
# instantiate and start a global asyncio event loop.
# pass in the exit_handler coroutine that will be called upon relasing the event loop.
# tm.acquire(disconnect_client)
TEXT_LISTENER = CodempClientTextChangeListener()
logger.debug("plugin loaded")
def disconnect_client():
def plugin_unloaded():
logger.debug("unloading")
global TEXT_LISTENER
if TEXT_LISTENER is not None:
safe_listener_detach(TEXT_LISTENER)
for vws in client.workspaces.values():
vws.cleanup()
client.handle = None # drop
def plugin_unloaded():
# releasing the runtime, runs the disconnect callback defined when acquiring the event loop.
logger.debug("unloading")
package_logger.removeHandler(handler)
disconnect_client()
rt.stop_loop()
client.disconnect()
# rt.stop_loop()
# Listeners
##############################################################################
class EventListener(sublime_plugin.EventListener):
def on_exit(self):
disconnect_client()
client.disconnect()
def on_pre_close_window(self, window):
if client.active_workspace is None:
@ -220,14 +208,15 @@ class ConnectUserName(sublime_plugin.TextInputHandler):
# Generic Join Command
#############################################################################
async def JoinCommand(client: VirtualClient, workspace_id: str, buffer_id: str):
class CodempJoinCommand(sublime_plugin.WindowCommand):
def run(self, workspace_id, buffer_id):
if workspace_id == "":
return
vws = client.workspaces.get(workspace_id)
if vws is None:
try:
vws = await client.join_workspace(workspace_id)
vws = client.join_workspace(workspace_id)
except Exception as e:
raise e
@ -237,15 +226,11 @@ async def JoinCommand(client: VirtualClient, workspace_id: str, buffer_id: str):
vws.materialize()
if buffer_id != "":
await vws.attach(buffer_id)
class CodempJoinCommand(sublime_plugin.WindowCommand):
def run(self, workspace_id, buffer_id):
if buffer_id == "* Don't Join Any":
buffer_id = ""
rt.dispatch(JoinCommand(client, workspace_id, buffer_id))
if buffer_id != "":
vws.attach(buffer_id)
def is_enabled(self) -> bool:
return client.handle is not None
@ -287,7 +272,7 @@ class JoinWorkspaceIdList(sublime_plugin.ListInputHandler):
wid = args["workspace_id"]
if wid != "":
vws = rt.block_on(client.join_workspace(wid))
vws = client.join_workspace(wid)
else:
vws = None
try:
@ -385,7 +370,7 @@ class CodempDisconnectCommand(sublime_plugin.WindowCommand):
return False
def run(self):
disconnect_client()
client.disconnect()
# Leave Workspace Command

View file

@ -58,6 +58,7 @@ class VirtualBuffer:
self.view.erase_status(g.SUBLIME_STATUS_ID)
rt.stop_task(f"{g.BUFFCTL_TASK_PREFIX}-{self.codemp_id}")
self.buffctl.stop()
logger.info(f"cleaning up virtual buffer '{self.codemp_id}'")
async def apply_bufferchange_task(self):
@ -94,7 +95,7 @@ class VirtualBuffer:
logger.error(f"buffer worker '{self.codemp_id}' crashed:\n{e}")
raise
async def send_buffer_change(self, changes):
def send_buffer_change(self, changes):
# we do not do any index checking, and trust sublime with providing the correct
# sequential indexing, assuming the changes are applied in the order they are received.
for change in changes:
@ -104,7 +105,7 @@ class VirtualBuffer:
region.begin(), region.end(), change.str
)
)
await self.buffctl.send(region.begin(), region.end(), change.str)
self.buffctl.send(region.begin(), region.end(), change.str)
def send_cursor(self, vws): # pyright: ignore # noqa: F821
# TODO: only the last placed cursor/selection.

View file

@ -1,9 +1,10 @@
from __future__ import annotations
from typing import Optional
import sublime
import logging
from codemp import Client
import codemp
from Codemp.src import globals as g
from Codemp.src.workspace import VirtualWorkspace
@ -11,43 +12,61 @@ logger = logging.getLogger(__name__)
class VirtualClient:
handle: Optional[codemp.Client]
def __init__(self):
self.handle = None
self.driver = codemp.init(lambda msg: logger.log(logger.level, msg), False)
self.workspaces: dict[str, VirtualWorkspace] = {}
self.active_workspace: VirtualWorkspace | None = None
self.active_workspace: Optional[None] = None
def __getitem__(self, key: str):
return self.workspaces.get(key)
def disconnect(self):
if self.handle is None:
return
logger.info("disconnecting from the current client")
for vws in self.workspaces.values():
vws.cleanup()
self.handle = None
def connect(self, host: str, user: str, password: str):
if self.handle is not None:
logger.info("Disconnecting from previous client.")
return self.disconnect()
logger.info(f"Connecting to {host} with user {user}")
try:
self.handle = Client(host, user, password)
self.handle = codemp.Client(host, user, password)
if self.handle is not None:
id = self.handle.user_id()
logger.debug(f"Connected to '{host}' with user {user} and id: {id}")
except Exception as e:
logger.error(f"Could not connect: {e}")
sublime.error_message(
"Could not connect:\n Make sure the server is up.\n\
or your credentials are correct."
)
return
id = self.handle.user_id() # pyright: ignore
logger.debug(f"Connected to '{host}' with user {user} and id: {id}")
raise
def join_workspace(
self,
workspace_id: str,
) -> VirtualWorkspace | None:
) -> VirtualWorkspace:
if self.handle is None:
return
sublime.error_message("Connect to a server first.")
raise
logger.info(f"Joining workspace: '{workspace_id}'")
try:
workspace = self.handle.join_workspace(workspace_id)
workspace = self.handle.join_workspace(workspace_id).wait()
except Exception as e:
logger.error(f"Could not join workspace '{workspace_id}'.\n\nerror: {e}")
sublime.error_message(f"Could not join workspace '{workspace_id}'")
return
raise
vws = VirtualWorkspace(workspace)
self.workspaces[workspace_id] = vws
@ -56,9 +75,10 @@ class VirtualClient:
def leave_workspace(self, id: str):
if self.handle is None:
return False
logger.info(f"Leaving workspace: '{id}'")
raise
if self.handle.leave_workspace(id):
logger.info(f"Leaving workspace: '{id}'")
self.workspaces[id].cleanup()
del self.workspaces[id]
@ -84,7 +104,7 @@ class VirtualClient:
ws = self.get_workspace(view)
return None if ws is None else ws.get_by_local(view.buffer_id())
def make_active(self, ws: VirtualWorkspace | None):
def make_active(self, ws: Optional[VirtualWorkspace]):
if self.active_workspace == ws:
return
@ -94,7 +114,7 @@ class VirtualClient:
if ws is not None:
ws.activate()
self.active_workspace = ws
self.active_workspace = ws # pyright: ignore
client = VirtualClient()

View file

@ -1,50 +0,0 @@
import logging
from asyncio import CancelledError
from codemp import PyLogger
logger = logging.getLogger(__name__)
class CodempLogger:
def __init__(self, log_level):
self.logger = logger
self.level = log_level
self.internal_logger = None
self.started = False
try:
# PyLogger spins a tracing_subscriber rust side with a
# .try_init() and errors out if already initialized.
# initialize only once
self.internal_logger = PyLogger(self.level == logging.DEBUG)
except Exception:
if self.internal_logger is None:
raise
async def listen(self):
if self.started:
return
self.started = True
self.logger.debug("spinning up internal logger listener...")
assert self.internal_logger is not None
try:
while msg := await self.internal_logger.listen():
if msg is not None:
logger.log(logging.DEBUG, msg)
else:
logger.log(logging.DEBUG, "logger sender dropped.")
break
except CancelledError:
self.logger.debug("inner logger stopped.")
self.started = False
raise
except Exception as e:
self.logger.error(f"inner logger crashed unexpectedly: \n {e}")
raise e
def log(self, msg):
self.logger.log(self.level, msg)
inner_logger = CodempLogger(logging.INFO)

View file

@ -1,6 +1,5 @@
from typing import Optional, Callable, Any
from asyncio.coroutines import functools
import sublime
import logging
import asyncio
@ -147,4 +146,4 @@ class Runtime:
# store a global in the module so it acts as a singleton
# (modules are loaded only once)
rt = Runtime()
# rt = Runtime()

View file

@ -1,5 +1,6 @@
import sublime
import sublime_plugin
from Codemp.src import globals as g
def status_log(msg, popup=False):
@ -47,3 +48,22 @@ def get_view_from_local_path(path):
for view in window.views():
if view.file_name() == path:
return view
def draw_cursor_region(view, cursor):
reg = rowcol_to_region(view, cursor.start, cursor.end)
reg_flags = sublime.RegionFlags.DRAW_EMPTY
user_hash = hash(cursor.user)
def draw():
view.add_regions(
f"{g.SUBLIME_REGIONS_PREFIX}-{user_hash}",
[reg],
flags=reg_flags,
scope=g.REGIONS_COLORS[user_hash % len(g.REGIONS_COLORS)],
annotations=[cursor.user], # pyright: ignore
annotation_color=g.PALETTE[user_hash % len(g.PALETTE)],
)
sublime.set_timeout_async(draw)

View file

@ -1,4 +1,5 @@
from __future__ import annotations
from typing import Optional
import sublime
import shutil
@ -6,12 +7,14 @@ import tempfile
import logging
from asyncio import CancelledError
from codemp import Workspace
from codemp import Workspace, Promise, CursorController
from Codemp.src import globals as g
from Codemp.src.buffers import VirtualBuffer
from Codemp.src.task_manager import rt
from Codemp.src.utils import rowcol_to_region
# from Codemp.src.task_manager import rt
from Codemp.src.utils import draw_cursor_region
logger = logging.getLogger(__name__)
@ -20,31 +23,35 @@ logger = logging.getLogger(__name__)
# events that happen to the codemp workspaces into sublime actions
class VirtualWorkspace:
def __init__(self, handle: Workspace):
self.handle = handle
self.id = self.handle.id()
self.sublime_window = sublime.active_window()
self.curctl = handle.cursor()
self.handle: Workspace = handle
self.id: str = self.handle.id()
self.sublime_window: sublime.Window = sublime.active_window()
self.curctl: CursorController = handle.cursor()
self.materialized = False
self.isactive = False
# mapping remote ids -> local ids
self.id_map: dict[str, int] = {}
self.active_Buffer: dict[int, VirtualBuffer] = {} # local_id -> VBuff
self.active_buffers: dict[int, VirtualBuffer] = {} # local_id -> VBuff
def cleanup(self):
self.deactivate()
def _cleanup(self):
self._deactivate()
# the worskpace only cares about closing the various open views on its Buffer.
# the event listener calls the cleanup code for each buffer independently on its own.
for vbuff in self.active_Buffer.values():
# the event listener calls the cleanup code for each buffer independently on its own
# upon closure.
for vbuff in self.active_buffers.values():
vbuff.view.close()
self.active_Buffer = {} # drop all Buffer, let them be garbace collected (hopefully)
self.active_buffers = {} # drop all Buffer, let them be garbage collected (hopefully)
if not self.materialized:
return # nothing to delete
# remove from the "virtual" project folders
d: dict = self.sublime_window.project_data() # pyright: ignore
if d is None:
raise
newf = list(
filter(
lambda f: f.get("name", "") != f"{g.WORKSPACE_FOLDER_PREFIX}{self.id}",
@ -56,23 +63,25 @@ class VirtualWorkspace:
logger.info(f"cleaning up virtual workspace '{self.id}'")
shutil.rmtree(self.rootdir, ignore_errors=True)
# stop the controller
self.curctl.stop()
# clean the window form the tags
s = self.sublime_window.settings()
del s[g.CODEMP_WINDOW_TAG]
del s[g.CODEMP_WINDOW_WORKSPACES]
self.materialized = False
def materialize(self):
def _materialize(self):
# attach the workspace to the editor, tagging windows and populating
# virtual file systems
if self.materialized:
return # no op, we already are in the editor
return # no op, we already have materialized the workspace in the editor
# initialise the virtual filesystem
tmpdir = tempfile.mkdtemp(prefix="codemp_")
logging.debug("setting up virtual fs for workspace in: {} ".format(tmpdir))
logging.debug(f"setting up virtual fs for workspace in: {tmpdir}")
self.rootdir = tmpdir
# and add a new "project folder"
@ -94,32 +103,32 @@ class VirtualWorkspace:
self.materialized = True
def activate(self):
rt.dispatch(
self.move_cursor_task(),
f"{g.CURCTL_TASK_PREFIX}-{self.id}",
)
self.isactive = True
# def _activate(self):
# rt.dispatch(
# self.move_cursor_task(),
# f"{g.CURCTL_TASK_PREFIX}-{self.id}",
# )
# self.isactive = True
def deactivate(self):
def _deactivate(self):
if self.isactive:
rt.stop_task(f"{g.CURCTL_TASK_PREFIX}-{self.id}")
self.isactive = False
def add_buffer(self, remote_id: str, vbuff: VirtualBuffer):
def _add_buffer(self, remote_id: str, vbuff: VirtualBuffer):
self.id_map[remote_id] = vbuff.view.buffer_id()
self.active_Buffer[vbuff.view.buffer_id()] = vbuff
self.active_buffers[vbuff.view.buffer_id()] = vbuff
def get_by_local(self, local_id: int) -> VirtualBuffer | None:
return self.active_Buffer.get(local_id)
def _get_by_local(self, local_id: int) -> Optional[VirtualBuffer]:
return self.active_buffers.get(local_id)
def get_by_remote(self, remote_id: str) -> VirtualBuffer | None:
def _get_by_remote(self, remote_id: str) -> Optional[VirtualBuffer]:
local_id = self.id_map.get(remote_id)
if local_id is None:
return
vbuff = self.active_Buffer.get(local_id)
vbuff = self.active_buffers.get(local_id)
if vbuff is None:
logging.warning(
"a local-remote buffer id pair was found but \
@ -129,21 +138,24 @@ class VirtualWorkspace:
return vbuff
def create(self, id: str) -> Promise[None]:
return self.handle.create(id)
# A workspace has some Buffer inside of it (filetree)
# some of those you are already attached to (Buffer_by_name)
# If already attached to it return the same alredy existing bufferctl
# if existing but not attached (attach)
# if not existing ask for creation (create + attach)
async def attach(self, id: str):
def attach(self, id: str):
if id is None:
return
attached_Buffer = self.handle.buffer_by_name(id)
if attached_Buffer is not None:
return self.get_by_remote(id)
return self._get_by_remote(id)
await self.handle.fetch_buffers()
existing_Buffer = self.handle.filetree()
self.handle.fetch_buffers()
existing_Buffer = self.handle.filetree(filter=None)
if id not in existing_Buffer:
create = sublime.ok_cancel_dialog(
"There is no buffer named '{id}' in the workspace.\n\
@ -153,46 +165,42 @@ class VirtualWorkspace:
)
if create:
try:
await self.handle.create(id)
create_promise = self.create(id)
except Exception as e:
logging.error(f"could not create buffer:\n\n {e}", True)
logging.error(f"could not create buffer:\n\n {e}")
return
create_promise.wait()
else:
return
# now either we created it or it exists already
try:
buff_ctl = await self.handle.attach(id)
buff_ctl = self.handle.attach(id)
except Exception as e:
logging.error(f"error when attaching to buffer '{id}':\n\n {e}", True)
logging.error(f"error when attaching to buffer '{id}':\n\n {e}")
return
vbuff = VirtualBuffer(self.id, self.rootdir, id, buff_ctl)
self.add_buffer(id, vbuff)
vbuff = VirtualBuffer(self.id, self.rootdir, id, buff_ctl.wait())
self._add_buffer(id, vbuff)
# TODO! if the view is already active calling focus_view() will not trigger the on_activate
self.sublime_window.focus_view(vbuff.view)
def detach(self, id: str):
if id is None:
return
attached_Buffer = self.handle.buffer_by_name(id)
if attached_Buffer is None:
logging.warning(f"You are not attached to the buffer '{id}'", True)
sublime.error_message(f"You are not attached to the buffer '{id}'")
logging.warning(f"You are not attached to the buffer '{id}'")
return
self.handle.detach(id)
async def delete(self, id: str):
if id is None:
return
# delete a non existent buffer
await self.handle.fetch_buffers()
existing_Buffer = self.handle.filetree()
def delete(self, id: str):
self.handle.fetch_buffers()
existing_Buffer = self.handle.filetree(filter=None)
if id not in existing_Buffer:
logging.info(f"The buffer '{id}' does not exists.", True)
sublime.error_message(f"The buffer '{id}' does not exists.")
logging.info(f"The buffer '{id}' does not exists.")
return
# delete a buffer that exists but you are not attached to
attached_Buffer = self.handle.buffer_by_name(id)
@ -204,7 +212,7 @@ class VirtualWorkspace:
)
if delete:
try:
await self.handle.delete(id)
self.handle.delete(id).wait()
except Exception as e:
logging.error(
f"error when deleting the buffer '{id}':\n\n {e}", True
@ -221,9 +229,9 @@ class VirtualWorkspace:
title="Delete Buffer?",
)
if delete:
self.detach(id)
self.handle.detach(id)
try:
await self.handle.delete(id)
self.handle.delete(id).wait()
except Exception as e:
logging.error(f"error when deleting the buffer '{id}':\n\n {e}", True)
return
@ -231,24 +239,14 @@ class VirtualWorkspace:
async def move_cursor_task(self):
logger.debug(f"spinning up cursor worker for workspace '{self.id}'...")
try:
while cursor_event := await self.curctl.recv():
vbuff = self.get_by_remote(cursor_event.buffer)
# blocking for now ...
while cursor_event := self.curctl.recv().wait():
vbuff = self._get_by_remote(cursor_event.buffer)
if vbuff is None:
continue
reg = rowcol_to_region(vbuff.view, cursor_event.start, cursor_event.end)
reg_flags = sublime.RegionFlags.DRAW_EMPTY # show cursors.
user_hash = hash(cursor_event.user)
vbuff.view.add_regions(
f"{g.SUBLIME_REGIONS_PREFIX}-{user_hash}",
[reg],
flags=reg_flags,
scope=g.REGIONS_COLORS[user_hash % len(g.REGIONS_COLORS)],
annotations=[cursor_event.user], # pyright: ignore
annotation_color=g.PALETTE[user_hash % len(g.PALETTE)],
)
draw_cursor_region(vbuff.view, cursor_event)
except CancelledError:
logger.debug(f"cursor worker for '{self.id}' stopped...")