def connect(self, cb=None): self.stop(False) self.empty_selects = 0 self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if self.secure: if ssl: with open(self.cert_path, 'wb') as cert_fd: cert_fd.write(cert.CA_CERT.encode('utf-8')) self.sock = ssl.wrap_socket(self.sock, ca_certs=self.cert_path, cert_reqs=ssl.CERT_REQUIRED) else: msg.debug( 'No SSL module found. Connection will not be encrypted.') if self.port == G.DEFAULT_PORT: self.port = 3148 # plaintext port msg.debug('Connecting to %s:%s' % (self.host, self.port)) try: self.sock.connect((self.host, self.port)) if self.secure and ssl: self.sock.do_handshake() except socket.error as e: msg.error('Error connecting:', e) self.reconnect() return self.sock.setblocking(0) msg.debug('Connected!') self.reconnect_delay = self.INITIAL_RECONNECT_DELAY self.send_auth() if cb: cb()
def _connect(self, attempts=0): if attempts > 500: msg.error('Connection attempt timed out.') return self.reconnect() if not self.sock: msg.debug('_connect: No socket') return try: self.sock.connect((self.host, self.port)) select.select([self.sock], [self.sock], [], 0) except socket.error as e: if e.errno == iscon_errno: pass elif e.errno in connect_errno: return utils.set_timeout(self._connect, 20, attempts + 1) else: msg.error('Error connecting:', e) return self.reconnect() if self.secure: sock_debug('SSL-wrapping socket') self.sock = ssl.wrap_socket(self.sock, ca_certs=self.cert_path, cert_reqs=ssl.CERT_REQUIRED, do_handshake_on_connect=False) self.on_connect() self.call_select = True self.select()
def protocol(self, req): self.buf += req while True: before, sep, after = self.buf.partition(NEWLINE) if not sep: break try: # Node.js sends invalid utf8 even though we're calling write(string, "utf8") # Python 2 can figure it out, but python 3 hates it and will die here with some byte sequences # Instead of crashing the plugin, we drop the data. Yes, this is horrible. before = before.decode('utf-8', 'ignore') data = json.loads(before) except Exception as e: msg.error('Unable to parse json: %s' % str(e)) msg.error('Data: %s' % before) # XXXX: THIS LOSES DATA self.buf = after continue name = data.get('name') try: if name == 'error': message = 'Floobits: Error! Message: %s' % str(data.get('msg')) msg.error(message) if data.get('flash'): sublime.error_message('Floobits: %s' % str(data.get('msg'))) elif name == 'disconnect': message = 'Floobits: Disconnected! Reason: %s' % str(data.get('reason')) msg.error(message) sublime.error_message(message) self.stop() else: self.handler(name, data) except Exception as e: msg.error('Error handling %s event with data %s: %s' % (name, data, e)) self.buf = after
def delete_buf(self, path, unlink=False): if not utils.is_shared(path): msg.error('Skipping deleting ', path, ' because it is not in shared path ', G.PROJECT_PATH, '.') return if os.path.isdir(path): for dirpath, dirnames, filenames in os.walk(path): # TODO: rexamine this assumption # Don't care about hidden stuff dirnames[:] = [d for d in dirnames if d[0] != '.'] for f in filenames: f_path = os.path.join(dirpath, f) if f[0] == '.': msg.log('Not deleting buf for hidden file ', f_path) else: self.delete_buf(f_path, unlink) return buf_to_delete = self.get_buf_by_path(path) if buf_to_delete is None: msg.error(path, ' is not in this workspace') return msg.log('deleting buffer ', utils.to_rel_path(path)) event = { 'name': 'delete_buf', 'id': buf_to_delete['id'], 'unlink': unlink, } self.send(event)
def handler(self, name, data): if name == 'create_user': del data['name'] try: floorc = BASE_FLOORC + '\n'.join(['%s %s' % (k, v) for k, v in data.items()]) + '\n' with open(G.FLOORC_PATH, 'wb') as floorc_fd: floorc_fd.write(floorc.encode('utf-8')) utils.reload_settings() if False in [bool(x) for x in (G.USERNAME, G.API_KEY, G.SECRET)]: sublime.message_dialog('Something went wrong. You will need to sign up for an account to use Floobits.') api.send_error({'message': 'No username or secret'}) else: p = os.path.join(G.BASE_DIR, 'welcome.md') with open(p, 'wb') as fd: text = welcome_text % (G.USERNAME, self.host) fd.write(text.encode('utf-8')) d = utils.get_persistent_data() d['auto_generated_account'] = True utils.update_persistent_data(d) G.AUTO_GENERATED_ACCOUNT = True sublime.active_window().open_file(p) except Exception as e: msg.error(e) try: d = utils.get_persistent_data() d['disable_account_creation'] = True utils.update_persistent_data(d) finally: self.stop()
def connect(self, cb=None): self.stop(False) self.empty_selects = 0 self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if self.secure: if ssl: cert_path = os.path.join(G.COLAB_DIR, "startssl-ca.pem") with open(cert_path, "wb") as cert_fd: cert_fd.write(cert.CA_CERT.encode("utf-8")) self.sock = ssl.wrap_socket(self.sock, ca_certs=cert_path, cert_reqs=ssl.CERT_REQUIRED) else: msg.log("No SSL module found. Connection will not be encrypted.") if self.port == G.DEFAULT_PORT: self.port = 3148 # plaintext port msg.debug("Connecting to %s:%s" % (self.host, self.port)) try: self.sock.connect((self.host, self.port)) if self.secure and ssl: self.sock.do_handshake() except socket.error as e: msg.error("Error connecting:", e) self.reconnect() return self.sock.setblocking(0) msg.debug("Connected!") self.reconnect_delay = self.INITIAL_RECONNECT_DELAY self.send_auth() if cb: cb()
def connect(self, cb=None): self.stop(False) self.empty_selects = 0 self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if self.secure: if ssl: with open(self.cert_path, 'wb') as cert_fd: cert_fd.write(cert.CA_CERT.encode('utf-8')) self.sock = ssl.wrap_socket(self.sock, ca_certs=self.cert_path, cert_reqs=ssl.CERT_REQUIRED) else: msg.debug('No SSL module found. Connection will not be encrypted.') if self.port == G.DEFAULT_PORT: self.port = 3148 # plaintext port msg.debug('Connecting to %s:%s' % (self.host, self.port)) try: self.sock.connect((self.host, self.port)) if self.secure and ssl: self.sock.do_handshake() except socket.error as e: msg.error('Error connecting:', e) self.reconnect() return self.sock.setblocking(0) msg.debug('Connected!') self.reconnect_delay = self.INITIAL_RECONNECT_DELAY self.send_auth() if cb: cb()
def handler(self, name, data): if name == "create_user": del data["name"] try: floorc = BASE_FLOORC + "\n".join(["%s %s" % (k, v) for k, v in data.items()]) + "\n" with open(G.FLOORC_PATH, "wb") as floorc_fd: floorc_fd.write(floorc.encode("utf-8")) utils.reload_settings() if False in [bool(x) for x in (G.USERNAME, G.API_KEY, G.SECRET)]: sublime.message_dialog( "Something went wrong. You will need to sign up for an account to use Floobits." ) api.send_error({"message": "No username or secret"}) else: p = os.path.join(G.BASE_DIR, "welcome.md") with open(p, "wb") as fd: text = welcome_text % (G.USERNAME, self.host) fd.write(text.encode("utf-8")) d = utils.get_persistent_data() d["auto_generated_account"] = True utils.update_persistent_data(d) G.AUTO_GENERATED_ACCOUNT = True sublime.active_window().open_file(p) except Exception as e: msg.error(e) try: d = utils.get_persistent_data() d["disable_account_creation"] = True utils.update_persistent_data(d) finally: self.stop()
def join_workspace(self, data, owner, workspace, dir_to_make=None): d = data['response'] if dir_to_make: if d: d = dir_to_make utils.mkdir(d) else: d = '' if d == '': return self.get_input('Save workspace files to: ', G.PROJECT_PATH, self.join_workspace, owner, workspace) d = os.path.realpath(os.path.expanduser(d)) if not os.path.isdir(d): if dir_to_make: return msg.error("Couldn't create directory %s" % dir_to_make) prompt = '%s is not a directory. Create it? ' % d return self.get_input(prompt, '', self.join_workspace, owner, workspace, dir_to_make=d, y_or_n=True) try: G.PROJECT_PATH = d utils.mkdir(os.path.dirname(G.PROJECT_PATH)) self.remote_connect(owner, workspace) except Exception as e: return msg.error("Couldn't create directory %s: %s" % (G.PROJECT_PATH, str(e)))
def delete_buf(self, path): if not utils.is_shared(path): msg.error( 'Skipping deleting %s because it is not in shared path %s.' % (path, G.PROJECT_PATH)) return if os.path.isdir(path): for dirpath, dirnames, filenames in os.walk(path): # TODO: rexamine this assumption # Don't care about hidden stuff dirnames[:] = [d for d in dirnames if d[0] != '.'] for f in filenames: f_path = os.path.join(dirpath, f) if f[0] == '.': msg.log('Not deleting buf for hidden file %s' % f_path) else: self.delete_buf(f_path) return buf_to_delete = self.get_buf_by_path(path) if buf_to_delete is None: msg.error('%s is not in this workspace' % path) return msg.log('deleting buffer ', utils.to_rel_path(path)) event = { 'name': 'delete_buf', 'id': buf_to_delete['id'], } G.AGENT.send(event)
def delete_buf(self, path, unlink=False): if not utils.is_shared(path): msg.error('Skipping deleting ', path, ' because it is not in shared path ', G.PROJECT_PATH, '.') return if os.path.isdir(path): for dirpath, dirnames, filenames in os.walk(path): # TODO: rexamine this assumption # Don't care about hidden stuff dirnames[:] = [d for d in dirnames if d[0] != '.'] for f in filenames: f_path = os.path.join(dirpath, f) if f[0] == '.': msg.log('Not deleting buf for hidden file ', f_path) else: self.delete_buf(f_path, unlink) return buf_to_delete = self.get_buf_by_path(path) if buf_to_delete is None: msg.error(path, ' is not in this workspace') return msg.log('deleting buffer ', utils.to_rel_path(path)) event = { 'name': 'delete_buf', 'id': buf_to_delete['id'], 'unlink': unlink, } def done(d): self._on_delete_buf(event) self.send(event, done)
def _on_open_workspace_settings(self, req): try: webbrowser.open(self.agent.workspace_url + '/settings', new=2, autoraise=True) except Exception as e: msg.error("Couldn't open a browser: %s" % (str(e)))
def _scan_dir(ig): path = ig.path if not utils.is_shared(path): msg.error('Skipping adding %s because it is not in shared path %s.' % (path, G.PROJECT_PATH)) return ignored = ig.is_ignored(path) if ignored: msg.log('Not creating buf: %s' % (ignored)) return msg.debug('create_buf: path is %s' % path) if not os.path.isdir(path): yield path return try: paths = os.listdir(path) except Exception as e: msg.error('Error listing path %s: %s' % (path, unicode(e))) return for p in paths: p_path = os.path.join(path, p) if p[0] == '.': if p not in ignore.HIDDEN_WHITELIST: msg.log('Not creating buf for hidden path %s' % p_path) continue ignored = ig.is_ignored(p_path) if ignored: msg.log('Not creating buf: %s' % (ignored)) continue yield p_path
def handler(self, name, data): if name == 'create_user': del data['name'] try: floorc = BASE_FLOORC + '\n'.join( ['%s %s' % (k, v) for k, v in data.items()]) + '\n' with open(G.FLOORC_PATH, 'wb') as floorc_fd: floorc_fd.write(floorc.encode('utf-8')) utils.reload_settings() if False in [ bool(x) for x in (G.USERNAME, G.API_KEY, G.SECRET) ]: sublime.message_dialog( 'Something went wrong. You will need to sign up for an account to use Floobits.' ) api.send_error({'message': 'No username or secret'}) else: p = os.path.join(G.BASE_DIR, 'welcome.md') with open(p, 'wb') as fd: text = welcome_text % (G.USERNAME, self.host) fd.write(text.encode('utf-8')) d = utils.get_persistent_data() d['auto_generated_account'] = True utils.update_persistent_data(d) G.AUTO_GENERATED_ACCOUNT = True sublime.active_window().open_file(p) except Exception as e: msg.error(e) try: d = utils.get_persistent_data() d['disable_account_creation'] = True utils.update_persistent_data(d) finally: self.stop()
def delete_buf(self, path): """deletes a path""" if not path: return path = utils.get_full_path(path) if not self.is_shared(path): msg.error('Skipping deleting %s because it is not in shared path %s.' % (path, G.PROJECT_PATH)) return if os.path.isdir(path): for dirpath, dirnames, filenames in os.walk(path): # Don't care about hidden stuff dirnames[:] = [d for d in dirnames if d[0] != '.'] for f in filenames: f_path = os.path.join(dirpath, f) if f[0] == '.': msg.log('Not deleting buf for hidden file %s' % f_path) else: self.delete_buf(f_path) return buf_to_delete = None rel_path = utils.to_rel_path(path) buf_to_delete = self.get_buf_by_path(rel_path) if buf_to_delete is None: msg.error('%s is not in this workspace' % path) return msg.log('deleting buffer ', rel_path) event = { 'name': 'delete_buf', 'id': buf_to_delete['id'], } self.agent.put(event)
def delete_buf(path): if not utils.is_shared(path): msg.error('Skipping deleting %s because it is not in shared path %s.' % (path, G.PROJECT_PATH)) return if os.path.isdir(path): for dirpath, dirnames, filenames in os.walk(path): # TODO: rexamine this assumption # Don't care about hidden stuff dirnames[:] = [d for d in dirnames if d[0] != '.'] for f in filenames: f_path = os.path.join(dirpath, f) if f[0] == '.': msg.log('Not deleting buf for hidden file %s' % f_path) else: Listener.delete_buf(f_path) return buf_to_delete = get_buf_by_path(path) if buf_to_delete is None: msg.error('%s is not in this workspace' % path) return msg.log('deleting buffer ', utils.to_rel_path(path)) event = { 'name': 'delete_buf', 'id': buf_to_delete['id'], } G.AGENT.put(event)
def set_text(self, text): msg.debug('\n\nabout to patch %s %s' % (str(self), self.vim_buf.name)) try: self.vim_buf[:] = text.encode('utf-8').split('\n') except Exception as e: msg.error("couldn't apply patches because: %s!\nThe unencoded text was: %s" % (str(e), text)) raise
def on_post_window_command(self, window, command, *args, **kwargs): agent = args[-1] if command == 'delete_file': # User probably deleted a file. Stat and delete. files = args[0]['files'] for f in files: buf = agent.get_buf_by_path(f) if not buf: continue if os.path.exists(f): continue agent.send({ 'name': 'delete_buf', 'id': buf['id'], }) return if command == 'delete_folder': dirs = args[0]['dirs'] for d in dirs: # Delete folder prompt just closed. Check if folder exists if os.path.isdir(d): continue rel_path = utils.to_rel_path(d) if not rel_path: msg.error('Can not delete %s from workspace', d) continue for buf_id, buf in G.AGENT.bufs.items(): if buf['path'].startswith(rel_path): agent.send({ 'name': 'delete_buf', 'id': buf_id, })
def _on_user_input(self, data): cb_id = int(data['id']) cb = self.user_inputs.get(cb_id) if cb is None: msg.error('cb for input %s is none' % cb_id) return cb(data) del self.user_inputs[cb_id]
def on_disconnect(self, data): message = 'Floobits: Disconnected! Reason: %s' % str(data.get('reason')) msg.error(message) try: self.agent.stop() except Exception: pass sys.exit(0)
def set_text(self, text): msg.debug('\n\nabout to patch %s %s' % (str(self), self.vim_buf.name)) try: msg.debug("now buf is loadedish? %s" % vim.eval('bufloaded(%s)' % self.native_id)) self.vim_buf[:] = text.encode('utf-8').split('\n') except Exception as e: msg.error("couldn't apply patches because: %s!\nThe unencoded text was: %s" % (str(e), text)) raise
def set_text(self, text): msg.debug('\n\nabout to patch %s %s' % (str(self), self.vim_buf.name)) try: self.vim_buf[:] = text.encode('utf-8').split('\n') except Exception as e: msg.error( "couldn't apply patches because: %s!\nThe unencoded text was: %s" % (str(e), text)) raise
def _on_user_input(self, data): print("got user_input", data) cb_id = int(data["id"]) cb = self.user_inputs.get(cb_id) if cb is None: msg.error("cb for input %s is none" % cb_id) return cb(data) del self.user_inputs[cb_id]
def open_in_browser(self): url = G.AGENT.workspace_url # webbrowser can print to stdout, which is hooked up to neovim's msgpack # neovim will close the channel on bad msgpack, so squelch all output args = [sys.executable, '-m', 'webbrowser', url] proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() if stderr: msg.error('Error opening browser: %s' % stderr)
def on_window_command(self, window, command, *args, **kwargs): if command == 'rename_path': # User is about to rename something msg.debug('rename') if window == G.WORKSPACE_WINDOW and command == 'close_window': msg.log('Workspace window closed, disconnecting.') try: window.run_command('floobits_leave_workspace') except Exception as e: msg.error(e)
def set_text(self, text): msg.debug('\n\nabout to patch %s %s' % (str(self), self.vim_buf.name)) try: msg.debug("now buf is loadedish? %s" % vim.eval('bufloaded(%s)' % self.native_id)) self.vim_buf[:] = text.encode('utf-8').split('\n') except Exception as e: msg.error( "couldn't apply patches because: %s!\nThe unencoded text was: %s" % (str(e), text)) raise
def upload(path): try: with open(path, 'rb') as buf_fd: buf = buf_fd.read() encoding = 'utf8' rel_path = utils.to_rel_path(path) existing_buf = get_buf_by_path(path) if existing_buf: buf_md5 = hashlib.md5(buf).hexdigest() if existing_buf['md5'] == buf_md5: msg.debug( '%s already exists and has the same md5. Skipping.' % path) return msg.log('setting buffer ', rel_path) existing_buf['buf'] = buf existing_buf['md5'] = buf_md5 try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' existing_buf['encoding'] = encoding G.AGENT.put({ 'name': 'set_buf', 'id': existing_buf['id'], 'buf': buf, 'md5': buf_md5, 'encoding': encoding, }) return try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' msg.log('creating buffer ', rel_path) event = { 'name': 'create_buf', 'buf': buf, 'path': rel_path, 'encoding': encoding, } G.AGENT.put(event) except (IOError, OSError): msg.error('Failed to open %s.' % path) except Exception as e: msg.error('Failed to create buffer %s: %s' % (path, unicode(e)))
def emacs_handle(self, data): name = data.get('name') if not name: return msg.error('no name in data?!?') func = getattr(self, "on_emacs_%s" % (name)) if not func: return msg.debug('unknown name: ', name, 'data: ', data) try: func(data) except Exception as e: msg.error(str(e))
def upload(path): try: with open(path, 'rb') as buf_fd: buf = buf_fd.read() encoding = 'utf8' rel_path = utils.to_rel_path(path) existing_buf = get_buf_by_path(path) if existing_buf: buf_md5 = hashlib.md5(buf).hexdigest() if existing_buf['md5'] == buf_md5: msg.log('%s already exists and has the same md5. Skipping.' % path) return msg.log('setting buffer ', rel_path) existing_buf['buf'] = buf existing_buf['md5'] = buf_md5 try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' existing_buf['encoding'] = encoding G.AGENT.put({ 'name': 'set_buf', 'id': existing_buf['id'], 'buf': buf, 'md5': buf_md5, 'encoding': encoding, }) return try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' msg.log('creating buffer ', rel_path) event = { 'name': 'create_buf', 'buf': buf, 'path': rel_path, 'encoding': encoding, } G.AGENT.put(event) except (IOError, OSError): msg.error('Failed to open %s.' % path) except Exception as e: msg.error('Failed to create buffer %s: %s' % (path, unicode(e)))
def _on_create_workspace(self, data, workspace_name, dir_to_share, owner=None, perms=None): owner = owner or G.USERNAME workspace_name = data.get('response', workspace_name) prompt = 'workspace %s already exists. Choose another name: ' % workspace_name try: api_args = { 'name': workspace_name, 'owner': owner, } if perms: api_args['perms'] = perms api.create_workspace(api_args) workspace_url = utils.to_workspace_url({ 'secure': True, 'owner': owner, 'workspace': workspace_name }) msg.debug('Created workspace %s' % workspace_url) except HTTPError as e: err_body = e.read() msg.error('Unable to create workspace: %s %s' % (unicode(e), err_body)) if e.code not in [400, 402, 409]: return msg.error('Unable to create workspace: %s' % str(e)) if e.code == 400: workspace_name = re.sub('[^A-Za-z0-9_\-]', '-', workspace_name) prompt = 'Invalid name. Workspace names must match the regex [A-Za-z0-9_\-]. Choose another name:' elif e.code == 402: try: err_body = json.loads(err_body) err_body = err_body['detail'] except Exception: pass return editor.error_message('%s' % err_body) else: prompt = 'Workspace %s/%s already exists. Choose another name:' % ( owner, workspace_name) return self.get_input(prompt, workspace_name, self._on_create_workspace, workspace_name, dir_to_share, owner, perms) except Exception as e: return msg.error('Unable to create workspace: %s' % str(e)) G.PROJECT_PATH = dir_to_share agent = self.remote_connect(owner, workspace_name, False) agent.once("room_info", lambda: agent.upload(dir_to_share))
def update_view(buf, view): msg.log('Floobits synced data for consistency: %s' % buf['path']) G.VIEW_TO_HASH[view.buffer_id()] = buf['md5'] view.set_read_only(False) try: view.run_command('floo_view_replace_region', {'r': [0, view.size()], 'data': buf['buf']}) view.set_status('Floobits', 'Floobits synced data for consistency.') utils.set_timeout(lambda: view.set_status('Floobits', ''), 5000) except Exception as e: msg.error('Exception updating view: %s' % e) if 'patch' not in G.PERMS: view.set_status('Floobits', 'You don\'t have write permission. Buffer is read-only.') view.set_read_only(True)
def handle(self, req): self.net_buf += req while True: before, sep, after = self.net_buf.partition("\n") if not sep: break try: data = json.loads(before) except Exception as e: msg.error("Unable to parse json:", e) msg.error("Data:", before) raise e self.protocol.handle(data) self.net_buf = after
def update(self, data): buf = self.buf = data msg.log('Floobits synced data for consistency: %s' % buf['path']) G.VIEW_TO_HASH[self.view.buffer_id()] = buf['md5'] self.view.set_read_only(False) try: self.view.run_command('floo_view_replace_region', {'r': [0, self.view.size()], 'data': buf['buf']}) self.set_status('Floobits synced data for consistency.') utils.set_timeout(self.set_status, 5000, '') except Exception as e: msg.error('Exception updating view: %s' % e) if 'patch' not in G.PERMS: self.set_status('You don\'t have write permission. Buffer is read-only.') self.view.set_read_only(True)
def _on_create_workspace(self, data, workspace_name, dir_to_share, owner=None, perms=None): owner = owner or G.USERNAME workspace_name = data.get('response', workspace_name) try: api_args = { 'name': workspace_name, 'owner': owner, } if perms: api_args['perms'] = perms msg.debug(str(api_args)) r = api.create_workspace(api_args) except Exception as e: msg.error('Unable to create workspace: %s' % unicode(e)) return editor.error_message('Unable to create workspace: %s' % unicode(e)) workspace_url = 'https://%s/%s/%s' % (G.DEFAULT_HOST, owner, workspace_name) if r.code < 400: msg.log('Created workspace %s' % workspace_url) utils.add_workspace_to_persistent_json(owner, workspace_name, workspace_url, dir_to_share) G.PROJECT_PATH = dir_to_share agent = self.remote_connect(owner, workspace_name, False) return agent.once("room_info", lambda: agent.upload(dir_to_share)) msg.error('Unable to create workspace: %s' % r.body) if r.code not in [400, 402, 409]: try: r.body = r.body['detail'] except Exception: pass return editor.error_message('Unable to create workspace: %s' % r.body) if r.code == 400: workspace_name = re.sub('[^A-Za-z0-9_\-\.]', '-', workspace_name) prompt = 'Invalid name. Workspace names must match the regex [A-Za-z0-9_\-\.]. Choose another name:' elif r.code == 402: try: r.body = r.body['detail'] except Exception: pass cb = lambda data: data['response'] and webbrowser.open('https://%s/%s/settings#billing' % (G.DEFAULT_HOST, owner)) self.get_input('%s Open billing settings?' % r.body, '', cb, y_or_n=True) return else: prompt = 'Workspace %s/%s already exists. Choose another name:' % (owner, workspace_name) return self.get_input(prompt, workspace_name, self._on_create_workspace, workspace_name, dir_to_share, owner, perms)
def _uploader(self, paths_iter): if not self.agent or not self.agent.sock: msg.error('Can\'t upload! Not connected. :(') return self.agent.tick() if self.agent.qsize() > 0: return utils.set_timeout(self._uploader, 10, paths_iter) try: p = next(paths_iter) self.upload(p) except StopIteration: msg.log('All done uploading') return return utils.set_timeout(self._uploader, 50, paths_iter)
def update(self, buf, message=True): self.buf = buf if message: msg.log('Floobits synced data for consistency: ', buf['path']) G.VIEW_TO_HASH[self.view.buffer_id()] = buf['md5'] self.view.set_read_only(False) try: self.view.run_command('floo_view_replace_region', {'r': [0, self.view.size()], 'data': buf['buf']}) if message: self.set_status('Floobits synced data for consistency.') utils.set_timeout(self.erase_status, 5000) except Exception as e: msg.error('Exception updating view: ', str_e(e)) if 'patch' not in G.PERMS: self.set_status('You don\'t have write permission. Buffer is read-only.') self.view.set_read_only(True)
def update(self, buf, message=True): self.buf = buf if message: msg.log("Floobits synced data for consistency: ", buf["path"]) G.VIEW_TO_HASH[self.view.buffer_id()] = buf["md5"] self.view.set_read_only(False) try: self.view.run_command("floo_view_replace_region", {"r": [0, self.view.size()], "data": buf["buf"]}) if message: self.set_status("Floobits synced data for consistency.") utils.set_timeout(self.erase_status, 5000) except Exception as e: msg.error("Exception updating view: ", str_e(e)) if "patch" not in G.PERMS: self.set_status("You don't have write permission. Buffer is read-only.") self.view.set_read_only(True)
def set_text(self, text): msg.debug('About to patch %s %s' % (str(self), self.vim_buf.name)) lines = text.encode('utf-8').split('\n') new_len = len(lines) end = start = -1 i = 0 def stomp_buffer(): msg.debug('Stomping buffer.') G.AGENT.patching += 1 self.vim_buf[:] = lines try: if new_len != len(self.vim_buf): stomp_buffer() return while i < new_len: if lines[i] != self.vim_buf[i]: msg.debug('Lines are not the same. "%s" "%s"' % (self.vim_buf[i], lines[i])) if start > -1: if end > -1: stomp_buffer( ) # More than one contiguous change in patch. return else: start = i else: msg.debug('Lines are the same. "%s"' % lines[i]) if start > -1 and end == -1: end = i i += 1 if start == -1 and end == -1: msg.debug("Nothing to do here, buffers are the same.") return if start > -1 and end == -1: end = i msg.debug('Stomping lines %d to %d: "%s" -> "%s"' % (start, end, self.vim_buf[start:end], lines[start:end])) G.AGENT.patching += 1 self.vim_buf[start:end] = lines[start:end] except Exception as e: msg.error( 'Couldn\'t apply patches because: %s!\nThe unencoded text was: "%s"' % (str(e), text)) raise msg.debug('All done patching.')
def select(self): if not self.sock: msg.error('select(): No socket.') return self.reconnect() try: _in, _out, _except = select.select([self.sock], [self.sock], [self.sock], 0) except (select.error, socket.error, Exception) as e: msg.error('Error in select(): %s' % str(e)) return self.reconnect() if _except: msg.error('Socket error') return self.reconnect() if _in: buf = '' while True: try: d = self.sock.recv(4096) if not d: break buf += d except (socket.error, TypeError): break if buf: self.empty_selects = 0 self.handle(buf) else: self.empty_selects += 1 if self.empty_selects > 10: msg.error('No data from sock.recv() {0} times.'.format( self.empty_selects)) return self.reconnect() if _out: for p in self._get_from_queue(): if p is None: self.sock_q.task_done() continue try: self.sock.sendall(p) self.sock_q.task_done() except Exception as e: msg.error('Couldn\'t write to socket: %s' % str(e)) return self.reconnect()
def handle(self, data): name = data.get('name') if not name: return msg.error('no name in data?!?') func = getattr(self, "on_%s" % (name), None) if not func: return msg.debug('unknown name', name, 'data:', data) func(data)
def select(self): if not self.sock: msg.error('select(): No socket.') return self.reconnect() try: _in, _out, _except = select.select([self.sock], [self.sock], [self.sock], 0) except (select.error, socket.error, Exception) as e: msg.error('Error in select(): %s' % str(e)) return self.reconnect() if _except: msg.error('Socket error') return self.reconnect() if _in: buf = '' while True: try: d = self.sock.recv(4096) if not d: break buf += d except (socket.error, TypeError): break if buf: self.empty_selects = 0 self.handle(buf) else: self.empty_selects += 1 if self.empty_selects > 10: msg.error('No data from sock.recv() {0} times.'.format(self.empty_selects)) return self.reconnect() if _out: for p in self._get_from_queue(): if p is None: self.sock_q.task_done() continue try: self.sock.sendall(p) self.sock_q.task_done() except Exception as e: msg.error('Couldn\'t write to socket: %s' % str(e)) return self.reconnect()
def set_text(self, text): msg.debug('About to patch %s %s' % (str(self), self.vim_buf.name)) lines = text.encode('utf-8').split('\n') new_len = len(lines) end = start = -1 i = 0 def stomp_buffer(): msg.debug('Stomping buffer.') G.AGENT.patching += 1 self.vim_buf[:] = lines try: if new_len != len(self.vim_buf): stomp_buffer() return while i < new_len: if lines[i] != self.vim_buf[i]: msg.debug('Lines are not the same. "%s" "%s"' % (self.vim_buf[i], lines[i])) if start > -1: if end > -1: stomp_buffer() # More than one contiguous change in patch. return else: start = i else: msg.debug('Lines are the same. "%s"' % lines[i]) if start > -1 and end == -1: end = i i += 1 if start == -1 and end == -1: msg.debug("Nothing to do here, buffers are the same.") return if start > -1 and end == -1: end = i msg.debug('Stomping lines %d to %d: "%s" -> "%s"' % (start, end, self.vim_buf[start:end], lines[start:end])) G.AGENT.patching += 1 self.vim_buf[start:end] = lines[start:end] except Exception as e: msg.error('Couldn\'t apply patches because: %s!\nThe unencoded text was: "%s"' % ( str(e), text)) raise msg.debug('All done patching.')
def handle(self, req): self.net_buf += req new_data = False while True: before, sep, after = self.net_buf.partition('\n') if not sep: break try: data = json.loads(before) except Exception as e: msg.error('Unable to parse json:', e) msg.error('Data:', before) raise e self.protocol.handle(data) new_data = True self.net_buf = after #XXX move to protocol :( if new_data: vim.command('redraw')
def update_view(buf, view): msg.log('Floobits synced data for consistency: %s' % buf['path']) G.VIEW_TO_HASH[view.buffer_id()] = buf['md5'] view.set_read_only(False) try: view.run_command('floo_view_replace_region', { 'r': [0, view.size()], 'data': buf['buf'] }) view.set_status('Floobits', 'Floobits synced data for consistency.') utils.set_timeout(lambda: view.set_status('Floobits', ''), 5000) except Exception as e: msg.error('Exception updating view: %s' % e) if 'patch' not in G.PERMS: view.set_status( 'Floobits', 'You don\'t have write permission. Buffer is read-only.') view.set_read_only(True)
def _create_buf_worker(ignores, files, too_big): quota = 10 # scan until we find a minimum of 10 files while quota > 0 and ignores: ig = ignores.popleft() for new_path in Listener._scan_dir(ig): if not new_path: continue try: s = os.lstat(new_path) except Exception as e: msg.error('Error lstat()ing path %s: %s' % (new_path, unicode(e))) continue if stat.S_ISDIR(s.st_mode): ignores.append(ignore.Ignore(ig, new_path)) elif stat.S_ISREG(s.st_mode): if s.st_size > (MAX_FILE_SIZE): too_big.append(new_path) else: files.append(new_path) quota -= 1 can_upload = False for f in utils.iter_n_deque(files, 10): Listener.upload(f) can_upload = True if can_upload and G.AGENT and G.AGENT.sock: G.AGENT.select() if ignores or files: return utils.set_timeout(Listener._create_buf_worker, 25, ignores, files, too_big) if too_big: sublime.error_message( "%s file(s) were not added because they were larger than 10 megabytes: \n%s" % (len(too_big), "\t".join(too_big))) msg.log('All done syncing')
def reconnect(self): try: self.sock.close() except Exception: pass self.workspace_info = {} self.net_buf = "" self.sock = None self.authed = False G.JOINED_WORKSPACE = False self.reconnect_delay *= 1.5 if self.reconnect_delay > 10000: self.reconnect_delay = 10000 if self.retries > 0: msg.log("Floobits: Reconnecting in %sms" % self.reconnect_delay) self.reconnect_timeout = sublime.set_timeout(self.connect, int(self.reconnect_delay)) elif self.retries == 0: msg.error("Floobits Error! Too many reconnect failures. Giving up.") sys.exit(0) self.retries -= 1
def protocol(self, req): self.buf += req while True: before, sep, after = self.buf.partition(NEWLINE) if not sep: break try: # Node.js sends invalid utf8 even though we're calling write(string, "utf8") # Python 2 can figure it out, but python 3 hates it and will die here with some byte sequences # Instead of crashing the plugin, we drop the data. Yes, this is horrible. before = before.decode('utf-8', 'ignore') data = json.loads(before) except Exception as e: msg.error('Unable to parse json: %s' % str(e)) msg.error('Data: %s' % before) # XXXX: THIS LOSES DATA self.buf = after continue name = data.get('name') try: if name == 'error': message = 'Floobits: Error! Message: %s' % str( data.get('msg')) msg.error(message) if data.get('flash'): sublime.error_message('Floobits: %s' % str(data.get('msg'))) elif name == 'disconnect': message = 'Floobits: Disconnected! Reason: %s' % str( data.get('reason')) msg.error(message) sublime.error_message(message) self.stop() else: self.handler(name, data) except Exception as e: msg.error('Error handling %s event with data %s: %s' % (name, data, e)) self.buf = after
def delete_buf(self, path): """deletes a path""" if not path: return path = utils.get_full_path(path) if not utils.is_shared(path): msg.error('Skipping deleting %s because it is not in shared path %s.' % (path, G.PROJECT_PATH)) return if os.path.isdir(path): for dirpath, dirnames, filenames in os.walk(path): # Don't care about hidden stuff dirnames[:] = [d for d in dirnames if d[0] != '.'] for f in filenames: f_path = os.path.join(dirpath, f) if f[0] == '.': msg.log('Not deleting buf for hidden file %s' % f_path) else: self.delete_buf(f_path) return buf_to_delete = None rel_path = utils.to_rel_path(path) for buf_id, buf in self.FLOO_BUFS.items(): if rel_path == buf['path']: buf_to_delete = buf break if buf_to_delete is None: msg.error('%s is not in this workspace' % path) return msg.log('deleting buffer ', rel_path) event = { 'name': 'delete_buf', 'id': buf_to_delete['id'], } self.agent.put(event)
def reconnect(self): if self.reconnect_timeout: return try: self.sock.close() except Exception: pass self.workspace_info = {} self.net_buf = '' self.sock = None self.authed = False G.JOINED_WORKSPACE = False self.reconnect_delay *= 1.5 if self.reconnect_delay > 10000: self.reconnect_delay = 10000 if self.retries > 0: msg.log('Floobits: Reconnecting in %sms' % self.reconnect_delay) self.reconnect_timeout = utils.set_timeout( self.connect, int(self.reconnect_delay)) elif self.retries == 0: msg.error( 'Floobits Error! Too many reconnect failures. Giving up.') self.retries -= 1
def _scan_dir(ig): path = ig.path if not utils.is_shared(path): msg.error( 'Skipping adding %s because it is not in shared path %s.' % (path, G.PROJECT_PATH)) return if os.path.islink(path): msg.error('Skipping adding %s because it is a symlink.' % path) return ignored = ig.is_ignored(path) if ignored: msg.log('Not creating buf: %s' % (ignored)) return msg.debug('create_buf: path is %s' % path) if not os.path.isdir(path): yield path return try: paths = os.listdir(path) except Exception as e: msg.error('Error listing path %s: %s' % (path, unicode(e))) return for p in paths: p_path = os.path.join(path, p) if p[0] == '.': if p not in ignore.HIDDEN_WHITELIST: msg.log('Not creating buf for hidden path %s' % p_path) continue ignored = ig.is_ignored(p_path) if ignored: msg.log('Not creating buf: %s' % (ignored)) continue yield p_path
def _on_create_workspace(self, data, workspace_name, dir_to_share, owner=None, perms=None): owner = owner or G.USERNAME workspace_name = data.get('response', workspace_name) try: api_args = { 'name': workspace_name, 'owner': owner, } if perms: api_args['perms'] = perms msg.debug(str(api_args)) r = api.create_workspace(api_args) except Exception as e: msg.error('Unable to create workspace: %s' % unicode(e)) return editor.error_message('Unable to create workspace: %s' % unicode(e)) workspace_url = 'https://%s/%s/%s' % (G.DEFAULT_HOST, owner, workspace_name) if r.code < 400: msg.log('Created workspace %s' % workspace_url) utils.add_workspace_to_persistent_json(owner, workspace_name, workspace_url, dir_to_share) G.PROJECT_PATH = dir_to_share agent = self.remote_connect(owner, workspace_name, False) return agent.once("room_info", lambda: agent.upload(dir_to_share)) msg.error('Unable to create workspace: %s' % r.body) if r.code not in [400, 402, 409]: try: r.body = r.body['detail'] except Exception: pass return editor.error_message('Unable to create workspace: %s' % r.body) if r.code == 400: workspace_name = re.sub('[^A-Za-z0-9_\-\.]', '-', workspace_name) prompt = 'Invalid name. Workspace names must match the regex [A-Za-z0-9_\-\.]. Choose another name:' elif r.code == 402: try: r.body = r.body['detail'] except Exception: pass cb = lambda data: data['response'] and webbrowser.open( 'https://%s/%s/settings#billing' % (G.DEFAULT_HOST, owner)) self.get_input('%s Open billing settings?' % r.body, '', cb, y_or_n=True) return else: prompt = 'Workspace %s/%s already exists. Choose another name:' % ( owner, workspace_name) return self.get_input(prompt, workspace_name, self._on_create_workspace, workspace_name, dir_to_share, owner, perms)
def _on_share_dir(self, data): file_to_share = None utils.reload_settings() G.USERNAME = data['username'] G.SECRET = data['secret'] dir_to_share = data['dir_to_share'] perms = data['perms'] editor.line_endings = data['line_endings'].find( "unix") >= 0 and "\n" or "\r\n" dir_to_share = os.path.expanduser(dir_to_share) dir_to_share = utils.unfuck_path(dir_to_share) workspace_name = os.path.basename(dir_to_share) G.PROJECT_PATH = os.path.realpath(dir_to_share) msg.debug('%s %s %s' % (G.USERNAME, workspace_name, G.PROJECT_PATH)) if os.path.isfile(dir_to_share): file_to_share = dir_to_share dir_to_share = os.path.dirname(dir_to_share) try: utils.mkdir(dir_to_share) except Exception: return msg.error( "The directory %s doesn't exist and I can't create it." % dir_to_share) floo_file = os.path.join(dir_to_share, '.floo') info = {} try: floo_info = open(floo_file, 'rb').read().decode('utf-8') info = json.loads(floo_info) except (IOError, OSError): pass except Exception as e: msg.warn("Couldn't read .floo file: %s: %s" % (floo_file, str(e))) workspace_url = info.get('url') if workspace_url: parsed_url = api.prejoin_workspace(workspace_url, dir_to_share, {'perms': perms}) if parsed_url: # TODO: make sure we create_flooignore # utils.add_workspace_to_persistent_json(parsed_url['owner'], parsed_url['workspace'], workspace_url, dir_to_share) agent = self.remote_connect(parsed_url['owner'], parsed_url['workspace'], False) return agent.once( "room_info", lambda: agent.upload(file_to_share or dir_to_share)) parsed_url = utils.get_workspace_by_path( dir_to_share, lambda workspace_url: api.prejoin_workspace( workspace_url, dir_to_share, {'perms': perms})) if parsed_url: agent = self.remote_connect(parsed_url['owner'], parsed_url['workspace'], False) return agent.once( "room_info", lambda: agent.upload(file_to_share or dir_to_share)) def on_done(data, choices=None): self.get_input('Workspace name:', workspace_name, self._on_create_workspace, workspace_name, dir_to_share, owner=data.get('response'), perms=perms) try: r = api.get_orgs_can_admin() except IOError as e: return editor.error_message('Error getting org list: %s' % str(e)) if r.code >= 400 or len(r.body) == 0: return on_done({'response': G.USERNAME}) i = 0 choices = [] choices.append([G.USERNAME, i]) for org in r.body: i += 1 choices.append([org['name'], i]) self.get_input('Create workspace owned by (%s) ' % " ".join([x[0] for x in choices]), '', on_done, choices=choices)
def reconnect(self): msg.error('Remote connection died') sys.exit(1)
def reconnect(self): msg.error('Client connection died') sys.exit(1)