def _on_get_buf(self, data): buf_id = data['id'] buf = self.bufs.get(buf_id) if not buf: return msg.warn('no buf found: %s. Hopefully you didn\'t need that' % data) timeout_id = buf.get('timeout_id') if timeout_id: utils.cancel_timeout(timeout_id) if data['encoding'] == 'base64': data['buf'] = base64.b64decode(data['buf']) self.bufs[buf_id] = data save = False if buf_id in self.save_on_get_bufs: self.save_on_get_bufs.remove(buf_id) save = True view = self.get_view(buf_id) if not view: msg.debug('No view for buf %s. Saving to disk.' % buf_id) return utils.save_buf(data) view.update(data) if save: view.save()
def _on_request_perms(self, data): user_id = str(data.get('user_id')) username = self.get_username_by_id(user_id) if not username: msg.debug('Unknown user for id %s. Not handling request_perms event.' % user_id) return perm_mapping = { 'edit_room': 'edit', 'admin_room': 'admin', } perms = data.get('perms') perms_str = ''.join([perm_mapping.get(p) for p in perms]) prompt = 'User %s is requesting %s permission for this room.' % (username, perms_str) message = data.get('message') if message: prompt += '\n\n%s says: %s' % (username, message) prompt += '\n\nDo you want to grant them permission?' confirm = yield self.ok_cancel_dialog, prompt self.send({ 'name': 'perms', 'action': confirm and 'add' or 'reject', 'user_id': user_id, 'perms': perms })
def _connect(self, host, port, attempts=0): if attempts > (self.proxy and 500 or 500): msg.error('Connection attempt timed out.') return self.reconnect() if not self._sock: msg.debug('_connect: No socket') return try: self._sock.connect((host, port)) select.select([self._sock], [self._sock], [], 0) except socket.error as e: if e.errno == iscon_errno: pass elif e.errno in connect_errno: msg.debug('connect_errno: ', str_e(e)) return utils.set_timeout(self._connect, 20, host, port, attempts + 1) else: msg.error('Error connecting: ', str_e(e)) return self.reconnect() if self._secure: sock_debug('SSL-wrapping socket') self._sock = ssl.wrap_socket(self._sock, ca_certs=self._cert_path, cert_reqs=ssl.CERT_REQUIRED, do_handshake_on_connect=False) self._q.clear() self._buf_out = bytes() self.emit('connect') self.connected = True
def start_event_loop(): global ticker if G.TIMERS: msg.debug('Your Vim was compiled with +timer support. Awesome!') return if not bool(int(vim.eval('has("clientserver")'))): return fallback_to_feedkeys('This VIM was not compiled with clientserver support. You should consider using a different vim!') exe = getattr(G, 'VIM_EXECUTABLE', None) if not exe: return fallback_to_feedkeys('Your vim was compiled with clientserver, but I don\'t know the name of the vim executable.' 'Please define it in your ~/.floorc using the vim_executable directive. e.g. \'vim_executable mvim\'.') servername = vim.eval('v:servername') if not servername: return fallback_to_feedkeys('I can not identify the servername of this vim. You may need to pass --servername to vim at startup.') evaler = ticker_python.format(binary=exe, servername=servername, sleep='1.0') ticker = subprocess.Popen(['python', '-c', evaler], stderr=subprocess.PIPE, stdout=subprocess.PIPE) ticker.poll() utils.set_timeout(ticker_watcher, 500, ticker)
def _handle(self, data): self._buf_in += data if self._handling: return self._handling = True while True: before, sep, after = self._buf_in.partition(b'\n') if not sep: break try: # Node.js sends invalid utf8 even though we're calling write(string, "utf8") # Python 2 can figure it out, but python 3 hates it and will die here with some byte sequences # Instead of crashing the plugin, we drop the data. Yes, this is horrible. before = before.decode('utf-8', 'ignore') data = json.loads(before) except Exception as e: msg.error('Unable to parse json: ', str_e(e)) msg.error('Data: ', before) # XXXX: THIS LOSES DATA self._buf_in = after continue name = data.get('name') self._buf_in = after try: msg.debug('got data ' + (name or 'no name')) self.emit('data', name, data) except Exception as e: api.send_error('Error handling %s event.' % name, str_e(e)) if name == 'room_info': editor.error_message('Error joining workspace: %s' % str_e(e)) self.stop() self._handling = False
def wrapped(*args, **kwargs): if reactor.is_ready(): return func(*args, **kwargs) if warn: msg.error('ignoring request (%s) because you aren\'t in a workspace.' % func.__name__) else: msg.debug('ignoring request (%s) because you aren\'t in a workspace.' % func.__name__)
def read(self): if self.fd is None: msg.debug('self.fd is None. Read called after cleanup.') return data = b'' while True: try: d = os.read(self.fd, 65535) if not d: break data += d except (IOError, OSError): break self.buf[0] += data if not data: return while True: before, sep, after = self.buf[0].partition(b'\n') if not sep: break self.buf[0] = after try: msg.debug('Floobits SSL proxy output: ', before.decode('utf-8', 'ignore')) except Exception: pass
def floobits_check_credentials(): msg.debug('Print checking credentials.') if not (G.USERNAME and G.SECRET): if not utils.has_browser(): msg.log('You need a Floobits account to use the Floobits plugin. Go to https://floobits.com to sign up.') return floobits_setup_credentials()
def create_workspace(workspace_name, share_path, owner, perms=None, upload_path=None): workspace_url = 'https://%s/%s/%s' % (G.DEFAULT_HOST, G.USERNAME, workspace_name) try: api_args = { 'name': workspace_name, 'owner': owner, } if perms: api_args['perms'] = perms r = api.create_workspace(api_args) except Exception as e: return editor.error_message('Unable to create workspace %s: %s' % (workspace_url, unicode(e))) if r.code < 400: msg.debug('Created workspace %s' % workspace_url) return floobits_join_workspace(workspace_url, share_path, upload_path=upload_path) if r.code == 402: # TODO: Better behavior. Ask to create a public workspace instead? detail = r.body.get('detail') err_msg = 'Unable to create workspace because you have reached your maximum number of workspaces' if detail: err_msg += detail return editor.error_message(err_msg) if r.code == 400: workspace_name = re.sub('[^A-Za-z0-9_\-]', '-', workspace_name) workspace_name = vim_input( '%s is an invalid name. Workspace names must match the regex [A-Za-z0-9_\-]. Choose another name:' % workspace_name, workspace_name) elif r.code == 409: workspace_name = vim_input('Workspace %s already exists. Choose another name: ' % workspace_name, workspace_name + '1', 'file') else: return editor.error_message('Unable to create workspace: %s %s' % (workspace_url, unicode(e))) return create_workspace(workspace_name, share_path, perms, upload_path=upload_path)
def open_workspace_window3(cb): G.WORKSPACE_WINDOW = get_workspace_window() if not G.WORKSPACE_WINDOW: G.WORKSPACE_WINDOW = sublime.active_window() msg.debug('Setting project data. Path: %s' % G.PROJECT_PATH) G.WORKSPACE_WINDOW.set_project_data({'folders': [{'path': G.PROJECT_PATH}]}) create_chat_view(cb)
def _on_rename_buf(self, data): # This can screw up if someone else renames the buffer around the same time as us. Oh well. msg.debug('asdf %s' % data) buf = self.get_buf_by_path(utils.get_full_path(data['old_path'])) if buf: return super(AgentConnection, self)._on_rename_buf(data) msg.debug('We already renamed %s. Skipping' % data['old_path'])
def _handle(self, data): self._buf += data while True: before, sep, after = self._buf.partition(b"\n") if not sep: return try: # Node.js sends invalid utf8 even though we're calling write(string, "utf8") # Python 2 can figure it out, but python 3 hates it and will die here with some byte sequences # Instead of crashing the plugin, we drop the data. Yes, this is horrible. before = before.decode("utf-8", "ignore") data = json.loads(before) except Exception as e: msg.error("Unable to parse json: %s" % str(e)) msg.error("Data: %s" % before) # XXXX: THIS LOSES DATA self._buf = after continue name = data.get("name") try: msg.debug("got data " + (name or "no name")) self.emit("data", name, data) except Exception as e: print(traceback.format_exc()) msg.error("Error handling %s event (%s)." % (name, str(e))) if name == "room_info": editor.error_message("Error joining workspace: %s" % str(e)) self.stop() self._buf = after
def _connect(self, attempts=0): if attempts > (self.proxy and 500 or 500): msg.error("Connection attempt timed out.") return self.reconnect() if not self._sock: msg.debug("_connect: No socket") return try: self._sock.connect((self._host, self._port)) select.select([self._sock], [self._sock], [], 0) except socket.error as e: if e.errno == iscon_errno: pass elif e.errno in connect_errno: return utils.set_timeout(self._connect, 20, attempts + 1) else: msg.error("Error connecting:", e) return self.reconnect() if self._secure: sock_debug("SSL-wrapping socket") self._sock = ssl.wrap_socket( self._sock, ca_certs=self._cert_path, cert_reqs=ssl.CERT_REQUIRED, do_handshake_on_connect=False ) self._q.clear() self.reconnect_delay = self.INITIAL_RECONNECT_DELAY self.retries = self.MAX_RETRIES self.emit("connect") self.connected = True
def open_workspace_window2(cb): if sublime.platform() == 'linux': subl = open('/proc/self/cmdline').read().split(chr(0))[0] elif sublime.platform() == 'osx': # TODO: totally explodes if you install ST2 somewhere else settings = sublime.load_settings('Floobits.sublime-settings') subl = settings.get('sublime_executable', '/Applications/Sublime Text 2.app/Contents/SharedSupport/bin/subl') if not os.path.exists(subl): return sublime.error_message('''Can't find your Sublime Text executable at %s. Please add "sublime_executable /path/to/subl" to your ~/.floorc and restart Sublime Text''' % subl) elif sublime.platform() == 'windows': subl = sys.executable else: raise Exception('WHAT PLATFORM ARE WE ON?!?!?') command = [subl] if get_workspace_window() is None: command.append('--new-window') command.append('--add') command.append(G.PROJECT_PATH) msg.debug('command:', command) p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) poll_result = p.poll() msg.debug('poll:', poll_result) set_workspace_window(cb)
def put(self, item): if not item: return msg.debug('writing %s' % item.get('name', 'NO NAME')) self._q.append(json.dumps(item) + '\n') qsize = len(self._q) msg.debug('%s items in q' % qsize) return qsize
def put(self, item): if not item: return msg.debug("writing %s: %s" % (item.get("name", "NO NAME"), item)) self._q.append(json.dumps(item) + "\n") qsize = len(self._q) msg.debug("%s items in q" % qsize) return qsize
def floobits_check_credentials(): msg.debug('Print checking credentials.') if utils.can_auth(): return if not utils.has_browser(): msg.log('You need a Floobits account to use the Floobits plugin. Go to https://floobits.com to sign up.') return yield VUI.create_or_link_account, None, G.DEFAULT_HOST, False
def select(self, timeout=0): if not self._protos: return readable = [] writeable = [] errorable = [] fd_map = {} for fd in self._protos: fileno = fd.fileno() if not fileno: continue fd.fd_set(readable, writeable, errorable) fd_map[fileno] = fd if not readable and not writeable: return try: _in, _out, _except = select.select(readable, writeable, errorable, timeout) except (select.error, socket.error, Exception) as e: # TODO: with multiple FDs, must call select with just one until we find the error :( for fileno in readable: try: select.select([fileno], [], [], 0) except (select.error, socket.error, Exception) as e: fd_map[fileno].reconnect() msg.error('Error in select(): ', fileno, str_e(e)) return for fileno in _except: fd = fd_map[fileno] self._reconnect(fd, _in, _out) for fileno in _out: fd = fd_map[fileno] try: fd.write() except ssl.SSLError as e: if e.args[0] != ssl.SSL_ERROR_WANT_WRITE: raise except Exception as e: msg.error('Couldn\'t write to socket: ', str_e(e)) msg.debug('Couldn\'t write to socket: ', pp_e(e)) return self._reconnect(fd, _in) for fileno in _in: fd = fd_map[fileno] try: fd.read() except ssl.SSLError as e: if e.args[0] != ssl.SSL_ERROR_WANT_READ: raise except Exception as e: msg.error('Couldn\'t read from socket: ', str_e(e)) msg.debug('Couldn\'t read from socket: ', pp_e(e)) fd.reconnect()
def disconnect_dialog(): if G.AGENT and G.JOINED_WORKSPACE: disconnect = sublime.ok_cancel_dialog('You can only be in one workspace at a time.', 'Leave workspace %s.' % G.AGENT.workspace) if disconnect: msg.debug('Stopping agent.') G.AGENT.stop() G.AGENT = None return disconnect return True
def disconnect_dialog(): if G.AGENT and G.AGENT.joined_workspace: disconnect = sublime.ok_cancel_dialog('You can only be in one workspace at a time.', 'Leave %s/%s' % (G.AGENT.owner, G.AGENT.workspace)) if disconnect: msg.debug('Stopping agent.') reactor.stop() G.AGENT = None return disconnect return True
def floobits_check_and_join_workspace(workspace_url): try: r = api.get_workspace_by_url(workspace_url) except Exception as e: return editor.error_message('Error joining %s: %s' % (workspace_url, str(e))) if r.code >= 400: return editor.error_message('Error joining %s: %s' % (workspace_url, r.body)) msg.debug('Workspace %s exists' % workspace_url) return floobits_join_workspace(workspace_url)
def put(self, item): if not item: return self.req_id += 1 item['req_id'] = self.req_id msg.debug('writing ', item.get('name', 'NO NAME'), ' req_id ', self.req_id, ' qsize ', len(self)) self._q.append(json.dumps(item) + '\n') return self.req_id
def on_input(self, workspace_name, dir_to_share=None): if dir_to_share: self.dir_to_share = dir_to_share if workspace_name == '': return self.run(dir_to_share=self.dir_to_share) try: self.api_args['name'] = workspace_name self.api_args['owner'] = self.owner msg.debug(str(self.api_args)) r = api.create_workspace(self.host, self.api_args) except Exception as e: msg.error('Unable to create workspace: %s' % str_e(e)) return sublime.error_message('Unable to create workspace: %s' % str_e(e)) workspace_url = 'https://%s/%s/%s' % (self.host, self.owner, workspace_name) msg.log('Created workspace %s' % workspace_url) if r.code < 400: utils.add_workspace_to_persistent_json(self.owner, workspace_name, workspace_url, self.dir_to_share) return self.window.run_command('floobits_join_workspace', { 'workspace_url': workspace_url, 'upload': dir_to_share }) msg.error('Unable to create workspace: %s' % r.body) if r.code not in [400, 402, 409]: try: r.body = r.body['detail'] except Exception: pass return sublime.error_message('Unable to create workspace: %s' % r.body) kwargs = { 'dir_to_share': self.dir_to_share, 'workspace_name': workspace_name, 'api_args': self.api_args, 'owner': self.owner, 'upload': self.upload, 'host': self.host, } if r.code == 400: kwargs['workspace_name'] = re.sub('[^A-Za-z0-9_\-\.]', '-', workspace_name) kwargs['prompt'] = 'Invalid name. Workspace names must match the regex [A-Za-z0-9_\-\.]. Choose another name:' elif r.code == 402: try: r.body = r.body['detail'] except Exception: pass if sublime.ok_cancel_dialog('%s' % r.body, 'Open billing settings'): webbrowser.open('https://%s/%s/settings#billing' % (self.host, self.owner)) return else: kwargs['prompt'] = 'Workspace %s/%s already exists. Choose another name:' % (self.owner, workspace_name) return self.window.run_command('floobits_create_workspace', kwargs)
def find_workspace(workspace_url): r = api.get_workspace_by_url(workspace_url) if r.code < 400: return r try: result = utils.parse_url(workspace_url) d = utils.get_persistent_data() del d['workspaces'][result['owner']][result['name']] utils.update_persistent_data(d) except Exception as e: msg.debug(str_e(e))
def floobits_complete_signup(): msg.debug('Completing signup.') if not utils.has_browser(): msg.log('You need a modern browser to complete the sign up. Go to https://floobits.com to sign up.') return floorc = utils.load_floorc() username = floorc.get('USERNAME') secret = floorc.get('SECRET') msg.debug('Completing sign up with %s %s' % (username, secret)) if not (username and secret): return msg.error('You don\'t seem to have a Floobits account of any sort.') webbrowser.open('https://%s/%s/pinocchio/%s' % (G.DEFAULT_HOST, username, secret))
def on_input(self, workspace_name, dir_to_share=None): if dir_to_share: self.dir_to_share = dir_to_share if workspace_name == '': return self.run(dir_to_share=self.dir_to_share) try: self.api_args['name'] = workspace_name self.api_args['owner'] = self.owner msg.debug(str(self.api_args)) r = api.create_workspace(self.api_args) except Exception as e: msg.error('Unable to create workspace: %s' % unicode(e)) return sublime.error_message('Unable to create workspace: %s' % unicode(e)) workspace_url = 'https://%s/%s/%s/' % (G.DEFAULT_HOST, self.owner, workspace_name) msg.log('Created workspace %s' % workspace_url) if r.code < 400: utils.add_workspace_to_persistent_json(self.owner, workspace_name, workspace_url, self.dir_to_share) return self.window.run_command('floobits_join_workspace', { 'workspace_url': workspace_url, 'agent_conn_kwargs': { 'get_bufs': False } }) msg.error('Unable to create workspace: %s' % r.body) if r.code not in [400, 402, 409]: try: r.body = r.body['detail'] except Exception: pass return sublime.error_message('Unable to create workspace: %s' % r.body) kwargs = { 'dir_to_share': self.dir_to_share, 'workspace_name': workspace_name, 'api_args': self.api_args, 'owner': self.owner, } if r.code == 400: kwargs['workspace_name'] = re.sub('[^A-Za-z0-9_\-\.]', '-', workspace_name) kwargs['prompt'] = 'Invalid name. Workspace names must match the regex [A-Za-z0-9_\-\.]. Choose another name:' elif r.code == 402: try: r.body = r.body['detail'] except Exception: pass return sublime.error_message('%s' % r.body) else: kwargs['prompt'] = 'Workspace %s/%s already exists. Choose another name:' % (self.owner, workspace_name) return self.window.run_command('floobits_create_workspace', kwargs)
def on_input(self, workspace_name, dir_to_share=None): if dir_to_share: self.dir_to_share = dir_to_share if workspace_name == '': return self.run(dir_to_share=self.dir_to_share) try: self.api_args['name'] = workspace_name self.api_args['owner'] = self.owner msg.debug(str(self.api_args)) api.create_workspace(self.api_args) workspace_url = 'https://%s/r/%s/%s' % (G.DEFAULT_HOST, self.owner, workspace_name) print('Created workspace %s' % workspace_url) except HTTPError as e: err_body = e.read() msg.error('Unable to create workspace: %s %s' % (unicode(e), err_body)) if e.code not in [400, 402, 409]: return sublime.error_message('Unable to create workspace: %s %s' % (unicode(e), err_body)) kwargs = { 'dir_to_share': self.dir_to_share, 'workspace_name': workspace_name, 'api_args': self.api_args, 'owner': self.owner, } if e.code == 400: kwargs['workspace_name'] = re.sub('[^A-Za-z0-9_\-]', '-', workspace_name) kwargs['prompt'] = 'Invalid name. Workspace names must match the regex [A-Za-z0-9_\-]. Choose another name:' elif e.code == 402: try: err_body = json.loads(err_body) err_body = err_body['detail'] except Exception: pass return sublime.error_message('%s' % err_body) else: kwargs['prompt'] = 'Workspace %s/%s already exists. Choose another name:' % (self.owner, workspace_name) return self.window.run_command('floobits_create_workspace', kwargs) except Exception as e: return sublime.error_message('Unable to create workspace: %s' % unicode(e)) add_workspace_to_persistent_json(self.owner, workspace_name, workspace_url, self.dir_to_share) on_room_info_waterfall.add(on_room_info_msg) self.window.run_command('floobits_join_workspace', { 'workspace_url': workspace_url, 'agent_conn_kwargs': { 'get_bufs': False } })
def select(self): if not self.conn: msg.error('select(): No socket.') return self.reconnect() while True: if self.agent: self.agent.tick() out_conns = [] if len(self.to_emacs_q) > 0: out_conns.append(self.conn) try: _in, _out, _except = select.select([self.conn], out_conns, [self.conn], 0.05) except (select.error, socket.error, Exception) as e: msg.error('Error in select(): %s' % str(e)) return self.reconnect() if _except: msg.error('Socket error') return self.reconnect() if _in: buf = '' while True: try: d = self.conn.recv(4096) if not d: break buf += d except (socket.error, TypeError): break if buf: self.empty_selects = 0 self.handle(buf) else: self.empty_selects += 1 if self.empty_selects > 10: msg.error('No data from sock.recv() {0} times.'.format(self.empty_selects)) return self.reconnect() if _out: while len(self.to_emacs_q) > 0: p = self.to_emacs_q.pop(0) try: msg.debug('to emacs: %s' % p) self.conn.sendall(p) except Exception as e: msg.error('Couldn\'t write to socket: %s' % str(e)) return self.reconnect()
def _on_delete_buf(self, data): buf_id = int(data['id']) buf = self.bufs[buf_id] path = buf['path'] try: super(AgentConnection, self)._on_delete_buf(data) except Exception as e: msg.debug('Unable to delete buf %s: %s' % (path, str(e))) else: self.to_emacs('delete_buf', { 'full_path': utils.get_full_path(path), 'path': path, 'username': data.get('username', ''), })
def find_workspace(workspace_url): r = api.get_workspace_by_url(workspace_url) if r.code < 400: on_room_info_waterfall.add(ignore.create_flooignore, dir_to_share) on_room_info_waterfall.add(lambda: G.AGENT.upload(dir_to_share, on_room_info_msg)) return r try: result = utils.parse_url(workspace_url) d = utils.get_persistent_data() del d['workspaces'][result['owner']][result['name']] utils.update_persistent_data(d) except Exception as e: msg.debug(unicode(e)) return
def _on_patch(self, data): buf_id = data['id'] buf = self.bufs[buf_id] if 'buf' not in buf: msg.debug('buf ', buf['path'], ' not populated yet. not patching') return if buf['encoding'] == 'base64': # TODO apply binary patches return self.get_buf(buf_id, None) if len(data['patch']) == 0: msg.debug('wtf? no patches to apply. server is being stupid') return msg.debug('patch is', data['patch']) dmp_patches = DMP.patch_fromText(data['patch']) # TODO: run this in a separate thread old_text = buf['buf'] view = self.get_view(buf_id) if view and not view.is_loading(): view_text = view.get_text() if old_text == view_text: buf['forced_patch'] = False elif not buf.get('forced_patch'): patch = utils.FlooPatch(view_text, buf) # Update the current copy of the buffer buf['buf'] = patch.current buf['md5'] = hashlib.md5( patch.current.encode('utf-8')).hexdigest() buf['forced_patch'] = True msg.debug('forcing patch for ', buf['path']) self.send(patch.to_json()) old_text = view_text else: msg.debug( 'forced patch is true. not sending another force patch for buf ', buf['path']) md5_before = hashlib.md5(old_text.encode('utf-8')).hexdigest() if md5_before != data['md5_before']: msg.warn('starting md5s don\'t match for ', buf['path'], '. this is dangerous!') t = DMP.patch_apply(dmp_patches, old_text) clean_patch = True for applied_patch in t[1]: if not applied_patch: clean_patch = False break if G.DEBUG: if len(t[0]) == 0: try: msg.debug('OMG EMPTY!') msg.debug('Starting data:', buf['buf']) msg.debug('Patch:', data['patch']) except Exception as e: msg.error(e) if '\x01' in t[0]: msg.debug('FOUND CRAZY BYTE IN BUFFER') msg.debug('Starting data:', buf['buf']) msg.debug('Patch:', data['patch']) timeout_id = buf.get('timeout_id') if timeout_id: utils.cancel_timeout(timeout_id) del buf['timeout_id'] if not clean_patch: msg.log('Couldn\'t patch ', buf['path'], ' cleanly.') return self.get_buf(buf_id, view) cur_hash = hashlib.md5(t[0].encode('utf-8')).hexdigest() if cur_hash != data['md5_after']: msg.debug('Ending md5s don\'t match for ', buf['path'], ' Setting get_buf timeout.') buf['timeout_id'] = utils.set_timeout(self.get_buf, 2000, buf_id, view) buf['buf'] = t[0] buf['md5'] = cur_hash if not view: msg.debug('No view. Not saving buffer ', buf_id) def _on_load(): v = self.get_view(buf_id) if v and 'buf' in buf: v.update(buf, message=False) self.on_load[buf_id]['patch'] = _on_load return view.apply_patches(buf, t, data['username'])
def _upload(self, path, text=None): size = 0 try: if text is None: with open(path, 'rb') as buf_fd: buf = buf_fd.read() else: try: # work around python 3 encoding issue buf = text.encode('utf8') except Exception as e: msg.debug('Error encoding buf ', path, ': ', str_e(e)) # We're probably in python 2 so it's ok to do this buf = text size = len(buf) encoding = 'utf8' rel_path = utils.to_rel_path(path) existing_buf = self.get_buf_by_path(path) if existing_buf: if text is None: buf_md5 = hashlib.md5(buf).hexdigest() if existing_buf['md5'] == buf_md5: msg.log( path, ' already exists and has the same md5. Skipping.') return size existing_buf['md5'] = buf_md5 msg.log('Setting buffer ', rel_path) try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' existing_buf['buf'] = buf existing_buf['encoding'] = encoding self.send({ 'name': 'set_buf', 'id': existing_buf['id'], 'buf': buf, 'md5': existing_buf['md5'], 'encoding': encoding, }) self.send({'name': 'saved', 'id': existing_buf['id']}) return size try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' msg.log('Creating buffer ', rel_path, ' (', len(buf), ' bytes)') event = { 'name': 'create_buf', 'buf': buf, 'path': rel_path, 'encoding': encoding, } self.send(event) except (IOError, OSError): msg.error('Failed to open ', path) except Exception as e: msg.error('Failed to create buffer ', path, ': ', str_e(e)) return size
def floobits_join_workspace(workspace_url, d='', upload_path=None): editor.line_endings = _get_line_endings() msg.debug('workspace url is %s' % workspace_url) try: result = utils.parse_url(workspace_url) except Exception as e: return msg.error(str(e)) if d: utils.mkdir(d) else: try: d = utils.get_persistent_data()['workspaces'][result['owner']][ result['workspace']]['path'] except Exception: d = os.path.realpath( os.path.join(G.COLAB_DIR, result['owner'], result['workspace'])) prompt = 'Save workspace files to: ' if not os.path.isdir(d): while True: d = vim_input(prompt, d, 'dir') if d == '': continue d = os.path.realpath(os.path.expanduser(d)) if os.path.isfile(d): prompt = '%s is not a directory. Enter an existing path or a path I can create: ' % d continue if not os.path.isdir(d): try: utils.mkdir(d) except Exception as e: prompt = 'Couldn\'t make dir %s: %s ' % (d, str(e)) continue break d = os.path.realpath(os.path.abspath(d) + os.sep) try: utils.add_workspace_to_persistent_json(result['owner'], result['workspace'], workspace_url, d) except Exception as e: return msg.error('Error adding workspace to persistent.json: %s' % str(e)) G.PROJECT_PATH = d vim.command('cd %s' % G.PROJECT_PATH) msg.debug('Joining workspace %s' % workspace_url) floobits_stop_everything() try: conn = VimHandler(result['owner'], result['workspace']) if upload_path: conn.once('room_info', lambda: G.AGENT.upload(upload_path)) reactor.connect(conn, result['host'], result['port'], result['secure']) except Exception as e: msg.error(str(e)) tb = traceback.format_exc() msg.debug(tb) if not G.TIMERS: start_event_loop()
def floobits_share_dir(dir_to_share, perms): utils.reload_settings() workspace_name = os.path.basename(dir_to_share) G.PROJECT_PATH = os.path.realpath(dir_to_share) msg.debug('%s %s %s' % (G.USERNAME, workspace_name, G.PROJECT_PATH)) file_to_share = None dir_to_share = os.path.expanduser(dir_to_share) dir_to_share = utils.unfuck_path(dir_to_share) dir_to_share = os.path.abspath(dir_to_share) dir_to_share = os.path.realpath(dir_to_share) workspace_name = os.path.basename(dir_to_share) if os.path.isfile(dir_to_share): file_to_share = dir_to_share dir_to_share = os.path.dirname(dir_to_share) try: utils.mkdir(dir_to_share) except Exception: return msg.error( "The directory %s doesn't exist and I can't create it." % dir_to_share) if not os.path.isdir(dir_to_share): return msg.error('The directory %s doesn\'t appear to exist' % dir_to_share) floo_file = os.path.join(dir_to_share, '.floo') # look for the .floo file for hints about previous behavior info = {} try: floo_info = open(floo_file, 'rb').read().decode('utf-8') info = json.loads(floo_info) except (IOError, OSError): pass except Exception: msg.warn('couldn\'t read the floo_info file: %s' % floo_file) workspace_url = info.get('url') if workspace_url: parsed_url = api.prejoin_workspace(workspace_url, dir_to_share, {'perms': perms}) if parsed_url: return floobits_join_workspace(workspace_url, dir_to_share, upload_path=file_to_share or dir_to_share) filter_func = lambda workspace_url: api.prejoin_workspace( workspace_url, dir_to_share, {'perms': perms}) parsed_url = utils.get_workspace_by_path(dir_to_share, filter_func) if parsed_url: return floobits_join_workspace(workspace_url, dir_to_share, upload_path=file_to_share or dir_to_share) try: r = api.get_orgs_can_admin() except IOError as e: return editor.error_message('Error getting org list: %s' % str(e)) if r.code >= 400 or len(r.body) == 0: workspace_name = vim_input('Workspace name:', workspace_name, "file") return create_workspace(workspace_name, dir_to_share, G.USERNAME, perms, upload_path=file_to_share or dir_to_share) orgs = r.body if len(orgs) == 0: return create_workspace(workspace_name, dir_to_share, G.USERNAME, perms, upload_path=file_to_share or dir_to_share) choices = [] choices.append(G.USERNAME) for o in orgs: choices.append(o['name']) owner = vim_choice('Create workspace for:', G.USERNAME, choices) if owner: return create_workspace(workspace_name, dir_to_share, owner, perms, upload_path=file_to_share or dir_to_share)
def _on_room_info(self, data): self.reset() self.joined_workspace = True self.workspace_info = data G.PERMS = data['perms'] self.proto.reset_retries() if G.OUTBOUND_FILTERING: msg.error( 'Detected outbound port blocking! See https://floobits.com/help/network for more info.' ) read_only = False if 'patch' not in data['perms']: read_only = True no_perms_msg = '''You don't have permission to edit this workspace. All files will be read-only.''' msg.log('No patch permission. Setting buffers to read-only') if 'request_perm' in data['perms']: should_send = yield self.ok_cancel_dialog, no_perms_msg + '\nDo you want to request edit permission?' # TODO: wait for perms to be OK'd/denied before uploading or bailing if should_send: self.send({ 'name': 'request_perms', 'perms': ['edit_room'] }) else: if G.EXPERT_MODE: editor.status_message(no_perms_msg) else: editor.error_message(no_perms_msg) floo_json = { 'url': utils.to_workspace_url({ 'owner': self.owner, 'workspace': self.workspace, 'host': self.proto.host, 'port': self.proto.port, 'secure': self.proto.secure, }) } utils.update_floo_file(os.path.join(G.PROJECT_PATH, '.floo'), floo_json) utils.update_recent_workspaces(self.workspace_url) changed_bufs = [] missing_bufs = [] new_files = set() ig = ignore.create_ignore_tree(G.PROJECT_PATH) G.IGNORE = ig if not read_only: new_files = set([utils.to_rel_path(x) for x in ig.list_paths()]) for buf_id, buf in data['bufs'].items(): buf_id = int(buf_id) # json keys must be strings buf_path = utils.get_full_path(buf['path']) new_dir = os.path.dirname(buf_path) utils.mkdir(new_dir) self.bufs[buf_id] = buf self.paths_to_ids[buf['path']] = buf_id view = self.get_view(buf_id) if view and not view.is_loading() and buf['encoding'] == 'utf8': view_text = view.get_text() view_md5 = hashlib.md5(view_text.encode('utf-8')).hexdigest() buf['buf'] = view_text buf['view'] = view G.VIEW_TO_HASH[view.native_id] = view_md5 if view_md5 == buf['md5']: msg.debug('md5 sum matches view. not getting buffer ', buf['path']) else: changed_bufs.append(buf) buf['md5'] = view_md5 continue try: if buf['encoding'] == 'utf8': if io: buf_fd = io.open(buf_path, 'Urt', encoding='utf8') buf_buf = buf_fd.read() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read().decode('utf-8').replace( '\r\n', '\n') md5 = hashlib.md5(buf_buf.encode('utf-8')).hexdigest() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read() md5 = hashlib.md5(buf_buf).hexdigest() buf_fd.close() buf['buf'] = buf_buf if md5 == buf['md5']: msg.debug('md5 sum matches. not getting buffer ', buf['path']) else: msg.debug('md5 differs. possibly getting buffer later ', buf['path']) changed_bufs.append(buf) buf['md5'] = md5 except Exception as e: msg.debug('Error calculating md5 for ', buf['path'], ', ', str_e(e)) missing_bufs.append(buf) ignored = [] for p, buf_id in self.paths_to_ids.items(): if p not in new_files: ignored.append(p) new_files.discard(p) if self.action == utils.JOIN_ACTION.UPLOAD: yield self._initial_upload, ig, missing_bufs, changed_bufs # TODO: maybe use org name here who = 'Your friends' anon_perms = G.AGENT.workspace_info.get('anon_perms') if 'get_buf' in anon_perms: who = 'Anyone' _msg = 'You are sharing:\n\n%s\n\n%s can join your workspace at:\n\n%s' % ( G.PROJECT_PATH, who, G.AGENT.workspace_url) # Workaround for horrible Sublime Text bug utils.set_timeout(editor.message_dialog, 0, _msg) elif changed_bufs or missing_bufs or new_files: # TODO: handle readonly here if self.action == utils.JOIN_ACTION.PROMPT: stomp_local = yield self.stomp_prompt, changed_bufs, missing_bufs, list( new_files), ignored if stomp_local not in [0, 1]: self.stop() return elif self.action == utils.JOIN_ACTION.DOWNLOAD: stomp_local = True else: # This should never happen assert False return if stomp_local: for buf in changed_bufs: self.get_buf(buf['id'], buf.get('view')) self.save_on_get_bufs.add(buf['id']) for buf in missing_bufs: self.get_buf(buf['id'], buf.get('view')) self.save_on_get_bufs.add(buf['id']) else: yield self._initial_upload, ig, missing_bufs, changed_bufs success_msg = 'Successfully joined workspace %s/%s' % (self.owner, self.workspace) msg.log(success_msg) editor.status_message(success_msg) data = utils.get_persistent_data() data['recent_workspaces'].insert(0, {"url": self.workspace_url}) utils.update_persistent_data(data) utils.add_workspace_to_persistent_json(self.owner, self.workspace, self.workspace_url, G.PROJECT_PATH) temp_data = data.get('temp_data', {}) hangout = temp_data.get('hangout', {}) hangout_url = hangout.get('url') if hangout_url: self.prompt_join_hangout(hangout_url) self.emit("room_info")