def prompt_ignore(self, ig, path, cb): ignore.create_flooignore(ig.path) dirs = ig.get_children() dirs.append(ig) dirs = sorted(dirs, key=attrgetter('size')) size = starting_size = reduce(lambda x, c: x + c.size, dirs, 0) too_big = [] while size > MAX_WORKSPACE_SIZE and dirs: cd = dirs.pop() size -= cd.size too_big.append(cd) if size > MAX_WORKSPACE_SIZE: editor.error_message( 'Maximum workspace size is %.2fMB.\n\n%s is too big (%.2fMB) to upload. Consider adding stuff to the .flooignore file.' % (MAX_WORKSPACE_SIZE / 1000000.0, path, ig.size / 1000000.0)) cb([set(), 0]) return if too_big: txt = TOO_BIG_TEXT % (MAX_WORKSPACE_SIZE / 1000000.0, path, starting_size / 1000000.0, "\n".join( set([x.path for x in too_big]))) upload = yield self.ok_cancel_dialog, txt if not upload: cb([set(), 0]) return files = set() for ig in dirs: files = files.union(set([utils.to_rel_path(x) for x in ig.files])) cb([files, size])
def upload(self, path): if not utils.is_shared(path): editor.error_message( 'Cannot share %s because is not in shared path %s.\n\nPlease move it there and try again.' % (path, G.PROJECT_PATH)) return ig = ignore.create_ignore_tree(G.PROJECT_PATH) G.IGNORE = ig is_dir = os.path.isdir(path) if ig.is_ignored(path, is_dir, True): editor.error_message( 'Cannot share %s because it is ignored.\n\nAdd an exclude rule (!%s) to your .flooignore file.' % (path, path)) return rel_path = utils.to_rel_path(path) if not is_dir: self._upload_file_by_path(rel_path) return for p in rel_path.split('/'): child = ig.children.get(p) if not child: break ig = child if ig.path != path: msg.warn(ig.path, ' is not the same as ', path) self._rate_limited_upload(ig.list_paths(), ig.total_size, upload_func=self._upload_file_by_path)
def prompt_ignore(self, ig, path, cb): ignore.create_flooignore(ig.path) dirs = ig.get_children() dirs.append(ig) dirs = sorted(dirs, key=attrgetter('size')) size = starting_size = reduce(lambda x, c: x + c.size, dirs, 0) too_big = [] while size > MAX_WORKSPACE_SIZE and dirs: cd = dirs.pop() size -= cd.size too_big.append(cd) if size > MAX_WORKSPACE_SIZE: editor.error_message( 'Maximum workspace size is %.2fMB.\n\n%s is too big (%.2fMB) to upload. Consider adding stuff to the .flooignore file.' % (MAX_WORKSPACE_SIZE / 1000000.0, path, ig.size / 1000000.0)) cb([set(), 0]) return if too_big: txt = TOO_BIG_TEXT % (MAX_WORKSPACE_SIZE / 1000000.0, path, starting_size / 1000000.0, "\n".join(set([x.path for x in too_big]))) upload = yield self.ok_cancel_dialog, txt if not upload: cb([set(), 0]) return files = set() for ig in dirs: files = files.union(set([utils.to_rel_path(x) for x in ig.files])) cb([files, size])
def upload(self, path): if not utils.is_shared(path): editor.error_message('Cannot share %s because is not in shared path %s.\n\nPlease move it there and try again.' % (path, G.PROJECT_PATH)) return ig = ignore.create_ignore_tree(G.PROJECT_PATH) G.IGNORE = ig is_dir = os.path.isdir(path) if ig.is_ignored(path, is_dir, True): editor.error_message('Cannot share %s because it is ignored.\n\nAdd an exclude rule (!%s) to your .flooignore file.' % (path, path)) return rel_path = utils.to_rel_path(path) if not is_dir: self._upload_file_by_path(rel_path) return for p in rel_path.split('/'): child = ig.children.get(p) if not child: break ig = child if ig.path != path: msg.warn(ig.path, ' is not the same as ', path) self._rate_limited_upload(ig.list_paths(), ig.total_size, upload_func=self._upload_file_by_path)
def run(self): try: agent = G.AGENT d = { 'port': agent.proto.port, 'secure': agent.proto.secure, 'owner': agent.owner, 'workspace': agent.workspace, 'host': agent.proto.host, } view = self.window.active_view() if view: path = view.file_name() if path and utils.is_shared(path): d['path'] = utils.to_rel_path(path) try: d['line'] = view.rowcol(view.sel()[0].a)[0] except Exception: pass url = utils.to_workspace_url(d) webbrowser.open(url) except Exception as e: sublime.error_message( 'Unable to open workspace in web editor: %s' % str_e(e))
def _scan_dir(self, bufs, ig, read_only): changed_bufs = [] missing_bufs = [] new_files = set() if not read_only: new_files = set([utils.to_rel_path(x) for x in ig.list_paths()]) for buf_id, buf in bufs.items(): buf_id = int(buf_id) # json keys must be strings buf_path = utils.get_full_path(buf['path']) new_dir = os.path.dirname(buf_path) utils.mkdir(new_dir) self.bufs[buf_id] = buf self.paths_to_ids[buf['path']] = buf_id view = self.get_view(buf_id) if view and not view.is_loading() and buf['encoding'] == 'utf8': view_text = view.get_text() view_md5 = hashlib.md5(view_text.encode('utf-8')).hexdigest() buf['buf'] = view_text buf['view'] = view G.VIEW_TO_HASH[view.native_id] = view_md5 if view_md5 == buf['md5']: msg.debug('md5 sum matches view. not getting buffer ', buf['path']) else: changed_bufs.append(buf) buf['md5'] = view_md5 continue try: if buf['encoding'] == 'utf8': if io: buf_fd = io.open(buf_path, 'Urt', encoding='utf8') buf_buf = buf_fd.read() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read().decode('utf-8').replace( '\r\n', '\n') md5 = hashlib.md5(buf_buf.encode('utf-8')).hexdigest() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read() md5 = hashlib.md5(buf_buf).hexdigest() buf_fd.close() buf['buf'] = buf_buf if md5 == buf['md5']: msg.debug('md5 sum matches. not getting buffer ', buf['path']) else: msg.debug('md5 differs. possibly getting buffer later ', buf['path']) changed_bufs.append(buf) buf['md5'] = md5 except Exception as e: msg.debug('Error calculating md5 for ', buf['path'], ', ', str_e(e)) missing_bufs.append(buf) return changed_bufs, missing_bufs, new_files
def get_buf_by_path(self, path): try: p = utils.to_rel_path(path) except ValueError: return buf_id = self.paths_to_ids.get(p) if buf_id: return self.bufs.get(buf_id)
def _upload(self, path, text=None): size = 0 try: if text: # TODO: possible encoding issues with python 3 buf = text else: with open(path, 'rb') as buf_fd: buf = buf_fd.read() size = len(buf) encoding = 'utf8' rel_path = utils.to_rel_path(path) existing_buf = self.get_buf_by_path(path) if existing_buf: buf_md5 = hashlib.md5(buf).hexdigest() if existing_buf['md5'] == buf_md5: msg.log('%s already exists and has the same md5. Skipping.' % path) return size msg.log('Setting buffer ', rel_path) try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' existing_buf['buf'] = buf existing_buf['encoding'] = encoding existing_buf['md5'] = buf_md5 self.send({ 'name': 'set_buf', 'id': existing_buf['id'], 'buf': buf, 'md5': buf_md5, 'encoding': encoding, }) return size try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' msg.log('Creating buffer ', rel_path) event = { 'name': 'create_buf', 'buf': buf, 'path': rel_path, 'encoding': encoding, } self.send(event) except (IOError, OSError): msg.error('Failed to open %s.' % path) except Exception as e: msg.error('Failed to create buffer %s: %s' % (path, unicode(e))) return size
def _scan_dir(self, bufs, ig, read_only): changed_bufs = [] missing_bufs = [] new_files = set() if not read_only: new_files = set([utils.to_rel_path(x) for x in ig.list_paths()]) for buf_id, buf in bufs.items(): buf_id = int(buf_id) # json keys must be strings buf_path = utils.get_full_path(buf['path']) new_dir = os.path.dirname(buf_path) utils.mkdir(new_dir) self.bufs[buf_id] = buf self.paths_to_ids[buf['path']] = buf_id view = self.get_view(buf_id) if view and not view.is_loading() and buf['encoding'] == 'utf8': view_text = view.get_text() view_md5 = hashlib.md5(view_text.encode('utf-8')).hexdigest() buf['buf'] = view_text buf['view'] = view G.VIEW_TO_HASH[view.native_id] = view_md5 if view_md5 == buf['md5']: msg.debug('md5 sum matches view. not getting buffer ', buf['path']) else: changed_bufs.append(buf) buf['md5'] = view_md5 continue try: if buf['encoding'] == 'utf8': if io: buf_fd = io.open(buf_path, 'Urt', encoding='utf8') buf_buf = buf_fd.read() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read().decode('utf-8').replace('\r\n', '\n') md5 = hashlib.md5(buf_buf.encode('utf-8')).hexdigest() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read() md5 = hashlib.md5(buf_buf).hexdigest() buf_fd.close() buf['buf'] = buf_buf if md5 == buf['md5']: msg.debug('md5 sum matches. not getting buffer ', buf['path']) else: msg.debug('md5 differs. possibly getting buffer later ', buf['path']) changed_bufs.append(buf) buf['md5'] = md5 except Exception as e: msg.debug('Error calculating md5 for ', buf['path'], ', ', str_e(e)) missing_bufs.append(buf) return changed_bufs, missing_bufs, new_files
def run(self): try: agent = G.AGENT d = { 'port': agent.proto.port, 'secure': agent.proto.secure, 'owner': agent.owner, 'workspace': agent.workspace, 'host': agent.proto.host, } view = self.window.active_view() if view: path = view.file_name() if path and utils.is_shared(path): d['path'] = utils.to_rel_path(path) try: d['line'] = view.rowcol(view.sel()[0].a)[0] except Exception: pass url = utils.to_workspace_url(d) webbrowser.open(url) except Exception as e: sublime.error_message('Unable to open workspace in web editor: %s' % str_e(e))
def _upload(self, path, text=None): size = 0 try: if text is None: with open(path, 'rb') as buf_fd: buf = buf_fd.read() else: try: # work around python 3 encoding issue buf = text.encode('utf8') except Exception as e: msg.debug('Error encoding buf ', path, ': ', str_e(e)) # We're probably in python 2 so it's ok to do this buf = text size = len(buf) encoding = 'utf8' rel_path = utils.to_rel_path(path) existing_buf = self.get_buf_by_path(path) if existing_buf: if text is None: buf_md5 = hashlib.md5(buf).hexdigest() if existing_buf['md5'] == buf_md5: msg.log(path, ' already exists and has the same md5. Skipping.') return size existing_buf['md5'] = buf_md5 msg.log('Setting buffer ', rel_path) try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' existing_buf['buf'] = buf existing_buf['encoding'] = encoding self.send({ 'name': 'set_buf', 'id': existing_buf['id'], 'buf': buf, 'md5': existing_buf['md5'], 'encoding': encoding, }) self.send({'name': 'saved', 'id': existing_buf['id']}) return size try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' msg.log('Creating buffer ', rel_path, ' (', len(buf), ' bytes)') event = { 'name': 'create_buf', 'buf': buf, 'path': rel_path, 'encoding': encoding, } def done(d): if d.get('id'): self.bufs[d['id']] = buf self.paths_to_ids[rel_path] = d['id'] self.send(event, done) except (IOError, OSError): msg.error('Failed to open ', path) except Exception as e: msg.error('Failed to create buffer ', path, ': ', str_e(e)) return size
def _on_room_info(self, data): self.joined_workspace = True self.workspace_info = data G.PERMS = data['perms'] self.proto.reset_retries() if G.OUTBOUND_FILTERING: msg.error( 'Detected outbound port blocking! See https://floobits.com/help/network for more info.' ) read_only = False if 'patch' not in data['perms']: read_only = True no_perms_msg = '''You don't have permission to edit this workspace. All files will be read-only.''' msg.log('No patch permission. Setting buffers to read-only') if 'request_perm' in data['perms']: should_send = yield self.ok_cancel_dialog, no_perms_msg + '\nDo you want to request edit permission?' # TODO: wait for perms to be OK'd/denied before uploading or bailing if should_send: self.send({ 'name': 'request_perms', 'perms': ['edit_room'] }) else: if G.EXPERT_MODE: editor.status_message(no_perms_msg) else: editor.error_message(no_perms_msg) floo_json = { 'url': utils.to_workspace_url({ 'owner': self.owner, 'workspace': self.workspace, 'host': self.proto.host, 'port': self.proto.port, 'secure': self.proto.secure, }) } utils.update_floo_file(os.path.join(G.PROJECT_PATH, '.floo'), floo_json) utils.update_recent_workspaces(self.workspace_url) changed_bufs = [] missing_bufs = [] new_files = set() ig = ignore.create_ignore_tree(G.PROJECT_PATH) G.IGNORE = ig if not read_only: new_files = set([utils.to_rel_path(x) for x in ig.list_paths()]) for buf_id, buf in data['bufs'].items(): buf_id = int(buf_id) # json keys must be strings buf_path = utils.get_full_path(buf['path']) new_dir = os.path.dirname(buf_path) utils.mkdir(new_dir) self.bufs[buf_id] = buf self.paths_to_ids[buf['path']] = buf_id view = self.get_view(buf_id) if view and not view.is_loading() and buf['encoding'] == 'utf8': view_text = view.get_text() view_md5 = hashlib.md5(view_text.encode('utf-8')).hexdigest() buf['buf'] = view_text buf['view'] = view G.VIEW_TO_HASH[view.native_id] = view_md5 if view_md5 == buf['md5']: msg.debug('md5 sum matches view. not getting buffer ', buf['path']) else: changed_bufs.append(buf) buf['md5'] = view_md5 continue try: if buf['encoding'] == 'utf8': if io: buf_fd = io.open(buf_path, 'Urt', encoding='utf8') buf_buf = buf_fd.read() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read().decode('utf-8').replace( '\r\n', '\n') md5 = hashlib.md5(buf_buf.encode('utf-8')).hexdigest() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read() md5 = hashlib.md5(buf_buf).hexdigest() buf_fd.close() buf['buf'] = buf_buf if md5 == buf['md5']: msg.debug('md5 sum matches. not getting buffer ', buf['path']) else: msg.debug('md5 differs. possibly getting buffer later ', buf['path']) changed_bufs.append(buf) buf['md5'] = md5 except Exception as e: msg.debug('Error calculating md5 for ', buf['path'], ', ', str_e(e)) missing_bufs.append(buf) ignored = [] for p, buf_id in self.paths_to_ids.items(): if p not in new_files: ignored.append(p) new_files.discard(p) if self.action == utils.JOIN_ACTION.UPLOAD: yield self._initial_upload, ig, missing_bufs, changed_bufs # TODO: maybe use org name here who = 'Your friends' anon_perms = G.AGENT.workspace_info.get('anon_perms') if 'get_buf' in anon_perms: who = 'Anyone' _msg = 'You are sharing:\n\n%s\n\n%s can join your workspace at:\n\n%s' % ( G.PROJECT_PATH, who, G.AGENT.workspace_url) # Workaround for horrible Sublime Text bug utils.set_timeout(editor.message_dialog, 0, _msg) elif changed_bufs or missing_bufs or new_files: # TODO: handle readonly here if self.action == utils.JOIN_ACTION.PROMPT: stomp_local = yield self.stomp_prompt, changed_bufs, missing_bufs, list( new_files), ignored if stomp_local not in [0, 1]: self.stop() return elif self.action == utils.JOIN_ACTION.DOWNLOAD: stomp_local = True else: # This should never happen assert False return if stomp_local: for buf in changed_bufs: self.get_buf(buf['id'], buf.get('view')) self.save_on_get_bufs.add(buf['id']) for buf in missing_bufs: self.get_buf(buf['id'], buf.get('view')) self.save_on_get_bufs.add(buf['id']) else: yield self._initial_upload, ig, missing_bufs, changed_bufs success_msg = '%s@%s/%s: Joined!' % (self.username, self.owner, self.workspace) msg.log(success_msg) editor.status_message(success_msg) data = utils.get_persistent_data() data['recent_workspaces'].insert(0, {"url": self.workspace_url}) utils.update_persistent_data(data) utils.add_workspace_to_persistent_json(self.owner, self.workspace, self.workspace_url, G.PROJECT_PATH) temp_data = data.get('temp_data', {}) hangout = temp_data.get('hangout', {}) hangout_url = hangout.get('url') if hangout_url: self.prompt_join_hangout(hangout_url) self.emit("room_info")
def _scan_dir(self, bufs, ig, read_only): status_msg = 'Comparing local files against workspace...' editor.status_message(status_msg) update_status_msg = getattr(self, 'update_status_msg', None) if update_status_msg: try: update_status_msg(self, status_msg) except Exception as e: pass changed_bufs = [] missing_bufs = [] new_files = set() if not read_only: new_files = set([utils.to_rel_path(x) for x in ig.list_paths()]) for buf_id, buf in bufs.items(): buf_id = int(buf_id) # json keys must be strings buf_path = utils.get_full_path(buf['path']) view = self.get_view(buf_id) if view and not view.is_loading() and buf['encoding'] == 'utf8': view_text = view.get_text() view_md5 = hashlib.md5(view_text.encode('utf-8')).hexdigest() buf['buf'] = view_text buf['view'] = view G.VIEW_TO_HASH[view.native_id] = view_md5 if view_md5 == buf['md5']: msg.debug('md5 sum matches view. not getting buffer ', buf['path']) else: changed_bufs.append(buf) buf['md5'] = view_md5 continue try: if buf['encoding'] == 'utf8': if io: buf_fd = io.open(buf_path, 'Urt', encoding='utf8') buf_buf = buf_fd.read() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read().decode('utf-8').replace( '\r\n', '\n') md5 = hashlib.md5(buf_buf.encode('utf-8')).hexdigest() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read() md5 = hashlib.md5(buf_buf).hexdigest() buf_fd.close() buf['buf'] = buf_buf if md5 == buf['md5']: msg.debug('md5 sum matches. not getting buffer ', buf['path']) else: msg.debug('md5 differs. possibly getting buffer later ', buf['path']) changed_bufs.append(buf) buf['md5'] = md5 except Exception as e: msg.debug('Error calculating md5 for ', buf['path'], ', ', str_e(e)) missing_bufs.append(buf) editor.status_message( 'Comparing local files against workspace... done.') return changed_bufs, missing_bufs, new_files
def _on_room_info(self, data): self.reset() self.joined_workspace = True self.workspace_info = data G.PERMS = data['perms'] self.proto.reset_retries() if G.OUTBOUND_FILTERING: msg.error('Detected outbound port blocking! See https://floobits.com/help/network for more info.') read_only = False if 'patch' not in data['perms']: read_only = True no_perms_msg = '''You don't have permission to edit this workspace. All files will be read-only.''' msg.log('No patch permission. Setting buffers to read-only') if 'request_perm' in data['perms']: should_send = yield self.ok_cancel_dialog, no_perms_msg + '\nDo you want to request edit permission?' # TODO: wait for perms to be OK'd/denied before uploading or bailing if should_send: self.send({'name': 'request_perms', 'perms': ['edit_room']}) else: if G.EXPERT_MODE: editor.status_message(no_perms_msg) else: editor.error_message(no_perms_msg) floo_json = { 'url': utils.to_workspace_url({ 'owner': self.owner, 'workspace': self.workspace, 'host': self.proto.host, 'port': self.proto.port, 'secure': self.proto.secure, }) } utils.update_floo_file(os.path.join(G.PROJECT_PATH, '.floo'), floo_json) utils.update_recent_workspaces(self.workspace_url) changed_bufs = [] missing_bufs = [] new_files = set() ig = ignore.create_ignore_tree(G.PROJECT_PATH) G.IGNORE = ig if not read_only: new_files = set([utils.to_rel_path(x) for x in ig.list_paths()]) for buf_id, buf in data['bufs'].items(): buf_id = int(buf_id) # json keys must be strings buf_path = utils.get_full_path(buf['path']) new_dir = os.path.dirname(buf_path) utils.mkdir(new_dir) self.bufs[buf_id] = buf self.paths_to_ids[buf['path']] = buf_id view = self.get_view(buf_id) if view and not view.is_loading() and buf['encoding'] == 'utf8': view_text = view.get_text() view_md5 = hashlib.md5(view_text.encode('utf-8')).hexdigest() buf['buf'] = view_text buf['view'] = view G.VIEW_TO_HASH[view.native_id] = view_md5 if view_md5 == buf['md5']: msg.debug('md5 sum matches view. not getting buffer ', buf['path']) else: changed_bufs.append(buf) buf['md5'] = view_md5 continue try: if buf['encoding'] == 'utf8': if io: buf_fd = io.open(buf_path, 'Urt', encoding='utf8') buf_buf = buf_fd.read() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read().decode('utf-8').replace('\r\n', '\n') md5 = hashlib.md5(buf_buf.encode('utf-8')).hexdigest() else: buf_fd = open(buf_path, 'rb') buf_buf = buf_fd.read() md5 = hashlib.md5(buf_buf).hexdigest() buf_fd.close() buf['buf'] = buf_buf if md5 == buf['md5']: msg.debug('md5 sum matches. not getting buffer ', buf['path']) else: msg.debug('md5 differs. possibly getting buffer later ', buf['path']) changed_bufs.append(buf) buf['md5'] = md5 except Exception as e: msg.debug('Error calculating md5 for ', buf['path'], ', ', str_e(e)) missing_bufs.append(buf) ignored = [] for p, buf_id in self.paths_to_ids.items(): if p not in new_files: ignored.append(p) new_files.discard(p) if self.action == utils.JOIN_ACTION.UPLOAD: yield self._initial_upload, ig, missing_bufs, changed_bufs # TODO: maybe use org name here who = 'Your friends' anon_perms = G.AGENT.workspace_info.get('anon_perms') if 'get_buf' in anon_perms: who = 'Anyone' _msg = 'You are sharing:\n\n%s\n\n%s can join your workspace at:\n\n%s' % (G.PROJECT_PATH, who, G.AGENT.workspace_url) # Workaround for horrible Sublime Text bug utils.set_timeout(editor.message_dialog, 0, _msg) elif changed_bufs or missing_bufs or new_files: # TODO: handle readonly here if self.action == utils.JOIN_ACTION.PROMPT: stomp_local = yield self.stomp_prompt, changed_bufs, missing_bufs, list(new_files), ignored if stomp_local not in [0, 1]: self.stop() return elif self.action == utils.JOIN_ACTION.DOWNLOAD: stomp_local = True else: # This should never happen assert False return if stomp_local: for buf in changed_bufs: self.get_buf(buf['id'], buf.get('view')) self.save_on_get_bufs.add(buf['id']) for buf in missing_bufs: self.get_buf(buf['id'], buf.get('view')) self.save_on_get_bufs.add(buf['id']) else: yield self._initial_upload, ig, missing_bufs, changed_bufs success_msg = 'Successfully joined workspace %s/%s' % (self.owner, self.workspace) msg.log(success_msg) editor.status_message(success_msg) data = utils.get_persistent_data() data['recent_workspaces'].insert(0, {"url": self.workspace_url}) utils.update_persistent_data(data) utils.add_workspace_to_persistent_json(self.owner, self.workspace, self.workspace_url, G.PROJECT_PATH) temp_data = data.get('temp_data', {}) hangout = temp_data.get('hangout', {}) hangout_url = hangout.get('url') if hangout_url: self.prompt_join_hangout(hangout_url) self.emit("room_info")
def _upload(self, path, text=None): size = 0 try: if text is None: with open(path, 'rb') as buf_fd: buf = buf_fd.read() else: try: # work around python 3 encoding issue buf = text.encode('utf8') except Exception as e: msg.debug('Error encoding buf ', path, ': ', str_e(e)) # We're probably in python 2 so it's ok to do this buf = text size = len(buf) encoding = 'utf8' rel_path = utils.to_rel_path(path) existing_buf = self.get_buf_by_path(path) if existing_buf: if text is None: buf_md5 = hashlib.md5(buf).hexdigest() if existing_buf['md5'] == buf_md5: msg.log( path, ' already exists and has the same md5. Skipping.') return size existing_buf['md5'] = buf_md5 msg.log('Setting buffer ', rel_path) try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' existing_buf['buf'] = buf existing_buf['encoding'] = encoding self.send({ 'name': 'set_buf', 'id': existing_buf['id'], 'buf': buf, 'md5': existing_buf['md5'], 'encoding': encoding, }) self.send({'name': 'saved', 'id': existing_buf['id']}) return size try: buf = buf.decode('utf-8') except Exception: buf = base64.b64encode(buf).decode('utf-8') encoding = 'base64' msg.log('Creating buffer ', rel_path, ' (', len(buf), ' bytes)') event = { 'name': 'create_buf', 'buf': buf, 'path': rel_path, 'encoding': encoding, } def done(d): if d.get('id'): self.bufs[d['id']] = buf self.paths_to_ids[rel_path] = d['id'] self.send(event, done) except (IOError, OSError): msg.error('Failed to open ', path) except Exception as e: msg.error('Failed to create buffer ', path, ': ', str_e(e)) return size