def send_error(description=None, exception=None): G.ERROR_COUNT += 1 if G.ERRORS_SENT >= G.MAX_ERROR_REPORTS: msg.warn('Already sent ', G.ERRORS_SENT, ' errors this session. Not sending any more.\n', description, exception) return data = { 'jsondump': { 'error_count': G.ERROR_COUNT }, 'message': {}, 'dir': G.COLAB_DIR, } if G.AGENT: data['owner'] = getattr(G.AGENT, "owner", None) data['username'] = getattr(G.AGENT, "username", None) data['workspace'] = getattr(G.AGENT, "workspace", None) if exception: data['message'] = { 'description': str(exception), 'stack': traceback.format_exc(exception) } msg.log('Floobits plugin error! Sending exception report: ', data['message']) if description: data['message']['description'] = description try: # TODO: use G.AGENT.proto.host? api_url = 'https://%s/api/log' % (G.DEFAULT_HOST) r = api_request(G.DEFAULT_HOST, api_url, data) G.ERRORS_SENT += 1 return r except Exception as e: print(e)
def send_error(description=None, exception=None): G.ERROR_COUNT += 1 if G.ERRORS_SENT >= G.MAX_ERROR_REPORTS: msg.warn('Already sent %s errors this session. Not sending any more.' % G.ERRORS_SENT) return data = { 'jsondump': { 'error_count': G.ERROR_COUNT }, 'message': {}, 'dir': G.COLAB_DIR, } if G.AGENT: data['owner'] = G.AGENT.owner data['username'] = G.AGENT.username data['workspace'] = G.AGENT.workspace if exception: data['message'] = { 'description': str(exception), 'stack': traceback.format_exc(exception) } msg.log('Floobits plugin error! Sending exception report: %s' % data['message']) if description: data['message']['description'] = description try: # TODO: use G.AGENT.proto.host? api_url = 'https://%s/api/log' % (G.DEFAULT_HOST) r = api_request(G.DEFAULT_HOST, api_url, data) G.ERRORS_SENT += 1 return r except Exception as e: print(e)
def on_patch(self, data): buf_id = data['id'] buf = self.FLOO_BUFS[buf_id] view = self.get_view(buf_id) DMP = dmp.diff_match_patch() if len(data['patch']) == 0: msg.error('wtf? no patches to apply. server is being stupid') return dmp_patches = DMP.patch_fromText(data['patch']) # TODO: run this in a separate thread if view: old_text = view.get_text() else: old_text = buf.get('buf', '') md5_before = hashlib.md5(old_text.encode('utf-8')).hexdigest() if md5_before != data['md5_before']: msg.debug('maybe vim is lame and discarded a trailing newline') old_text += '\n' md5_before = hashlib.md5(old_text.encode('utf-8')).hexdigest() if md5_before != data['md5_before']: msg.warn('starting md5s don\'t match for %s. ours: %s patch: %s this is dangerous!' % (buf['path'], md5_before, data['md5_before'])) t = DMP.patch_apply(dmp_patches, old_text) clean_patch = True for applied_patch in t[1]: if not applied_patch: clean_patch = False break if G.DEBUG: if len(t[0]) == 0: msg.debug('OMG EMPTY!') msg.debug('Starting data:', buf['buf']) msg.debug('Patch:', data['patch']) if '\x01' in t[0]: msg.debug('FOUND CRAZY BYTE IN BUFFER') msg.debug('Starting data:', buf['buf']) msg.debug('Patch:', data['patch']) if not clean_patch: msg.error('failed to patch %s cleanly. re-fetching buffer' % buf['path']) return self.agent.send_get_buf(buf_id) cur_hash = hashlib.md5(t[0].encode('utf-8')).hexdigest() if cur_hash != data['md5_after']: msg.warn( '%s new hash %s != expected %s. re-fetching buffer...' % (buf['path'], cur_hash, data['md5_after']) ) return self.agent.send_get_buf(buf_id) buf['buf'] = t[0] buf['md5'] = cur_hash if not view: self.save_buf(buf) return view.apply_patches(buf, t)
def send_error(description=None, exception=None): G.ERROR_COUNT += 1 if G.ERRORS_SENT > G.MAX_ERROR_REPORTS: msg.warn('Already sent %s errors this session. Not sending any more.' % G.ERRORS_SENT) return data = { 'jsondump': { 'error_count': G.ERROR_COUNT }, 'username': G.USERNAME, 'dir': G.COLAB_DIR, } if G.AGENT: data['owner'] = G.AGENT.owner data['workspace'] = G.AGENT.workspace if description: data['description'] = description if exception: data['message'] = { 'msg': str(exception), 'stack': traceback.format_exc(exception) } msg.log('Floobits plugin error! Sending exception report: %s' % data['message']) try: api_url = 'https://%s/api/log' % (G.DEFAULT_HOST) r = api_request(api_url, data) G.ERRORS_SENT += 1 return r except Exception as e: print(e)
def on_delete_buf(self, data): # TODO: somehow tell the user about this. maybe delete on disk too? del self.FLOO_BUFS[data["id"]] path = utils.get_full_path(data["path"]) if not G.DELETE_LOCAL_FILES: msg.log("Not deleting %s because delete_local_files is disabled" % path) return utils.rm(path) msg.warn("deleted %s because %s told me to." % (path, data.get("username", "the internet")))
def on_delete_buf(self, data): # TODO: somehow tell the user about this. maybe delete on disk too? del self.FLOO_BUFS[data['id']] path = utils.get_full_path(data['path']) if not G.DELETE_LOCAL_FILES: msg.log('Not deleting %s because delete_local_files is disabled' % path) return utils.rm(path) msg.warn('deleted %s because %s told me to.' % (path, data.get('username', 'the internet')))
def read_floo_file(path): floo_file = os.path.join(path, '.floo') info = {} try: floo_info = open(floo_file, 'rb').read().decode('utf-8') info = json.loads(floo_info) except (IOError, OSError): pass except Exception as e: msg.warn('Couldn\'t read .floo file: ', floo_file, ': ', str_e(e)) return info
def get_persistent_data(): try: per = open(per_path, 'rb') except (IOError, OSError): msg.warn('Failed to open %s. Recent room list will be empty.' % per_path) return {} try: persistent_data = json.loads(per.read()) except: msg.warn('Failed to parse %s. Recent room list will be empty.' % per_path) return {} return persistent_data
def read_floo_file(path): floo_file = os.path.join(path, ".floo") info = {} try: floo_info = open(floo_file, "rb").read().decode("utf-8") info = json.loads(floo_info) except (IOError, OSError): pass except Exception as e: msg.warn("Couldn't read .floo file: ", floo_file, ": ", str_e(e)) return info
def update_floo_file(path, data): try: floo_json = json.loads(open(path, 'r').read()) except Exception: pass try: floo_json.update(data) except Exception: floo_json = data try: with open(path, 'w') as floo_fd: floo_fd.write(json.dumps(floo_json, indent=4, sort_keys=True, separators=(',', ': '))) except Exception as e: msg.warn('Couldn\'t update .floo file: ', floo_json, ': ', str_e(e))
def update_floo_file(path, data): try: floo_json = json.loads(open(path, "r").read()) except Exception: pass try: floo_json.update(data) except Exception: floo_json = data try: with open(path, "w") as floo_fd: floo_fd.write(json.dumps(floo_json, indent=4, sort_keys=True, separators=(",", ": "))) except Exception as e: msg.warn("Couldn't update .floo file: ", floo_json, ": ", str_e(e))
def send_error(description=None, exception=None): G.ERROR_COUNT += 1 data = { 'jsondump': { 'error_count': G.ERROR_COUNT }, 'message': {}, 'dir': G.COLAB_DIR, } stack = '' if G.AGENT: data['owner'] = getattr(G.AGENT, "owner", None) data['username'] = getattr(G.AGENT, "username", None) data['workspace'] = getattr(G.AGENT, "workspace", None) if exception: exc_info = sys.exc_info() try: stack = traceback.format_exception(*exc_info) except Exception: if exc_info[0] is None: stack = 'No sys.exc_info()' else: stack = "Python is rtardd" try: description = str(exception) except Exception: description = "Python is rtadd" data['message'] = { 'description': description, 'stack': stack } msg.log('Floobits plugin error! Sending exception report: ', data['message']) if description: data['message']['description'] = description if G.ERRORS_SENT >= G.MAX_ERROR_REPORTS: msg.warn('Already sent ', G.ERRORS_SENT, ' errors this session. Not sending any more.\n', description, exception, stack) return try: # TODO: use G.AGENT.proto.host? api_url = 'https://%s/api/log' % (G.DEFAULT_HOST) r = api_request(G.DEFAULT_HOST, api_url, data) G.ERRORS_SENT += 1 return r except Exception as e: print(e)
def api_request(host, url, data=None, method=None): if data: method = method or 'POST' else: method = method or 'GET' if ssl is False: return proxy_api_request(host, url, data, method) try: r = hit_url(host, url, data, method) except HTTPError as e: r = e except URLError as e: msg.warn('Error hitting url ', url, ': ', e) r = e if not PY2: msg.warn('Retrying using system python...') return proxy_api_request(host, url, data, method) return APIResponse(r)
def api_request(host, url, data=None, method=None): if data: method = method or "POST" else: method = method or "GET" if ssl is False: return proxy_api_request(host, url, data, method) try: r = hit_url(host, url, data, method) except HTTPError as e: r = e except URLError as e: msg.warn("Error hitting url ", url, ": ", e) r = e if not PY2: msg.warn("Retrying using system python...") return proxy_api_request(host, url, data, method) return APIResponse(r)
def send_error(description=None, exception=None): G.ERROR_COUNT += 1 data = {"jsondump": {"error_count": G.ERROR_COUNT}, "message": {}, "dir": G.COLAB_DIR} stack = "" if G.AGENT: data["owner"] = getattr(G.AGENT, "owner", None) data["username"] = getattr(G.AGENT, "username", None) data["workspace"] = getattr(G.AGENT, "workspace", None) if exception: exc_info = sys.exc_info() try: stack = traceback.format_exception(*exc_info) except Exception: if exc_info[0] is None: stack = "No sys.exc_info()" else: stack = "Python is rtardd" try: description = str(exception) except Exception: description = "Python is rtadd" data["message"] = {"description": description, "stack": stack} msg.log("Floobits plugin error! Sending exception report: ", data["message"]) if description: data["message"]["description"] = description if G.ERRORS_SENT >= G.MAX_ERROR_REPORTS: msg.warn( "Already sent ", G.ERRORS_SENT, " errors this session. Not sending any more.\n", description, exception, stack, ) return try: # TODO: use G.AGENT.proto.host? api_url = "https://%s/api/log" % (G.DEFAULT_HOST) r = api_request(G.DEFAULT_HOST, api_url, data) G.ERRORS_SENT += 1 return r except Exception as e: print(e)
def update_floo_file(path, data): try: floo_json = json.loads(open(path, 'r').read()) except Exception: pass try: floo_json.update(data) except Exception: floo_json = data try: with open(path, 'w') as floo_fd: floo_fd.write( json.dumps(floo_json, indent=4, sort_keys=True, separators=(',', ': '))) except Exception as e: msg.warn('Couldn\'t update .floo file: ', floo_json, ': ', str_e(e))
def on_delete_buf(self, data): # TODO: somehow tell the user about this. maybe delete on disk too? del self.FLOO_BUFS[data['id']] path = utils.get_full_path(data['path']) utils.rm(path) msg.warn('deleted %s because %s told me to.' % (path, data.get('username', 'the internet')))
def apply_patch(patch_data): buf_id = patch_data['id'] buf = BUFS[buf_id] view = get_view(buf_id) DMP = dmp.diff_match_patch() if len(patch_data['patch']) == 0: msg.error('wtf? no patches to apply. server is being stupid') return msg.debug('patch is', patch_data['patch']) dmp_patches = DMP.patch_fromText(patch_data['patch']) # TODO: run this in a separate thread if view: old_text = get_text(view) else: old_text = buf.get('buf', '') md5_before = hashlib.md5(old_text.encode('utf-8')).hexdigest() if md5_before != patch_data['md5_before']: msg.warn('starting md5s don\'t match for %s. this is dangerous!' % buf['path']) t = DMP.patch_apply(dmp_patches, old_text) clean_patch = True for applied_patch in t[1]: if not applied_patch: clean_patch = False break if G.DEBUG: if len(t[0]) == 0: msg.debug('OMG EMPTY!') msg.debug('Starting data:', buf['buf']) msg.debug('Patch:', patch_data['patch']) if '\x01' in t[0]: msg.debug('FOUND CRAZY BYTE IN BUFFER') msg.debug('Starting data:', buf['buf']) msg.debug('Patch:', patch_data['patch']) if not clean_patch: msg.error('failed to patch %s cleanly. re-fetching buffer' % buf['path']) return Listener.get_buf(buf_id) cur_hash = hashlib.md5(t[0].encode('utf-8')).hexdigest() if cur_hash != patch_data['md5_after']: msg.warn( '%s new hash %s != expected %s. re-fetching buffer...' % (buf['path'], cur_hash, patch_data['md5_after']) ) return Listener.get_buf(buf_id) buf['buf'] = t[0] buf['md5'] = cur_hash if not view: save_buf(buf) return selections = [x for x in view.sel()] # deep copy regions = [] for patch in t[2]: offset = patch[0] length = patch[1] patch_text = patch[2] region = sublime.Region(offset, offset + length) regions.append(region) MODIFIED_EVENTS.put(1) view.run_command('floo_view_replace_region', {'r': [offset, offset + length], 'data': patch_text}) new_sels = [] for sel in selections: a = sel.a b = sel.b new_offset = len(patch_text) - length if sel.a > offset: a += new_offset if sel.b > offset: b += new_offset new_sels.append(sublime.Region(a, b)) selections = [x for x in new_sels] SELECTED_EVENTS.put(1) view.sel().clear() region_key = 'floobits-patch-' + patch_data['username'] view.add_regions(region_key, regions, 'floobits.patch', 'circle', sublime.DRAW_OUTLINED) utils.set_timeout(view.erase_regions, 1000, region_key) for sel in selections: SELECTED_EVENTS.put(1) view.sel().add(sel) now = datetime.now() view.set_status('Floobits', 'Changed by %s at %s' % (patch_data['username'], now.strftime('%H:%M')))
def on_patch(self, data): added_newline = False buf_id = data["id"] buf = self.FLOO_BUFS[buf_id] view = self.get_view(buf_id) DMP = dmp.diff_match_patch() if len(data["patch"]) == 0: msg.error("wtf? no patches to apply. server is being stupid") return dmp_patches = DMP.patch_fromText(data["patch"]) # TODO: run this in a separate thread if view: old_text = view.get_text() else: old_text = buf.get("buf", "") md5_before = hashlib.md5(old_text.encode("utf-8")).hexdigest() if md5_before != data["md5_before"]: msg.debug("maybe vim is lame and discarded a trailing newline") old_text += "\n" added_newline = True md5_before = hashlib.md5(old_text.encode("utf-8")).hexdigest() if md5_before != data["md5_before"]: msg.warn( "starting md5s don't match for %s. ours: %s patch: %s this is dangerous!" % (buf["path"], md5_before, data["md5_before"]) ) if added_newline: old_text = old_text[:-1] md5_before = hashlib.md5(old_text.encode("utf-8")).hexdigest() t = DMP.patch_apply(dmp_patches, old_text) clean_patch = True for applied_patch in t[1]: if not applied_patch: clean_patch = False break if G.DEBUG: if len(t[0]) == 0: msg.debug("OMG EMPTY!") msg.debug("Starting data:", buf["buf"]) msg.debug("Patch:", data["patch"]) if "\x01" in t[0]: msg.debug("FOUND CRAZY BYTE IN BUFFER") msg.debug("Starting data:", buf["buf"]) msg.debug("Patch:", data["patch"]) if not clean_patch: msg.error("failed to patch %s cleanly. re-fetching buffer" % buf["path"]) return self.agent.send_get_buf(buf_id) cur_hash = hashlib.md5(t[0].encode("utf-8")).hexdigest() if cur_hash != data["md5_after"]: msg.warn( "%s new hash %s != expected %s. re-fetching buffer..." % (buf["path"], cur_hash, data["md5_after"]) ) return self.agent.send_get_buf(buf_id) buf["buf"] = t[0] buf["md5"] = cur_hash if not view: self.save_buf(buf) return view.apply_patches(buf, t)