def recurse(self, root): try: paths = os.listdir(self.path) except OSError as e: if e.errno != errno.ENOTDIR: msg.error('Error listing path ', self.path, ': ', str_e(e)) return except Exception as e: msg.error('Error listing path ', self.path, ': ', str_e(e)) return msg.debug('Initializing ignores for ', self.path) for ignore_file in IGNORE_FILES: try: self.load(ignore_file) except Exception: pass for p in paths: if p == '.' or p == '..': continue if p in BLACKLIST: msg.log('Ignoring blacklisted file ', p) continue p_path = os.path.join(self.path, p) try: s = os.stat(p_path) except Exception as e: msg.error('Error stat()ing path ', p_path, ': ', str_e(e)) continue if stat.S_ISREG(s.st_mode) and p in HIDDEN_WHITELIST: # Don't count these whitelisted files in size self.files.append(p_path) continue is_dir = stat.S_ISDIR(s.st_mode) if root.is_ignored(p_path, is_dir, True): continue if is_dir: ig = Ignore(p_path, self) self.children[p] = ig ig.recurse(root) self.total_size += ig.total_size continue if stat.S_ISREG(s.st_mode): if s.st_size > (MAX_FILE_SIZE): self.ignores['/TOO_BIG/'].append(p) msg.log( self.is_ignored_message(p_path, p, '/TOO_BIG/', False)) else: self.size += s.st_size self.total_size += s.st_size self.files.append(p_path)
def recurse(self, root): try: paths = os.listdir(self.path) except OSError as e: if e.errno != errno.ENOTDIR: msg.error('Error listing path ', self.path, ': ', str_e(e)) return except Exception as e: msg.error('Error listing path ', self.path, ': ', str_e(e)) return msg.debug('Initializing ignores for ', self.path) for ignore_file in IGNORE_FILES: try: self.load(ignore_file) except Exception: pass for p in paths: if p == '.' or p == '..': continue if p in BLACKLIST: msg.log('Ignoring blacklisted file ', p) continue p_path = os.path.join(self.path, p) try: s = os.stat(p_path) except Exception as e: msg.error('Error stat()ing path ', p_path, ': ', str_e(e)) continue if stat.S_ISREG(s.st_mode) and p in HIDDEN_WHITELIST: # Don't count these whitelisted files in size self.files.append(p_path) continue is_dir = stat.S_ISDIR(s.st_mode) if root.is_ignored(p_path, is_dir, True): continue if is_dir: ig = Ignore(p_path, self) self.children[p] = ig ig.recurse(root) self.total_size += ig.total_size continue if stat.S_ISREG(s.st_mode): if s.st_size > (MAX_FILE_SIZE): self.ignores['/TOO_BIG/'].append(p) msg.log(self.is_ignored_message(p_path, p, '/TOO_BIG/', False)) else: self.size += s.st_size self.total_size += s.st_size self.files.append(p_path)
def mkdir(path): try: os.makedirs(path) except OSError as e: if e.errno != errno.EEXIST: editor.error_message('Cannot create directory {0}.\n{1}'.format(path, str_e(e))) raise
def mkdir(path): try: os.makedirs(path) except OSError as e: if e.errno != errno.EEXIST: editor.error_message("Cannot create directory {0}.\n{1}".format(path, str_e(e))) raise
def get_info(workspace_url, project_dir): repo_type = detect_type(project_dir) if not repo_type: return msg.debug('Detected ', repo_type, ' repo in ', project_dir) data = { 'type': repo_type, } cmd = REPO_MAPPING[repo_type]['cmd'] try: p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=project_dir) result = p.communicate() repo_url = result[0].decode('utf-8').strip() if repo_type == 'svn': repo_url = parse_svn_xml(repo_url) msg.log(repo_type, ' url is ', repo_url) if not repo_url: msg.error('Error getting ', repo_type, ' url:', result[1]) return except Exception as e: msg.error('Error getting ', repo_type, ' url:', str_e(e)) return data['url'] = repo_url return data
def create_flooignore(path): flooignore = os.path.join(path, '.flooignore') # A very short race condition, but whatever. if os.path.exists(flooignore): return try: with open(flooignore, 'w') as fd: fd.write('\n'.join(DEFAULT_IGNORES)) except Exception as e: msg.error('Error creating default .flooignore: %s' % str_e(e))
def is_ignored(self, path, is_dir=None, log=False): if is_dir is None: try: s = os.stat(path) except Exception as e: msg.error('Error lstat()ing path ', path, ': ', str_e(e)) return True is_dir = stat.S_ISDIR(s.st_mode) rel_path = os.path.relpath(path, self.path).replace(os.sep, '/') return self._is_ignored(rel_path, is_dir, log)
def is_ignored(self, path, is_dir=None, log=False): if is_dir is None: try: s = os.stat(path) except Exception as e: msg.error('Error lstat()ing path %s: %s' % (path, str_e(e))) return True is_dir = stat.S_ISDIR(s.st_mode) rel_path = os.path.relpath(path, self.path).replace(os.sep, '/') return self._is_ignored(rel_path, is_dir, log)
def create_flooignore(path): flooignore = os.path.join(path, '.flooignore') # A very short race condition, but whatever. if os.path.exists(flooignore): return try: with open(flooignore, 'w') as fd: fd.write('\n'.join(DEFAULT_IGNORES)) except Exception as e: msg.error('Error creating default .flooignore: ', str_e(e))
def __init__(self, current, buf): self.buf = buf self.current = current self.previous = buf["buf"] if buf["encoding"] == "base64": self.md5_before = hashlib.md5(self.previous).hexdigest() self.md5_after = hashlib.md5(self.current).hexdigest() else: try: self.md5_before = hashlib.md5(self.previous.encode("utf-8")).hexdigest() except Exception as e: # Horrible fallback if for some reason encoding doesn't agree with actual object self.md5_before = hashlib.md5(self.previous).hexdigest() msg.log("Error calculating md5_before for ", str(self), ": ", str_e(e)) try: self.md5_after = hashlib.md5(self.current.encode("utf-8")).hexdigest() except Exception as e: # Horrible fallback if for some reason encoding doesn't agree with actual object self.md5_after = hashlib.md5(self.current).hexdigest() msg.log("Error calculating md5_after for ", str(self), ": ", str_e(e))
def __init__(self, current, buf): self.buf = buf self.current = current self.previous = buf['buf'] if buf['encoding'] == 'base64': self.md5_before = hashlib.md5(self.previous).hexdigest() self.md5_after = hashlib.md5(self.current).hexdigest() else: try: self.md5_before = hashlib.md5(self.previous.encode('utf-8')).hexdigest() except Exception as e: # Horrible fallback if for some reason encoding doesn't agree with actual object self.md5_before = hashlib.md5(self.previous).hexdigest() msg.log('Error calculating md5_before for ', str(self), ': ', str_e(e)) try: self.md5_after = hashlib.md5(self.current.encode('utf-8')).hexdigest() except Exception as e: # Horrible fallback if for some reason encoding doesn't agree with actual object self.md5_after = hashlib.md5(self.current).hexdigest() msg.log('Error calculating md5_after for ', str(self), ': ', str_e(e))
def read_floo_file(path): floo_file = os.path.join(path, '.floo') info = {} try: floo_info = open(floo_file, 'rb').read().decode('utf-8') info = json.loads(floo_info) except (IOError, OSError): pass except Exception as e: msg.warn('Couldn\'t read .floo file: ', floo_file, ': ', str_e(e)) return info
def read_floo_file(path): floo_file = os.path.join(path, ".floo") info = {} try: floo_info = open(floo_file, "rb").read().decode("utf-8") info = json.loads(floo_info) except (IOError, OSError): pass except Exception as e: msg.warn("Couldn't read .floo file: ", floo_file, ": ", str_e(e)) return info
def prejoin_workspace(workspace_url, dir_to_share, api_args): try: result = utils.parse_url(workspace_url) except Exception as e: msg.error(str_e(e)) return False try: w = get_workspace_by_url(workspace_url) except Exception as e: editor.error_message('Error opening url %s: %s' % (workspace_url, str_e(e))) return False if w.code >= 400: try: d = utils.get_persistent_data() try: del d['workspaces'][result['owner']][result['name']] except Exception: pass try: del d['recent_workspaces'][workspace_url] except Exception: pass utils.update_persistent_data(d) except Exception as e: msg.debug(str_e(e)) return False msg.debug('workspace: %s', json.dumps(w.body)) anon_perms = w.body.get('perms', {}).get('AnonymousUser', []) msg.debug('api args: %s' % api_args) new_anon_perms = api_args.get('perms', {}).get('AnonymousUser', []) # TODO: prompt/alert user if going from private to public if set(anon_perms) != set(new_anon_perms): msg.debug(str(anon_perms), str(new_anon_perms)) w.body['perms']['AnonymousUser'] = new_anon_perms response = update_workspace(workspace_url, w.body) msg.debug(str(response.body)) utils.add_workspace_to_persistent_json(w.body['owner'], w.body['name'], workspace_url, dir_to_share) return result
def get_git_excludesfile(): global_ignore = None try: p = subprocess.Popen(['git', 'config -z --get core.excludesfile'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) result = p.communicate() global_ignore = result[0] if not global_ignore: return global_ignore = os.path.realpath(os.path.expanduser(global_ignore.decode('utf-8'))) msg.log('git core.excludesfile is ', global_ignore) except Exception as e: msg.error('Error getting git core.excludesfile:', str_e(e)) return global_ignore
def get_git_excludesfile(): global_ignore = None try: p = subprocess.Popen(['git', 'config -z --get core.excludesfile'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) result = p.communicate() global_ignore = result[0] if not global_ignore: return global_ignore = os.path.realpath(os.path.expanduser(str(global_ignore))) msg.log('git core.excludesfile is ', global_ignore) except Exception as e: msg.error('Error getting git core.excludesfile:', str_e(e)) return global_ignore
def floobits_log(msg): # TODO: ridiculously inefficient try: fd = open(LOG_FILE, 'ab') fmsg = msg try: fmsg = fmsg.encode('utf-8') except Exception: pass fd.write(fmsg) fd.write(b'\n') fd.close() except Exception as e: safe_print(str_e(e))
def update_persistent_data(data): seen = set() recent_workspaces = [] for x in data['recent_workspaces']: try: if x['url'] in seen: continue seen.add(x['url']) recent_workspaces.append(x) except Exception as e: msg.debug(str_e(e)) data['recent_workspaces'] = recent_workspaces per_path = os.path.join(G.BASE_DIR, 'persistent.json') with open(per_path, 'wb') as per: per.write(json.dumps(data, indent=2).encode('utf-8'))
def update_floo_file(path, data): try: floo_json = json.loads(open(path, 'r').read()) except Exception: pass try: floo_json.update(data) except Exception: floo_json = data try: with open(path, 'w') as floo_fd: floo_fd.write(json.dumps(floo_json, indent=4, sort_keys=True, separators=(',', ': '))) except Exception as e: msg.warn('Couldn\'t update .floo file: ', floo_json, ': ', str_e(e))
def update_floo_file(path, data): try: floo_json = json.loads(open(path, "r").read()) except Exception: pass try: floo_json.update(data) except Exception: floo_json = data try: with open(path, "w") as floo_fd: floo_fd.write(json.dumps(floo_json, indent=4, sort_keys=True, separators=(",", ": "))) except Exception as e: msg.warn("Couldn't update .floo file: ", floo_json, ": ", str_e(e))
def update_persistent_data(data): seen = set() recent_workspaces = [] for x in data["recent_workspaces"]: try: if x["url"] in seen: continue seen.add(x["url"]) recent_workspaces.append(x) except Exception as e: msg.debug(str_e(e)) data["recent_workspaces"] = recent_workspaces per_path = os.path.join(G.BASE_DIR, "persistent.json") with open(per_path, "wb") as per: per.write(json.dumps(data, indent=2).encode("utf-8"))
def save_buf(buf): path = get_full_path(buf["path"]) mkdir(os.path.split(path)[0]) if buf["encoding"] == "utf8": newline = get_line_endings(path) or editor.get_line_endings(path) try: with open(path, "wb") as fd: if buf["encoding"] == "utf8": out = buf["buf"] if newline != "\n": out = out.split("\n") out = newline.join(out) fd.write(out.encode("utf-8")) else: fd.write(buf["buf"]) except Exception as e: msg.error("Error saving buf: ", str_e(e))
def save_buf(buf): path = get_full_path(buf['path']) mkdir(os.path.split(path)[0]) if buf['encoding'] == 'utf8': newline = get_line_endings(path) or editor.get_line_endings(path) try: with open(path, 'wb') as fd: if buf['encoding'] == 'utf8': out = buf['buf'] if newline != '\n': out = out.split('\n') out = newline.join(out) fd.write(out.encode('utf-8')) else: fd.write(buf['buf']) except Exception as e: msg.error('Error saving buf: ', str_e(e))
def save_buf(buf): path = get_full_path(buf['path']) mkdir(os.path.split(path)[0]) if buf['encoding'] == 'utf8': newline = get_line_endings(path) or editor.get_line_endings(path) try: with open(path, 'wb') as fd: if buf['encoding'] == 'utf8': out = buf['buf'] if newline != '\n': out = out.split('\n') out = newline.join(out) fd.write(out.encode('utf-8')) else: fd.write(buf['buf']) except Exception as e: msg.error('Error saving buf: %s' % str_e(e))
def update_floo_file(path, data): try: floo_json = json.loads(open(path, 'r').read()) except Exception: pass try: floo_json.update(data) except Exception: floo_json = data try: with open(path, 'w') as floo_fd: floo_fd.write( json.dumps(floo_json, indent=4, sort_keys=True, separators=(',', ': '))) except Exception as e: msg.warn('Couldn\'t update .floo file: ', floo_json, ': ', str_e(e))
def get_persistent_data(per_path=None): per_data = {'recent_workspaces': [], 'workspaces': {}} per_path = per_path or os.path.join(G.BASE_DIR, 'persistent.json') try: per = open(per_path, 'rb') except (IOError, OSError): msg.debug('Failed to open ', per_path, '. Recent workspace list will be empty.') return per_data try: data = per.read().decode('utf-8') persistent_data = json.loads(data) except Exception as e: msg.debug('Failed to parse ', per_path, '. Recent workspace list will be empty.') msg.debug(str_e(e)) msg.debug(data) return per_data if 'recent_workspaces' not in persistent_data: persistent_data['recent_workspaces'] = [] if 'workspaces' not in persistent_data: persistent_data['workspaces'] = {} return persistent_data
def get_persistent_data(per_path=None): per_data = {"recent_workspaces": [], "workspaces": {}} per_path = per_path or os.path.join(G.BASE_DIR, "persistent.json") try: per = open(per_path, "rb") except (IOError, OSError): msg.debug("Failed to open ", per_path, ". Recent workspace list will be empty.") return per_data try: data = per.read().decode("utf-8") persistent_data = json.loads(data) except Exception as e: msg.debug("Failed to parse ", per_path, ". Recent workspace list will be empty.") msg.debug(str_e(e)) msg.debug(data) return per_data if "recent_workspaces" not in persistent_data: persistent_data["recent_workspaces"] = [] if "workspaces" not in persistent_data: persistent_data["workspaces"] = {} return persistent_data
def display(self): if self.level < LOG_LEVEL: return msg = unicode(self) if G.LOG_TO_CONSOLE or G.CHAT_VIEW is None: # TODO: ridiculously inefficient try: fd = open(LOG_FILE, 'ab') fmsg = msg try: fmsg = fmsg.encode('utf-8') except Exception: pass fd.write(fmsg) fd.write(b'\n') fd.close() except Exception as e: safe_print(str_e(e)) safe_print(msg) else: editor_log(msg)