def index_files_visit(arg, dir_name, names): """ @type arg: dict @type dir_name: str or unicode @type names: list """ if os.path.basename(dir_name).startswith("."): return filenames = [os.path.basename(x) for x in filter(lambda fpath: not os.path.os.path.isdir(fpath), [os.path.join(dir_name, x.lstrip(os.path.sep)) for x in names])] filenames = [x for x in filenames if not x.startswith(".")] dirname_hash_input = dir_name.replace(arg["DIR"], "").replace(os.path.sep, "/") symlinks = [x for x in filenames if os.path.islink(os.path.join(dir_name, x))] for sl in symlinks: arg["messages"].append("symbolic link "+os.path.join(dir_name, sl)+" ignored") filenames.remove(sl) if len(dirname_hash_input) == 0: dirname_hash_input = "/" dirname_hash = make_sha1_hash_file(data=dirname_hash_input) nameshash = make_sha1_hash_file(data="".join(names)) filenames = [{'name': x} for x in filenames] folder = {"dirname": dir_name, "dirnamehash": dirname_hash, "filenames": filenames, "nameshash": nameshash} arg["folders"]["dirnames"][dirname_hash] = folder arg["numfiles"] += len(filenames)
def del_serverhash(memory, relative_path_name): """ del_serverhash @type memory: Memory @type relative_path_name: str, unicode """ relative_path_unix_style = path_to_relative_path_unix_style(memory, relative_path_name) if memory.set_have_value("serverpath_history", (relative_path_unix_style, make_sha1_hash_file(data=relative_path_unix_style))): memory.set_delete_value("serverpath_history", (relative_path_unix_style, make_sha1_hash_file(data=relative_path_unix_style))) return memory
def make_hash_path(path): if not os.path.exists(path): message_json("input file does not exist") else: if os.path.isdir(path): buf = tempfile.SpooledTemporaryFile(max_size=524288000) for p in [os.path.join(dp, f) for dp, dn, fn in os.walk(path) for f in fn]: buf.write(open(p).read()) return make_sha1_hash_file(fpi=buf) else: return make_sha1_hash_file(fpath=path)
def parse_serverindex(serverindex): """ @type serverindex: dict @rtype: dict, list, dict, set """ unique_content = {} unique_dirs = set() fnodes = [] checked_dirnames = [] dirname_hashes_server = {} for node in serverindex["doclist"]: if node["doc"]["m_nodetype"] == "folder": dirname_of_path = node["doc"]["m_path_p64s"] else: dirname_of_path = os.path.dirname(node["doc"]["m_path_p64s"]) node["dirname_of_path"] = dirname_of_path unique_dirs.add(dirname_of_path) if node["content_hash_latest_timestamp"]: unique_content[node["content_hash_latest_timestamp"][0]] = node fnodes.append(node) if dirname_of_path not in checked_dirnames: dirname_hash = make_sha1_hash_file(data=dirname_of_path.replace(os.path.sep, "/")) dirname_hashes_server[dirname_hash] = node checked_dirnames.append(dirname_of_path) return dirname_hashes_server, tuple(fnodes), unique_content, tuple(unique_dirs)
def make_hash_path(path): if not os.path.exists(path): message_json("input file does not exist") else: if os.path.isdir(path): buf = tempfile.SpooledTemporaryFile(max_size=524288000) for p in [ os.path.join(dp, f) for dp, dn, fn in os.walk(path) for f in fn ]: buf.write(open(p).read()) return make_sha1_hash_file(fpi=buf) else: return make_sha1_hash_file(fpath=path)
def have_serverhash(memory, node_path): """ have_serverhash @type memory: Memory @type node_path: str, unicode """ node_path_relative = path_to_relative_path_unix_style(memory, node_path) return memory.set_have_value("serverpath_history", (node_path_relative, make_sha1_hash_file(data=node_path_relative))), memory
def del_serverhash(memory, relative_path_name): """ del_serverhash @type memory: Memory @type relative_path_name: str, unicode """ relative_path_unix_style = path_to_relative_path_unix_style( memory, relative_path_name) if memory.set_have_value( "serverpath_history", (relative_path_unix_style, make_sha1_hash_file(data=relative_path_unix_style))): memory.set_delete_value( "serverpath_history", (relative_path_unix_style, make_sha1_hash_file(data=relative_path_unix_style))) return memory
def add_server_path_history(memory, relative_path_name): """ add_server_path_history @type memory: Memory @type relative_path_name: str, unicode """ relative_path_unix_style = path_to_relative_path_unix_style(memory, relative_path_name) memory.set_add_value("serverpath_history", (relative_path_unix_style, make_sha1_hash_file(data=relative_path_unix_style))) return memory
def have_serverhash(memory, node_path): """ have_serverhash @type memory: Memory @type node_path: str, unicode """ node_path_relative = path_to_relative_path_unix_style(memory, node_path) return memory.set_have_value( "serverpath_history", (node_path_relative, make_sha1_hash_file(data=node_path_relative))), memory
def add_server_path_history(memory, relative_path_name): """ add_server_path_history @type memory: Memory @type relative_path_name: str, unicode """ relative_path_unix_style = path_to_relative_path_unix_style( memory, relative_path_name) memory.set_add_value("serverpath_history", (relative_path_unix_style, make_sha1_hash_file(data=relative_path_unix_style))) return memory
def index_files_visit(arg, dir_name, names): """ @type arg: dict @type dir_name: str or unicode @type names: list """ if os.path.basename(dir_name).startswith("."): return filenames = [ os.path.basename(x) for x in filter( lambda fpath: not os.path.os.path.isdir(fpath), [os.path.join(dir_name, x.lstrip(os.path.sep)) for x in names]) ] filenames = [x for x in filenames if not x.startswith(".")] dirname_hash_input = dir_name.replace(arg["DIR"], "").replace(os.path.sep, "/") symlinks = [ x for x in filenames if os.path.islink(os.path.join(dir_name, x)) ] for sl in symlinks: arg["messages"].append("symbolic link " + os.path.join(dir_name, sl) + " ignored") filenames.remove(sl) if len(dirname_hash_input) == 0: dirname_hash_input = "/" dirname_hash = make_sha1_hash_file(data=dirname_hash_input) nameshash = make_sha1_hash_file(data="".join(names)) filenames = [{'name': x} for x in filenames] folder = { "dirname": dir_name, "dirnamehash": dirname_hash, "filenames": filenames, "nameshash": nameshash } arg["folders"]["dirnames"][dirname_hash] = folder arg["numfiles"] += len(filenames)
def get_mtime_and_content_hash(fpath): """ @type fpath: str or unicode """ if not os.path.exists(fpath): return None, None if os.path.isdir(fpath): return None, None file_dict = read_file_to_fdict(fpath) filehash = make_sha1_hash_file(prefix="blob " + str(file_dict["st_size"]) + "\0", fpath=fpath) return file_dict["st_mtime"], filehash
def make_cryptogit_filehash(fpath): file_dict = read_file_to_fdict(fpath) file_dict["filehash"] = make_sha1_hash_file( prefix="blob " + str(file_dict["st_size"]) + "\0", fpath=fpath) return file_dict
def make_cryptogit_filehash(fpath): file_dict = read_file_to_fdict(fpath) file_dict["filehash"] = make_sha1_hash_file(prefix="blob " + str(file_dict["st_size"]) + "\0", fpath=fpath) return file_dict