def path_to_server_guid(memory, options, serverindex, parent_path): """ @type memory: Memory @param options: @type options: @type parent_path: @type serverindex: dict @return: @rtype: @raise MultipleGuidsForPath: """ result = [x["doc"]["m_short_id"] for x in serverindex["doclist"] if strcmp(x["doc"]["m_path_p64s"], parent_path)] if len(result) > 1: raise MultipleGuidsForPath(parent_path) if len(result) == 0: shortid = server_path_to_shortid(memory, options, parent_path) else: if len(result) > 1: raise MultipleGuidsForPath(parent_path) shortid = result[0] if not shortid: result = [x["doc"]["m_short_id"] for x in serverindex["doclist"] if strcmp(x["doc"]["m_path_p64s"], "/")] if len(result) > 1: raise MultipleGuidsForPath(parent_path) shortid = result[0] if not shortid: raise NoParentFound(parent_path) return shortid, memory
def decrypt_write_file(localindex, fdir, fhash, secret): """ @param localindex: dict @type localindex: @param fdir: str or unicode @type fdir: @param fhash: str or unicode @type fhash: @param secret: str or unicode @type secret: """ cpath = os.path.join(fdir, fhash[2:]) enc_file_parts = open(cpath).read().split("\n") enc_file_parts = tuple([x for x in enc_file_parts if x]) data = decrypt_file_smp(secret, enc_files=enc_file_parts, progress_callback=update_item_progress) os.remove(cpath) for fp in enc_file_parts: os.remove(fp) files_left = get_files_dir(fdir) if len(files_left) == 0: os.rmdir(fdir) if len(files_left) == 1: dstorp = os.path.join(fdir, ".DS_Store") if os.path.exists(dstorp): os.remove(dstorp) files_left = get_files_dir(fdir) if len(files_left) == 0: os.rmdir(fdir) file_blob = {"data": data} paths = [] for dirhash in localindex["dirnames"]: for cfile in localindex["dirnames"][dirhash]["filenames"]: if strcmp(fhash, cfile["hash"]): fpath = os.path.join( localindex["dirnames"][dirhash]["dirname"], cfile["name"]) if not os.path.exists(fpath): ft = localindex["filestats"][fpath] file_blob["st_atime"] = int(ft["st_atime"]) file_blob["st_mtime"] = int(ft["st_mtime"]) file_blob["st_mode"] = int(ft["st_mode"]) file_blob["st_uid"] = int(ft["st_uid"]) file_blob["st_gid"] = int(ft["st_gid"]) write_fdict_to_file(file_blob, fpath) paths.append(fpath) return paths
def restore_hidden_config(options): """ @type options: optparse.Values, instance """ encrypted_configs = get_encrypted_configs(options, name_stop=options.cryptobox) for encrypted_config in encrypted_configs: if strcmp(encrypted_config["cryptoboxname"], options.cryptobox): restore_config(encrypted_config["config_file_path"], encrypted_config["cryptoboxname"], options, encrypted_config["secret"]) return encrypted_config["secret"] return None
def decrypt_write_file(localindex, fdir, fhash, secret): """ @param localindex: dict @type localindex: @param fdir: str or unicode @type fdir: @param fhash: str or unicode @type fhash: @param secret: str or unicode @type secret: """ cpath = os.path.join(fdir, fhash[2:]) enc_file_parts = open(cpath).read().split("\n") enc_file_parts = tuple([x for x in enc_file_parts if x]) data = decrypt_file_smp(secret, enc_files=enc_file_parts, progress_callback=update_item_progress) os.remove(cpath) for fp in enc_file_parts: os.remove(fp) files_left = get_files_dir(fdir) if len(files_left) == 0: os.rmdir(fdir) if len(files_left) == 1: dstorp = os.path.join(fdir, ".DS_Store") if os.path.exists(dstorp): os.remove(dstorp) files_left = get_files_dir(fdir) if len(files_left) == 0: os.rmdir(fdir) file_blob = {"data": data} paths = [] for dirhash in localindex["dirnames"]: for cfile in localindex["dirnames"][dirhash]["filenames"]: if strcmp(fhash, cfile["hash"]): fpath = os.path.join(localindex["dirnames"][dirhash]["dirname"], cfile["name"]) if not os.path.exists(fpath): ft = localindex["filestats"][fpath] file_blob["st_atime"] = int(ft["st_atime"]) file_blob["st_mtime"] = int(ft["st_mtime"]) file_blob["st_mode"] = int(ft["st_mode"]) file_blob["st_uid"] = int(ft["st_uid"]) file_blob["st_gid"] = int(ft["st_gid"]) write_fdict_to_file(file_blob, fpath) paths.append(fpath) return paths
def short_id_to_server_path(memory, serverindex, short_id): """ @type memory: Memory @param short_id: @type short_id: str, unicode @type serverindex: dict @return: @rtype: @raise MultipleGuidsForPath: """ result = [x["doc"]["m_path_p64s"] for x in serverindex["doclist"] if strcmp(x["doc"]["m_short_id"], short_id)] if len(result) == 0: raise NoPathFound(short_id) elif len(result) == 1: return result[0], memory else: raise MultiplePathsForSID(short_id)
def path_to_server_shortid(options, serverindex, path): """ @param options: @type options: @param path: @type path: @type serverindex: dict @return: @rtype: @raise MultipleGuidsForPath: """ path = path.replace(options.dir, "") result = [x["doc"]["m_short_id"] for x in serverindex["doclist"] if strcmp(x["doc"]["m_path_p64s"], path)] if len(result) == 0: raise NoParentFound(path) elif len(result) == 1: return result[0] else: raise MultipleGuidsForPath(path)
def write_blobs_to_filepaths(memory, options, file_nodes, data, downloaded_fhash, content_path): """ @type memory: Memory @type options: optparse.Values @type file_nodes: tuple @type data: str or unicode or None @type downloaded_fhash: unicode @type content_path: str or unicode """ files_same_hash = [] file_nodes_copy = list(file_nodes) for sfile in file_nodes: fhash = sfile["content_hash_latest_timestamp"][0] if strcmp(fhash, downloaded_fhash): files_same_hash.append(sfile) for fnode in files_same_hash: memory = add_server_path_history(memory, fnode["doc"]["m_path_p64s"]) write_blob_to_filepath(memory, fnode, options, data, content_path) file_nodes_copy.remove(fnode) return memory, file_nodes_copy
def diff_files_locally(memory, options, localindex, serverindex): """ diff_files_locally @type memory: Memory @type options: optparse.Values, instance @type localindex: dict @type serverindex: dict """ local_pathnames = [(localindex["dirnames"][d]["dirname"], localindex["dirnames"][d]["filenames"]) for d in localindex["dirnames"] if len(localindex["dirnames"][d]["filenames"]) > 0] local_pathnames_set = set() file_uploads = [] for ft in local_pathnames: for fname in ft[1]: if not str(fname["name"]).startswith("."): local_path = os.path.join(ft[0], fname["name"]) local_pathnames_set.add(str(local_path)) for local_path in local_pathnames_set: if os.path.exists(local_path): seen_local_path_before, memory = in_local_path_history(memory, local_path) rel_local_path = local_path.replace(options.dir, "") upload_file_object = {"local_path": local_path, "parent_short_id": None, "rel_path": rel_local_path } corresponding_server_nodes = [x for x in serverindex["doclist"] if x["doc"]["m_path_p64s"] == upload_file_object["rel_path"]] if not seen_local_path_before: if len(corresponding_server_nodes) == 0 or not corresponding_server_nodes: file_uploads.append(upload_file_object) else: filedata, localindex = make_cryptogit_hash(upload_file_object["local_path"], options.dir, localindex) if not corresponding_server_nodes[0]: file_uploads.append(upload_file_object) else: if not strcmp(corresponding_server_nodes[0]["content_hash_latest_timestamp"][0], filedata["filehash"]): file_uploads.append(upload_file_object) else: memory = add_local_path_history(memory, upload_file_object["local_path"]) else: # is it changed? if len(corresponding_server_nodes) != 0: filestats = read_file_to_fdict(local_path) if filestats and corresponding_server_nodes[0]: try: if filestats["st_ctime"] > corresponding_server_nodes[0]["content_hash_latest_timestamp"][1]: filedata, localindex = make_cryptogit_hash(local_path, options.dir, localindex) if filedata["filehash"] != corresponding_server_nodes[0]["content_hash_latest_timestamp"][0]: file_uploads.append(upload_file_object) except: raise file_del_local = [] server_paths = [str(os.path.join(options.dir, x["doc"]["m_path_p64s"].lstrip(os.path.sep))) for x in serverindex["doclist"]] for local_path in local_pathnames_set: if os.path.exists(local_path): if local_path not in server_paths: seen_local_path_before, memory = in_local_path_history(memory, local_path) if seen_local_path_before: file_del_local.append(local_path) return file_uploads, file_del_local, memory, localindex
def get_unique_content(memory, options, all_unique_nodes, local_paths): """ @type memory: Memory @type options: istance @type all_unique_nodes: dict @type local_paths: tuple """ if len(local_paths) == 0: return memory unique_nodes_hashes = [fhash for fhash in all_unique_nodes if not have_blob(options, fhash)] unique_nodes = [all_unique_nodes[fhash] for fhash in all_unique_nodes if fhash in unique_nodes_hashes] downloaded_files_cnt = 0 unique_nodes = [node for node in unique_nodes if not os.path.exists(os.path.join(options.dir, node["doc"]["m_path_p64s"].lstrip(os.path.sep)))] unique_nodes = sorted(unique_nodes, key=lambda k: k["doc"]["m_size_p64s"]) for node in unique_nodes: update_progress(downloaded_files_cnt, len(unique_nodes), "downloading " + str(node["doc"]["m_name"])) content_path, content_hash = download_blob(memory, options, node) update_item_progress(100) output_json({"item_progress": 0}) memory, file_nodes_left = write_blobs_to_filepaths(memory, options, local_paths, None, content_hash, content_path) downloaded_files_cnt += 1 update_progress(downloaded_files_cnt, len(unique_nodes), "downloading done") for lp in local_paths: memory = add_local_path_history(memory, os.path.join(options.dir, lp["doc"]["m_path_p64s"].lstrip(os.sep))) source_path = None file_path = os.path.join(options.dir, lp["doc"]["m_path_p64s"].lstrip(os.path.sep)) if not os.path.exists(file_path): for lph in all_unique_nodes: if lph == lp["content_hash_latest_timestamp"][0]: source_path = os.path.join(options.dir, all_unique_nodes[lph]["doc"]["m_path_p64s"].lstrip(os.path.sep)) break datapath = data = None if source_path: if not os.path.exists(source_path): fhash = lp["content_hash_latest_timestamp"][0] source_path = os.path.join(get_blob_dir(options), fhash[:2]) source_path = os.path.join(source_path, fhash[2:]) memory = add_path_history(file_path, memory) secret = password_derivation(options.password, base64.decodestring(memory.get("salt_b64"))) dec_file = decrypt_file(source_path, secret) datapath = dec_file.name if not datapath: st_ctime, st_atime, st_mtime, st_mode, st_uid, st_gid, st_size, datapath = read_file(source_path, True) else: st_ctime, st_atime, st_mtime, st_mode, st_uid, st_gid, st_size, datapath_dummy = read_file(source_path) st_mtime = int(lp["content_hash_latest_timestamp"][1]) write_file(file_path, data, datapath, st_mtime, st_mtime, st_mode, st_uid, st_gid) local_paths_not_written = [fp for fp in local_paths if not os.path.exists(os.path.join(options.dir, fp["doc"]["m_path_p64s"].lstrip(os.path.sep)))] if len(local_paths_not_written) > 0: local_index = get_localindex(memory) local_path_hashes = {} for ldir in local_index["dirnames"]: for f in local_index["dirnames"][ldir]["filenames"]: if "hash" in f: local_path_hashes[f["hash"]] = os.path.join(local_index["dirnames"][ldir]["dirname"], f["name"]) for lfnw in local_paths_not_written: w = False for lfh in local_path_hashes: if not w: if strcmp(lfnw["content_hash_latest_timestamp"][0], lfh): w = True open(os.path.join(options.dir, lfnw["doc"]["m_path_p64s"].lstrip(os.path.sep)), "w").write(open(local_path_hashes[lfh]).read()) return memory
def dirs_on_local(memory, options, localindex, dirname_hashes_server, serverindex): """ @type memory: Memory @param dirname_hashes_server: folders on server @type dirname_hashes_server: dict @type serverindex: dict @param options: options @type options: optparse.Values, instance @type localindex: dict @return: list of dirs on server or to remove locally @rtype: tuple """ if "tree_timestamp" not in serverindex: raise Exception("dirs_on_local needs a tree timestamp") tree_timestamp = int(serverindex["tree_timestamp"]) local_dirs_not_on_server = [] for dirhashlocal in localindex["dirnames"]: found = False for dirhashserver in dirname_hashes_server: if strcmp(dirhashserver, localindex["dirnames"][dirhashlocal]["dirnamehash"]): found = True if not found: if localindex["dirnames"][dirhashlocal]["dirname"] != options.dir: local_dirs_not_on_server.append(localindex["dirnames"][dirhashlocal]) dirs_make_server = [] dirs_del_local = [] server_dir_history = [] if memory.has("serverpath_history"): server_dir_history = [path_to_relative_path_unix_style(memory, x[0]) for x in memory.get("serverpath_history")] for node in local_dirs_not_on_server: if os.path.exists(node["dirname"]): rel_dirname = path_to_relative_path_unix_style(memory, node["dirname"]) node["relname"] = rel_dirname if rel_dirname not in serverindex["dirlist"]: folder_timestamp = os.stat(node["dirname"]).st_mtime if int(folder_timestamp) >= int(tree_timestamp): dirs_make_server.append(node) else: if node["dirname"].replace(options.dir, "") not in server_dir_history: dirs_make_server.append(node) else: dirs_del_local.append(node) else: dirs_del_local.append(node) dirs_make_server_unique = [] key_set = set([k["dirnamehash"] for k in dirs_make_server]) for k in key_set: for i in dirs_make_server: if strcmp(i["dirnamehash"], k): dirs_make_server_unique.append(i) break return dirs_make_server_unique, dirs_del_local
message_json("hello") message_json("world") message_json(str(range(0, 10000))) message_json(str(range(0, 1000000))) return def main(): # noinspection PyUnusedLocal (options, args) = add_options() try: cryptobox_command(options) except Exception: exs = handle_ex(False, True) output_json({"error_message": exs}) raise if strcmp(__name__, "__main__"): try: # On Windows calling this function is necessary. if sys.platform.startswith("win"): multiprocessing.freeze_support() main() except KeyboardInterrupt: print "cba_main.py:494", "\nbye main"
from cba_utils import message_json message_json("hello") message_json("world") message_json(str(range(0, 10000))) message_json(str(range(0, 1000000))) return def main(): #noinspection PyUnusedLocal (options, args) = add_options() try: cryptobox_command(options) except Exception: exs = handle_ex(False, True) output_json({"error_message": exs}) raise if strcmp(__name__, '__main__'): try: # On Windows calling this function is necessary. if sys.platform.startswith('win'): multiprocessing.freeze_support() main() except KeyboardInterrupt: print "cba_main.py:494", "\nbye main"