def test_hash(self): """ test_hash """ fpath = "testdata/1MB.zip" localindex = {"filestats": {}} fd, localindex = make_cryptogit_hash(fpath, self.cboptions.dir, localindex) self.assertEqual(fd["filehash"], '0c1d7e2e3283b3ee3f319533ea6aa6372982922f')
def check_renames_server(memory, options, localindex, serverindex, file_uploads, file_del_server_param, dir_del_server): """ check_renames """ renames_server = [] file_uploads_remove = [] file_del_server_remove = [] file_del_server = list(file_del_server_param) if memory.has("localindex"): memory_dirnames = memory.get("localindex")["dirnames"] cryptobox_folder = memory.get("cryptobox_folder") memory_dirnames = [add_relname(cryptobox_folder, memory_dirnames[x]) for x in memory_dirnames] deleted_dirs = [x for x in memory_dirnames if x["relname"] in dir_del_server] for d in deleted_dirs: for f in d["filenames"]: file_del_server.append(os.path.join(d["dirname"], f["name"])) for fu in file_uploads: for fd in file_del_server: fu_data, localindex = make_cryptogit_hash(fu["local_path"], options.dir, localindex) fu_hash = fu_data["filehash"] fd_hash = get_content_hash_server(options, serverindex, fd) if fu_hash == fd_hash: fd_rel_path = fd.replace(options.dir, "") fu_rel_path = fu["local_path"].replace(options.dir, "") ren_item = (fd_rel_path, fu_rel_path) renames_server.append(ren_item) file_uploads_remove.append(fu) file_del_server_remove.append(fd) for fur in file_uploads_remove: if fur in file_uploads: file_uploads.remove(fur) for fdr in file_del_server_remove: if fdr in file_del_server: file_del_server.remove(fdr) file_del_server = [x.replace(options.dir, "") for x in file_del_server] return tuple(renames_server), tuple(file_uploads), tuple(file_del_server), localindex
def diff_files_locally(memory, options, localindex, serverindex): """ diff_files_locally @type memory: Memory @type options: optparse.Values, instance @type localindex: dict @type serverindex: dict """ local_pathnames = [(localindex["dirnames"][d]["dirname"], localindex["dirnames"][d]["filenames"]) for d in localindex["dirnames"] if len(localindex["dirnames"][d]["filenames"]) > 0] local_pathnames_set = set() file_uploads = [] for ft in local_pathnames: for fname in ft[1]: if not str(fname["name"]).startswith("."): local_path = os.path.join(ft[0], fname["name"]) local_pathnames_set.add(str(local_path)) for local_path in local_pathnames_set: if os.path.exists(local_path): seen_local_path_before, memory = in_local_path_history(memory, local_path) rel_local_path = local_path.replace(options.dir, "") upload_file_object = {"local_path": local_path, "parent_short_id": None, "rel_path": rel_local_path } corresponding_server_nodes = [x for x in serverindex["doclist"] if x["doc"]["m_path_p64s"] == upload_file_object["rel_path"]] if not seen_local_path_before: if len(corresponding_server_nodes) == 0 or not corresponding_server_nodes: file_uploads.append(upload_file_object) else: filedata, localindex = make_cryptogit_hash(upload_file_object["local_path"], options.dir, localindex) if not corresponding_server_nodes[0]: file_uploads.append(upload_file_object) else: if not strcmp(corresponding_server_nodes[0]["content_hash_latest_timestamp"][0], filedata["filehash"]): file_uploads.append(upload_file_object) else: memory = add_local_path_history(memory, upload_file_object["local_path"]) else: # is it changed? if len(corresponding_server_nodes) != 0: filestats = read_file_to_fdict(local_path) if filestats and corresponding_server_nodes[0]: try: if filestats["st_ctime"] > corresponding_server_nodes[0]["content_hash_latest_timestamp"][1]: filedata, localindex = make_cryptogit_hash(local_path, options.dir, localindex) if filedata["filehash"] != corresponding_server_nodes[0]["content_hash_latest_timestamp"][0]: file_uploads.append(upload_file_object) except: raise file_del_local = [] server_paths = [str(os.path.join(options.dir, x["doc"]["m_path_p64s"].lstrip(os.path.sep))) for x in serverindex["doclist"]] for local_path in local_pathnames_set: if os.path.exists(local_path): if local_path not in server_paths: seen_local_path_before, memory = in_local_path_history(memory, local_path) if seen_local_path_before: file_del_local.append(local_path) return file_uploads, file_del_local, memory, localindex
def index_and_encrypt(memory, options): """ index_and_encrypt @type memory: Memory @type options: optparse.Values, instance @rtype salt, secret, memory, localindex: str, str, Memory, dict """ localindex = make_local_index(options) datadir = get_data_dir(options) if quick_lock_check(options): output_json({ "message": "cryptobox is locked, nothing can be added now first decrypt (-d)" }) return None, None, memory, localindex salt = None if memory.has("salt_b64"): salt = base64.decodestring(memory.get("salt_b64")) if not salt: salt = Random.new().read(32) memory.set("salt_b64", base64.encodestring(salt)) output_json({"msg": "preparing encrypt"}) secret = password_derivation(options.password, salt) ensure_directory(datadir) new_blobs = {} file_cnt = 0 new_objects = 0 hash_set_on_disk = set() processed_files = 0 numfiles = 0 for dirhash in localindex["dirnames"]: numfiles += len(localindex["dirnames"][dirhash]["filenames"]) for dirhash in localindex["dirnames"]: for fname in localindex["dirnames"][dirhash]["filenames"]: file_cnt += 1 file_dir = localindex["dirnames"][dirhash]["dirname"] file_path = os.path.join(file_dir, fname["name"]) if os.path.exists(file_path): update_progress(processed_files, numfiles, "indexing " + os.path.basename(file_path)) filedata, localindex = make_cryptogit_hash( file_path, datadir, localindex) fname["hash"] = filedata["filehash"] hash_set_on_disk.add(filedata["filehash"]) if not filedata["blob_exists"]: new_blobs[filedata["filehash"]] = filedata new_objects += 1 if len(new_blobs) > 1500: encrypt_new_blobs(secret, new_blobs) new_blobs = {} if len(new_blobs) > 0: if len(new_blobs) > 0: encrypt_new_blobs(secret, new_blobs) cleanup_tempfiles() memory = store_localindex(memory, localindex) if options.remove: ld = os.listdir(options.dir) ld.remove(".cryptobox") processed_files = 0 numfiles = len(ld) for fname in ld: fpath = os.path.join(options.dir, fname) processed_files += 1 update_progress(processed_files, numfiles, "delete " + os.path.basename(fpath)) if os.path.isdir(fpath): if os.path.exists(fpath): shutil.rmtree(fpath, True) else: if os.path.exists(fpath): os.remove(fpath) obsolute_blob_store_entries = set() blob_dirs = os.path.join(datadir, "blobs") ensure_directory(blob_dirs) for blob_dir in os.listdir(blob_dirs): blob_store = os.path.join(blob_dirs, blob_dir.lstrip(os.path.sep)) if os.path.isdir(blob_store): for blob_file in os.listdir(blob_store): found = False for fhash in hash_set_on_disk: if fhash in (blob_dir + blob_file): found = True if not found: obsolute_blob_store_entries.add(blob_dir + blob_file) for f_hash in obsolute_blob_store_entries: blob_dir = os.path.join(blob_dirs, f_hash[:2]) blob_path = os.path.join(blob_dir, f_hash[2:]) os.remove(blob_path) if os.path.isdir(blob_dir): blob_entries = [ f for f in os.listdir(blob_dir) if not f.startswith('.') ] if len(blob_entries) == 0: shutil.rmtree(blob_dir, True) cleanup_tempfiles() return salt, secret, memory, localindex
def index_and_encrypt(memory, options): """ index_and_encrypt @type memory: Memory @type options: optparse.Values, instance @rtype salt, secret, memory, localindex: str, str, Memory, dict """ localindex = make_local_index(options) datadir = get_data_dir(options) if quick_lock_check(options): output_json({"message": "cryptobox is locked, nothing can be added now first decrypt (-d)"}) return None, None, memory, localindex salt = None if memory.has("salt_b64"): salt = base64.decodestring(memory.get("salt_b64")) if not salt: salt = Random.new().read(32) memory.set("salt_b64", base64.encodestring(salt)) output_json({"msg": "preparing encrypt"}) secret = password_derivation(options.password, salt) ensure_directory(datadir) new_blobs = {} file_cnt = 0 new_objects = 0 hash_set_on_disk = set() processed_files = 0 numfiles = 0 for dirhash in localindex["dirnames"]: numfiles += len(localindex["dirnames"][dirhash]["filenames"]) for dirhash in localindex["dirnames"]: for fname in localindex["dirnames"][dirhash]["filenames"]: file_cnt += 1 file_dir = localindex["dirnames"][dirhash]["dirname"] file_path = os.path.join(file_dir, fname["name"]) if os.path.exists(file_path): update_progress(processed_files, numfiles, "indexing " + os.path.basename(file_path)) filedata, localindex = make_cryptogit_hash(file_path, datadir, localindex) fname["hash"] = filedata["filehash"] hash_set_on_disk.add(filedata["filehash"]) if not filedata["blob_exists"]: new_blobs[filedata["filehash"]] = filedata new_objects += 1 if len(new_blobs) > 1500: encrypt_new_blobs(secret, new_blobs) new_blobs = {} if len(new_blobs) > 0: if len(new_blobs) > 0: encrypt_new_blobs(secret, new_blobs) cleanup_tempfiles() memory = store_localindex(memory, localindex) if options.remove: ld = os.listdir(options.dir) ld.remove(".cryptobox") processed_files = 0 numfiles = len(ld) for fname in ld: fpath = os.path.join(options.dir, fname) processed_files += 1 update_progress(processed_files, numfiles, "delete " + os.path.basename(fpath)) if os.path.isdir(fpath): if os.path.exists(fpath): shutil.rmtree(fpath, True) else: if os.path.exists(fpath): os.remove(fpath) obsolute_blob_store_entries = set() blob_dirs = os.path.join(datadir, "blobs") ensure_directory(blob_dirs) for blob_dir in os.listdir(blob_dirs): blob_store = os.path.join(blob_dirs, blob_dir.lstrip(os.path.sep)) if os.path.isdir(blob_store): for blob_file in os.listdir(blob_store): found = False for fhash in hash_set_on_disk: if fhash in (blob_dir + blob_file): found = True if not found: obsolute_blob_store_entries.add(blob_dir + blob_file) for f_hash in obsolute_blob_store_entries: blob_dir = os.path.join(blob_dirs, f_hash[:2]) blob_path = os.path.join(blob_dir, f_hash[2:]) os.remove(blob_path) if os.path.isdir(blob_dir): blob_entries = [f for f in os.listdir(blob_dir) if not f.startswith('.')] if len(blob_entries) == 0: shutil.rmtree(blob_dir, True) cleanup_tempfiles() return salt, secret, memory, localindex