Exemplo n.º 1
0
    def test_index_and_encrypt(self):
        """
        test_index_and_encrypt
        """
        self.reset_cb_dir()
        self.unzip_testfiles_synced()
        self.do_wait_for_tasks = False
        salt, secret, self.cbmemory, localindex = index_and_encrypt(self.cbmemory, self.cboptions)
        self.assertIsNotNone(salt)
        self.assertIsNotNone(secret)
        self.assertEqual(count_files_dir(get_blob_dir(self.cboptions)), 38)

        # add a new file
        open(os.path.join(self.cboptions.dir, "hello world.txt"), "w").write("hello world 123 Dit is random data")
        salt, secret, self.cbmemory, localindex = index_and_encrypt(self.cbmemory, self.cboptions)
        self.assertIsNotNone(salt)
        self.assertIsNotNone(secret)
        self.assertEqual(count_files_dir(get_blob_dir(self.cboptions)), 46)

        # same content, blob count should not rise
        open(os.path.join(self.cboptions.dir, "hello world2.txt"), "w").write("hello world 123 Dit is random data")
        salt, secret, self.cbmemory, localindex = index_and_encrypt(self.cbmemory, self.cboptions)
        self.assertIsNotNone(salt)
        self.assertIsNotNone(secret)
        self.assertEqual(count_files_dir(get_blob_dir(self.cboptions)), 46)
Exemplo n.º 2
0
    def test_encrypt_hide_decrypt(self):
        """
        encrypt_hide_decrypt
        """
        self.do_wait_for_tasks = False
        encrypt = 1
        decrypt = 1
        self.reset_cb_dir()
        self.unzip_testfiles_synced()
        p = os.path.join(os.path.join(os.getcwd(), "testdata"), "test")
        org_files = get_files_dir(p, ignore_hidden=True)
        org_files = [x for x in org_files if "memory.pickle.json" not in x]

        #decrypt_and_build_filetree, hide_config
        if encrypt:
            self.delete_hidden_configs()
            self.do_wait_for_tasks = False
            self.cboptions.remove = True
            salt, secret, self.cbmemory, localindex = index_and_encrypt(self.cbmemory, self.cboptions)
            datadir = get_data_dir(self.cboptions)
            self.cbmemory.save(datadir)
            hide_config(self.cboptions, salt, secret)
            self.assertEqual(count_files_dir(get_blob_dir(self.cboptions)), 0)

        if decrypt:
            os.system("rm -Rf testdata/test")
            if not encrypt:
                os.system("cd testdata; unzip -o hidden_config.zip > /dev/null")

            options = self.cboptions
            options.encrypt = False
            options.decrypt = True
            options.remove = False
            secret = restore_hidden_config(options)
            datadir = get_data_dir(self.cboptions)
            memory = Memory()
            memory.load(datadir)
            decrypt_and_build_filetree(memory, options, secret)

        org_files2 = get_files_dir(p, ignore_hidden=True)
        self.assertEqual(set(org_files), set(org_files2))
Exemplo n.º 3
0
 def test_super_large_file(self):
     """
     test_super_large_file
     """
     self.reset_cb_db_clean()
     self.unzip_testfiles_clean()
     self.make_testfile("3000MB.txt", 3000)
     os.system("rm testdata/test/all_types/*")
     os.system("rm -Rf testdata/test/smalltest")
     os.system("cp testdata/3000MB.txt testdata/test/all_types/")
     localindex, self.cbmemory = sync_server(self.cbmemory, self.cboptions)
     self.assertTrue(self.files_synced())
     datadir = get_data_dir(self.cboptions)
     self.cbmemory.save(datadir)
     p = os.path.join(os.path.join(os.getcwd(), "testdata"), "test")
     org_files = get_files_dir(p, ignore_hidden=True)
     org_files = [x for x in org_files if "memory.pickle.json" not in x]
     org_files1 = [make_sha1_hash_file(fpath=x) for x in org_files]
     self.delete_hidden_configs()
     self.do_wait_for_tasks = False
     self.cboptions.remove = True
     salt, secret, self.cbmemory, localindex = index_and_encrypt(self.cbmemory, self.cboptions)
     datadir = get_data_dir(self.cboptions)
     self.cbmemory.save(datadir)
     hide_config(self.cboptions, salt, secret)
     self.assertEqual(count_files_dir(get_blob_dir(self.cboptions)), 0)
     options = self.cboptions
     options.encrypt = False
     options.decrypt = True
     options.remove = False
     secret = restore_hidden_config(options)
     datadir = get_data_dir(self.cboptions)
     memory = Memory()
     memory.load(datadir)
     decrypt_and_build_filetree(memory, options, secret)
     org_files2 = get_files_dir(p, ignore_hidden=True)
     org_files3 = [make_sha1_hash_file(fpath=x) for x in org_files2]
     self.assertEqual(set(org_files1), set(org_files3))
     os.system("rm -Rf testdata/test")
     os.system("rm testdata/3000MB.txt")
Exemplo n.º 4
0
def get_unique_content(memory, options, all_unique_nodes, local_paths):
    """
    @type memory: Memory
    @type options: istance
    @type all_unique_nodes: dict
    @type local_paths: tuple
    """
    if len(local_paths) == 0:
        return memory

    unique_nodes_hashes = [fhash for fhash in all_unique_nodes if not have_blob(options, fhash)]
    unique_nodes = [all_unique_nodes[fhash] for fhash in all_unique_nodes if fhash in unique_nodes_hashes]
    downloaded_files_cnt = 0
    unique_nodes = [node for node in unique_nodes if not os.path.exists(os.path.join(options.dir, node["doc"]["m_path_p64s"].lstrip(os.path.sep)))]
    unique_nodes = sorted(unique_nodes, key=lambda k: k["doc"]["m_size_p64s"])

    for node in unique_nodes:
        update_progress(downloaded_files_cnt, len(unique_nodes), "downloading " + str(node["doc"]["m_name"]))
        content_path, content_hash = download_blob(memory, options, node)
        update_item_progress(100)
        output_json({"item_progress": 0})
        memory, file_nodes_left = write_blobs_to_filepaths(memory, options, local_paths, None, content_hash, content_path)
        downloaded_files_cnt += 1
    update_progress(downloaded_files_cnt, len(unique_nodes), "downloading done")

    for lp in local_paths:
        memory = add_local_path_history(memory, os.path.join(options.dir, lp["doc"]["m_path_p64s"].lstrip(os.sep)))
        source_path = None
        file_path = os.path.join(options.dir, lp["doc"]["m_path_p64s"].lstrip(os.path.sep))

        if not os.path.exists(file_path):
            for lph in all_unique_nodes:
                if lph == lp["content_hash_latest_timestamp"][0]:
                    source_path = os.path.join(options.dir, all_unique_nodes[lph]["doc"]["m_path_p64s"].lstrip(os.path.sep))
                    break

        datapath = data = None
        if source_path:
            if not os.path.exists(source_path):
                fhash = lp["content_hash_latest_timestamp"][0]
                source_path = os.path.join(get_blob_dir(options), fhash[:2])
                source_path = os.path.join(source_path, fhash[2:])
                memory = add_path_history(file_path, memory)
                secret = password_derivation(options.password, base64.decodestring(memory.get("salt_b64")))
                dec_file = decrypt_file(source_path, secret)
                datapath = dec_file.name

            if not datapath:
                st_ctime, st_atime, st_mtime, st_mode, st_uid, st_gid, st_size, datapath = read_file(source_path, True)
            else:
                st_ctime, st_atime, st_mtime, st_mode, st_uid, st_gid, st_size, datapath_dummy = read_file(source_path)

            st_mtime = int(lp["content_hash_latest_timestamp"][1])
            write_file(file_path, data, datapath, st_mtime, st_mtime, st_mode, st_uid, st_gid)

    local_paths_not_written = [fp for fp in local_paths if not os.path.exists(os.path.join(options.dir, fp["doc"]["m_path_p64s"].lstrip(os.path.sep)))]

    if len(local_paths_not_written) > 0:
        local_index = get_localindex(memory)
        local_path_hashes = {}

        for ldir in local_index["dirnames"]:
            for f in local_index["dirnames"][ldir]["filenames"]:
                if "hash" in f:
                    local_path_hashes[f["hash"]] = os.path.join(local_index["dirnames"][ldir]["dirname"], f["name"])

        for lfnw in local_paths_not_written:
            w = False

            for lfh in local_path_hashes:
                if not w:
                    if strcmp(lfnw["content_hash_latest_timestamp"][0], lfh):
                        w = True
                        open(os.path.join(options.dir, lfnw["doc"]["m_path_p64s"].lstrip(os.path.sep)), "w").write(open(local_path_hashes[lfh]).read())

    return memory