Exemplo n.º 1
0
 def test_mutation_history(self):
     """
     test_sync_delete_local_folder
     """
     self.reset_cb_dir()
     self.reset_cb_db_clean()
     ensure_directory(self.cboptions.dir)
     localindex, self.cbmemory = sync_server(self.cbmemory, self.cboptions)
     os.mkdir("testdata/test/foo")
     localindex, self.cbmemory = sync_server(self.cbmemory, self.cboptions)
     os.system("rm -Rf testdata/test/foo")
     localindex, self.cbmemory = sync_server(self.cbmemory, self.cboptions)
     self.assertEqual(self.directories_synced(), True)
     self.all_changes_asserted_zero()
     os.mkdir("testdata/test/foo")
     self.assertEqual(os.path.exists("testdata/test/foo"), True)
     dir_del_local, dir_del_server, dir_make_local, dir_make_server, file_del_local, file_del_server, file_downloads, file_uploads, rename_server, folder_rename_server, rename_local_folders = self.get_sync_changes()
     self.assertEqual(len(dir_make_server), 1)
     localindex, self.cbmemory = sync_server(self.cbmemory, self.cboptions)
     os.mkdir("testdata/test/foo2")
     os.system("ls > testdata/test/foo2/test.txt")
     localindex, self.cbmemory = sync_server(self.cbmemory, self.cboptions)
     os.system("rm -Rf testdata/test/foo2/test.txt")
     dir_del_local, dir_del_server, dir_make_local, dir_make_server, file_del_local, file_del_server, file_downloads, file_uploads, rename_server, folder_rename_server, rename_local_folders = self.get_sync_changes()
     self.assertEqual(len(file_del_server), 1)
     localindex, self.cbmemory = sync_server(self.cbmemory, self.cboptions)
     os.system("ls > testdata/test/foo2/test.txt")
     dir_del_local, dir_del_server, dir_make_local, dir_make_server, file_del_local, file_del_server, file_downloads, file_uploads, rename_server, folder_rename_server, rename_local_folders = self.get_sync_changes()
     self.assertEqual(len(file_uploads), 1)
Exemplo n.º 2
0
 def reset_cb_dir(self):
     """
     complete_reset
     """
     os.system("rm -Rf testdata/test")
     ensure_directory(self.cboptions.dir)
     ensure_directory(get_data_dir(self.cboptions))
Exemplo n.º 3
0
 def unzip_testfiles_clean(self):
     """
     unzip_testfiles_clean
     """
     ensure_directory(self.cboptions.dir)
     ensure_directory(get_data_dir(self.cboptions))
     os.system("cd testdata; cp testmap_clean.zip test.zip")
     os.system("cd testdata; unzip -o test.zip > /dev/null")
     os.system("rm testdata/test.zip")
Exemplo n.º 4
0
 def unzip_testfiles_configonly(self):
     """
     unzip_testfiles_configonly
     """
     ensure_directory(self.cboptions.dir)
     ensure_directory(get_data_dir(self.cboptions))
     os.system("cd testdata; cp testmap_config.zip test.zip")
     os.system("cd testdata; unzip -o test.zip > /dev/null")
     os.system("rm testdata/test.zip")
     self.cbmemory.load(get_data_dir(self.cboptions))
Exemplo n.º 5
0
 def test_sync_changed_file(self):
     """
     test_sync_changed_file
     """
     self.reset_cb_db_clean()
     ensure_directory(self.cboptions.dir)
     os.system("echo 'hello' > testdata/test/hello.txt")
     localindex, self.cbmemory = sync_server(self.cbmemory, self.cboptions)
     os.system("echo 'hello world' > testdata/test/hello.txt")
     dir_del_local, dir_del_server, dir_make_local, dir_make_server, file_del_local, file_del_server, file_downloads, file_uploads, rename_server, folder_rename_server, rename_local_folders = self.get_sync_changes()
     self.assertEqual(len(file_uploads), 1)
Exemplo n.º 6
0
def decrypt_and_build_filetree(memory, options, secret):
    """
    decrypt_and_build_filetree
    @type memory: Memory
    @type options: optparse.Values, instance
    @type secret: str
    """
    if not secret:
        raise Exception("decrypt_and_build_filetree: no secret given")

    datadir = get_data_dir(options)

    if not os.path.exists(datadir):
        print "cba_index.py:365", "nothing to decrypt", datadir, "does not exists"
        return memory

    output_json({"msg": "preparing decrypt"})
    blobdir = os.path.join(datadir, "blobs")
    localindex = get_localindex(memory)
    hashes = set()
    restored_hashes = []

    if localindex:
        for dirhash in localindex["dirnames"]:
            if "dirname" in localindex["dirnames"][dirhash]:
                if not os.path.exists(
                        localindex["dirnames"][dirhash]["dirname"]):
                    ensure_directory(
                        localindex["dirnames"][dirhash]["dirname"])

            for cfile in localindex["dirnames"][dirhash]["filenames"]:
                fpath = os.path.join(
                    localindex["dirnames"][dirhash]["dirname"], cfile["name"])

                if not os.path.exists(fpath):
                    hashes.add((cfile["hash"], cfile["name"]))
                else:
                    restored_hashes.append(cfile["hash"])

    processed_files = 0
    numfiles = len(hashes)

    for cfile in hashes:
        processed_files += 1
        update_progress(processed_files, numfiles, cfile[1])

        #noinspection PyUnusedLocal
        paths = decrypt_blob_to_filepaths(blobdir, localindex, cfile[0],
                                          secret)

    memory = store_localindex(memory, localindex)
    cleanup_tempfiles()
    return memory
Exemplo n.º 7
0
def encrypt_new_blobs(secret, new_blobs):
    """
    @type secret: str or unicode
    @type new_blobs: dict
    """
    processed_files = 0
    numfiles = len(new_blobs)

    for fhash in new_blobs:
        ensure_directory(new_blobs[fhash]["blobdir"])
        update_progress(processed_files, numfiles, "encrypting: " + os.path.basename(new_blobs[fhash]["fpath"]))
        read_and_encrypt_file(new_blobs[fhash]["fpath"], new_blobs[fhash]["blobpath"], secret)
        processed_files += 1
Exemplo n.º 8
0
def decrypt_and_build_filetree(memory, options, secret):
    """
    decrypt_and_build_filetree
    @type memory: Memory
    @type options: optparse.Values, instance
    @type secret: str
    """
    if not secret:
        raise Exception("decrypt_and_build_filetree: no secret given")

    datadir = get_data_dir(options)

    if not os.path.exists(datadir):
        print "cba_index.py:365", "nothing to decrypt", datadir, "does not exists"
        return memory

    output_json({"msg": "preparing decrypt"})
    blobdir = os.path.join(datadir, "blobs")
    localindex = get_localindex(memory)
    hashes = set()
    restored_hashes = []

    if localindex:
        for dirhash in localindex["dirnames"]:
            if "dirname" in localindex["dirnames"][dirhash]:
                if not os.path.exists(localindex["dirnames"][dirhash]["dirname"]):
                    ensure_directory(localindex["dirnames"][dirhash]["dirname"])

            for cfile in localindex["dirnames"][dirhash]["filenames"]:
                fpath = os.path.join(localindex["dirnames"][dirhash]["dirname"], cfile["name"])

                if not os.path.exists(fpath):
                    hashes.add((cfile["hash"], cfile["name"]))
                else:
                    restored_hashes.append(cfile["hash"])

    processed_files = 0
    numfiles = len(hashes)

    for cfile in hashes:
        processed_files += 1
        update_progress(processed_files, numfiles, cfile[1])

        #noinspection PyUnusedLocal
        paths = decrypt_blob_to_filepaths(blobdir, localindex, cfile[0], secret)

    memory = store_localindex(memory, localindex)
    cleanup_tempfiles()
    return memory
Exemplo n.º 9
0
def make_directories_local(memory, options, localindex, folders):
    """
    @type memory: Memory
    @type options: optparse.Values, instance
    @type localindex: dict
    @type folders: tuple
    """
    for f in folders:
        ensure_directory(f["name"])
        memory = add_local_path_history(memory, f["name"])
        memory = add_server_path_history(memory, f["relname"])
        arg = {"DIR": options.dir,
               "folders": {"dirnames": {}},
               "numfiles": 0}
        index_files_visit(arg, f["name"], [])

        for k in arg["folders"]["dirnames"]:
            localindex["dirnames"][k] = arg["folders"]["dirnames"][k]

    return memory
Exemplo n.º 10
0
def index_and_encrypt(memory, options):
    """
    index_and_encrypt
    @type memory: Memory
    @type options: optparse.Values, instance
    @rtype salt, secret, memory, localindex: str, str, Memory, dict
    """
    localindex = make_local_index(options)
    datadir = get_data_dir(options)

    if quick_lock_check(options):
        output_json({
            "message":
            "cryptobox is locked, nothing can be added now first decrypt (-d)"
        })
        return None, None, memory, localindex

    salt = None

    if memory.has("salt_b64"):
        salt = base64.decodestring(memory.get("salt_b64"))

    if not salt:
        salt = Random.new().read(32)
        memory.set("salt_b64", base64.encodestring(salt))

    output_json({"msg": "preparing encrypt"})
    secret = password_derivation(options.password, salt)
    ensure_directory(datadir)
    new_blobs = {}
    file_cnt = 0
    new_objects = 0
    hash_set_on_disk = set()
    processed_files = 0
    numfiles = 0

    for dirhash in localindex["dirnames"]:
        numfiles += len(localindex["dirnames"][dirhash]["filenames"])

    for dirhash in localindex["dirnames"]:
        for fname in localindex["dirnames"][dirhash]["filenames"]:
            file_cnt += 1
            file_dir = localindex["dirnames"][dirhash]["dirname"]
            file_path = os.path.join(file_dir, fname["name"])

            if os.path.exists(file_path):
                update_progress(processed_files, numfiles,
                                "indexing " + os.path.basename(file_path))
                filedata, localindex = make_cryptogit_hash(
                    file_path, datadir, localindex)
                fname["hash"] = filedata["filehash"]
                hash_set_on_disk.add(filedata["filehash"])
                if not filedata["blob_exists"]:
                    new_blobs[filedata["filehash"]] = filedata
                    new_objects += 1

                if len(new_blobs) > 1500:
                    encrypt_new_blobs(secret, new_blobs)
                    new_blobs = {}

    if len(new_blobs) > 0:
        if len(new_blobs) > 0:
            encrypt_new_blobs(secret, new_blobs)
    cleanup_tempfiles()
    memory = store_localindex(memory, localindex)

    if options.remove:
        ld = os.listdir(options.dir)
        ld.remove(".cryptobox")
        processed_files = 0
        numfiles = len(ld)

        for fname in ld:
            fpath = os.path.join(options.dir, fname)
            processed_files += 1
            update_progress(processed_files, numfiles,
                            "delete " + os.path.basename(fpath))
            if os.path.isdir(fpath):
                if os.path.exists(fpath):
                    shutil.rmtree(fpath, True)
            else:
                if os.path.exists(fpath):
                    os.remove(fpath)

    obsolute_blob_store_entries = set()
    blob_dirs = os.path.join(datadir, "blobs")
    ensure_directory(blob_dirs)

    for blob_dir in os.listdir(blob_dirs):
        blob_store = os.path.join(blob_dirs, blob_dir.lstrip(os.path.sep))

        if os.path.isdir(blob_store):
            for blob_file in os.listdir(blob_store):
                found = False

                for fhash in hash_set_on_disk:
                    if fhash in (blob_dir + blob_file):
                        found = True

                if not found:
                    obsolute_blob_store_entries.add(blob_dir + blob_file)

    for f_hash in obsolute_blob_store_entries:
        blob_dir = os.path.join(blob_dirs, f_hash[:2])
        blob_path = os.path.join(blob_dir, f_hash[2:])
        os.remove(blob_path)
        if os.path.isdir(blob_dir):
            blob_entries = [
                f for f in os.listdir(blob_dir) if not f.startswith('.')
            ]

            if len(blob_entries) == 0:
                shutil.rmtree(blob_dir, True)

    cleanup_tempfiles()
    return salt, secret, memory, localindex
Exemplo n.º 11
0
def index_and_encrypt(memory, options):
    """
    index_and_encrypt
    @type memory: Memory
    @type options: optparse.Values, instance
    @rtype salt, secret, memory, localindex: str, str, Memory, dict
    """
    localindex = make_local_index(options)
    datadir = get_data_dir(options)

    if quick_lock_check(options):
        output_json({"message": "cryptobox is locked, nothing can be added now first decrypt (-d)"})
        return None, None, memory, localindex

    salt = None

    if memory.has("salt_b64"):
        salt = base64.decodestring(memory.get("salt_b64"))

    if not salt:
        salt = Random.new().read(32)
        memory.set("salt_b64", base64.encodestring(salt))

    output_json({"msg": "preparing encrypt"})
    secret = password_derivation(options.password, salt)
    ensure_directory(datadir)
    new_blobs = {}
    file_cnt = 0
    new_objects = 0
    hash_set_on_disk = set()
    processed_files = 0
    numfiles = 0

    for dirhash in localindex["dirnames"]:
        numfiles += len(localindex["dirnames"][dirhash]["filenames"])

    for dirhash in localindex["dirnames"]:
        for fname in localindex["dirnames"][dirhash]["filenames"]:
            file_cnt += 1
            file_dir = localindex["dirnames"][dirhash]["dirname"]
            file_path = os.path.join(file_dir, fname["name"])

            if os.path.exists(file_path):
                update_progress(processed_files, numfiles, "indexing " + os.path.basename(file_path))
                filedata, localindex = make_cryptogit_hash(file_path, datadir, localindex)
                fname["hash"] = filedata["filehash"]
                hash_set_on_disk.add(filedata["filehash"])
                if not filedata["blob_exists"]:
                    new_blobs[filedata["filehash"]] = filedata
                    new_objects += 1

                if len(new_blobs) > 1500:
                    encrypt_new_blobs(secret, new_blobs)
                    new_blobs = {}

    if len(new_blobs) > 0:
        if len(new_blobs) > 0:
            encrypt_new_blobs(secret, new_blobs)
    cleanup_tempfiles()
    memory = store_localindex(memory, localindex)

    if options.remove:
        ld = os.listdir(options.dir)
        ld.remove(".cryptobox")
        processed_files = 0
        numfiles = len(ld)

        for fname in ld:
            fpath = os.path.join(options.dir, fname)
            processed_files += 1
            update_progress(processed_files, numfiles, "delete " + os.path.basename(fpath))
            if os.path.isdir(fpath):
                if os.path.exists(fpath):
                    shutil.rmtree(fpath, True)
            else:
                if os.path.exists(fpath):
                    os.remove(fpath)

    obsolute_blob_store_entries = set()
    blob_dirs = os.path.join(datadir, "blobs")
    ensure_directory(blob_dirs)

    for blob_dir in os.listdir(blob_dirs):
        blob_store = os.path.join(blob_dirs, blob_dir.lstrip(os.path.sep))

        if os.path.isdir(blob_store):
            for blob_file in os.listdir(blob_store):
                found = False

                for fhash in hash_set_on_disk:
                    if fhash in (blob_dir + blob_file):
                        found = True

                if not found:
                    obsolute_blob_store_entries.add(blob_dir + blob_file)

    for f_hash in obsolute_blob_store_entries:
        blob_dir = os.path.join(blob_dirs, f_hash[:2])
        blob_path = os.path.join(blob_dir, f_hash[2:])
        os.remove(blob_path)
        if os.path.isdir(blob_dir):
            blob_entries = [f for f in os.listdir(blob_dir) if not f.startswith('.')]

            if len(blob_entries) == 0:
                shutil.rmtree(blob_dir, True)

    cleanup_tempfiles()
    return salt, secret, memory, localindex