Esempio n. 1
0
def decrypt_and_build_filetree(memory, options, secret):
    """
    decrypt_and_build_filetree
    @type memory: Memory
    @type options: optparse.Values, instance
    @type secret: str
    """
    if not secret:
        raise Exception("decrypt_and_build_filetree: no secret given")

    datadir = get_data_dir(options)

    if not os.path.exists(datadir):
        print "cba_index.py:365", "nothing to decrypt", datadir, "does not exists"
        return memory

    output_json({"msg": "preparing decrypt"})
    blobdir = os.path.join(datadir, "blobs")
    localindex = get_localindex(memory)
    hashes = set()
    restored_hashes = []

    if localindex:
        for dirhash in localindex["dirnames"]:
            if "dirname" in localindex["dirnames"][dirhash]:
                if not os.path.exists(
                        localindex["dirnames"][dirhash]["dirname"]):
                    ensure_directory(
                        localindex["dirnames"][dirhash]["dirname"])

            for cfile in localindex["dirnames"][dirhash]["filenames"]:
                fpath = os.path.join(
                    localindex["dirnames"][dirhash]["dirname"], cfile["name"])

                if not os.path.exists(fpath):
                    hashes.add((cfile["hash"], cfile["name"]))
                else:
                    restored_hashes.append(cfile["hash"])

    processed_files = 0
    numfiles = len(hashes)

    for cfile in hashes:
        processed_files += 1
        update_progress(processed_files, numfiles, cfile[1])

        #noinspection PyUnusedLocal
        paths = decrypt_blob_to_filepaths(blobdir, localindex, cfile[0],
                                          secret)

    memory = store_localindex(memory, localindex)
    cleanup_tempfiles()
    return memory
Esempio n. 2
0
def encrypt_new_blobs(secret, new_blobs):
    """
    @type secret: str or unicode
    @type new_blobs: dict
    """
    processed_files = 0
    numfiles = len(new_blobs)

    for fhash in new_blobs:
        ensure_directory(new_blobs[fhash]["blobdir"])
        update_progress(processed_files, numfiles, "encrypting: " + os.path.basename(new_blobs[fhash]["fpath"]))
        read_and_encrypt_file(new_blobs[fhash]["fpath"], new_blobs[fhash]["blobpath"], secret)
        processed_files += 1
Esempio n. 3
0
def decrypt_and_build_filetree(memory, options, secret):
    """
    decrypt_and_build_filetree
    @type memory: Memory
    @type options: optparse.Values, instance
    @type secret: str
    """
    if not secret:
        raise Exception("decrypt_and_build_filetree: no secret given")

    datadir = get_data_dir(options)

    if not os.path.exists(datadir):
        print "cba_index.py:365", "nothing to decrypt", datadir, "does not exists"
        return memory

    output_json({"msg": "preparing decrypt"})
    blobdir = os.path.join(datadir, "blobs")
    localindex = get_localindex(memory)
    hashes = set()
    restored_hashes = []

    if localindex:
        for dirhash in localindex["dirnames"]:
            if "dirname" in localindex["dirnames"][dirhash]:
                if not os.path.exists(localindex["dirnames"][dirhash]["dirname"]):
                    ensure_directory(localindex["dirnames"][dirhash]["dirname"])

            for cfile in localindex["dirnames"][dirhash]["filenames"]:
                fpath = os.path.join(localindex["dirnames"][dirhash]["dirname"], cfile["name"])

                if not os.path.exists(fpath):
                    hashes.add((cfile["hash"], cfile["name"]))
                else:
                    restored_hashes.append(cfile["hash"])

    processed_files = 0
    numfiles = len(hashes)

    for cfile in hashes:
        processed_files += 1
        update_progress(processed_files, numfiles, cfile[1])

        #noinspection PyUnusedLocal
        paths = decrypt_blob_to_filepaths(blobdir, localindex, cfile[0], secret)

    memory = store_localindex(memory, localindex)
    cleanup_tempfiles()
    return memory
Esempio n. 4
0
def wait_for_tasks(memory, options, result_message_param=None):
    """
    wait_for_tasks
    @type memory: Memory
    @type options: optparse.Values, instance
    """
    initial_num_tasks = -1

    while True:
        session = None

        if memory.has("session"):
            session = memory.get("session")

        result, memory = on_server(memory, options, "crypto_tasks", payload={}, session=session)

        if result:
            if len(result) > 1:
                if result[1]:
                    num_tasks = len([x for x in result[1] if x["m_command_object"] != "StorePassword"])

                    if initial_num_tasks == -1:
                        initial_num_tasks = num_tasks

                    if not result_message_param:
                        result_message = "waiting for tasks to finish on server"
                    else:
                        result_message = result_message_param
                    update_progress(initial_num_tasks - num_tasks, initial_num_tasks, result_message)
                    if num_tasks == 0:
                        return memory

                    if not result_message:
                        if num_tasks > 3:
                            time.sleep(1)
                            if num_tasks > 6:
                                log_json("waiting for tasks " + str(num_tasks))

                else:
                    return memory

        if not result_message_param:
            time.sleep(1)
        else:
            time.sleep(0.5)
Esempio n. 5
0
def upload_files(memory, options, serverindex, file_uploads):
    """
    upload_files
    @type memory: Memory
    @type options: optparse.Values, instance
    @type serverindex: dict
    @type file_uploads: tuple
    """
    path_guid_cache = {}
    cnt = 0

    for uf in file_uploads:
        try:
            parent_path = uf["local_path"].replace(options.dir, "")
            parent_path = os.path.dirname(parent_path)
            uf["parent_path"] = parent_path

            if parent_path in path_guid_cache:
                uf["parent_short_id"] = path_guid_cache[parent_path]
            else:
                uf["parent_short_id"], memory = path_to_server_guid(memory, options, serverindex, parent_path)
                path_guid_cache[parent_path] = uf["parent_short_id"]
                update_progress(cnt, len(file_uploads), "checking path " + parent_path)

            cnt += 1
        except NoParentFound:
            uf["parent_short_id"] = uf["parent_path"] = ""

    files_uploaded = []
    file_uploads = sorted(file_uploads, key=lambda k: k["size"])

    for uf in file_uploads:
        update_progress(len(files_uploaded) + 1, len(file_uploads), "uploading " + os.path.basename(uf["local_path"]))
        if os.path.exists(uf["local_path"]):
            file_path = upload_file(memory.get("session"), options.server, options.cryptobox, uf["local_path"], path_to_relative_path_unix_style(memory, uf["local_path"]), uf["parent_short_id"])
            files_uploaded.append(file_path)
            output_json({"item_progress": 0})

    return memory, tuple(files_uploaded)
Esempio n. 6
0
def get_unique_content(memory, options, all_unique_nodes, local_paths):
    """
    @type memory: Memory
    @type options: istance
    @type all_unique_nodes: dict
    @type local_paths: tuple
    """
    if len(local_paths) == 0:
        return memory

    unique_nodes_hashes = [fhash for fhash in all_unique_nodes if not have_blob(options, fhash)]
    unique_nodes = [all_unique_nodes[fhash] for fhash in all_unique_nodes if fhash in unique_nodes_hashes]
    downloaded_files_cnt = 0
    unique_nodes = [node for node in unique_nodes if not os.path.exists(os.path.join(options.dir, node["doc"]["m_path_p64s"].lstrip(os.path.sep)))]
    unique_nodes = sorted(unique_nodes, key=lambda k: k["doc"]["m_size_p64s"])

    for node in unique_nodes:
        update_progress(downloaded_files_cnt, len(unique_nodes), "downloading " + str(node["doc"]["m_name"]))
        content_path, content_hash = download_blob(memory, options, node)
        update_item_progress(100)
        output_json({"item_progress": 0})
        memory, file_nodes_left = write_blobs_to_filepaths(memory, options, local_paths, None, content_hash, content_path)
        downloaded_files_cnt += 1
    update_progress(downloaded_files_cnt, len(unique_nodes), "downloading done")

    for lp in local_paths:
        memory = add_local_path_history(memory, os.path.join(options.dir, lp["doc"]["m_path_p64s"].lstrip(os.sep)))
        source_path = None
        file_path = os.path.join(options.dir, lp["doc"]["m_path_p64s"].lstrip(os.path.sep))

        if not os.path.exists(file_path):
            for lph in all_unique_nodes:
                if lph == lp["content_hash_latest_timestamp"][0]:
                    source_path = os.path.join(options.dir, all_unique_nodes[lph]["doc"]["m_path_p64s"].lstrip(os.path.sep))
                    break

        datapath = data = None
        if source_path:
            if not os.path.exists(source_path):
                fhash = lp["content_hash_latest_timestamp"][0]
                source_path = os.path.join(get_blob_dir(options), fhash[:2])
                source_path = os.path.join(source_path, fhash[2:])
                memory = add_path_history(file_path, memory)
                secret = password_derivation(options.password, base64.decodestring(memory.get("salt_b64")))
                dec_file = decrypt_file(source_path, secret)
                datapath = dec_file.name

            if not datapath:
                st_ctime, st_atime, st_mtime, st_mode, st_uid, st_gid, st_size, datapath = read_file(source_path, True)
            else:
                st_ctime, st_atime, st_mtime, st_mode, st_uid, st_gid, st_size, datapath_dummy = read_file(source_path)

            st_mtime = int(lp["content_hash_latest_timestamp"][1])
            write_file(file_path, data, datapath, st_mtime, st_mtime, st_mode, st_uid, st_gid)

    local_paths_not_written = [fp for fp in local_paths if not os.path.exists(os.path.join(options.dir, fp["doc"]["m_path_p64s"].lstrip(os.path.sep)))]

    if len(local_paths_not_written) > 0:
        local_index = get_localindex(memory)
        local_path_hashes = {}

        for ldir in local_index["dirnames"]:
            for f in local_index["dirnames"][ldir]["filenames"]:
                if "hash" in f:
                    local_path_hashes[f["hash"]] = os.path.join(local_index["dirnames"][ldir]["dirname"], f["name"])

        for lfnw in local_paths_not_written:
            w = False

            for lfh in local_path_hashes:
                if not w:
                    if strcmp(lfnw["content_hash_latest_timestamp"][0], lfh):
                        w = True
                        open(os.path.join(options.dir, lfnw["doc"]["m_path_p64s"].lstrip(os.path.sep)), "w").write(open(local_path_hashes[lfh]).read())

    return memory
Esempio n. 7
0
def index_and_encrypt(memory, options):
    """
    index_and_encrypt
    @type memory: Memory
    @type options: optparse.Values, instance
    @rtype salt, secret, memory, localindex: str, str, Memory, dict
    """
    localindex = make_local_index(options)
    datadir = get_data_dir(options)

    if quick_lock_check(options):
        output_json({
            "message":
            "cryptobox is locked, nothing can be added now first decrypt (-d)"
        })
        return None, None, memory, localindex

    salt = None

    if memory.has("salt_b64"):
        salt = base64.decodestring(memory.get("salt_b64"))

    if not salt:
        salt = Random.new().read(32)
        memory.set("salt_b64", base64.encodestring(salt))

    output_json({"msg": "preparing encrypt"})
    secret = password_derivation(options.password, salt)
    ensure_directory(datadir)
    new_blobs = {}
    file_cnt = 0
    new_objects = 0
    hash_set_on_disk = set()
    processed_files = 0
    numfiles = 0

    for dirhash in localindex["dirnames"]:
        numfiles += len(localindex["dirnames"][dirhash]["filenames"])

    for dirhash in localindex["dirnames"]:
        for fname in localindex["dirnames"][dirhash]["filenames"]:
            file_cnt += 1
            file_dir = localindex["dirnames"][dirhash]["dirname"]
            file_path = os.path.join(file_dir, fname["name"])

            if os.path.exists(file_path):
                update_progress(processed_files, numfiles,
                                "indexing " + os.path.basename(file_path))
                filedata, localindex = make_cryptogit_hash(
                    file_path, datadir, localindex)
                fname["hash"] = filedata["filehash"]
                hash_set_on_disk.add(filedata["filehash"])
                if not filedata["blob_exists"]:
                    new_blobs[filedata["filehash"]] = filedata
                    new_objects += 1

                if len(new_blobs) > 1500:
                    encrypt_new_blobs(secret, new_blobs)
                    new_blobs = {}

    if len(new_blobs) > 0:
        if len(new_blobs) > 0:
            encrypt_new_blobs(secret, new_blobs)
    cleanup_tempfiles()
    memory = store_localindex(memory, localindex)

    if options.remove:
        ld = os.listdir(options.dir)
        ld.remove(".cryptobox")
        processed_files = 0
        numfiles = len(ld)

        for fname in ld:
            fpath = os.path.join(options.dir, fname)
            processed_files += 1
            update_progress(processed_files, numfiles,
                            "delete " + os.path.basename(fpath))
            if os.path.isdir(fpath):
                if os.path.exists(fpath):
                    shutil.rmtree(fpath, True)
            else:
                if os.path.exists(fpath):
                    os.remove(fpath)

    obsolute_blob_store_entries = set()
    blob_dirs = os.path.join(datadir, "blobs")
    ensure_directory(blob_dirs)

    for blob_dir in os.listdir(blob_dirs):
        blob_store = os.path.join(blob_dirs, blob_dir.lstrip(os.path.sep))

        if os.path.isdir(blob_store):
            for blob_file in os.listdir(blob_store):
                found = False

                for fhash in hash_set_on_disk:
                    if fhash in (blob_dir + blob_file):
                        found = True

                if not found:
                    obsolute_blob_store_entries.add(blob_dir + blob_file)

    for f_hash in obsolute_blob_store_entries:
        blob_dir = os.path.join(blob_dirs, f_hash[:2])
        blob_path = os.path.join(blob_dir, f_hash[2:])
        os.remove(blob_path)
        if os.path.isdir(blob_dir):
            blob_entries = [
                f for f in os.listdir(blob_dir) if not f.startswith('.')
            ]

            if len(blob_entries) == 0:
                shutil.rmtree(blob_dir, True)

    cleanup_tempfiles()
    return salt, secret, memory, localindex
Esempio n. 8
0
def index_and_encrypt(memory, options):
    """
    index_and_encrypt
    @type memory: Memory
    @type options: optparse.Values, instance
    @rtype salt, secret, memory, localindex: str, str, Memory, dict
    """
    localindex = make_local_index(options)
    datadir = get_data_dir(options)

    if quick_lock_check(options):
        output_json({"message": "cryptobox is locked, nothing can be added now first decrypt (-d)"})
        return None, None, memory, localindex

    salt = None

    if memory.has("salt_b64"):
        salt = base64.decodestring(memory.get("salt_b64"))

    if not salt:
        salt = Random.new().read(32)
        memory.set("salt_b64", base64.encodestring(salt))

    output_json({"msg": "preparing encrypt"})
    secret = password_derivation(options.password, salt)
    ensure_directory(datadir)
    new_blobs = {}
    file_cnt = 0
    new_objects = 0
    hash_set_on_disk = set()
    processed_files = 0
    numfiles = 0

    for dirhash in localindex["dirnames"]:
        numfiles += len(localindex["dirnames"][dirhash]["filenames"])

    for dirhash in localindex["dirnames"]:
        for fname in localindex["dirnames"][dirhash]["filenames"]:
            file_cnt += 1
            file_dir = localindex["dirnames"][dirhash]["dirname"]
            file_path = os.path.join(file_dir, fname["name"])

            if os.path.exists(file_path):
                update_progress(processed_files, numfiles, "indexing " + os.path.basename(file_path))
                filedata, localindex = make_cryptogit_hash(file_path, datadir, localindex)
                fname["hash"] = filedata["filehash"]
                hash_set_on_disk.add(filedata["filehash"])
                if not filedata["blob_exists"]:
                    new_blobs[filedata["filehash"]] = filedata
                    new_objects += 1

                if len(new_blobs) > 1500:
                    encrypt_new_blobs(secret, new_blobs)
                    new_blobs = {}

    if len(new_blobs) > 0:
        if len(new_blobs) > 0:
            encrypt_new_blobs(secret, new_blobs)
    cleanup_tempfiles()
    memory = store_localindex(memory, localindex)

    if options.remove:
        ld = os.listdir(options.dir)
        ld.remove(".cryptobox")
        processed_files = 0
        numfiles = len(ld)

        for fname in ld:
            fpath = os.path.join(options.dir, fname)
            processed_files += 1
            update_progress(processed_files, numfiles, "delete " + os.path.basename(fpath))
            if os.path.isdir(fpath):
                if os.path.exists(fpath):
                    shutil.rmtree(fpath, True)
            else:
                if os.path.exists(fpath):
                    os.remove(fpath)

    obsolute_blob_store_entries = set()
    blob_dirs = os.path.join(datadir, "blobs")
    ensure_directory(blob_dirs)

    for blob_dir in os.listdir(blob_dirs):
        blob_store = os.path.join(blob_dirs, blob_dir.lstrip(os.path.sep))

        if os.path.isdir(blob_store):
            for blob_file in os.listdir(blob_store):
                found = False

                for fhash in hash_set_on_disk:
                    if fhash in (blob_dir + blob_file):
                        found = True

                if not found:
                    obsolute_blob_store_entries.add(blob_dir + blob_file)

    for f_hash in obsolute_blob_store_entries:
        blob_dir = os.path.join(blob_dirs, f_hash[:2])
        blob_path = os.path.join(blob_dir, f_hash[2:])
        os.remove(blob_path)
        if os.path.isdir(blob_dir):
            blob_entries = [f for f in os.listdir(blob_dir) if not f.startswith('.')]

            if len(blob_entries) == 0:
                shutil.rmtree(blob_dir, True)

    cleanup_tempfiles()
    return salt, secret, memory, localindex