Example #1
0
def add_path_history(fpath, memory):
    """
    @type fpath:str, unicode
    @type memory: Memory
    """
    memory = add_server_path_history(memory, fpath)
    memory = add_local_path_history(memory, fpath)
    return memory
Example #2
0
def write_blob_to_filepath(memory, node, options, data, content_path):
    """
    @type memory: Memory
    @type node: dict
    @type options: optparse.Values, instance
    @type data: str or unicode
    @type content_path: str or unicode
    """
    if not node["content_hash_latest_timestamp"][1]:
        raise NoTimeStamp(str(node))

    st_mtime = int(node["content_hash_latest_timestamp"][1])
    dirname_of_path = os.path.dirname(node["doc"]["m_path_p64s"])
    new_path = os.path.join(options.dir, os.path.join(dirname_of_path.lstrip(os.path.sep), node["doc"]["m_name"]))
    memory = add_local_path_history(memory, new_path)
    output_json({"msg": new_path})
    write_file(path=new_path, data=data, content_path=content_path, a_time=st_mtime, m_time=st_mtime, st_mode=None, st_uid=None, st_gid=None)
    return memory
Example #3
0
def make_directories_local(memory, options, localindex, folders):
    """
    @type memory: Memory
    @type options: optparse.Values, instance
    @type localindex: dict
    @type folders: tuple
    """
    for f in folders:
        ensure_directory(f["name"])
        memory = add_local_path_history(memory, f["name"])
        memory = add_server_path_history(memory, f["relname"])
        arg = {"DIR": options.dir,
               "folders": {"dirnames": {}},
               "numfiles": 0}
        index_files_visit(arg, f["name"], [])

        for k in arg["folders"]["dirnames"]:
            localindex["dirnames"][k] = arg["folders"]["dirnames"][k]

    return memory
Example #4
0
def diff_files_locally(memory, options, localindex, serverindex):
    """
    diff_files_locally
    @type memory: Memory
    @type options: optparse.Values, instance
    @type localindex: dict
    @type serverindex: dict
    """
    local_pathnames = [(localindex["dirnames"][d]["dirname"], localindex["dirnames"][d]["filenames"]) for d in localindex["dirnames"] if len(localindex["dirnames"][d]["filenames"]) > 0]
    local_pathnames_set = set()
    file_uploads = []

    for ft in local_pathnames:
        for fname in ft[1]:
            if not str(fname["name"]).startswith("."):
                local_path = os.path.join(ft[0], fname["name"])
                local_pathnames_set.add(str(local_path))

    for local_path in local_pathnames_set:
        if os.path.exists(local_path):
            seen_local_path_before, memory = in_local_path_history(memory, local_path)
            rel_local_path = local_path.replace(options.dir, "")
            upload_file_object = {"local_path": local_path,
                                  "parent_short_id": None,
                                  "rel_path": rel_local_path }

            corresponding_server_nodes = [x for x in serverindex["doclist"] if x["doc"]["m_path_p64s"] == upload_file_object["rel_path"]]

            if not seen_local_path_before:
                if len(corresponding_server_nodes) == 0 or not corresponding_server_nodes:
                    file_uploads.append(upload_file_object)
                else:
                    filedata, localindex = make_cryptogit_hash(upload_file_object["local_path"], options.dir, localindex)

                    if not corresponding_server_nodes[0]:
                        file_uploads.append(upload_file_object)
                    else:
                        if not strcmp(corresponding_server_nodes[0]["content_hash_latest_timestamp"][0], filedata["filehash"]):
                            file_uploads.append(upload_file_object)
                        else:
                            memory = add_local_path_history(memory, upload_file_object["local_path"])

            else:
                # is it changed?
                if len(corresponding_server_nodes) != 0:
                    filestats = read_file_to_fdict(local_path)

                    if filestats and corresponding_server_nodes[0]:
                        try:
                            if filestats["st_ctime"] > corresponding_server_nodes[0]["content_hash_latest_timestamp"][1]:
                                filedata, localindex = make_cryptogit_hash(local_path, options.dir, localindex)
                                if filedata["filehash"] != corresponding_server_nodes[0]["content_hash_latest_timestamp"][0]:
                                    file_uploads.append(upload_file_object)

                        except:
                            raise

    file_del_local = []
    server_paths = [str(os.path.join(options.dir, x["doc"]["m_path_p64s"].lstrip(os.path.sep))) for x in serverindex["doclist"]]
    for local_path in local_pathnames_set:
        if os.path.exists(local_path):
            if local_path not in server_paths:
                seen_local_path_before, memory = in_local_path_history(memory, local_path)

                if seen_local_path_before:
                    file_del_local.append(local_path)

    return file_uploads, file_del_local, memory, localindex
Example #5
0
def get_unique_content(memory, options, all_unique_nodes, local_paths):
    """
    @type memory: Memory
    @type options: istance
    @type all_unique_nodes: dict
    @type local_paths: tuple
    """
    if len(local_paths) == 0:
        return memory

    unique_nodes_hashes = [fhash for fhash in all_unique_nodes if not have_blob(options, fhash)]
    unique_nodes = [all_unique_nodes[fhash] for fhash in all_unique_nodes if fhash in unique_nodes_hashes]
    downloaded_files_cnt = 0
    unique_nodes = [node for node in unique_nodes if not os.path.exists(os.path.join(options.dir, node["doc"]["m_path_p64s"].lstrip(os.path.sep)))]
    unique_nodes = sorted(unique_nodes, key=lambda k: k["doc"]["m_size_p64s"])

    for node in unique_nodes:
        update_progress(downloaded_files_cnt, len(unique_nodes), "downloading " + str(node["doc"]["m_name"]))
        content_path, content_hash = download_blob(memory, options, node)
        update_item_progress(100)
        output_json({"item_progress": 0})
        memory, file_nodes_left = write_blobs_to_filepaths(memory, options, local_paths, None, content_hash, content_path)
        downloaded_files_cnt += 1
    update_progress(downloaded_files_cnt, len(unique_nodes), "downloading done")

    for lp in local_paths:
        memory = add_local_path_history(memory, os.path.join(options.dir, lp["doc"]["m_path_p64s"].lstrip(os.sep)))
        source_path = None
        file_path = os.path.join(options.dir, lp["doc"]["m_path_p64s"].lstrip(os.path.sep))

        if not os.path.exists(file_path):
            for lph in all_unique_nodes:
                if lph == lp["content_hash_latest_timestamp"][0]:
                    source_path = os.path.join(options.dir, all_unique_nodes[lph]["doc"]["m_path_p64s"].lstrip(os.path.sep))
                    break

        datapath = data = None
        if source_path:
            if not os.path.exists(source_path):
                fhash = lp["content_hash_latest_timestamp"][0]
                source_path = os.path.join(get_blob_dir(options), fhash[:2])
                source_path = os.path.join(source_path, fhash[2:])
                memory = add_path_history(file_path, memory)
                secret = password_derivation(options.password, base64.decodestring(memory.get("salt_b64")))
                dec_file = decrypt_file(source_path, secret)
                datapath = dec_file.name

            if not datapath:
                st_ctime, st_atime, st_mtime, st_mode, st_uid, st_gid, st_size, datapath = read_file(source_path, True)
            else:
                st_ctime, st_atime, st_mtime, st_mode, st_uid, st_gid, st_size, datapath_dummy = read_file(source_path)

            st_mtime = int(lp["content_hash_latest_timestamp"][1])
            write_file(file_path, data, datapath, st_mtime, st_mtime, st_mode, st_uid, st_gid)

    local_paths_not_written = [fp for fp in local_paths if not os.path.exists(os.path.join(options.dir, fp["doc"]["m_path_p64s"].lstrip(os.path.sep)))]

    if len(local_paths_not_written) > 0:
        local_index = get_localindex(memory)
        local_path_hashes = {}

        for ldir in local_index["dirnames"]:
            for f in local_index["dirnames"][ldir]["filenames"]:
                if "hash" in f:
                    local_path_hashes[f["hash"]] = os.path.join(local_index["dirnames"][ldir]["dirname"], f["name"])

        for lfnw in local_paths_not_written:
            w = False

            for lfh in local_path_hashes:
                if not w:
                    if strcmp(lfnw["content_hash_latest_timestamp"][0], lfh):
                        w = True
                        open(os.path.join(options.dir, lfnw["doc"]["m_path_p64s"].lstrip(os.path.sep)), "w").write(open(local_path_hashes[lfh]).read())

    return memory