def dirs_on_server(memory, options, unique_dirs_server): """ @type memory: Memory @type options: optparse.Values, instance @type unique_dirs_server: tuple @rtype: list, Memory """ absolute_unique_dirs_server = [os.path.join(options.dir, np.lstrip(os.path.sep)) for np in unique_dirs_server] local_folders_removed = [np for np in absolute_unique_dirs_server if not os.path.exists(np)] dirs_del_server = [] dirs_make_local = [] # check if they are really removed or just new if memory.has("localpath_history"): local_path_history = memory.get("localpath_history") # absolute paths local_path_history_disk = [os.path.join(options.dir, x[0].lstrip(os.sep)) for x in local_path_history] local_dirs_history_disk = [os.path.dirname(x) for x in local_path_history_disk] local_path_history_disk.extend(local_dirs_history_disk) local_path_history_disk = tuple(set(local_path_history_disk)) # filter out folders previously seen local_folders_removed = [x for x in local_folders_removed if x in local_path_history_disk] if len(local_folders_removed) == 0: # first run dirs_make_local = [{"name": os.path.join(options.dir, x.lstrip(os.sep)), "relname": x} for x in unique_dirs_server if (os.path.exists(os.path.join(options.dir, x.lstrip(os.sep))) not in local_path_history_disk and not os.path.exists(os.path.join(options.dir, x.lstrip(os.sep))))] else: local_folders_removed = [] for dir_name in local_folders_removed: dirname_rel = dir_name.replace(options.dir, "") had_on_server, memory = in_server_path_history(memory, dirname_rel) have_on_server = False if not had_on_server: if memory.has("serverindex"): serverindex = memory.get("serverindex") if "dirlist" in serverindex: have_on_server = dirname_rel in memory.get("serverindex")["dirlist"] if have_on_server: memory = add_server_path_history(memory, dirname_rel) # initial run, download everything if not memory.has("localpath_history"): had_on_server = have_on_server = False if had_on_server or have_on_server: dirs_del_server.append(dirname_rel) else: folder = {"name": dir_name, "relname": dirname_rel} dirs_make_local.append(folder) return tuple(dirs_del_server), tuple(dirs_make_local), memory
def add_path_history(fpath, memory): """ @type fpath:str, unicode @type memory: Memory """ memory = add_server_path_history(memory, fpath) memory = add_local_path_history(memory, fpath) return memory
def make_directories_local(memory, options, localindex, folders): """ @type memory: Memory @type options: optparse.Values, instance @type localindex: dict @type folders: tuple """ for f in folders: ensure_directory(f["name"]) memory = add_local_path_history(memory, f["name"]) memory = add_server_path_history(memory, f["relname"]) arg = {"DIR": options.dir, "folders": {"dirnames": {}}, "numfiles": 0} index_files_visit(arg, f["name"], []) for k in arg["folders"]["dirnames"]: localindex["dirnames"][k] = arg["folders"]["dirnames"][k] return memory
def write_blobs_to_filepaths(memory, options, file_nodes, data, downloaded_fhash, content_path): """ @type memory: Memory @type options: optparse.Values @type file_nodes: tuple @type data: str or unicode or None @type downloaded_fhash: unicode @type content_path: str or unicode """ files_same_hash = [] file_nodes_copy = list(file_nodes) for sfile in file_nodes: fhash = sfile["content_hash_latest_timestamp"][0] if strcmp(fhash, downloaded_fhash): files_same_hash.append(sfile) for fnode in files_same_hash: memory = add_server_path_history(memory, fnode["doc"]["m_path_p64s"]) write_blob_to_filepath(memory, fnode, options, data, content_path) file_nodes_copy.remove(fnode) return memory, file_nodes_copy