def test_pull_dir_client_to_server(self): """Copy dir from client to server""" h_register_dummy_user_device_client() client_folder = gen_paths.normalize_path( client_paths.LOCAL_CLIENT_DATA, "folder1") h_fill_dummy_dir(client_folder) actions = [ c_sync.create_action(gen_paths.normalize_path("folder1"), gen_paths.normalize_path(""), gen_json.ACTION_PULL, remote_abs_path=client_folder, is_directory=True) ] net_interface.server.execute_actions(actions) self.assertTrue( os.path.exists((path_utils.rel_user_path_to_abs("folder1", 1)))) self.assertTrue( os.path.exists( (path_utils.rel_user_path_to_abs("folder1/inner1", 1)))) self.assertTrue( os.path.exists( (path_utils.rel_user_path_to_abs("folder1/inner2", 1)))) self.assertTrue( os.path.exists((path_utils.rel_user_path_to_abs( "folder1/inner1/test_inner.txt", 1)))) self.assertTrue( os.path.exists( (path_utils.rel_user_path_to_abs("folder1/test.txt", 1))))
def _merge_folders_latest(folders1: List[dict], folders2: List[dict], f1_path: NormalizedPath, f2_path: NormalizedPath, rel_path: NormalizedPath): """Create actions based on the folders inside the two folders.""" f1_actions = [] f2_actions = [] f2_folder_names = [folder["folder_name"] for folder in folders2] for f1 in folders1: try: idx = f2_folder_names.index(f1["folder_name"]) f2 = folders2[idx] new_rel_path = normalize_path(rel_path, f1["folder_name"]) new_f1_actions, new_f2_actions = _prioritize_latest_recursive(f1["files"], f2["files"], f1_path, f2_path, new_rel_path) f1_actions += new_f1_actions f2_actions += new_f2_actions f2_folder_names.pop(idx) folders2.pop(idx) except ValueError: rel_file_path = normalize_path(rel_path, f1["folder_name"]) remote_abs_path = normalize_path(f1_path, rel_path, f1["folder_name"]) f2_actions.append(c_syc.create_action(normalize_path(f2_path), rel_file_path, gen_json.ACTION_PULL, True, remote_abs_path=remote_abs_path)) if len(folders2) > 0: new_f2_actions, new_f1_actions = _merge_folders_latest(folders2, folders1, f2_path, f1_path, rel_path) f1_actions += new_f1_actions f2_actions += new_f2_actions return f1_actions, f2_actions
def get_dir(abs_src_path: str, abs_dest_path: str, pull_file_func: Callable, make_dirs_func: Callable) -> None: """Every file inside the abs_src_path is copied to the proper abs_dest_path.""" for dirpath, dirnames, filenames in os.walk(abs_src_path): rel_folder_name = os.path.relpath(dirpath, abs_src_path) for dir in dirnames: dest_path = gen_paths.normalize_path(abs_dest_path, rel_folder_name, dir) make_dirs_func(dest_path) for file in filenames: abs_src_file_path = gen_paths.normalize_path(dirpath, file) abs_dest_file_path = gen_paths.normalize_path( abs_dest_path, rel_folder_name, file) pull_file_func(abs_src_file_path, abs_dest_file_path)
def test_execute_server_actions_delete(self): self.server_folder = h_setup_execution_env() src_file_path = os.path.join(self.server_folder, "test.txt") with open(src_file_path, "w") as f: f.write("Hello") server_actions = [ c_sync.create_action(gen_paths.normalize_path("folder1"), gen_paths.normalize_path("test.txt"), gen_json.ACTION_DELETE) ] c_sync._execute_server_actions(server_actions) self.assertFalse(os.path.isfile(src_file_path))
def test_merge_changes_conflicts(self): l_file_change = h_create_change(gen_paths.normalize_path("test1.txt"), gen_json.ACTION_PULL) server_changes = { **h_create_folder_entry( gen_paths.normalize_path("folder1"), { **h_create_change(gen_paths.normalize_path("test1.txt"), gen_json.ACTION_PULL) }) } client_changes = { **h_create_folder_entry( gen_paths.normalize_path(client_paths.LOCAL_CLIENT_DATA, "folder1"), { **h_create_change(gen_paths.normalize_path("test1.txt"), gen_json.ACTION_PULL) }, gen_paths.normalize_path("folder1")) } expected_server = [] expected_client = [] expected_conflicts = [{ "folders": [ gen_paths.normalize_path(client_paths.LOCAL_CLIENT_DATA, "folder1"), gen_paths.normalize_path("folder1") ], "rel_file_path": "test1.txt", "local_file": l_file_change["test1.txt"], "remote_file": l_file_change["test1.txt"] }] self.h_check_merge(server_changes, client_changes, expected_server, expected_client, expected_conflicts)
def test_execute_client_actions_delete(self): h_create_empty(os.path.join(client_paths.LOCAL_CLIENT_DATA, "folder1")) client_src_path = os.path.join(client_paths.LOCAL_CLIENT_DATA, "folder1/test.txt") with open(client_src_path, "w") as f: f.write("Lorem ipsum " * 10) client_actions = [ c_sync.create_action( gen_paths.normalize_path(client_paths.LOCAL_CLIENT_DATA, "folder1"), gen_paths.normalize_path("test.txt"), gen_json.ACTION_DELETE) ] c_sync.execute_client_actions(client_actions) self.assertFalse(os.path.isfile(client_src_path))
def test_execute_server_actions_pull(self): self.server_folder = h_setup_execution_env() client_src_path = os.path.join(client_paths.LOCAL_CLIENT_DATA, "folder1/test.txt") with open(client_src_path, "w") as f: f.write("Lorem ipsum " * 10) server_actions = [ c_sync.create_action(gen_paths.normalize_path("folder1"), gen_paths.normalize_path("test.txt"), gen_json.ACTION_PULL, remote_abs_path=client_src_path) ] c_sync._execute_server_actions(server_actions) server_dest_path = os.path.join(self.server_folder, "test.txt") self.assertTrue(os.path.isfile(server_dest_path))
def test_execute_server_actions_move(self): self.server_folder = h_setup_execution_env() src_file_path = os.path.join(self.server_folder, "test.txt") with open(src_file_path, "w") as f: f.write("Hello") server_actions = [ c_sync.create_action( gen_paths.normalize_path("folder1"), gen_paths.normalize_path("new_test.txt"), gen_json.ACTION_MOVE, rel_old_file_path=gen_paths.normalize_path("test.txt")) ] c_sync._execute_server_actions(server_actions) expected_path = os.path.join(self.server_folder, "new_test.txt") self.assertTrue(os.path.isfile(expected_path))
def test_merge_changes_move_server(self): server_changes = { **h_create_folder_entry( gen_paths.normalize_path("folder1"), { **h_create_change(gen_paths.NormalizedPath("test.txt"), gen_json.ACTION_MOVE, new_file_path=gen_paths.normalize_path("new_test.txt")) }) } client_changes = h_create_folder_entry( gen_paths.normalize_path(client_paths.LOCAL_CLIENT_DATA, "folder1"), {}, gen_paths.normalize_path("folder1")) expected_server = [] expected_conflicts = [] expected_client = [ c_sync.create_action( gen_paths.normalize_path(client_paths.LOCAL_CLIENT_DATA, "folder1"), gen_paths.normalize_path("new_test.txt"), gen_json.ACTION_MOVE, rel_old_file_path=gen_paths.normalize_path("test.txt")) ] self.h_check_merge(server_changes, client_changes, expected_server, expected_client, expected_conflicts)
def test_merge_changes_create_server(self): server_changes = { **h_create_folder_entry( gen_paths.normalize_path("folder1"), { **h_create_change(gen_paths.normalize_path("test.txt"), gen_json.ACTION_PULL) }) } client_changes = h_create_folder_entry( gen_paths.normalize_path(client_paths.LOCAL_CLIENT_DATA, "folder1"), {}, gen_paths.normalize_path("folder1")) src_path = gen_paths.normalize_path("folder1", "test.txt") expected_server = [] expected_conflicts = [] expected_client = [ c_sync.create_action(gen_paths.normalize_path( client_paths.LOCAL_CLIENT_DATA, "folder1"), gen_paths.normalize_path("test.txt"), gen_json.ACTION_PULL, remote_abs_path=src_path) ] self.h_check_merge(server_changes, client_changes, expected_server, expected_client, expected_conflicts)
def test_execute_client_actions_pull(self): self.server_folder = h_setup_execution_env() server_file_path = os.path.join(self.server_folder, 'test.txt') with open(server_file_path, "w") as f: f.write("Hello" * 10) client_actions = [ c_sync.create_action( gen_paths.normalize_path(client_paths.LOCAL_CLIENT_DATA, "folder1"), gen_paths.normalize_path("test.txt"), gen_json.ACTION_PULL, remote_abs_path=gen_paths.NormalizedPath("folder1/test.txt")) ] c_sync.execute_client_actions(client_actions) client_dest_path = os.path.join(client_paths.LOCAL_CLIENT_DATA, "folder1/test.txt") self.assertTrue(os.path.isfile(client_dest_path))
def test_pull_dir_server_to_client(self): """Copy dir from server to client""" h_register_dummy_user_device_client() server_folder = gen_paths.normalize_path( path_utils.rel_user_path_to_abs("folder1", 1)) client_folder = gen_paths.normalize_path( client_paths.LOCAL_CLIENT_DATA, "folder1") h_fill_dummy_dir(server_folder) net_interface.server.get_dir("folder1", client_folder) self.assertTrue(os.path.exists((os.path.join(client_folder)))) self.assertTrue(os.path.exists((os.path.join(client_folder, "inner1")))) self.assertTrue(os.path.exists((os.path.join(client_folder, "inner2")))) self.assertTrue( os.path.exists((os.path.join(client_folder, "inner1/test_inner.txt")))) self.assertTrue( os.path.exists((os.path.join(client_folder, "test.txt"))))
def init_logging(): global logger_network, logger_general, logger_web if log_to_file: os.makedirs(gen_paths.SERVER_LOGS, exist_ok=True) log_file = gen_paths.normalize_path(gen_paths.SERVER_LOGS, "all.log") else: log_file = None logger_general = setup_logger("General", log_file) logger_network = setup_logger("Network", log_file) logger_web = setup_logger("Web", log_file) log_system(log_file)
def test_merge_changes_delete_client(self): client_changes = { **h_create_folder_entry( gen_paths.normalize_path(client_paths.LOCAL_CLIENT_DATA, "folder1"), { **h_create_change(gen_paths.NormalizedPath("test.txt"), gen_json.ACTION_DELETE) }, gen_paths.normalize_path("folder1")) } server_changes = h_create_folder_entry( gen_paths.normalize_path("folder1"), {}) expected_server = [ c_sync.create_action(gen_paths.normalize_path("folder1"), gen_paths.normalize_path("test.txt"), gen_json.ACTION_DELETE) ] expected_client = [] expected_conflicts = [] self.h_check_merge(server_changes, client_changes, expected_server, expected_client, expected_conflicts)
def test_distribute_action(self): server_json.create_changes_file_for_new_device(1, 1) server_json.add_folder("folder1") src_path = "Dummy_client_path" action = c_sync.create_action(gen_paths.normalize_path("folder1"), gen_paths.normalize_path("test.txt"), gen_json.ACTION_PULL, remote_abs_path=src_path) server_json.distribute_action(action, [1]) data = server_json._get_json_data() changes: dict = data["folder1"]["changes"] changes_data = changes["test.txt"] self.assertEqual( { "test.txt": { "action": "pull", "timestamp": changes_data["timestamp"], "is_directory": False, "rel_old_file_path": None } }, changes)
def _recursive_generate_content_of_folder(abs_folder_path: str, folder_name: str, only_files_list): content = {"folder_name": folder_name, "files": [], "folders": []} _, dir_list, file_list = next(os.walk(abs_folder_path)) for file in sorted(file_list): file_path = normalize_path(abs_folder_path, file) if not only_files_list: content["files"].append({ "file_name": file, "modified_timestamp": os.path.getmtime(file_path) }) else: content["files"].append(file) for dir_name in dir_list: abs_path = normalize_path(abs_folder_path, dir_name) content["folders"].append( _recursive_generate_content_of_folder(abs_path, dir_name, only_files_list)) return content
def _merge_files_latest(files1: List[dict], files2: List[dict], f1_path: NormalizedPath, f2_path: NormalizedPath, rel_path: NormalizedPath): """Create actions based on the files inside the two folders.""" f1_actions = [] f2_actions = [] f2_file_names = [file["file_name"] for file in files2] for f1 in files1: try: idx = f2_file_names.index(f1["file_name"]) f2 = files2[idx] if f1["modified_timestamp"] > f2["modified_timestamp"]: take_1 = True else: take_1 = False f2_file_names.pop(idx) files2.pop(idx) except ValueError: take_1 = True if take_1: f2_actions.append(c_syc.create_action(normalize_path(f2_path), normalize_path(rel_path, f1["file_name"]), gen_json.ACTION_PULL, False, remote_abs_path=normalize_path(f1_path, rel_path, f1["file_name"]))) else: f1_actions.append(c_syc.create_action(normalize_path(f1_path), normalize_path(rel_path, f1["file_name"]), gen_json.ACTION_PULL, False, remote_abs_path=normalize_path(f2_path, rel_path, f1["file_name"]))) files1.clear() if len(files2) > 0: new_f2_actions, new_f1_actions = _merge_files_latest(files2, files1, f2_path, f1_path, rel_path) f1_actions += new_f1_actions f2_actions += new_f2_actions return f1_actions, f2_actions
def __init__(self, abs_folder_path: NormalizedPath, include_regexes: List[str] = (".*", ), exclude_regexes: List[str] = ()): super().__init__(regexes=include_regexes, ignore_regexes=exclude_regexes, case_sensitive=False) self.folder_path = abs_folder_path self._single_ignore_paths: Dict[NormalizedPath, Tuple[datetime.datetime, bool]] = {} self._is_dir: bool = False self._rel_path: NormalizedPath = normalize_path("") self._ignore = False
def on_any_event(self, event): """Known issue with watchdog: When a directory is deleted, it is dispatched as FileDeleteEvent. Because after it is deleted it is not possible to check whether it was a directory or a file. So when handling a remove change, it can be both a file or a directory. """ # Metadata self._is_dir = event.is_directory src_path = event.src_path self._rel_path = normalize_path( os.path.relpath(src_path, self.folder_path)) # ignore # TODO: Better handling of ignore. Remove magic 0.1 number self._ignore = False if event.is_directory and event.event_type == "modified": # Only meta data of the directory is modified. This data is not tracked self._ignore = True if normalize_path( event.src_path) in ignore_on_synchronize.ignore_paths: self._ignore = True ignore_on_synchronize.ignore_paths.remove( normalize_path(event.src_path)) # if self._rel_path in self._single_ignore_paths.keys(): # ignore = self._single_ignore_paths[self._rel_path] # if not ignore[1]: # not changed # self._single_ignore_paths[self._rel_path] = (datetime.datetime.now(), True) # self._ignore = True # else: # enter_time = self._single_ignore_paths[self._rel_path][0] # if datetime.datetime.now() - enter_time < datetime.timedelta(seconds=0.1): # self._ignore = True # else: # self._single_ignore_paths.pop(self._rel_path) if not self._ignore: sync_waiter.sync() logger_sync.debug( f"{event.event_type}: {os.path.relpath(event.src_path, self.folder_path)}" )
def init_logging(): global logger_general, logger_network, logger_security, logger_sync, logger_gui if log_to_file: os.makedirs(gen_paths.CLIENT_LOGS, exist_ok=True) log_file = gen_paths.normalize_path(gen_paths.CLIENT_LOGS, "all.log") else: log_file = None logger_gui = setup_logger("GUI", log_file) logger_general = setup_logger("General", log_file) logger_network = setup_logger("Network", log_file) logger_sync = setup_logger("Sync", log_file) logger_security = setup_logger("Security", log_file) log_system(log_file)
def execute_client_actions(client_actions: List[SyncAction]) -> None: for action in client_actions: dest_path = normalize_path(action["local_folder_path"], action["rel_file_path"]) if action["action_type"] == gen_json.ACTION_DELETE[0]: # Because directory deletions are also handled as files, there is only one function, that tries what # function fits if os.path.isdir(dest_path): gen_file_exchanges.remove_dir(dest_path) else: gen_file_exchanges.remove_file(dest_path) elif action["action_type"] == gen_json.ACTION_MOVE[0]: src_path = normalize_path(action["local_folder_path"], action["rel_old_file_path"]) gen_file_exchanges.move(src_path, dest_path) elif action["action_type"] == gen_json.ACTION_PULL[0]: src_path = action["remote_abs_path"] if action["is_directory"]: net_interface.server.get_dir(src_path, dest_path) else: net_interface.server.get_file(src_path, dest_path) elif action["action_type"] == gen_json.ACTION_MKDIR[0]: gen_file_exchanges.make_dirs(dest_path) else: raise KeyError(f"Unknown action type: {action['action_type']} in {action}")
def get_directory_path( window_title="OpenDrive: Select a directory") -> paths.NormalizedPath: """Opens a platform specific dialog, where the user can select a directory. Returns ------- path_to_directory: NormalizedPath """ local_root = tkinter.Tk() local_root.withdraw() directory = tkinter.filedialog.askdirectory( parent=local_root, title=window_title, initialdir=Path.home(), ) local_root.destroy() return paths.normalize_path(directory)
def walk_directories(dir_content: dict, parent_path: NormalizedPath): """Directory tree generator. For each directory in the directory tree, yields a 3-tuple parent_path, dir_path, files (Tuple[filename, timestamp]) path of file: parent_path + dir_path + file_name """ folder_name = dir_content["folder_name"] files = [(f["file_name"], f["modified_timestamp"]) for f in dir_content["files"]] yield parent_path, folder_name, files for folder in dir_content["folders"]: yield from walk_directories( folder, normalize_path(parent_path, dir_content["folder_name"]))
def test_merge_folders(self): c_json.init_file(empty=True) h_register_dummy_user_device_client() abs_local_path = normalize_path(c_paths.LOCAL_CLIENT_DATA, "folder1") h_create_empty(abs_local_path) dummy_content = {"folder_name": "folder1", "files": ["test.txt", "test2.txt"], "folders": [ {"folder_name": "inner1", "files": ["inner1_test.txt", "inner1_test2.txt"], "folders": []}, {"folder_name": "inner2", "files": ["inner2_test.txt", "inner2_test2.txt"], "folders": []} ]} h_create_files_folders(abs_local_path, dummy_content) interface.add_sync_folder(abs_local_path, "folder1") server_path = path_utils.rel_user_path_to_abs("folder1", 1) current_structure = gen_merge_folders.generate_content_of_folder(server_path, only_files_list=True, top_folder_name="folder1") expected_structure = dummy_content self.assertEqual(expected_structure, current_structure)
def add_folder(abs_folder_path: str, include_regexes: List[str] = (".*", ), exclude_regexes: List[str] = (), remote_name: Optional[str] = None) -> bool: """If possible add folder to file and start watching. Returns True, if the folder was added.""" assert isinstance(include_regexes, list) or isinstance( include_regexes, tuple) assert isinstance(exclude_regexes, list) or isinstance( exclude_regexes, tuple) abs_folder_path = normalize_path(abs_folder_path) added = file_changes_json.add_folder(abs_folder_path, include_regexes, exclude_regexes, remote_name) if not added: return False _add_watcher(abs_folder_path, include_regexes, exclude_regexes) logger_sync.info( f"Start watching at new folder: {abs_folder_path}, include_regexes={include_regexes}, " f"exclude_regexes={exclude_regexes}") return True
def h_test_merge_method(self, merge_method, f1_init_content, f2_init_content, expected_content): c_json.init_file(empty=True) h_register_dummy_user_device_client() abs_local_path = normalize_path(c_paths.LOCAL_CLIENT_DATA, "folder1") h_create_empty(abs_local_path) f1_path = os.path.join(c_paths.LOCAL_CLIENT_DATA, "folder1") f2_path = path_utils.rel_user_path_to_abs("folder1", 1) net_interface.server.add_folder(f2_init_content["folder_name"]) h_create_files_folders(f1_path, f1_init_content) h_create_files_folders(f2_path, f2_init_content) yield interface.add_sync_folder(abs_local_path, "folder1", merge_method=merge_method) f1_structure = gen_merge_folders.generate_content_of_folder(f1_path, only_files_list=True, top_folder_name=f1_init_content["folder_name"]) f2_structure = gen_merge_folders.generate_content_of_folder(f2_path, only_files_list=True, top_folder_name=f2_init_content["folder_name"]) self.assertEqual(expected_content, f2_structure) self.assertEqual(expected_content, f1_structure) yield
def _get_server_changes() -> dict: dest_path = normalize_path(client_paths.LOCAL_CLIENT_DATA, "server_changes.json") changes_file = net_interface.server.get_changes(dest_path) with open(changes_file.dst_path, "r") as file: return json.load(file)
def prioritize_latest(folder_1_content: dict, folder_2_content: dict) -> Tuple[List[SyncAction], List[SyncAction]]: return _prioritize_latest_recursive(folder_1_content, folder_2_content, folder_1_content["folder_name"], folder_2_content["folder_name"], normalize_path(""))
def test_walk(self): example_dict = { "folder_name": "top", "files": [{"file_name": "test.txt", "modified_timestamp": 1234}], "folders": [{"folder_name": "inner", "files": [{"file_name": "inner.txt", "modified_timestamp": 23}], "folders": [ {"folder_name": "inner2", "files": [{"file_name": "inner2.txt", "modified_timestamp": 23}], "folders": []}]}, ] } for dir_name, dirs, file_names in gen_merge_folders.walk_directories(example_dict, normalize_path("")): print(f"parent_path: {dir_name}, dir_name: {dirs}, file_names: {file_names}")
def get_client_file_path(client_name): if log_to_file: return gen_paths.normalize_path(gen_paths.SERVER_LOGS, f"client_{client_name}.log") else: return None