def create_folder_by_path(self, folder_path): """ Creates a folder as specfified by parent_path. Folders in the path are checked for existence and created if they aren't already. :param folder_path: path to new folder from the server root. :return: the id of the created folder. """ root_folder_tree = self._get_root_folder_tree() current_parent_id = root_folder_tree.find_item_by_path( '', is_path_to_file=False)['id'] path_folders = StoreTree.get_path_levels(folder_path) if path_folders[0] == '': return current_parent_id current_path = '' for folder_name in path_folders: new_parent = root_folder_tree.find_item_by_path( StoreTree.concat_paths([current_path, folder_name])) if new_parent is None: # Need to make on the server new_parent_id = self.create_folder(current_parent_id, folder_name) root_folder_tree.add_folder(new_parent_id, name=folder_name, parent_id=current_parent_id) current_parent_id = new_parent_id else: current_parent_id = new_parent['id'] current_path = StoreTree.concat_paths([current_path, folder_name]) return current_parent_id
def get_root_file_tree(self, root_folder_path=''): """ This is a generator function. Each iteration returned will be an instance of StoreTree - this instance will just show the progress. Just use the last one returned for a complete tree. :param root_folder_path: the path to the root folder of the desired store. :return: StoreTree instance. """ root_folder_id = self._get_folder_id_from_path(root_folder_path) if root_folder_id is None: raise ValueError( 'Root {} doesn\'t appear to exist.'.format(root_folder_path)) result_tree = StoreTree(root_folder_id) # Another provider that forces us to traverse every folder... stack = [_integer_id_from_str_id(root_folder_id)] while len(stack) > 0: parent_folder_id = stack.pop() # Get folders in parent folder, look for current folder entries = \ self._do_paginated_get( http_server_utils.join_url_components([self._api_drive_endpoint_prefix, 'folders/{}/items'.format(parent_folder_id)]), 'entries', params={'fields': _metadata_fields}) for item in entries: if item['type'] == 'folder': result_tree.add_folder( _str_id_from_folder_integer_id(item['id']), name=item['name'], parent_id=_str_id_from_folder_integer_id( parent_folder_id)) stack.append(item['id']) else: result_tree.add_file( _str_id_from_file_integer_id(item['id']), name=item['name'], parent_id=_str_id_from_folder_integer_id( parent_folder_id), modified_datetime=date_parser.isoparse( item['content_modified_at']), file_hash=item['sha1']) yield result_tree
def create_folder_by_path(self, folder_path): """ Creates a folder as specfified by folder_path. Folders in the path are checked for existence and created if they aren't already. :param folder_path: path to new folder from the server root. :return: the id of the created folder. """ result = None yandex_path = pathlib.Path( _convert_standard_to_yandex_path( StoreTree.standardise_path(folder_path))) current_parent = pathlib.Path('/') for folder_path in itertools.chain(reversed(yandex_path.parents), [yandex_path]): if folder_path != pathlib.Path('/'): # yandex screams if you try and create existing folder parent_data = self._get_item_metadata(current_parent) child_exists = False for child in parent_data['_embedded']['items']: if child['type'] == 'dir' and child[ 'name'] == folder_path.name: child_exists = True break if child_exists is False: result = self.create_folder(str(current_parent), folder_path.name) current_parent = folder_path # Now get folder id if needed if result is None: result = _yandex_id_from_yandex_path( self._get_item_metadata(yandex_path)['path']) return result
def create_folder_by_path(self, folder_path): """ Creates a folder as specfified by folder_path. Folders in the path are checked for existence and created if they aren't already. :param folder_path: path to new folder from the server root. :return: the id of the created folder. """ current_parent_id = self._get_root_metadata()['id'] path_folders = StoreTree.get_path_levels(folder_path) if path_folders[0] == '': return current_parent_id for folder_name in path_folders: current_parent_id = self.create_folder(current_parent_id, folder_name) return current_parent_id
def get_root_file_tree(self, root_folder_path=''): """ This is a generator function. Each iteration returned will be an instance of StoreTree - this instance will just show the progress. Just use the last one returned for a complete tree. :param root_folder_path: the path to the root folder of the desired store. :return: StoreTree instance. """ root_folder_id = self._get_folder_path_metadata(root_folder_path)['id'] result_tree = StoreTree(root_folder_id) # Recursive traverse of the root r, rx_dict = self._do_request( 'get', http_server_utils.join_url_components( [self._api_drive_endpoint_prefix, 'listfolder']), params={ 'folderid': _integer_id_from_str_id(root_folder_id), 'recursive': 1 }) # DFS the resultant contents lists to build tree stack = [rx_dict['metadata']] while len(stack) > 0: parent_item = stack.pop() for item in parent_item['contents']: if item['isfolder']: result_tree.add_folder(item['id'], name=item['name'], parent_id=parent_item['id']) stack.append(item) else: result_tree.add_file( item['id'], name=item['name'], parent_id=parent_item['id'], modified_datetime=_convert_pcloud_string_to_dt( item['modified'])) yield result_tree
def testFindItemByPath(self): tree = StoreTree(0) tree.add_folder(1, 'folder1') tree.add_folder(2, 'folder2') tree.add_folder(3, 'folder3') tree.add_folder(4, 'folder4', parent_id=1) tree.add_folder(5, 'folder5', parent_id=3) tree.add_file(6, 'file1.txt') tree.add_file(7, 'file2', parent_id=4) tree.add_file(8, 'file3.png') root_dict = tree.find_item_by_path('') self.assertEqual(len(root_dict['files']), 2) self.assertEqual(len(root_dict['folders']), 3) self.assertEqual(root_dict['id'], 0) self.assertEqual(root_dict['name'], '') folder1_dict = tree.find_item_by_path('folder1/', is_path_to_file=False) self.assertEqual(len(folder1_dict['files']), 0) self.assertEqual(len(folder1_dict['folders']), 1) self.assertEqual(folder1_dict['id'], 1) self.assertEqual(folder1_dict['name'], 'folder1') folder3_dict = tree.find_item_by_path('folder3', is_path_to_file=False) self.assertEqual(len(folder3_dict['files']), 0) self.assertEqual(len(folder3_dict['folders']), 1) self.assertEqual(folder3_dict['id'], 3) self.assertEqual(folder3_dict['name'], 'folder3') folder4_dict = tree.find_item_by_path('folder1/folder4', is_path_to_file=False) self.assertEqual(len(folder4_dict['files']), 1) self.assertEqual(len(folder4_dict['folders']), 0) self.assertEqual(folder4_dict['id'], 4) self.assertEqual(folder4_dict['name'], 'folder4') file1_dict = tree.find_item_by_path('file1.txt', is_path_to_file=True) self.assertEqual(file1_dict['id'], 6) self.assertEqual(file1_dict['name'], 'file1.txt') file1_dict = tree.find_item_by_path('file1.txt', is_path_to_file=True) self.assertEqual(file1_dict['id'], 6) self.assertEqual(file1_dict['name'], 'file1.txt') file2_dict = tree.find_item_by_path('folder1/folder4/file2', is_path_to_file=True) self.assertEqual(file2_dict['id'], 7) self.assertEqual(file2_dict['name'], 'file2') file3_dict = tree.find_item_by_path('file3.png', is_path_to_file=True) self.assertEqual(file3_dict['id'], 8) self.assertEqual(file3_dict['name'], 'file3.png') self.assertIs(tree.find_item_by_path('folder4'), None) self.assertIs( tree.find_item_by_path('folder1/folder4/file3', is_path_to_file=True), None) self.assertIs( tree.find_item_by_path('folder1/folder4/file3', is_path_to_file=False), None) self.assertIs( tree.find_item_by_path('folder1/folder4', is_path_to_file=True), None)
def testGetfilePathsList(self): tree = StoreTree(0) tree.add_folder(1, 'folder1') tree.add_folder(2, 'folder2') tree.add_folder(3, 'folder3') tree.add_folder(4, 'folder4', parent_id=1) tree.add_folder(5, 'folder5', parent_id=3) tree.add_file(6, 'file1.txt') tree.add_file(7, 'file2', parent_id=4) tree.add_file(8, 'file3.png') file_paths = tree.get_file_paths_list() self.assertEqual(len(file_paths), 3) self.assertIn(StoreTree.concat_paths(['', 'file1.txt']), file_paths) self.assertIn( StoreTree.concat_paths(['', 'folder1', 'folder4', 'file2']), file_paths) self.assertIn(StoreTree.concat_paths(['', 'file3.png']), file_paths)
def testFindItemById(self): tree = StoreTree(0) tree.add_folder(1, 'folder1') tree.add_folder(2, 'folder2') tree.add_folder(3, 'folder3') tree.add_folder(4, 'folder4', parent_id=1) tree.add_folder(5, 'folder5', parent_id=3) tree.add_file(6, 'file1.txt') tree.add_file(7, 'file2', parent_id=4) tree.add_file(8, 'file3.png') root_dict = tree.find_item_by_id(0) self.assertEqual(len(root_dict['files']), 2) self.assertEqual(len(root_dict['folders']), 3) self.assertEqual(root_dict['id'], 0) self.assertEqual(root_dict['name'], '') folder1_dict = tree.find_item_by_id(1) self.assertEqual(len(folder1_dict['files']), 0) self.assertEqual(len(folder1_dict['folders']), 1) self.assertEqual(folder1_dict['id'], 1) self.assertEqual(folder1_dict['name'], 'folder1') folder4_dict = tree.find_item_by_id(4) self.assertEqual(len(folder4_dict['files']), 1) self.assertEqual(len(folder4_dict['folders']), 0) self.assertEqual(folder4_dict['id'], 4) self.assertEqual(folder4_dict['name'], 'folder4') file1_dict = tree.find_item_by_id(6) self.assertEqual(file1_dict['id'], 6) self.assertEqual(file1_dict['name'], 'file1.txt') file2_dict = tree.find_item_by_id(7) self.assertEqual(file2_dict['id'], 7) self.assertEqual(file2_dict['name'], 'file2') file3_dict = tree.find_item_by_id(8) self.assertEqual(file3_dict['id'], 8) self.assertEqual(file3_dict['name'], 'file3.png') self.assertIs(tree.find_item_by_id(39), None) # ================================================ # Test find parent folder3_dict, _ = tree.find_item_parent_by_id(5) self.assertEqual(len(folder3_dict['files']), 0) self.assertEqual(len(folder3_dict['folders']), 1) self.assertEqual(folder3_dict['id'], 3) self.assertEqual(folder3_dict['name'], 'folder3') root_dict, _ = tree.find_item_parent_by_id(2) self.assertEqual(len(root_dict['files']), 2) self.assertEqual(len(root_dict['folders']), 3) self.assertEqual(root_dict['id'], 0) self.assertEqual(root_dict['name'], '') self.assertIs(tree.find_item_parent_by_id(29)[0], None)
def testAddTree(self): # Tree 1 # - folder1 - folder4 - file2 # - file4 # - folder2 # - folder3 - folder5 # - file1.txt # - file3.png tree1 = StoreTree(0) tree1.add_folder(1, 'folder1') tree1.add_folder(2, 'folder2') tree1.add_folder(3, 'folder3') tree1.add_folder(4, 'folder4', parent_id=1) tree1.add_folder(5, 'folder5', parent_id=3) tree1.add_file(6, 'file1.txt') tree1.add_file(7, 'file2', parent_id=4) tree1.add_file(8, 'file3.png') tree1.add_file(9, 'file4', parent_id=1) # Tree 2 # - folder6 - folder7 - file5 # - file6.png tree2 = StoreTree(100) tree2.add_folder(10, 'folder6') tree2.add_folder(11, 'folder7', parent_id=10) tree2.add_file(12, 'file5.txt', parent_id=11) tree2.add_file(13, 'file6.png') # New tree # - folder1 - folder4 - file2 # - file4 # # - folder2 - tree2 - folder6 - folder7 - file5.txt # - file6.png # # - folder3 - folder5 # - file1.txt # - file3.png tree1.add_tree(tree2, 2) tree1.update_folder_name(100, 'tree2') root_dict = tree1.find_item_by_id(0) self.assertEqual(len(root_dict['files']), 2) self.assertEqual(len(root_dict['folders']), 3) self.assertEqual(root_dict['id'], 0) self.assertEqual(root_dict['name'], '') tree2_dict = tree1.find_item_by_id(100) self.assertEqual(len(tree2_dict['files']), 1) self.assertEqual(len(tree2_dict['folders']), 1) self.assertEqual(tree2_dict['id'], 100) self.assertEqual(tree2_dict['name'], 'tree2') folder6_dict = tree1.find_item_by_id(10) self.assertEqual(len(folder6_dict['files']), 0) self.assertEqual(len(folder6_dict['folders']), 1) self.assertEqual(folder6_dict['id'], 10) self.assertEqual(folder6_dict['name'], 'folder6') folder7_dict = tree1.find_item_by_id(11) self.assertEqual(len(folder7_dict['files']), 1) self.assertEqual(len(folder7_dict['folders']), 0) self.assertEqual(folder7_dict['id'], 11) self.assertEqual(folder7_dict['name'], 'folder7') file5_dict = folder7_dict['files'][0] self.assertEqual(file5_dict['id'], 12) self.assertEqual(file5_dict['name'], 'file5.txt') file6_dict = tree1.find_item_by_id(13) self.assertEqual(file6_dict['id'], 13) self.assertEqual(file6_dict['name'], 'file6.png')
def testAddFolderPath(self): tree = StoreTree(0) # - folder1 - folder2 - file1.txt # - folder3 - folder4 tree.add_folder_path([{ 'name': 'folder1', 'id': 1 }, { 'name': 'folder2', 'id': 2 }]) tree.add_file(3, 'file1.txt', 2) tree.add_folder(4, 'folder3', 0) tree.add_folder_path([ { 'name': 'folder3', 'id': 4 }, { 'name': 'folder4', 'id': 5 }, ]) #Check exception raised when adding existing folder with different id with self.assertRaises(ValueError): tree.add_folder_path([{ 'name': 'folder1', 'id': 1 }, { 'name': 'folder2', 'id': 86 }]) root_dict = tree.find_item_by_id(tree.root_id) self.assertEqual(len(root_dict['files']), 0) self.assertEqual(len(root_dict['folders']), 2) self.assertEqual(root_dict['id'], 0) folder1_dict = tree.find_item_by_id(1) self.assertEqual(len(folder1_dict['files']), 0) self.assertEqual(len(folder1_dict['folders']), 1) self.assertEqual(folder1_dict['id'], 1) self.assertEqual(folder1_dict['name'], 'folder1') folder2_dict = tree.find_item_by_id(2) self.assertEqual(len(folder2_dict['files']), 1) self.assertEqual(len(folder2_dict['folders']), 0) self.assertEqual(folder2_dict['id'], 2) self.assertEqual(folder2_dict['name'], 'folder2') folder3_dict = tree.find_item_by_id(4) self.assertEqual(len(folder3_dict['files']), 0) self.assertEqual(len(folder3_dict['folders']), 1) self.assertEqual(folder3_dict['id'], 4) self.assertEqual(folder3_dict['name'], 'folder3') folder4_dict = tree.find_item_by_id(5) self.assertEqual(len(folder4_dict['files']), 0) self.assertEqual(len(folder4_dict['folders']), 0) self.assertEqual(folder4_dict['id'], 5) self.assertEqual(folder4_dict['name'], 'folder4')
def testCreateFromId(self): tree = StoreTree(0) # - folder1 - folder4 - file2 # - file4 # - folder2 # - folder3 - folder5 # - file1.txt # - file3.png tree.add_folder(1, 'folder1') tree.add_folder(2, 'folder2') tree.add_folder(3, 'folder3') tree.add_folder(4, 'folder4', parent_id=1) tree.add_folder(5, 'folder5', parent_id=3) tree.add_file(6, 'file1.txt') tree.add_file(7, 'file2', parent_id=4) tree.add_file(8, 'file3.png') tree.add_file(9, 'file4', parent_id=1) new_tree = tree.create_new_from_id(1) # folder 1 is now the new root # - folder4 - file2 # - file4 root_dict = tree.find_item_by_id(1) self.assertEqual(len(root_dict['files']), 1) self.assertEqual(len(root_dict['folders']), 1) self.assertEqual(root_dict['id'], 1) self.assertEqual(root_dict['name'], 'folder1') file4_dict = root_dict['files'][0] self.assertEqual(file4_dict['id'], 9) self.assertEqual(file4_dict['name'], 'file4') folder4_dict = tree.find_item_by_id(4) self.assertEqual(len(folder4_dict['files']), 1) self.assertEqual(len(folder4_dict['folders']), 0) self.assertEqual(folder4_dict['id'], 4) self.assertEqual(folder4_dict['name'], 'folder4')
def _get_drive_folder_tree(self): """ :return: an instance of StoreTree representing the entire folder tree of the drive. """ # Get the root id and create store tree root_id = self._get_root_folder()['id'] result = StoreTree(id=root_id) # Google returns items randomly, only specifying the parent id. # We might not have received the parent item yet, so we maintain # a list of trees, the first being our result, the others are # "dangling" trees where the root item hasn't been received yet # but has been mentioned as a parent of an item that HAS been # received. tree_list = [result] response_dict = None while response_dict is None or 'nextPageToken' in response_dict: params = { 'q': 'mimeType = \'application/vnd.google-apps.folder\' and trashed = false', 'fields': 'files/id, files/name, files/parents', 'pageSize': 1000 } if isinstance(response_dict, dict) and 'nextPageToken' in response_dict: params['pageToken'] = response_dict['nextPageToken'] r = self._do_request('get', http_server_utils.join_url_components([ self._api_drive_endpoint_prefix, 'files' ]), params=params, error_500_retries=5) response_dict = r.json() for new_folder in response_dict['files']: # First check if the parent exists in one of the trees tree. If not, we'll # need to create it as the root of a new dangling tree and update later if/when # it arrives. parent_tree = None for tree in tree_list: if tree.find_item_by_id( new_folder['parents'][0]) is not None: parent_tree = tree break if parent_tree is None: parent_tree = StoreTree(id=new_folder['parents'][0]) tree_list.append(parent_tree) # Now check if this item has already been added as a parent # (that will mean it is a tree root). If so, move it to # its parent and update its name. added = False for tree_index in range(0, len(tree_list)): tree = tree_list[tree_index] if tree.root_id == new_folder['id']: tree.update_folder_name(new_folder['id'], new_folder['name']) parent_tree.add_tree(tree, new_folder['parents'][0]) del tree_list[tree_index] added = True break # New folder doesn't exist, create a new one. if added is False: parent_tree.add_folder(new_folder['id'], new_folder['name'], new_folder['parents'][0]) return result
def get_root_file_tree(self, root_folder_path=''): """ This is a generator function. Each iteration returned will be an instance of StoreTree - this instance will just show the progress. Just use the last one returned for a complete tree. :param root_folder_path: the path to the root folder of the desired store. :return: StoreTree instance. """ # Get root id root_folder_path = StoreTree.standardise_path(root_folder_path) if root_folder_path == '': url = http_server_utils.join_url_components( [self._api_drive_endpoint_prefix, 'root']) else: url = http_server_utils.join_url_components([ self._api_drive_endpoint_prefix, 'root:/{}'.format(StoreTree.standardise_path(root_folder_path)) ]) r = self._do_request('get', url, params={'select': 'id'}, raise_for_status=False) if r.status_code == 404: error_dict = r.json() if error_dict['error']['code'] == 'itemNotFound': raise ValueError('Couldn\'t find folder with path {}'.format( root_folder_path)) r.raise_for_status() root_id = r.json()['id'] result_tree = StoreTree(id=root_id) # Have to traverse the whole thing per directory, but can use # batching to help a little with latency... # We use a stack for returned folder ids and one for nextLink # urls and service the next link urls first. stack = [root_id] while len(stack) > 0: batch = {} # For each folder id on the stack, build a request and put in the batch while len(stack) > 0 and self._batch_is_full(batch) == False: self._add_request_to_batch( batch, 'GET', '/me/drive/items/{}/children'.format(stack.pop()), params={ 'top': 1000, 'select': 'id,name,folder,file,parentReference,fileSystemInfo' }) # Do batch request r = self._do_request('post', self._api_drive_batch_url, json=batch) rx_dict = r.json() # Maintain a list for response bodies because reach response body could have # a nextLink that needs to be accessed. body_list = [ response['body'] for response in rx_dict['responses'] if 'body' in response ] for body in body_list: for item in body['value']: if 'folder' in item: result_tree.add_folder( item['id'], name=item['name'], parent_id=item['parentReference']['id']) stack.append(item['id']) else: if 'sha1Hash' in item['file']['hashes']: h = item['file']['hashes']['sha1Hash'].lower() else: h = None result_tree.add_file( item['id'], name=item['name'], parent_id=item['parentReference']['id'], modified_datetime=date_parser.isoparse( item['fileSystemInfo'] ['lastModifiedDateTime']), file_hash=h) if '@odata.nextLink' in body: r = self._do_request('get', body['@odata.nextLink']) body_list.append(r.json()) yield result_tree
def _convert_yandex_to_standard_path(yandex_path): return StoreTree.standardise_path( yandex_path.replace('disk:', '').lstrip('/'))
def sync_drives(path_to_local_root, path_to_config_dir, provider_dict, config_pw, analyse_only=False): """ Will check every folder and file in path_to_local_root and, for every provider in providers_list, upload files that have been modified since the last upload and delete any files or folders that are no longer on the local root. :param path_to_local_root: :param path_to_config_dir: Directory that stores the config files for the providers. :param provider_dict: A {'provider_name': , 'user_id' , 'server_root_path': ,} dict. provider_name can be 'google', ... user_id is used to find the appropriate config file in path_to_config_dir - each provide can have its own config file format and info. server_root_path is the path on the cloud drive to the store root folder (relative to the drive root). :param config_pw: Password used to encrypt the config files. :return: Nothing. """ if os.path.exists(path_to_local_root) is False: raise FileNotFoundError( 'Local store root {} does not exist.'.format(path_to_local_root)) logging.info( 'Starting sync to {} drive for account {} and store {}'.format( provider_dict['provider_name'], provider_dict['user_id'], provider_dict['server_root_path'])) provider_class = provider_list.get_drive_class( provider_dict['provider_name']) cloud_drive = provider_class(provider_dict['user_id'], path_to_config_dir, config_pw) # Build remote tree for res in cloud_drive.get_root_file_tree( root_folder_path=provider_dict['server_root_path']): server_tree = res # We'll build a list of dicts that specify required changes so we can report progess: # {'name': , 'type': <dir or file>, # 'op': <'c', 'u', 'd'>, # 'id': , # 'parent_path': , # 'local_path': , # 'mtime': } operations = [] # Now cycle through the local store root and do the following: # 1. for each folder, check the local contents are present on the server and # if not, or if the file modified date is older on the server, upload to the server. # 2. for each folder, delete any folders or folders that are on the server but not # on the local. # # NOTE: This assumes pathlib.Path.glob('**') returns parent directories before their children. local_root = Path(path_to_local_root) # This chaining will produce all items in the local root (recursive) AND the local root itself. # It is important we have the local root too for checking deleted items on # the local. for item in itertools.chain([local_root], local_root.glob('**/*')): relative_path = item.relative_to(local_root) if str(relative_path) != '.': parent_relative_path = item.parent.relative_to(local_root) server_item =\ server_tree.find_item_by_path(str(relative_path), is_path_to_file=item.is_file()) local_modified_time = datetime.datetime.fromtimestamp( item.stat().st_mtime, tz=datetime.timezone.utc) if server_item is None: # Not on server, add it operation = { 'name': item.name, 'op': 'c', 'parent_path': str(parent_relative_path), 'mtime': local_modified_time } if item.is_dir() is True: operation['type'] = 'dir' elif item.is_file() is True: operation['type'] = 'file' operation['local_path'] = str(item) operations.append(operation) elif item.is_file(): # Is on the server. If a file, check date for update server_item = server_tree.find_item_by_path( str(relative_path), is_path_to_file=True) hash_different = provider_class.files_differ_on_hash( str(item), server_item['file_hash']) if (hash_different is True or (hash_different is None and _files_dt_out_of_sync( local_modified_time, server_item['modified']))): operations.append({ 'id': server_item['id'], 'type': 'file', 'name': item.name, 'op': 'u', 'local_path': str(item), 'mtime': local_modified_time }) # For each folder on the local store (starting from the root itself), # check if there are any files or folders on the server tree that don't # exist on the local (this works because both locations are guaranteed # to have the root directory). if item.is_dir(): server_folder = server_tree.find_item_by_path( str(relative_path), is_path_to_file=False) if server_folder is not None: for server_child in (server_folder['folders'] + server_folder['files']): exists_on_local = False for local_child in item.iterdir(): if (local_child.name == server_child['name'] and ((local_child.is_dir() and StoreTree.item_is_folder(server_child)) or (local_child.is_file() and not StoreTree.item_is_folder(server_child)))): exists_on_local = True break if exists_on_local is False: # Can it on the server operations.append({ 'id': server_child['id'], 'op': 'd', 'name': server_child['name'] }) yield None # Now apply the changes logger.info('Will carry out {} operations for sync...'.format( len(operations))) for i in range(0, len(operations)): operation = operations[i] if operation['op'] == 'c': logger.info('{} {} {} in {} (operation {}/{})'.format( 'Would create' if analyse_only is True else 'Creating', operation['type'], operation['name'], operation['parent_path'], i + 1, len(operations))) if analyse_only is False: parent_id = server_tree.find_item_by_path( operation['parent_path'], is_path_to_file=False)['id'] if operation['type'] == 'dir': new_id = cloud_drive.create_folder(parent_id, operation['name']) server_tree.add_folder(new_id, name=operation['name'], parent_id=parent_id) else: cloud_drive.create_file(parent_id, operation['name'], operation['mtime'], operation['local_path']) elif operation['op'] == 'u': logger.info('{} file {} with id {} (operation {}/{})'.format( 'Would upload' if analyse_only is True else 'Uploading', operation['name'], operation['id'], i + 1, len(operations))) if analyse_only is False: cloud_drive.update_file(operation['id'], operation['mtime'], operation['local_path']) elif operation['op'] == 'd': logger.info('{} file {} with id {} (operation {}/{})'.format( 'Would delete' if analyse_only is True else 'Deleting', operation['name'], operation['id'], i + 1, len(operations))) if analyse_only is False: cloud_drive.delete_item_by_id(operation['id']) server_tree.remove_item(operation['id']) yield None