def download_to_path(options, paths): client = Client(options) for path in paths: urn = Urn(path) create_path = path if urn.is_dir() else os.path.dirname(path) try: os.makedirs(create_path) except: pass client.download_sync(path, path)
class DavSync(Sync): """DavSync synchronizes the changes with a webdav server. Attributes: path (:obj:`str`): The local directory path to synchronize. dav_path (:obj:`str`): The remote directory path which corresponds to the local one. dav_root (:obj:`str`): The path to access the dav server. Nextcloud, e.g., uses /remote.php/webdav/ username (:obj:`str`): Webdav username. password (:obj:`str`): Webdav password. hostname (:obj:`str`): Webdav server host, e.g. https://cloud.example.com/ """ def __init__( self, path: str, dav_path: str, dav_root: str, username: str, password: str, hostname: str, ): super().__init__(path) self.dav_path = dav_path self.username = username self.password = password self.hostname = hostname options = { "webdav_hostname": hostname, "webdav_login": username, "webdav_password": password, "root": dav_root, } self.client = Client(options) self.pull() def pull(self): """Download updated directory from server.""" self.client.download_sync(self.dav_path, self.os_path) def push(self, fname, msg=""): """Upload a file to the server. If the directory or file does not exist on the remote server, create it. Args: fname (:obj:`str`): File to upload. msg (:obj:`str`, optional): Not used with DavSync. """ self.client.upload_file(path.join(self.dav_path, fname), path.join(self.os_path, fname))
class Webdav_sync(Sync): def __init__(self, user, password, url, db_file): super().__init__() log.info("We are in Webdav_sync.__init__") if user == '' or password == '' or url == '': log.error("Webdav config incomplete. Check config.yaml") raise SystemExit else: self.user = user self.password = password self.url = url self.discobase = db_file #self.backuppath = '/discodos/{}'.format(db_file) options = { 'webdav_hostname': self.url, 'webdav_login': self.user, 'webdav_password': self.password } self.client = Client(options) #print(dir(self.client)) #print('') #print(self.client.is_dir('discodos')) #print(self.client.check(self.discobase)) def _webdav_mtime( self, filename): # we currently don't need this, put to func anyway mod_server_dt = parse(self.client.info(filename)['modified']) mod_server_str = mod_server_dt.strftime('%Y-%m-%d_%H%M%S') #if mod_local_str != mod_server_str: # print('Local and server discobase.db modification time diverge.') # print(mod_local_str) # print(mod_server_str) return mod_server_str def backup(self): # check file stats on local machine bak_file_name = self._get_fileobj_mtime(self.discobase) print("Uploading as {} to {}".format(bak_file_name, self.url)) existing = False try: if self.client.check(bak_file_name): existing = True else: existing = False except WebDavException as exception: log.error('Webserver returned: {}'.format(exception)) raise SystemExit if existing: log.warning('Backup existing. Won\'t overwrite "{}" '.format( bak_file_name)) else: print('Backup not existing yet, uploading ...') self.client.upload_sync(remote_path='{}'.format(bak_file_name), local_path='{}'.format(self.discobase)) # in any case, show list of existing backups self.show_backups() return True def show_backups(self, restore=False): if not restore: print('\nExisting backups:') #relevant_files = self.client.list()[1:] # leave out first item, it's the containing folder all_files = self.client.list() all_files.sort() # sorts by name relevant_files = [] for i, resource in enumerate(all_files): if re.search('_(\d+)-(\d+)-(\d+)_(\d+)$', resource): relevant_files.append(resource) else: log.debug('Sync: Skipping resource: {}'.format(all_files[i])) for j, file in enumerate(relevant_files): file = '({}) - {}'.format(j, file) print(file) if restore: restore_id = ask_user('Restore backup #: ') try: restore_file = relevant_files[int(restore_id)] except ValueError: log.warning('Nothing to restore!') raise SystemExit except IndexError: log.warning('Non-existent ID. Nothing to restore!') raise SystemExit print('Restoring backup {}...'.format(restore_file)) return restore_file print() def restore(self): print('\nWhich backup would you like to restore?') restore_filename = self.show_backups(restore=True) overwrite = ask_user( "Download backup and overwrite local file {} (n)? ".format( self.discobase)) if overwrite.lower() == 'y': self.client.download_sync( remote_path='{}'.format(restore_filename), local_path='{}'.format(self.discobase)) self._touch_to_backupdate(restore_filename)
# download files log('downloading files:') conn_options = None basepath = None client = None contents_zip = None if config.has_section('WEBDAV'): conn_options = { 'webdav_hostname': config['WEBDAV']['hostname'], 'webdav_login': config['WEBDAV']['username'], 'webdav_password': config['WEBDAV']['sync_pass'] } client = Client(conn_options) client.verify = True log('- downloading keystore') client.download_sync(remote_path=add_basepath('OmniFocus.ofocus/encrypted'), local_path='encrypted') log('- finding database') files = client.list(add_basepath('OmniFocus.ofocus/')) contents_zip = [file for file in files if file.endswith('.zip') and file.startswith('00')][0] # contents_zip = [file for file in files if file.endswith('.zip') and not file.startswith('00')][0] log('- downloading database') client.download_sync(remote_path=add_basepath('OmniFocus.ofocus/{}'.format(contents_zip)), local_path=contents_zip) # decrypt them log('decrypting database') dec_contents_zip = None if config.has_section('ENC') and 'is_encrypted' in config['ENC'] and config['ENC'].getboolean('is_encrypted') and contents_zip is not None: dec_contents_zip = 'dec_{}'.format(contents_zip) enc_pass = config['ENC']['enc_pass'] encryptionMetadata = DocumentKey.parse_metadata(open('encrypted', 'rb')) metadataKey = DocumentKey.use_passphrase(encryptionMetadata, enc_pass)
class ClientTestCase(TestCase): remote_path_file = 'test_dir/test.txt' remote_path_file2 = 'test_dir2/test.txt' remote_path_dir = 'test_dir' remote_path_dir2 = 'test_dir2' local_base_dir = 'tests/' local_file = 'test.txt' local_file_path = local_base_dir + 'test.txt' local_path_dir = local_base_dir + 'res/test_dir' def setUp(self): options = { 'webdav_hostname': 'https://webdav.yandex.ru', 'webdav_login': '******', 'webdav_password': '******' } self.client = Client(options) if path.exists(path=self.local_path_dir): shutil.rmtree(path=self.local_path_dir) def tearDown(self): if path.exists(path=self.local_path_dir): shutil.rmtree(path=self.local_path_dir) if self.client.check(remote_path=self.remote_path_dir): self.client.clean(remote_path=self.remote_path_dir) if self.client.check(remote_path=self.remote_path_dir2): self.client.clean(remote_path=self.remote_path_dir2) def test_list(self): self._prepare_for_downloading() file_list = self.client.list() self.assertIsNotNone(file_list, 'List of files should not be None') self.assertGreater(file_list.__len__(), 0, 'Expected that amount of files more then 0') def test_free(self): self.assertGreater( self.client.free(), 0, 'Expected that free space on WebDAV server is more then 0 bytes') def test_check(self): self.assertTrue(self.client.check(), 'Expected that root directory is exist') def test_mkdir(self): if self.client.check(remote_path=self.remote_path_dir): self.client.clean(remote_path=self.remote_path_dir) self.client.mkdir(remote_path=self.remote_path_dir) self.assertTrue(self.client.check(remote_path=self.remote_path_dir), 'Expected the directory is created.') @unittest.skip( "Yandex brakes response for file it contains property resourcetype as collection but it should " "be empty for file") def test_download_to(self): self._prepare_for_downloading() buff = BytesIO() self.client.download_from(buff=buff, remote_path=self.remote_path_file) self.assertEquals(buff.getvalue(), 'test content for testing of webdav client') @unittest.skip( "Yandex brakes response for file it contains property resourcetype as collection but it should " "be empty for file") def test_download(self): self._prepare_for_downloading() self.client.download(local_path=self.local_path_dir, remote_path=self.remote_path_dir) self.assertTrue(path.exists(self.local_path_dir), 'Expected the directory is downloaded.') self.assertTrue(path.isdir(self.local_path_dir), 'Expected this is a directory.') self.assertTrue( path.exists(self.local_path_dir + os.path.sep + self.local_file), 'Expected the file is downloaded') self.assertTrue( path.isfile(self.local_path_dir + os.path.sep + self.local_path_file), 'Expected this is a file') @unittest.skip( "Yandex brakes response for file it contains property resourcetype as collection but it should " "be empty for file") def test_download_sync(self): self._prepare_for_downloading() os.mkdir(self.local_path_dir) def callback(): self.assertTrue( path.exists(self.local_path_dir + os.path.sep + self.local_file), 'Expected the file is downloaded') self.assertTrue( path.isfile(self.local_path_dir + os.path.sep + self.local_file), 'Expected this is a file') self.client.download_sync(local_path=self.local_path_dir + os.path.sep + self.local_file, remote_path=self.remote_path_file, callback=callback) self.assertTrue( path.exists(self.local_path_dir + os.path.sep + self.local_file), 'Expected the file has already been downloaded') @unittest.skip( "Yandex brakes response for file it contains property resourcetype as collection but it should " "be empty for file") def test_download_async(self): self._prepare_for_downloading() os.mkdir(self.local_path_dir) def callback(): self.assertTrue( path.exists(self.local_path_dir + os.path.sep + self.local_file), 'Expected the file is downloaded') self.assertTrue( path.isfile(self.local_path_dir + os.path.sep + self.local_file), 'Expected this is a file') self.client.download_async(local_path=self.local_path_dir + os.path.sep + self.local_file, remote_path=self.remote_path_file, callback=callback) self.assertFalse( path.exists(self.local_path_dir + os.path.sep + self.local_file), 'Expected the file has not been downloaded yet') def test_upload_from(self): self._prepare_for_uploading() buff = StringIO(u'test content for testing of webdav client') self.client.upload_to(buff=buff, remote_path=self.remote_path_file) self.assertTrue(self.client.check(self.remote_path_file), 'Expected the file is uploaded.') def test_upload(self): self._prepare_for_uploading() self.client.upload(remote_path=self.remote_path_file, local_path=self.local_path_dir) self.assertTrue(self.client.check(self.remote_path_dir), 'Expected the directory is created.') self.assertTrue(self.client.check(self.remote_path_file), 'Expected the file is uploaded.') def test_upload_file(self): self._prepare_for_uploading() self.client.upload_file(remote_path=self.remote_path_file, local_path=self.local_file_path) self.assertTrue(self.client.check(remote_path=self.remote_path_file), 'Expected the file is uploaded.') def test_upload_sync(self): self._prepare_for_uploading() def callback(): self.assertTrue(self.client.check(self.remote_path_dir), 'Expected the directory is created.') self.assertTrue(self.client.check(self.remote_path_file), 'Expected the file is uploaded.') self.client.upload(remote_path=self.remote_path_file, local_path=self.local_path_dir) def test_upload_async(self): self._prepare_for_uploading() def callback(): self.assertTrue(self.client.check(self.remote_path_dir), 'Expected the directory is created.') self.assertTrue(self.client.check(self.remote_path_file), 'Expected the file is uploaded.') self.client.upload(remote_path=self.remote_path_file, local_path=self.local_path_dir) def test_copy(self): self._prepare_for_downloading() self.client.mkdir(remote_path=self.remote_path_dir2) self.client.copy(remote_path_from=self.remote_path_file, remote_path_to=self.remote_path_file2) self.assertTrue(self.client.check(remote_path=self.remote_path_file2)) def test_move(self): self._prepare_for_downloading() self.client.mkdir(remote_path=self.remote_path_dir2) self.client.move(remote_path_from=self.remote_path_file, remote_path_to=self.remote_path_file2) self.assertFalse(self.client.check(remote_path=self.remote_path_file)) self.assertTrue(self.client.check(remote_path=self.remote_path_file2)) def test_clean(self): self._prepare_for_downloading() self.client.clean(remote_path=self.remote_path_dir) self.assertFalse(self.client.check(remote_path=self.remote_path_file)) self.assertFalse(self.client.check(remote_path=self.remote_path_dir)) def test_info(self): self._prepare_for_downloading() result = self.client.info(remote_path=self.remote_path_file) self.assertEquals(result['name'], 'test.txt') self.assertEquals(result['size'], '41') self.assertTrue('created' in result) self.assertTrue('modified' in result) def test_directory_is_dir(self): self._prepare_for_downloading() self.assertTrue(self.client.is_dir(self.remote_path_dir), 'Should return True for directory') def test_file_is_not_dir(self): self._prepare_for_downloading() self.assertFalse(self.client.is_dir(self.remote_path_file), 'Should return False for file') def test_get_property_of_non_exist(self): self._prepare_for_downloading() result = self.client.get_property(remote_path=self.remote_path_file, option={'name': 'aProperty'}) self.assertEquals( result, None, 'For not found property should return value as None') def test_set_property(self): self._prepare_for_downloading() self.client.set_property(remote_path=self.remote_path_file, option={ 'namespace': 'test', 'name': 'aProperty', 'value': 'aValue' }) result = self.client.get_property(remote_path=self.remote_path_file, option={ 'namespace': 'test', 'name': 'aProperty' }) self.assertEquals(result, 'aValue', 'Property value should be set') def test_set_property_batch(self): self._prepare_for_downloading() self.client.set_property_batch(remote_path=self.remote_path_file, option=[{ 'namespace': 'test', 'name': 'aProperty', 'value': 'aValue' }, { 'namespace': 'test', 'name': 'aProperty2', 'value': 'aValue2' }]) result = self.client.get_property(remote_path=self.remote_path_file, option={ 'namespace': 'test', 'name': 'aProperty' }) self.assertEquals(result, 'aValue', 'First property value should be set') result = self.client.get_property(remote_path=self.remote_path_file, option={ 'namespace': 'test', 'name': 'aProperty2' }) self.assertEquals(result, 'aValue2', 'Second property value should be set') def _prepare_for_downloading(self): if not self.client.check(remote_path=self.remote_path_dir): self.client.mkdir(remote_path=self.remote_path_dir) if not self.client.check(remote_path=self.remote_path_file): self.client.upload_file(remote_path=self.remote_path_file, local_path=self.local_file_path) if not path.exists(self.local_path_dir): os.makedirs(self.local_path_dir) def _prepare_for_uploading(self): if not self.client.check(remote_path=self.remote_path_dir): self.client.mkdir(remote_path=self.remote_path_dir) if not path.exists(path=self.local_path_dir): os.makedirs(self.local_path_dir) if not path.exists(path=self.local_path_dir + os.sep + self.local_file): shutil.copy(src=self.local_file_path, dst=self.local_path_dir + os.sep + self.local_file)
class WebWav: #------------------------------------------------------- # 定数 TEMPWAV = 'TempWav/' # LOCAL端末FTPアップロード対象音声フォルダ TEMPTXT = 'TempTxt/' # LOCAL端末FTPダウンロードフォルダ UPPATH = 'uploads/' # WebWav音声アップロードフォルダ DOWNPATH = 'auto_uploads/' # WebWavテキストダウンロードフォルダ BACKUPPATH = 'BackUp/' # バックアップフォルダ #------------------------------------------------------- # コンストラクタ def __init__(self): #------------------------------------------------------- # WebWavの接続情報 options = { 'webdav_hostname': "https://seto.teracloud.jp/dav/", 'webdav_login': "******", 'webdav_password': "******" } #------------------------------------------------------- # WebWavクライアント self.client = Client(options) self.client.verify = True # To not check SSL certificates (Default = True) #------------------------------------------------------- # 1.AmiVoiceクラウド環境で過去にアップロードされた音声ファイルの削除する。 #------------------------------------------------------- def delWavFile(self): print('---START:delWavFile---') # 削除前にファイル出力 print('AMIVOICE OLD FILE DEL BEFORE') delFilelist = self.client.list(self.UPPATH) print(delFilelist) # uploadフォルダ内、ファイルの削除 for file in delFilelist: if file == self.UPPATH: # ディレクトリ名と一致する場合、次へ (listでディレクトリ名も取得されるので回避) continue delFile = self.UPPATH + file print('DELFILE:' + delFile) self.client.clean(delFile) print('AMIVOICE OLD FILE DEL CHECK List:') print(self.client.list(self.UPPATH)) print('---END:delWavFile---') #------------------------------------------------------- # 2.ローカル端末内の転送用フォルダ(TempWav)内の音声ファイルをWebWavでアップロードする。 #------------------------------------------------------- def uploadWavFile(self): print('---START:uploadWavFile---') # WebWavアップロード対象のファイルを取得 upFileList = os.listdir(self.TEMPWAV) print('LOCAL TempWav List:') print(upFileList) for file in upFileList: print('AMIVOICE UPLOAD FILE:%s' % file) upFile = self.UPPATH + file localUpFile = self.TEMPWAV + file self.client.upload_sync(remote_path=upFile, local_path=localUpFile) print('---END:uploadWavFile---') #------------------------------------------------------- # 3.アップロード後の音声ファイルを転送用フォルダ(TempWav)から削除する。 #------------------------------------------------------- def delLocalWavFile(self): print('---START:delLocalWavFile---') # アップロード終了後、実施 localFileList = os.listdir(self.TEMPWAV) for file in localFileList: print('LOCAL DEL FILE:%s' % file) delfile = self.TEMPWAV + file os.remove(delfile) print('LOCAL TempWav CHECK List:') print(os.listdir(self.TEMPWAV)) print('---END:delLocalWavFile---') #------------------------------------------------------- # 4.音声ファイルから変換されたテキストデータをWebWavで転送用フォルダ(TempTxt)にダウンロードする。 #------------------------------------------------------- def downloadTxtFile(self): print('---START:downloadTxtFile---') downFileList = self.client.list(self.DOWNPATH) print('AMIVOICE DOWNLOAD ') print(downFileList) for file in downFileList: if file == self.DOWNPATH: # ディレクトリ名と一致する場合、次へ (listでディレクトリ名も取得されるので回避) continue downFile = self.DOWNPATH + file localDownFile = self.TEMPTXT + file self.client.download_sync(remote_path=downFile, local_path=localDownFile) print('LOCAL TempTxt CHECK List:') print(os.listdir(self.TEMPTXT)) print('---END:downloadTxtFile---') #------------------------------------------------------- # 5.音声ファイルから変換されたテキストデータを日付単位フォルダにバックアップする。 #------------------------------------------------------- def BackUpTxtFile(self): print('---START:BackUpTxtFile---') # システム日付より、今日日付を取得today = datetime.date.today() BackUpFolder = self.BACKUPPATH + datetime.date.today().strftime( "%Y-%m-%d") print('BACKUP FOLDEPATH:' + BackUpFolder) # 今日日付のディレクトリない場合 if not os.path.exists(BackUpFolder): # フォルダを作成 os.mkdir(BackUpFolder) # 取得したデータのコピーし、バックアップを作成する。 # バックアップ for file in os.listdir(self.TEMPTXT): shutil.copy2(self.TEMPTXT + file, BackUpFolder + '/' + file) print('LOCAL BACKUP FOLDER List:') print(os.listdir(BackUpFolder)) print('---END:BackUpTxtFile---')
class CloudWebDAV(CloudLibrary): """As CloudLibrary but for WebDAV server This attempts to work within the framework set by CloudLibrary but adds the functionality to connect with a username and password to the server Files can not be accessed purely with a URL as for previous impolementations (Piwigo), the connection needs to be made first or all the connection details passed """ def __init__(self, name, folder_url_example): self._client = None self.default_url = "" self.basepath = "" super().__init__(name, folder_url_example) def get_form_fields(self, user_id): return [ self.get_server_url_field(), self.get_server_user_field(), self.get_server_token_field(user_id) ] def serialize(self, **kwargs): base_payload = { 'name': self.name, 'folder_url_example': self.folder_url_example } if kwargs['user'] != None: ds = get_current_plugin().get_user_data_store(kwargs['user']) server_url_field = self.get_server_url_field() server_url = server_url_field.get_stored_value(ds) server_user_field = self.get_server_user_field() server_user = server_user_field.get_stored_value(ds) server_token_field = self.get_server_token_field(kwargs['user'].id) server_token = server_token_field.get_stored_value(ds) base_payload['type'] = 'library' base_payload[server_url_field.key] = server_url base_payload[server_user_field.key] = server_user base_payload[server_token_field.key] = server_token return base_payload base_payload['type'] = 'platform' return base_payload def get_server_url_field(self): return ServerURLField(self.name, self.default_url) def get_server_user_field(self): return ServerUserField(self.name) def get_server_token_field(self, user_id): return ServerTokenField(self.name, user_id) # Cloud Platform def parse_url(self, url): #No parsing necessary return url def build_folder_api_url(self, information): return 'folder_api:' + information def parse_payload_into_folder(self, original_url, payload): #Just return the first entry return payload[0] def build_list_files_in_folder_api_url(self, information): return 'files_in_folder_api:' + information def library_folder_processing(self, files): """This method does nothing, but each platform might want to do some processing of the folders and they can, by overriding this method""" return files def get_credentials(self, ds, user_id): """Return dict with all data required for WebDAV connection """ server_url_field = self.get_server_url_field() server_url = server_url_field.get_stored_value(ds) server_user_field = self.get_server_user_field() server_user = server_user_field.get_stored_value(ds) server_token_field = self.get_server_token_field(user_id) #server_token = server_token_field.get_stored_value(ds) #Save the encrypted data, decrypt at last minute before use server_token = server_token_field.get_encrypted_value(ds) connection_data = { 'type': 'webdav', 'webdav_hostname': server_url, 'webdav_login': server_user, 'webdav_password': server_token } return connection_data def connect(self, ds, user_id): """Connect to the server if necessary, the connection can be re-used by other methods (not required if each request is self-contained) This method takes the user_data_store and gets connection details from there """ options = self.get_credentials(ds, user_id) self.connect_dict(options, user_id) def connect_dict(self, options, user_id): """Connect to the server if necessary, the connection can be re-used by other methods (not required if each request is self-contained) This method takes a dict containing connection details: "webdav_hostname", "webdav_login", "webdav_password" """ if self._client: try: self._client.info("/") except (WebDavException) as e: logger.info("WebDAV client exception, re-connecting:" + str(e)) self._client = None if self._client is None and options: #Dummy field for decryption es = ServerTokenField(self.name, user_id) options['webdav_password'] = es.decrypt_value( options['webdav_password']) self._client = Client(options) def download(self, url, filepath): #Use webdav connection to download file self._client.download_sync(remote_path=url, local_path=filepath) def _get_files(self, path): files = self._client.list(path) if len(files) == 0: return [] #Skip the first entry if it is current path #(for some %*&#$ reason not all webdav servers do this) name = pathlib.Path(path).name first = files[0] if first[-1] == '/' and (name == first or name == first[0:-1] or first == 'webdav/'): return files[1:] return files #Recurse and return folders with number of image files/subfolders def _read_folder(self, path, recursive=0, extensions=None): if len(path) == 0 or path[-1] != '/': path = path + '/' logger.info(" read folder:" + path) name = pathlib.Path(path).name #files = self._client.list(path) files = self._get_files(path) alldirs = [] if recursive != 0 and path != '/': parent = str(pathlib.Path(path).parent) #print("PATH:",path,"PARENT:",parent) alldirs += [Folder('[/..] ' + parent, parent, 0)] if len(files) == 0: return alldirs contents = [] folders = [] for f in files: if f[0] == '.': continue if f[-1] == '/': #Include subfolders? if recursive > 0: #print(path + f) alldirs += self._read_folder(path + f, recursive - 1, extensions=extensions) elif recursive < 0: #Add the folders without counting their images, unknown image count alldirs += [Folder(f[0:-1], path + f, -1)] else: #Just add folders to list if not going into them folders += [f] else: ext = pathlib.Path(f).suffix.lower() if extensions is None or ext in extensions: contents += [f] #Skip current if no images or subfolders if len(contents) or len(folders): #Remove trailing slash for name #alldirs += [Folder(name[0:-1], path, len(contents))] alldirs += [Folder(name, path, len(contents))] logger.info(" read folder entries: " + str(len(alldirs))) return alldirs def _read_files(self, path, extensions=None): logger.info(" read files in folder:" + path) files = self._get_files(path) contents = [] for f in files: if f[0] == '.' or f[-1] == '/': continue ext = pathlib.Path(f).suffix.lower() if extensions is None or ext in extensions: contents += [File(f, path + f)] logger.info(" read file entries" + str(len(contents))) return contents def call_api(self, api_url): if self._client is None: logger.info("WebDAV: No client, please connect first") return [] #File filter for images and ground control points (.txt) ext_list = VALID_IMAGE_EXTENSIONS + ['.txt'] #Get the type of request from the prefix req_type, url = api_url.split(':', maxsplit=1) #Decode url (spaces etc) url = urllib.parse.unquote(url) logger.info("CALLING API:" + req_type + "," + url) if req_type.startswith('folder_list_api'): #Returns all folders and sub-folders with number of images within #return self._read_folder(url, recursive=1, extensions=ext_list) return self._read_folder(url, recursive=-1, extensions=ext_list) if req_type == 'folder_api': #Returns info about a folder, including number of images return self._read_folder(url, recursive=0, extensions=ext_list) if req_type == 'files_in_folder_api': #Returns list of images in a folder return self._read_files(url, extensions=ext_list) def parse_payload_into_files(self, payload): return payload # Cloud Library def build_folder_list_api_url(self, server_url, root): #return 'folder_list_api:' + server_url return 'folder_list_api:' + root def parse_payload_into_folders(self, payload): #Already in Folders() return payload
class WebDAV: """Commerce Cloud WebDAV session. Args: client (CommerceCloudClientSession): Active client session with Commerce Cloud for a bearer token. instance (str, optional): Optional commerce cloud instance, useful for opening clients to multiple instances using the same bearer token. Defaults to None. cert (str, optional): Path to TLS client certificate. Defaults to None. key ([type], optional): Export key for the TLS certificate. Defaults to None. verify (bool, optional): Verify TLS certificates, set to false for self signed. Defaults to True. """ def __init__(self, client, instance=None, cert=None, key=None, verify=True): self.client = client self._instance = instance or self.client.instance self.options = {"webdav_hostname": self._instance.rstrip("/")} self.verify = verify self.token = self.client.Token self.options.update({"webdav_token": self.token["access_token"]}) self.webdav_client = Client(self.options) self.webdav_client.verify = self.verify if cert and key: self.cert = str(Path(cert).resolve()) self.key = str(Path(key).resolve()) self.options.update({"cert_path": self.cert, "key_path": self.key}) def reauth(self): """Checks token expiry and re-initialises the Client if a new token is needed. """ if self.token["expires_at"] < int(time.time()): self.client.getToken() self.options.update({"webdav_token": self.token["access_token"]}) self.webdav_client = Client(self.options) def reconnect(self): """Re-initalise the Client session. """ self.webdav_client = Client(self.options) @property def hostname(self): """Return the hostname the WebDAV client connection is connected to. Returns: str: Hostname including prefix eg https:// """ return self.options["webdav_hostname"] @property def netloc(self): """Return a urlparse netloc string of the connected hostname. Returns: str: netloc of hostname. """ url = urlparse(self.options["webdav_hostname"]) return url.netloc @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def GetInfo(self, remote_filepath: str, headers: dict = None) -> list: """Get properties for entity [extended_summary] Args: remote_filepath (str): Path to remote resource. headers (dict, optional): Additional headers to apply to request. Defaults to None. Raises: RetryException: Adds to retries counter on failure. Returns: list: WebDAV attribute information. """ try: return self.webdav_client.info(remote_filepath) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def GetDirectoryList(self, filepath: str, get_info: bool = False, headers: dict = None) -> list: """Get list of files and folders in a path from WebDAV endpoint. [extended_summary] Args: filepath (str): Path to get directory listing for. get_info (bool): returns dictionary of attributes instead of file list. headers (dict, optional): Additional headers to apply to request. Defaults to None. Returns: list: Directory listing. """ try: return self.webdav_client.list(filepath, get_info=get_info) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Upload(self, local_filepath: str, remote_filepath: str): """Upload file or directory recursively to WebDAV endpoint. [extended_summary] Args: local_filepath (str): Local path to file or directory to upload. remote_filepath (str): Remote path to upload to. """ local_filepath = str(Path(local_filepath).resolve()) try: self.webdav_client.upload_sync(remote_filepath, local_filepath) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def StreamUpload(self, payload, remote_path: str, file_name: str): """Upload FileIO, StringIO, BytesIO or string to WebDAV [extended_summary] Args: payload: Stream payload remote_path (str): Remote path relative to host. file_name (str): Name for the file uploaded. """ try: self.webdav_client.upload_to(payload, f"{remote_path}/{file_name}") except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def MakeDir(self, remote_path: str): """Make new directory at path specified. Args: remote_path (str): Path of proposed new directory. """ try: self.webdav_client.mkdir(remote_path) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Move(self, remote_path_source: str, remote_path_dest: str, overwrite: bool = False): """Make new directory at path specified. Args: remote_path_source (str): Path of source resource. remote_path_dest (str): Path of destination resource. overwrite (bool): Overwrite destination resource. Defaults to False. """ try: self.webdav_client.move(remote_path_source, remote_path_dest, overwrite) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Delete(self, remote_filepath: str): """Delete file on remote WebDAV endpoint. Args: remote_filepath (str): Location of resource to delete. """ try: self.webdav_client.clean(remote_filepath) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Download(self, local_filepath: str, remote_filepath: str): """Download file/folder from WebDAV endpoint. This is a synchronous operation, and the file is downloaded in full to the local_filepath. Args: local_filepath (str): Local path to download to, including filename of file saved. remote_filepath (str): Remote path to file to download. """ local_filepath = str(Path(local_filepath).resolve()) try: self.webdav_client.download_sync(remote_filepath, local_filepath) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Pull(self, local_filepath: str, remote_filepath: str): """Sync file/folder from WebDAV endpoint to local storage. This downloads missing or nwer modified files from the remote to local storage. You can use it to do "resumeable" transfers, but the checks are slow for deeply nested files. Args: local_filepath (str): Local path to download to, including filename of file saved. remote_filepath (str): Remote path to file to download. """ local_filepath = str(Path(local_filepath).resolve()) try: self.webdav_client.pull(remote_filepath, local_filepath) return True except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException return False @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Push(self, local_filepath: str, remote_filepath: str): """Sync file/folder from local storage to WebDAV endpoint. This uploads missing or nwer modified files from the local to remote storage. You can use it to do "resumeable" transfers, but the checks are slow for deeply nested files. Args: local_filepath (str): Local path to download to, including filename of file saved. remote_filepath (str): Remote path to file to download. """ local_filepath = str(Path(local_filepath).resolve()) try: self.webdav_client.push(local_filepath, remote_filepath) return True except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException return False @retry( retry_on_exceptions=(RetryException), max_calls_total=10, retry_window_after_first_call_in_seconds=15, ) def StreamDownload(self, remote_filepath: str, buffer=None, decode: bool = False): """Download a file in chunks to a local file buffer. You must provide a BytesIO object or one will be created for you. Args: remote_filepath (str): Path to remote resource to download. buffer ([type], optional): Buffer write streamed content to. decode (bool, optional): Optionally try to decode downloaded file into a string. Defaults to False. Raises: RetryException: Adds to retries counter on failure. Returns: Bytes: Returns a BytesIO object for further use. """ self.reauth() if buffer is None: buffer = BytesIO() try: self.webdav_client.download_from(buff=buffer, remote_path=remote_filepath) if decode is True: return buffer.getvalue().decode("utf-8") else: buffer.seek(0) return buffer except (NoConnection, ConnectionException, WebDavException): raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=10, retry_window_after_first_call_in_seconds=60, ) def HashObject(self, remote_filepath: str) -> str: """Generate a MD5 hashsum for a remote resource. This is streamed into memory, hashed and discarded. Optimised for low memory but high bandwidth environments. Args: remote_filepath (str): Path to remote resource. Raises: RetryException: Adds to retries counter on failure. Returns: str: MDSSUM of the file requested. """ self.reauth() try: sum = md5(self.StreamDownload(remote_filepath).getbuffer()) return { "filepath": remote_filepath, "hashtype": "MD5", "hashsum": sum.hexdigest(), } except (NoConnection, ConnectionException, WebDavException): self.reconnect() raise RetryException def RecursiveFileListing(self, remote_filepath: str) -> str: """Recursive filetree walker, returns paths found. Args: remote_filepath (str): [description] Raises: RetryException: Adds to retries counter on failure. Yields: Iterator[str]: Yields resource paths for any files found. """ @retry( retry_on_exceptions=(RetryException), max_calls_total=10, retry_window_after_first_call_in_seconds=60, ) def get_list(self, path): self.reauth() try: return self.webdav_client.list(path, get_info=True) except (NoConnection, ConnectionException, WebDavException): self.reconnect() raise RetryException def get_files(self, path): return [x for x in get_list(self, path) if x["isdir"] is False] def get_dirs(self, path): return [ x["path"] for x in get_list(self, path) if x["isdir"] is True ] yield from get_files(self, remote_filepath) for subdir in get_dirs(self, remote_filepath): yield from self.RecursiveFileListing(subdir) def RecursiveFolderListing(self, remote_filepath: str) -> str: """Recursive filetree walker, returns paths found. Args: remote_filepath (str): [description] Raises: RetryException: Adds to retries counter on failure. Yields: Iterator[str]: Yields resource paths for any files found. """ @retry( retry_on_exceptions=(RetryException), max_calls_total=10, retry_window_after_first_call_in_seconds=60, ) def get_list(self, path): self.reauth() try: return self.webdav_client.list(path, get_info=True) except (NoConnection, ConnectionException, WebDavException): self.reconnect() raise RetryException def get_dirs(self, path): return [ x["path"] for x in get_list(self, path) if x["isdir"] is True ] dirlist = get_dirs(self, remote_filepath) yield from dirlist for subdir in get_dirs(self, remote_filepath): yield from self.RecursiveFolderListing(subdir)
def upload_dav(): try: options = { 'webdav_hostname': "https://192.168.1.6/remote.php/dav/files/clouduser/", 'webdav_login': "******", 'webdav_password': "******", 'verbose':True } conn=False client = Client(options) client.verify = False try: if(client.list()): print('\n' * 4) print("Connection Successfull") conn=True print('\n' * 3) except: print('\n' * 4) print("Connection error. Check credentials") if conn: print("##############################################################") opt=int(input("Choose action to be performed:\n1:Show all files\n2:Make Directory\n3:Delete\n4:Download file\n5:Upload File\n")) if opt==1: files=client.list() print(files) elif opt==2: d=input("Enter directory name\n") if client.mkdir(d): print("Created Successfully") elif opt==3: d=input("Enter directory or file name\n") client.clean(d) print("Deleted") elif opt==4: file=input("Enter file to download with public key and signature\n") path_1=file+'.encrypted' path_2=file+'.sign' path_3='public_key.pem' s_time=time.time() client.download_sync(path_1, path_1) client.download_sync(path_2, path_2) client.download_sync(path_3, path_3) e_time=time.time() t=e_time-s_time print("Downloaded, Time taken is",e_time-s_time) elif opt==5: file=input("Enter file to upload with public key and signature\n") path_1=file+'.encrypted' path_2=file+'.sign' path_3='public_key.pem' s_time=time.time() client.upload_sync(path_1, path_1) client.upload_sync(path_2, path_2) client.upload_sync(path_3, path_3) e_time=time.time() print("Uplaoded, Time taken is",e_time-s_time) else: None except WebDavException as exception: print("\n\n",exception,"\n\n")