def main(): args = create_parser().parse_args() webdav_url = args.webdav_url webdav_path = args.webdav_path pattern = args.pattern login = args.login password = args.password options = { 'webdav_hostname': webdav_url, 'webdav_login': login, 'webdav_password': password } client = Client(options) client.verify = False not_matched_files = {} for dir in client.list(webdav_path): new_path = '/'.join([webdav_path, dir]) files = client.list(new_path) for file in files: if not re.match(pattern, file): not_matched_files[''.join([new_path, file])] = 'NOT MATCHED' print('Running on the options {}'.format({ 'webdav_url': webdav_url, 'webdav_path': webdav_path, 'pattern': pattern })) print( tabulate(not_matched_files.items(), headers=['FILENAME', 'WARNING'], tablefmt="grid"))
class Dav: """A class to handle webdav request for uploading the mail attachments """ def __init__(self, host, user, password): """initializes a webdav connection Args: host (string): host url user (string): dav user name password (string): dav password """ self.options = { 'webdav_hostname': host, 'webdav_login': user, 'webdav_password': password } self.client=Client(self.options) def uploadAll(self, dir): """Uploads all attachments to the cloud Args: dir (string): existing directory to save the files to Returns: False: if something went wrong """ files = [f for f in listdir("attachments") if isfile(join("attachments", f))] year = datetime.datetime.now().strftime("%Y") date = datetime.datetime.now().strftime("%m-%d") if not year in self.client.list(dir): try: self.client.mkdir(dir+"/"+year) except Exception as e: print(e) return False if not date in self.client.list(dir+"/"+year) and len(files) > 0: try: self.client.mkdir(dir+"/"+year+"/"+date) except Exception as e: print(e) return False for f in files: try: self.client.upload_sync(remote_path=dir+"/"+year+"/"+date+"/"+str(random.randint(10000,100000))+"_"+f, local_path="attachments/"+f) os.remove("attachments/"+f) except Exception as e: print(e)
class connector: def __init__(self, config): self.config = config options = { 'webdav_hostname': config['Connector']['Host'], 'webdav_login': config['Connector']['Username'], 'webdav_password': config['Connector']['Password'] } self.client = Client(options) self.client.verfiy = False self.base = config['Connector']['Base'] def listdir(self): l = self.client.list(self.base) l.pop(l.index(self.base + '/')) print(l) return l def remove(self, name): self.client.clean(self.base + '/' + name) def check(self, name): return self.client.check(self.base + '/' + name) def get(self, name): return self.client.resource(self.base + '/' + name) def create(self, name): self.client.upload_to(None, self.base + '/' + name) def upload(self, name): self.client.upload_file(self.base + '/' + name, name)
def fetch(options=options): client = Client(options) if not client.check("/server"): client.mkdir("/server") lst=client.list("/server")[1::] cnt=0 for i in lst: cnt+=1 print("Index:",cnt," Time:",parse_time(i)) index=int(input("Please select the file you want to restore:"))%len(lst) client.download("/server/"+lst[index-1],"./dat/") print(parse_time(lst[index-1]),"has been restored")
def show_WebDAV(self, session): cfg = session.config hpc = cfg["hpc"] lobcder = hpc["url"] port = lobcder[lobcder.find(":"):] webdav_host = lobcder.replace(port, "32223") options = { 'webdav_hostname': webdav_host, 'webdav_login': "******", 'webdav_password': hpc["webdav_pwd"] } client = Client(options) session.rw_fits = client.list("krk/LOFAR_results")
def fetch(options=options): client = Client(options) if not client.check("/blocks"): client.mkdir("/blocks") lst = client.list("/blocks")[1::] cnt = 0 for i in lst: cnt += 1 print("Index:", cnt, i) index = int( input("Please select the block you want to install:")) % len(lst) client.download("/blocks/" + lst[index - 1], "./blocks/" + lst[index - 1]) # print(parse_time(lst[index-1]),"has been restored") print(lst[index - 1], "has been installed successfully")
class CardDavImporter: def __init__(self, web_dav_host, web_dav_login, web_dav_password, card_dav_path): self.web_dav_host = web_dav_host self.web_dav_login = web_dav_login self.web_dav_password = web_dav_password self.card_dav_path = card_dav_path self.client: Client = None def connect(self): options = { 'webdav_hostname': self.web_dav_host, 'webdav_login': self.web_dav_login, 'webdav_password': self.web_dav_password, 'webdav_override_methods': { 'check': 'GET' } } self.client = Client(options) def download_contacts(self): if not self.client: raise Exception("You have to connect first") local_path = os.getcwd() + "/tmp" if os.path.exists(local_path): shutil.rmtree(local_path) os.makedirs(local_path) for resource_name in self.client.list(self.card_dav_path): _remote_path = "{parent}/{name}".format(parent=self.card_dav_path, name=resource_name) _local_path = os.path.join(local_path, resource_name) if not _remote_path.endswith(".vcf"): continue try: print("Downloading " + _remote_path) self.client.download(local_path=_local_path, remote_path=_remote_path) except WebDavException as ex: print("Could not download " + resource_name)
def get_content_in_path(conn_settings, path, root, list_dirs_only=True): client = Client(conn_settings) content = [] folders = [] files = [] remote_files = client.list(root) # Remove first file because it's always the parent folder for file in remote_files[1:]: # Retrieve only children of this path child_path = root + file if Urn(file).is_dir() and path.startswith(child_path): try: nested = get_content_in_path(conn_settings, path, child_path, list_dirs_only=list_dirs_only) except Exception as e: nested = [] folders.append({ 'name': file, 'value': child_path, 'options': nested, 'selected': False, 'expanded': True }) elif Urn(file).is_dir(): folders.append({ 'name': file, 'value': child_path, 'options': RETRIEVING_OPTIONS, 'selected': False, 'expanded': False }) elif not list_dirs_only: files.append({ 'name': file, 'value': child_path, 'options': [], 'selected': False, 'expanded': False }) # Folders and files are collected separately so folders can be listed first content.extend(folders) content.extend(files) return content
def salvar(caminho_arquivo, nome_arquivo='UFs.xlsx'): cfg = configparser.ConfigParser(interpolation=None) with io.open(str(config.arquivo_config_webdav), mode='r', encoding='utf-8') as fp: cfg.read_file(fp) options = { 'webdav_hostname': cfg['webdav']['hostname'], 'webdav_login': cfg['webdav']['login'], 'webdav_password': cfg['webdav']['password'] } pasta_virtual = cfg['webdav']['pasta_virtual'] cliente = Client(options) cliente.verify = False cliente.upload_sync(remote_path=pasta_virtual + '/' + nome_arquivo, local_path=caminho_arquivo) print('Upload concluído. Listando conteúdo do diretório remoto...') print(cliente.list(pasta_virtual))
class WebWav: #------------------------------------------------------- # 定数 TEMPWAV = 'TempWav/' # LOCAL端末FTPアップロード対象音声フォルダ TEMPTXT = 'TempTxt/' # LOCAL端末FTPダウンロードフォルダ UPPATH = 'uploads/' # WebWav音声アップロードフォルダ DOWNPATH = 'auto_uploads/' # WebWavテキストダウンロードフォルダ BACKUPPATH = 'BackUp/' # バックアップフォルダ #------------------------------------------------------- # コンストラクタ def __init__(self): #------------------------------------------------------- # WebWavの接続情報 options = { 'webdav_hostname': "https://seto.teracloud.jp/dav/", 'webdav_login': "******", 'webdav_password': "******" } #------------------------------------------------------- # WebWavクライアント self.client = Client(options) self.client.verify = True # To not check SSL certificates (Default = True) #------------------------------------------------------- # 1.AmiVoiceクラウド環境で過去にアップロードされた音声ファイルの削除する。 #------------------------------------------------------- def delWavFile(self): print('---START:delWavFile---') # 削除前にファイル出力 print('AMIVOICE OLD FILE DEL BEFORE') delFilelist = self.client.list(self.UPPATH) print(delFilelist) # uploadフォルダ内、ファイルの削除 for file in delFilelist: if file == self.UPPATH: # ディレクトリ名と一致する場合、次へ (listでディレクトリ名も取得されるので回避) continue delFile = self.UPPATH + file print('DELFILE:' + delFile) self.client.clean(delFile) print('AMIVOICE OLD FILE DEL CHECK List:') print(self.client.list(self.UPPATH)) print('---END:delWavFile---') #------------------------------------------------------- # 2.ローカル端末内の転送用フォルダ(TempWav)内の音声ファイルをWebWavでアップロードする。 #------------------------------------------------------- def uploadWavFile(self): print('---START:uploadWavFile---') # WebWavアップロード対象のファイルを取得 upFileList = os.listdir(self.TEMPWAV) print('LOCAL TempWav List:') print(upFileList) for file in upFileList: print('AMIVOICE UPLOAD FILE:%s' % file) upFile = self.UPPATH + file localUpFile = self.TEMPWAV + file self.client.upload_sync(remote_path=upFile, local_path=localUpFile) print('---END:uploadWavFile---') #------------------------------------------------------- # 3.アップロード後の音声ファイルを転送用フォルダ(TempWav)から削除する。 #------------------------------------------------------- def delLocalWavFile(self): print('---START:delLocalWavFile---') # アップロード終了後、実施 localFileList = os.listdir(self.TEMPWAV) for file in localFileList: print('LOCAL DEL FILE:%s' % file) delfile = self.TEMPWAV + file os.remove(delfile) print('LOCAL TempWav CHECK List:') print(os.listdir(self.TEMPWAV)) print('---END:delLocalWavFile---') #------------------------------------------------------- # 4.音声ファイルから変換されたテキストデータをWebWavで転送用フォルダ(TempTxt)にダウンロードする。 #------------------------------------------------------- def downloadTxtFile(self): print('---START:downloadTxtFile---') downFileList = self.client.list(self.DOWNPATH) print('AMIVOICE DOWNLOAD ') print(downFileList) for file in downFileList: if file == self.DOWNPATH: # ディレクトリ名と一致する場合、次へ (listでディレクトリ名も取得されるので回避) continue downFile = self.DOWNPATH + file localDownFile = self.TEMPTXT + file self.client.download_sync(remote_path=downFile, local_path=localDownFile) print('LOCAL TempTxt CHECK List:') print(os.listdir(self.TEMPTXT)) print('---END:downloadTxtFile---') #------------------------------------------------------- # 5.音声ファイルから変換されたテキストデータを日付単位フォルダにバックアップする。 #------------------------------------------------------- def BackUpTxtFile(self): print('---START:BackUpTxtFile---') # システム日付より、今日日付を取得today = datetime.date.today() BackUpFolder = self.BACKUPPATH + datetime.date.today().strftime( "%Y-%m-%d") print('BACKUP FOLDEPATH:' + BackUpFolder) # 今日日付のディレクトリない場合 if not os.path.exists(BackUpFolder): # フォルダを作成 os.mkdir(BackUpFolder) # 取得したデータのコピーし、バックアップを作成する。 # バックアップ for file in os.listdir(self.TEMPTXT): shutil.copy2(self.TEMPTXT + file, BackUpFolder + '/' + file) print('LOCAL BACKUP FOLDER List:') print(os.listdir(BackUpFolder)) print('---END:BackUpTxtFile---')
class OwncloudUser: """ This represents an owncloud user. It initialize only one connection to owncloud for one user and holds the current access token. """ _access_token = None _user_id = None def __init__(self, userId, apiKey=None): self._user_id = userId self._access_token = (apiKey if apiKey is not None else Util.loadToken( userId, "port-owncloud")) options = { "webdav_hostname": "{}/remote.php/webdav".format( os.getenv("OWNCLOUD_INSTALLATION_URL", "http://localhost:3000")), "webdav_token": self._access_token, } self.client = Client(options) self.client.verify = os.environ.get("VERIFY_SSL", "True") == "True" def getFolder(self, foldername): """Returns the files within the foldername. If a folder is in there, it returns all files within recursive. Args: foldername (str): Represents the searched foldername Returns: list: Represents all files as strings in a list. """ logger.debug("foldername {}".format(foldername)) from urllib.parse import quote, unquote if unquote(foldername) is not foldername: foldername = unquote(foldername) files = self.client.list(foldername) logger.debug("found files: {}".format(files)) # remove the first element, because this is the searched folder. del files[0] indexList = [] # TODO: needs tests. for index, file in enumerate(files): if file.endswith("/"): # save index for later removal, because we do not want the folderpaths indexList.append(index) fullname = file logger.debug( f"recursive getFolder for inner folders: {fullname}") tmpFiles = self.getFolder(foldername + "/" + fullname) for appendFile in tmpFiles: # add full filepath in context of folder files.append(fullname + appendFile) for index in indexList: del files[index] return files def getFile(self, filename): """ Returns bytesIO content from specified owncloud filepath. The path does not start with /. """ logger.debug("filename {}".format(filename)) from urllib.parse import quote, unquote # check if string is already urlencoded # if unquote is equal to string, then it is not urlencoded (unquote respects plus sign) if unquote(filename) != filename: filename = unquote(filename) from io import BytesIO buffer = BytesIO(b"") res1 = self.client.resource(filename) res1.write_to(buffer) buffer.seek(0) logger.debug("file content: {}".format(buffer.getvalue())) return buffer
#!!! if you set this to True the script WILL remove files!!! removeFiles = False #this is the connection information for your Learn deployment #you will need to use a System Administrator account #or an account with explicit read and remove permissions to /internal/courses options = { 'webdav_hostname': "https://host.blackboard.com", 'webdav_root': "/bbcswebdav/", 'webdav_login': "******", 'webdav_password': "******", } #open the client client = Client(options) x = client.list() #the file should have at least two columns "deleteMe" (Y/N) and "fullPath" with open("theList.csv") as csvfile: #this pushes the file data into a list of dictionaries reader = csv.DictReader(csvfile) #itterate on each dictionary in the list for row in reader: #if the backup doesn't exist, say so and move on if not client.check(row.get("fullPath")): print("Cannot find: " + row.get("fullPath")) continue
def test_webdav(self): server_2_ip = '172.16.3.155' error, server_id = self.mm['webdavalpine'].run("add_server", ip_addr=server_2_ip, fqdn='webdav.test') self.assertTrue(error == None, msg=error) time.sleep(10) webdav_url = "https://{}/files/".format(server_2_ip) full_path = parentdir + "/work/webdavalpine/1/webdav/admin.pass" self.assertTrue(os.path.exists(full_path), msg="{} does not exist".format(full_path)) admin_pass = open(full_path, "r").read().strip() resp = requests.get("https://{}/".format(server_2_ip), verify=False) self.assertTrue(resp.status_code == 200) options = { 'webdav_hostname': webdav_url, 'webdav_login': "******", 'webdav_password': admin_pass } client = Client(options) client.verify = False result = client.check("public") self.assertTrue(result == True) result = client.list() self.assertTrue(len(result) > 0) subprocess.check_output(["touch /tmp/testdata"], shell=True) client.upload_sync("public/testdata", "/tmp/testdata") result = client.check("public/testdata") self.assertTrue(result == True) unauth_client = Client({ 'webdav_hostname': webdav_url }) unauth_client.verify = False public_list = unauth_client.list("public/") self.assertTrue(len(public_list) > 0) try: unauth_client.upload_sync("public/testdata2", "/tmp/testdata") self.fail() except: pass error, _ = self.mm['webdavalpine'].run("stop_server", id=server_id) self.assertTrue(error == None, msg=error) time.sleep(2) error, _ = self.mm['webdavalpine'].run("start_server", id=server_id) self.assertTrue(error == None, msg=error) time.sleep(10) resp = requests.get("https://{}/".format(server_2_ip), verify=False) self.assertTrue(resp.status_code == 200) options = { 'webdav_hostname': webdav_url, 'webdav_login': "******", 'webdav_password': admin_pass } client2 = Client(options) client2.verify = False public_list = client2.list("public/") self.assertTrue(len(public_list) > 0) self.assertTrue(public_list[1] == "testdata") error, _ = self.mm['webdavalpine'].run("remove_server", id=server_id) self.assertTrue(error == None, msg=error)
class Webdav_sync(Sync): def __init__(self, user, password, url, db_file): super().__init__() log.info("We are in Webdav_sync.__init__") if user == '' or password == '' or url == '': log.error("Webdav config incomplete. Check config.yaml") raise SystemExit else: self.user = user self.password = password self.url = url self.discobase = db_file #self.backuppath = '/discodos/{}'.format(db_file) options = { 'webdav_hostname': self.url, 'webdav_login': self.user, 'webdav_password': self.password } self.client = Client(options) #print(dir(self.client)) #print('') #print(self.client.is_dir('discodos')) #print(self.client.check(self.discobase)) def _webdav_mtime( self, filename): # we currently don't need this, put to func anyway mod_server_dt = parse(self.client.info(filename)['modified']) mod_server_str = mod_server_dt.strftime('%Y-%m-%d_%H%M%S') #if mod_local_str != mod_server_str: # print('Local and server discobase.db modification time diverge.') # print(mod_local_str) # print(mod_server_str) return mod_server_str def backup(self): # check file stats on local machine bak_file_name = self._get_fileobj_mtime(self.discobase) print("Uploading as {} to {}".format(bak_file_name, self.url)) existing = False try: if self.client.check(bak_file_name): existing = True else: existing = False except WebDavException as exception: log.error('Webserver returned: {}'.format(exception)) raise SystemExit if existing: log.warning('Backup existing. Won\'t overwrite "{}" '.format( bak_file_name)) else: print('Backup not existing yet, uploading ...') self.client.upload_sync(remote_path='{}'.format(bak_file_name), local_path='{}'.format(self.discobase)) # in any case, show list of existing backups self.show_backups() return True def show_backups(self, restore=False): if not restore: print('\nExisting backups:') #relevant_files = self.client.list()[1:] # leave out first item, it's the containing folder all_files = self.client.list() all_files.sort() # sorts by name relevant_files = [] for i, resource in enumerate(all_files): if re.search('_(\d+)-(\d+)-(\d+)_(\d+)$', resource): relevant_files.append(resource) else: log.debug('Sync: Skipping resource: {}'.format(all_files[i])) for j, file in enumerate(relevant_files): file = '({}) - {}'.format(j, file) print(file) if restore: restore_id = ask_user('Restore backup #: ') try: restore_file = relevant_files[int(restore_id)] except ValueError: log.warning('Nothing to restore!') raise SystemExit except IndexError: log.warning('Non-existent ID. Nothing to restore!') raise SystemExit print('Restoring backup {}...'.format(restore_file)) return restore_file print() def restore(self): print('\nWhich backup would you like to restore?') restore_filename = self.show_backups(restore=True) overwrite = ask_user( "Download backup and overwrite local file {} (n)? ".format( self.discobase)) if overwrite.lower() == 'y': self.client.download_sync( remote_path='{}'.format(restore_filename), local_path='{}'.format(self.discobase)) self._touch_to_backupdate(restore_filename)
conn_options = None basepath = None client = None contents_zip = None if config.has_section('WEBDAV'): conn_options = { 'webdav_hostname': config['WEBDAV']['hostname'], 'webdav_login': config['WEBDAV']['username'], 'webdav_password': config['WEBDAV']['sync_pass'] } client = Client(conn_options) client.verify = True log('- downloading keystore') client.download_sync(remote_path=add_basepath('OmniFocus.ofocus/encrypted'), local_path='encrypted') log('- finding database') files = client.list(add_basepath('OmniFocus.ofocus/')) contents_zip = [file for file in files if file.endswith('.zip') and file.startswith('00')][0] # contents_zip = [file for file in files if file.endswith('.zip') and not file.startswith('00')][0] log('- downloading database') client.download_sync(remote_path=add_basepath('OmniFocus.ofocus/{}'.format(contents_zip)), local_path=contents_zip) # decrypt them log('decrypting database') dec_contents_zip = None if config.has_section('ENC') and 'is_encrypted' in config['ENC'] and config['ENC'].getboolean('is_encrypted') and contents_zip is not None: dec_contents_zip = 'dec_{}'.format(contents_zip) enc_pass = config['ENC']['enc_pass'] encryptionMetadata = DocumentKey.parse_metadata(open('encrypted', 'rb')) metadataKey = DocumentKey.use_passphrase(encryptionMetadata, enc_pass) docKey = DocumentKey(encryptionMetadata.get('key').data, unwrapping_key=metadataKey) with open(contents_zip, "rb") as infp, open(dec_contents_zip, "wb") as outfp:
def test(self, start=True, gedis=True, webdav=False, bottle=True, websockets=False): """ kosmos 'j.servers.rack.test()' kosmos 'j.servers.rack.test(gedis_ssl=True)' kosmos 'j.servers.rack.test(ssl=False)' kosmos 'j.servers.rack.test(start=True)' :param manual means the server is run manually using e.g. js_shell 'j.servers.rack.start()' """ if start: self._server_test_start(background=True, gedis=gedis, webdav=webdav, bottle=bottle, websockets=websockets) namespace = "system" secret = "1234" cl = j.clients.gedis.get(namespace, port=8901, host="localhost") assert cl.ping() if webdav: # how to use see https://github.com/ezhov-evgeny/webdav-client-python-3/blob/da46592c6f1cc9fb810ca54019763b1e7dce4583/webdav3/client.py#L197 options = { "webdav_hostname": "http://127.0.0.1:4443", "webdav_login": "******", "webdav_password": "******" } from webdav3.client import Client cl = Client(options) cl.check() assert len(cl.list("")) > 0 if websockets: # TODO: does not work yet from websocket import create_connection ws = create_connection("ws://localhost:4444") ws.send("Hello, World") result = ws.recv() print(result) # ws.close() if bottle: import requests # https://realpython.com/python-requests/#the-get-request r1 = requests.get("http://localhost:4442/ping") self._log(r1.status_code) self._log(r1.content) assert r1.content == b"pong" assert r1.status_code == 200 print("tests are ok")
prefixes = ('ExportFile_','ArchiveFile_','CommonCartridge_') #this is the connection information for your Learn deployment #you will need to use a System Administrator account #or an account with explicit read and remove permissions to /internal/courses options = { 'webdav_hostname': "https://host.blackboard.com", 'webdav_root': "/bbcswebdav/", 'webdav_login': "******", 'webdav_password': "******", } #open and instantiate the client client = Client(options) #I found that this is requried, but not sure why x = client.list() #build a list of ALL courses in /internal courseList = client.list("internal/courses") #this is the list of dictionaries one for each backup theList = [] #for testing only, using islice to limit itterations for course in islice(courseList, 0, coursesToCheck): #construct full path to archive directory for this course arcpath = "internal/courses/" + course + "archive" #execute if archive directory exists
class ClientTestCase(TestCase): remote_path_file = 'test_dir/test.txt' remote_path_file2 = 'test_dir2/test.txt' remote_path_dir = 'test_dir' remote_path_dir2 = 'test_dir2' local_base_dir = 'tests/' local_file = 'test.txt' local_file_path = local_base_dir + 'test.txt' local_path_dir = local_base_dir + 'res/test_dir' def setUp(self): options = { 'webdav_hostname': 'https://webdav.yandex.ru', 'webdav_login': '******', 'webdav_password': '******' } self.client = Client(options) if path.exists(path=self.local_path_dir): shutil.rmtree(path=self.local_path_dir) def tearDown(self): if path.exists(path=self.local_path_dir): shutil.rmtree(path=self.local_path_dir) if self.client.check(remote_path=self.remote_path_dir): self.client.clean(remote_path=self.remote_path_dir) if self.client.check(remote_path=self.remote_path_dir2): self.client.clean(remote_path=self.remote_path_dir2) def test_list(self): self._prepare_for_downloading() file_list = self.client.list() self.assertIsNotNone(file_list, 'List of files should not be None') self.assertGreater(file_list.__len__(), 0, 'Expected that amount of files more then 0') def test_free(self): self.assertGreater( self.client.free(), 0, 'Expected that free space on WebDAV server is more then 0 bytes') def test_check(self): self.assertTrue(self.client.check(), 'Expected that root directory is exist') def test_mkdir(self): if self.client.check(remote_path=self.remote_path_dir): self.client.clean(remote_path=self.remote_path_dir) self.client.mkdir(remote_path=self.remote_path_dir) self.assertTrue(self.client.check(remote_path=self.remote_path_dir), 'Expected the directory is created.') @unittest.skip( "Yandex brakes response for file it contains property resourcetype as collection but it should " "be empty for file") def test_download_to(self): self._prepare_for_downloading() buff = BytesIO() self.client.download_from(buff=buff, remote_path=self.remote_path_file) self.assertEquals(buff.getvalue(), 'test content for testing of webdav client') @unittest.skip( "Yandex brakes response for file it contains property resourcetype as collection but it should " "be empty for file") def test_download(self): self._prepare_for_downloading() self.client.download(local_path=self.local_path_dir, remote_path=self.remote_path_dir) self.assertTrue(path.exists(self.local_path_dir), 'Expected the directory is downloaded.') self.assertTrue(path.isdir(self.local_path_dir), 'Expected this is a directory.') self.assertTrue( path.exists(self.local_path_dir + os.path.sep + self.local_file), 'Expected the file is downloaded') self.assertTrue( path.isfile(self.local_path_dir + os.path.sep + self.local_path_file), 'Expected this is a file') @unittest.skip( "Yandex brakes response for file it contains property resourcetype as collection but it should " "be empty for file") def test_download_sync(self): self._prepare_for_downloading() os.mkdir(self.local_path_dir) def callback(): self.assertTrue( path.exists(self.local_path_dir + os.path.sep + self.local_file), 'Expected the file is downloaded') self.assertTrue( path.isfile(self.local_path_dir + os.path.sep + self.local_file), 'Expected this is a file') self.client.download_sync(local_path=self.local_path_dir + os.path.sep + self.local_file, remote_path=self.remote_path_file, callback=callback) self.assertTrue( path.exists(self.local_path_dir + os.path.sep + self.local_file), 'Expected the file has already been downloaded') @unittest.skip( "Yandex brakes response for file it contains property resourcetype as collection but it should " "be empty for file") def test_download_async(self): self._prepare_for_downloading() os.mkdir(self.local_path_dir) def callback(): self.assertTrue( path.exists(self.local_path_dir + os.path.sep + self.local_file), 'Expected the file is downloaded') self.assertTrue( path.isfile(self.local_path_dir + os.path.sep + self.local_file), 'Expected this is a file') self.client.download_async(local_path=self.local_path_dir + os.path.sep + self.local_file, remote_path=self.remote_path_file, callback=callback) self.assertFalse( path.exists(self.local_path_dir + os.path.sep + self.local_file), 'Expected the file has not been downloaded yet') def test_upload_from(self): self._prepare_for_uploading() buff = StringIO(u'test content for testing of webdav client') self.client.upload_to(buff=buff, remote_path=self.remote_path_file) self.assertTrue(self.client.check(self.remote_path_file), 'Expected the file is uploaded.') def test_upload(self): self._prepare_for_uploading() self.client.upload(remote_path=self.remote_path_file, local_path=self.local_path_dir) self.assertTrue(self.client.check(self.remote_path_dir), 'Expected the directory is created.') self.assertTrue(self.client.check(self.remote_path_file), 'Expected the file is uploaded.') def test_upload_file(self): self._prepare_for_uploading() self.client.upload_file(remote_path=self.remote_path_file, local_path=self.local_file_path) self.assertTrue(self.client.check(remote_path=self.remote_path_file), 'Expected the file is uploaded.') def test_upload_sync(self): self._prepare_for_uploading() def callback(): self.assertTrue(self.client.check(self.remote_path_dir), 'Expected the directory is created.') self.assertTrue(self.client.check(self.remote_path_file), 'Expected the file is uploaded.') self.client.upload(remote_path=self.remote_path_file, local_path=self.local_path_dir) def test_upload_async(self): self._prepare_for_uploading() def callback(): self.assertTrue(self.client.check(self.remote_path_dir), 'Expected the directory is created.') self.assertTrue(self.client.check(self.remote_path_file), 'Expected the file is uploaded.') self.client.upload(remote_path=self.remote_path_file, local_path=self.local_path_dir) def test_copy(self): self._prepare_for_downloading() self.client.mkdir(remote_path=self.remote_path_dir2) self.client.copy(remote_path_from=self.remote_path_file, remote_path_to=self.remote_path_file2) self.assertTrue(self.client.check(remote_path=self.remote_path_file2)) def test_move(self): self._prepare_for_downloading() self.client.mkdir(remote_path=self.remote_path_dir2) self.client.move(remote_path_from=self.remote_path_file, remote_path_to=self.remote_path_file2) self.assertFalse(self.client.check(remote_path=self.remote_path_file)) self.assertTrue(self.client.check(remote_path=self.remote_path_file2)) def test_clean(self): self._prepare_for_downloading() self.client.clean(remote_path=self.remote_path_dir) self.assertFalse(self.client.check(remote_path=self.remote_path_file)) self.assertFalse(self.client.check(remote_path=self.remote_path_dir)) def test_info(self): self._prepare_for_downloading() result = self.client.info(remote_path=self.remote_path_file) self.assertEquals(result['name'], 'test.txt') self.assertEquals(result['size'], '41') self.assertTrue('created' in result) self.assertTrue('modified' in result) def test_directory_is_dir(self): self._prepare_for_downloading() self.assertTrue(self.client.is_dir(self.remote_path_dir), 'Should return True for directory') def test_file_is_not_dir(self): self._prepare_for_downloading() self.assertFalse(self.client.is_dir(self.remote_path_file), 'Should return False for file') def test_get_property_of_non_exist(self): self._prepare_for_downloading() result = self.client.get_property(remote_path=self.remote_path_file, option={'name': 'aProperty'}) self.assertEquals( result, None, 'For not found property should return value as None') def test_set_property(self): self._prepare_for_downloading() self.client.set_property(remote_path=self.remote_path_file, option={ 'namespace': 'test', 'name': 'aProperty', 'value': 'aValue' }) result = self.client.get_property(remote_path=self.remote_path_file, option={ 'namespace': 'test', 'name': 'aProperty' }) self.assertEquals(result, 'aValue', 'Property value should be set') def test_set_property_batch(self): self._prepare_for_downloading() self.client.set_property_batch(remote_path=self.remote_path_file, option=[{ 'namespace': 'test', 'name': 'aProperty', 'value': 'aValue' }, { 'namespace': 'test', 'name': 'aProperty2', 'value': 'aValue2' }]) result = self.client.get_property(remote_path=self.remote_path_file, option={ 'namespace': 'test', 'name': 'aProperty' }) self.assertEquals(result, 'aValue', 'First property value should be set') result = self.client.get_property(remote_path=self.remote_path_file, option={ 'namespace': 'test', 'name': 'aProperty2' }) self.assertEquals(result, 'aValue2', 'Second property value should be set') def _prepare_for_downloading(self): if not self.client.check(remote_path=self.remote_path_dir): self.client.mkdir(remote_path=self.remote_path_dir) if not self.client.check(remote_path=self.remote_path_file): self.client.upload_file(remote_path=self.remote_path_file, local_path=self.local_file_path) if not path.exists(self.local_path_dir): os.makedirs(self.local_path_dir) def _prepare_for_uploading(self): if not self.client.check(remote_path=self.remote_path_dir): self.client.mkdir(remote_path=self.remote_path_dir) if not path.exists(path=self.local_path_dir): os.makedirs(self.local_path_dir) if not path.exists(path=self.local_path_dir + os.sep + self.local_file): shutil.copy(src=self.local_file_path, dst=self.local_path_dir + os.sep + self.local_file)
'webdav_password': os.environ['SDPWD'] } client = Client(options) #client.verify = False # To not check SSL certificates (Default = True) #client.session.proxies(...) # To set proxy directly into the session (Optional) #client.session.auth(...) # To set proxy auth directly into the session (Optional) #client.execute_request("mkdir", 'directory_name') #client.execute_request("list", '') #print(client.list()) #print(client.info('FILE')) remote_files = client.list(TARGETDIR) #print(remote_files.remove(TARGETDIR+'/')) for fname in remote_files: if fname != TARGETDIR + '/': #info = json.load(client.info(fname)) info = client.info(TARGETDIR + '/' + fname) #print(info) remotesize = info['size'] if remotesize is not None: locsize = 0 locpath = './' + fname.replace('-', '%2F', 1).replace( '(', '%28').replace(')', '%29') if os.path.exists(locpath): locsize = os.path.getsize(locpath) delta = int(remotesize) - locsize
def upload_dav(): try: options = { 'webdav_hostname': "https://192.168.1.6/remote.php/dav/files/clouduser/", 'webdav_login': "******", 'webdav_password': "******", 'verbose':True } conn=False client = Client(options) client.verify = False try: if(client.list()): print('\n' * 4) print("Connection Successfull") conn=True print('\n' * 3) except: print('\n' * 4) print("Connection error. Check credentials") if conn: print("##############################################################") opt=int(input("Choose action to be performed:\n1:Show all files\n2:Make Directory\n3:Delete\n4:Download file\n5:Upload File\n")) if opt==1: files=client.list() print(files) elif opt==2: d=input("Enter directory name\n") if client.mkdir(d): print("Created Successfully") elif opt==3: d=input("Enter directory or file name\n") client.clean(d) print("Deleted") elif opt==4: file=input("Enter file to download with public key and signature\n") path_1=file+'.encrypted' path_2=file+'.sign' path_3='public_key.pem' s_time=time.time() client.download_sync(path_1, path_1) client.download_sync(path_2, path_2) client.download_sync(path_3, path_3) e_time=time.time() t=e_time-s_time print("Downloaded, Time taken is",e_time-s_time) elif opt==5: file=input("Enter file to upload with public key and signature\n") path_1=file+'.encrypted' path_2=file+'.sign' path_3='public_key.pem' s_time=time.time() client.upload_sync(path_1, path_1) client.upload_sync(path_2, path_2) client.upload_sync(path_3, path_3) e_time=time.time() print("Uplaoded, Time taken is",e_time-s_time) else: None except WebDavException as exception: print("\n\n",exception,"\n\n")
class FileBrowserWebdavStorage(StorageMixin, Storage): def __init__(self, base_url='/', url_as_download=True, simple_listdir=False, webdav_root='/'): self.base_url = base_url self.url_as_download = url_as_download self.simple_listdir = simple_listdir webdav_client_options = { 'webdav_hostname': settings.CONTRAX_FILE_STORAGE_WEBDAV_ROOT_URL.rstrip('/'), 'webdav_login': settings.CONTRAX_FILE_STORAGE_WEBDAV_USERNAME, 'webdav_password': settings.CONTRAX_FILE_STORAGE_WEBDAV_PASSWORD, } self.client = Client(webdav_client_options) try: self.client.mkdir('/media') self.client.mkdir('/media/photo') except: pass self.client.webdav.root = webdav_root self.client.root = webdav_root def path(self, name): """ Return a local filesystem path where the file can be retrieved using Python's built-in open() function. Storage systems that can't be accessed using open() should *not* implement this method. """ # FIXME: this would be useful with self.location != '' # in this case use this notation: # 1. define self.location in __init__ # 2. rewrite path() method to be like # return os.oath.join(self.location, name) # 3. everywhere in other sel.methods use self.path(name) instead of name attr return name def isdir(self, path): """ Returns true if name exists and is a directory. """ return self.client.check(path) and self.client.is_dir(path) def isfile(self, path): """ Returns true if name exists and is a regular file. """ return self.client.check(path) and not self.client.is_dir(path) def move(self, old_file_name, new_file_name, allow_overwrite=False): """ Moves safely a file from one location to another. If allow_ovewrite==False and new_file_name exists, raises an exception. """ return self.client.move(remote_path_from=old_file_name, remote_path_to=new_file_name, overwrite=allow_overwrite) def makedirs(self, path): """ Creates all missing directories specified by name. Analogue to os.mkdirs(). """ return self.client.mkdir(path) def rmtree(self, path): """ Deletes a directory and everything it contains. Analogue to shutil.rmtree(). """ return self.client.clean(path) def setpermission(self, path): """ Sets file permission """ pass def _open(self, path, mode='rb'): tmp = io.BytesIO() self.client.download_from(tmp, path) tmp.seek(0) return File(tmp) def _save(self, path, content): res = self.client.resource(path) res.read_from(content) return path def get_valid_name(self, name): """ Return a filename, based on the provided filename, that's suitable for use in the target storage system. """ return get_valid_filename(name) def delete(self, path): """ Delete the specified file from the storage system. """ if self.exists(path): self.client.clean(path) def exists(self, path): """ Return True if a file referenced by the given name already exists in the storage system, or False if the name is available for a new file. """ return self.client.check(path) def listdir(self, path): """ List the contents of the specified path. Return a 2-tuple of lists: the first item being directories, the second item being files. """ _list = self.client.list(path) # for API: iterating over big directory take too much time if self.simple_listdir: return _list # for filebrowser directories, files = [], [] for entry in _list: entry_path = os.path.join(path, entry) if self.isdir(entry_path): directories.append(entry.rstrip('/')) else: files.append(entry) return directories, files def size(self, path): """ Return the total size, in bytes, of the file specified by name. """ return self.client.info(path)['size'] def url(self, path): """ Return an absolute URL where the file's contents can be accessed directly by a Web browser. """ url = filepath_to_uri(path) if url is not None: url = url.lstrip('/') url = urljoin(self.base_url, url) if self.url_as_download and self.isfile(path): url += '?action=download' return url @staticmethod def _datetime_from_timestamp(ts, fmt): """ If timezone support is enabled, make an aware datetime object in UTC; otherwise make a naive one in the local timezone. """ dt = datetime.strptime(ts, fmt) if settings.USE_TZ: # Safe to use .replace() because UTC doesn't have DST return dt.replace(tzinfo=timezone.utc) else: return dt def get_accessed_time(self, path): """ Return the last accessed time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ pass def get_created_time(self, path): """ Return the creation time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ return self._datetime_from_timestamp(self.client.info(path)['created'], fmt='%Y-%m-%dT%H:%M:%SZ') def get_modified_time(self, path): """ Return the last modified time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ return self._datetime_from_timestamp( self.client.info(path)['modified'], fmt='%a, %d %b %Y %H:%M:%S %Z')
class CloudWebDAV(CloudLibrary): """As CloudLibrary but for WebDAV server This attempts to work within the framework set by CloudLibrary but adds the functionality to connect with a username and password to the server Files can not be accessed purely with a URL as for previous impolementations (Piwigo), the connection needs to be made first or all the connection details passed """ def __init__(self, name, folder_url_example): self._client = None self.default_url = "" self.basepath = "" super().__init__(name, folder_url_example) def get_form_fields(self, user_id): return [ self.get_server_url_field(), self.get_server_user_field(), self.get_server_token_field(user_id) ] def serialize(self, **kwargs): base_payload = { 'name': self.name, 'folder_url_example': self.folder_url_example } if kwargs['user'] != None: ds = get_current_plugin().get_user_data_store(kwargs['user']) server_url_field = self.get_server_url_field() server_url = server_url_field.get_stored_value(ds) server_user_field = self.get_server_user_field() server_user = server_user_field.get_stored_value(ds) server_token_field = self.get_server_token_field(kwargs['user'].id) server_token = server_token_field.get_stored_value(ds) base_payload['type'] = 'library' base_payload[server_url_field.key] = server_url base_payload[server_user_field.key] = server_user base_payload[server_token_field.key] = server_token return base_payload base_payload['type'] = 'platform' return base_payload def get_server_url_field(self): return ServerURLField(self.name, self.default_url) def get_server_user_field(self): return ServerUserField(self.name) def get_server_token_field(self, user_id): return ServerTokenField(self.name, user_id) # Cloud Platform def parse_url(self, url): #No parsing necessary return url def build_folder_api_url(self, information): return 'folder_api:' + information def parse_payload_into_folder(self, original_url, payload): #Just return the first entry return payload[0] def build_list_files_in_folder_api_url(self, information): return 'files_in_folder_api:' + information def library_folder_processing(self, files): """This method does nothing, but each platform might want to do some processing of the folders and they can, by overriding this method""" return files def get_credentials(self, ds, user_id): """Return dict with all data required for WebDAV connection """ server_url_field = self.get_server_url_field() server_url = server_url_field.get_stored_value(ds) server_user_field = self.get_server_user_field() server_user = server_user_field.get_stored_value(ds) server_token_field = self.get_server_token_field(user_id) #server_token = server_token_field.get_stored_value(ds) #Save the encrypted data, decrypt at last minute before use server_token = server_token_field.get_encrypted_value(ds) connection_data = { 'type': 'webdav', 'webdav_hostname': server_url, 'webdav_login': server_user, 'webdav_password': server_token } return connection_data def connect(self, ds, user_id): """Connect to the server if necessary, the connection can be re-used by other methods (not required if each request is self-contained) This method takes the user_data_store and gets connection details from there """ options = self.get_credentials(ds, user_id) self.connect_dict(options, user_id) def connect_dict(self, options, user_id): """Connect to the server if necessary, the connection can be re-used by other methods (not required if each request is self-contained) This method takes a dict containing connection details: "webdav_hostname", "webdav_login", "webdav_password" """ if self._client: try: self._client.info("/") except (WebDavException) as e: logger.info("WebDAV client exception, re-connecting:" + str(e)) self._client = None if self._client is None and options: #Dummy field for decryption es = ServerTokenField(self.name, user_id) options['webdav_password'] = es.decrypt_value( options['webdav_password']) self._client = Client(options) def download(self, url, filepath): #Use webdav connection to download file self._client.download_sync(remote_path=url, local_path=filepath) def _get_files(self, path): files = self._client.list(path) if len(files) == 0: return [] #Skip the first entry if it is current path #(for some %*&#$ reason not all webdav servers do this) name = pathlib.Path(path).name first = files[0] if first[-1] == '/' and (name == first or name == first[0:-1] or first == 'webdav/'): return files[1:] return files #Recurse and return folders with number of image files/subfolders def _read_folder(self, path, recursive=0, extensions=None): if len(path) == 0 or path[-1] != '/': path = path + '/' logger.info(" read folder:" + path) name = pathlib.Path(path).name #files = self._client.list(path) files = self._get_files(path) alldirs = [] if recursive != 0 and path != '/': parent = str(pathlib.Path(path).parent) #print("PATH:",path,"PARENT:",parent) alldirs += [Folder('[/..] ' + parent, parent, 0)] if len(files) == 0: return alldirs contents = [] folders = [] for f in files: if f[0] == '.': continue if f[-1] == '/': #Include subfolders? if recursive > 0: #print(path + f) alldirs += self._read_folder(path + f, recursive - 1, extensions=extensions) elif recursive < 0: #Add the folders without counting their images, unknown image count alldirs += [Folder(f[0:-1], path + f, -1)] else: #Just add folders to list if not going into them folders += [f] else: ext = pathlib.Path(f).suffix.lower() if extensions is None or ext in extensions: contents += [f] #Skip current if no images or subfolders if len(contents) or len(folders): #Remove trailing slash for name #alldirs += [Folder(name[0:-1], path, len(contents))] alldirs += [Folder(name, path, len(contents))] logger.info(" read folder entries: " + str(len(alldirs))) return alldirs def _read_files(self, path, extensions=None): logger.info(" read files in folder:" + path) files = self._get_files(path) contents = [] for f in files: if f[0] == '.' or f[-1] == '/': continue ext = pathlib.Path(f).suffix.lower() if extensions is None or ext in extensions: contents += [File(f, path + f)] logger.info(" read file entries" + str(len(contents))) return contents def call_api(self, api_url): if self._client is None: logger.info("WebDAV: No client, please connect first") return [] #File filter for images and ground control points (.txt) ext_list = VALID_IMAGE_EXTENSIONS + ['.txt'] #Get the type of request from the prefix req_type, url = api_url.split(':', maxsplit=1) #Decode url (spaces etc) url = urllib.parse.unquote(url) logger.info("CALLING API:" + req_type + "," + url) if req_type.startswith('folder_list_api'): #Returns all folders and sub-folders with number of images within #return self._read_folder(url, recursive=1, extensions=ext_list) return self._read_folder(url, recursive=-1, extensions=ext_list) if req_type == 'folder_api': #Returns info about a folder, including number of images return self._read_folder(url, recursive=0, extensions=ext_list) if req_type == 'files_in_folder_api': #Returns list of images in a folder return self._read_files(url, extensions=ext_list) def parse_payload_into_files(self, payload): return payload # Cloud Library def build_folder_list_api_url(self, server_url, root): #return 'folder_list_api:' + server_url return 'folder_list_api:' + root def parse_payload_into_folders(self, payload): #Already in Folders() return payload
class WebDAV: """Commerce Cloud WebDAV session. Args: client (CommerceCloudClientSession): Active client session with Commerce Cloud for a bearer token. instance (str, optional): Optional commerce cloud instance, useful for opening clients to multiple instances using the same bearer token. Defaults to None. cert (str, optional): Path to TLS client certificate. Defaults to None. key ([type], optional): Export key for the TLS certificate. Defaults to None. verify (bool, optional): Verify TLS certificates, set to false for self signed. Defaults to True. """ def __init__(self, client, instance=None, cert=None, key=None, verify=True): self.client = client self._instance = instance or self.client.instance self.options = {"webdav_hostname": self._instance.rstrip("/")} self.verify = verify self.token = self.client.Token self.options.update({"webdav_token": self.token["access_token"]}) self.webdav_client = Client(self.options) self.webdav_client.verify = self.verify if cert and key: self.cert = str(Path(cert).resolve()) self.key = str(Path(key).resolve()) self.options.update({"cert_path": self.cert, "key_path": self.key}) def reauth(self): """Checks token expiry and re-initialises the Client if a new token is needed. """ if self.token["expires_at"] < int(time.time()): self.client.getToken() self.options.update({"webdav_token": self.token["access_token"]}) self.webdav_client = Client(self.options) def reconnect(self): """Re-initalise the Client session. """ self.webdav_client = Client(self.options) @property def hostname(self): """Return the hostname the WebDAV client connection is connected to. Returns: str: Hostname including prefix eg https:// """ return self.options["webdav_hostname"] @property def netloc(self): """Return a urlparse netloc string of the connected hostname. Returns: str: netloc of hostname. """ url = urlparse(self.options["webdav_hostname"]) return url.netloc @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def GetInfo(self, remote_filepath: str, headers: dict = None) -> list: """Get properties for entity [extended_summary] Args: remote_filepath (str): Path to remote resource. headers (dict, optional): Additional headers to apply to request. Defaults to None. Raises: RetryException: Adds to retries counter on failure. Returns: list: WebDAV attribute information. """ try: return self.webdav_client.info(remote_filepath) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def GetDirectoryList(self, filepath: str, get_info: bool = False, headers: dict = None) -> list: """Get list of files and folders in a path from WebDAV endpoint. [extended_summary] Args: filepath (str): Path to get directory listing for. get_info (bool): returns dictionary of attributes instead of file list. headers (dict, optional): Additional headers to apply to request. Defaults to None. Returns: list: Directory listing. """ try: return self.webdav_client.list(filepath, get_info=get_info) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Upload(self, local_filepath: str, remote_filepath: str): """Upload file or directory recursively to WebDAV endpoint. [extended_summary] Args: local_filepath (str): Local path to file or directory to upload. remote_filepath (str): Remote path to upload to. """ local_filepath = str(Path(local_filepath).resolve()) try: self.webdav_client.upload_sync(remote_filepath, local_filepath) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def StreamUpload(self, payload, remote_path: str, file_name: str): """Upload FileIO, StringIO, BytesIO or string to WebDAV [extended_summary] Args: payload: Stream payload remote_path (str): Remote path relative to host. file_name (str): Name for the file uploaded. """ try: self.webdav_client.upload_to(payload, f"{remote_path}/{file_name}") except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def MakeDir(self, remote_path: str): """Make new directory at path specified. Args: remote_path (str): Path of proposed new directory. """ try: self.webdav_client.mkdir(remote_path) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Move(self, remote_path_source: str, remote_path_dest: str, overwrite: bool = False): """Make new directory at path specified. Args: remote_path_source (str): Path of source resource. remote_path_dest (str): Path of destination resource. overwrite (bool): Overwrite destination resource. Defaults to False. """ try: self.webdav_client.move(remote_path_source, remote_path_dest, overwrite) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Delete(self, remote_filepath: str): """Delete file on remote WebDAV endpoint. Args: remote_filepath (str): Location of resource to delete. """ try: self.webdav_client.clean(remote_filepath) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Download(self, local_filepath: str, remote_filepath: str): """Download file/folder from WebDAV endpoint. This is a synchronous operation, and the file is downloaded in full to the local_filepath. Args: local_filepath (str): Local path to download to, including filename of file saved. remote_filepath (str): Remote path to file to download. """ local_filepath = str(Path(local_filepath).resolve()) try: self.webdav_client.download_sync(remote_filepath, local_filepath) except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Pull(self, local_filepath: str, remote_filepath: str): """Sync file/folder from WebDAV endpoint to local storage. This downloads missing or nwer modified files from the remote to local storage. You can use it to do "resumeable" transfers, but the checks are slow for deeply nested files. Args: local_filepath (str): Local path to download to, including filename of file saved. remote_filepath (str): Remote path to file to download. """ local_filepath = str(Path(local_filepath).resolve()) try: self.webdav_client.pull(remote_filepath, local_filepath) return True except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException return False @retry( retry_on_exceptions=(RetryException), max_calls_total=3, retry_window_after_first_call_in_seconds=10, ) def Push(self, local_filepath: str, remote_filepath: str): """Sync file/folder from local storage to WebDAV endpoint. This uploads missing or nwer modified files from the local to remote storage. You can use it to do "resumeable" transfers, but the checks are slow for deeply nested files. Args: local_filepath (str): Local path to download to, including filename of file saved. remote_filepath (str): Remote path to file to download. """ local_filepath = str(Path(local_filepath).resolve()) try: self.webdav_client.push(local_filepath, remote_filepath) return True except (NoConnection, ConnectionException, WebDavException): self.reauth() raise RetryException return False @retry( retry_on_exceptions=(RetryException), max_calls_total=10, retry_window_after_first_call_in_seconds=15, ) def StreamDownload(self, remote_filepath: str, buffer=None, decode: bool = False): """Download a file in chunks to a local file buffer. You must provide a BytesIO object or one will be created for you. Args: remote_filepath (str): Path to remote resource to download. buffer ([type], optional): Buffer write streamed content to. decode (bool, optional): Optionally try to decode downloaded file into a string. Defaults to False. Raises: RetryException: Adds to retries counter on failure. Returns: Bytes: Returns a BytesIO object for further use. """ self.reauth() if buffer is None: buffer = BytesIO() try: self.webdav_client.download_from(buff=buffer, remote_path=remote_filepath) if decode is True: return buffer.getvalue().decode("utf-8") else: buffer.seek(0) return buffer except (NoConnection, ConnectionException, WebDavException): raise RetryException @retry( retry_on_exceptions=(RetryException), max_calls_total=10, retry_window_after_first_call_in_seconds=60, ) def HashObject(self, remote_filepath: str) -> str: """Generate a MD5 hashsum for a remote resource. This is streamed into memory, hashed and discarded. Optimised for low memory but high bandwidth environments. Args: remote_filepath (str): Path to remote resource. Raises: RetryException: Adds to retries counter on failure. Returns: str: MDSSUM of the file requested. """ self.reauth() try: sum = md5(self.StreamDownload(remote_filepath).getbuffer()) return { "filepath": remote_filepath, "hashtype": "MD5", "hashsum": sum.hexdigest(), } except (NoConnection, ConnectionException, WebDavException): self.reconnect() raise RetryException def RecursiveFileListing(self, remote_filepath: str) -> str: """Recursive filetree walker, returns paths found. Args: remote_filepath (str): [description] Raises: RetryException: Adds to retries counter on failure. Yields: Iterator[str]: Yields resource paths for any files found. """ @retry( retry_on_exceptions=(RetryException), max_calls_total=10, retry_window_after_first_call_in_seconds=60, ) def get_list(self, path): self.reauth() try: return self.webdav_client.list(path, get_info=True) except (NoConnection, ConnectionException, WebDavException): self.reconnect() raise RetryException def get_files(self, path): return [x for x in get_list(self, path) if x["isdir"] is False] def get_dirs(self, path): return [ x["path"] for x in get_list(self, path) if x["isdir"] is True ] yield from get_files(self, remote_filepath) for subdir in get_dirs(self, remote_filepath): yield from self.RecursiveFileListing(subdir) def RecursiveFolderListing(self, remote_filepath: str) -> str: """Recursive filetree walker, returns paths found. Args: remote_filepath (str): [description] Raises: RetryException: Adds to retries counter on failure. Yields: Iterator[str]: Yields resource paths for any files found. """ @retry( retry_on_exceptions=(RetryException), max_calls_total=10, retry_window_after_first_call_in_seconds=60, ) def get_list(self, path): self.reauth() try: return self.webdav_client.list(path, get_info=True) except (NoConnection, ConnectionException, WebDavException): self.reconnect() raise RetryException def get_dirs(self, path): return [ x["path"] for x in get_list(self, path) if x["isdir"] is True ] dirlist = get_dirs(self, remote_filepath) yield from dirlist for subdir in get_dirs(self, remote_filepath): yield from self.RecursiveFolderListing(subdir)
class WebDev(object): def __init__(self, options ): # self.client = wc.Client(options) self.client = Client(options) def read(self, name): with BytesIO() as buf: self.client.download_from(buf, name) return buf.getvalue() def write(self,content, target): data = self.read(target) with BytesIO() as buf: buf.write(data) buf.write(content.encode()) self.client.upload_to(buf.getvalue(), target) def mkdir(self, dir_name): self.client.mkdir(dir_name) def list(self): data = self.client.list() print(data) def clear(self, target): self.client.clean(target) def check(self,remote_path): data = self.client.check(remote_path) if data: print('{} is exists'.format(remote_path)) else: print('{} is not exists'.format(remote_path)) def upload(self,remote_path, local_path): self.client.upload(remote_path, local_path) def help_dev(self): print(dir(self.client)) def mkfile(self, remote_path): with BytesIO() as buf: buf.write(b'') self.client.upload_to(buf.getvalue(), remote_path) def download_file(self, remote_path): self.client.download_file(remote_path, remote_path) def rename(self, old_name, new_name): self.client.copy(old_name, old_name) self.client.clean(old_name) def set_property(self,remote_path, option): self.client.set_property(remote_path, option=[option])