def delete_result(filename): AzureStorageAccount = 'effiles' key = 'axLykwdLsUwKTDY5flU6ivGrt9obV38k2UMVDCSpLYE3K6jAkwsjWOThQydhuMSWHfx6lTq102gdkas/GyKhEA==' down_path = 'results' path1 = 'efficientfrontier' file_service = FileService(account_name = AzureStorageAccount, account_key = key) file_service.delete_file (path1, down_path, filename) # Create Cursor cur = mysql.connection.cursor() # Execute cur.execute ('DELETE FROM result_files WHERE filename = %s', [filename]) # Commit to DB mysql.connection.commit() # Close connection cur.close() target = os.path.join(APP_ROOT, 'results/') destination = '/'.join([target, filename]) if os.path.exists(destination): os.remove(destination) flash ('File Deleted', 'success') return redirect(url_for('results'))
def onRecordingComplete(self): print("recording completed {}".format(self.filename)) file_service = FileService(account_name=cfg.storageAc, account_key=cfg.accountkey) file_service.create_file_from_path(cfg.fileShare, None, '{}.flv'.format(self.filename), 'temp/{}.flv'.format(self.filename)) sharedAccessStorage = SharedAccessSignature(cfg.storageAc, cfg.accountkey) sasKey = sharedAccessStorage.generate_file( cfg.fileShare, file_name='{}.flv'.format(self.filename), permission=FilePermissions.READ, start=datetime.utcnow(), expiry=datetime.utcnow() + timedelta(minutes=5)) downloadLink = cfg.downloadLinkFormat.format(cfg.storageAcUrl, cfg.fileShare, self.filename, sasKey) self.recorderSock.emit('recordingcomplete', { 'sid': self.sid, 'download': downloadLink }) os.remove('temp/{}.flv'.format(self.filename)) time.sleep(cfg.timeBeforeFileDelete) file_service.delete_file(cfg.fileShare, None, '{}.flv'.format(self.filename)) return
def file(): static_dir_path = "D:\home\site\wwwroot\static" static_file_dir_path = static_dir_path + '\\' + 'files' account_name = 'hanastragetest' account_key = 'acount_key' root_share_name = 'root' share_name = 'images' directory_url = 'https://hanastragetest.file.core.windows.net/' + root_share_name + '/' + share_name # create local save directory if os.path.exist(static_file_dir_path) is False: os.mkdir(static_file_dir_path) file_service = FileService(account_name=account_name, account_key=account_key) # create share file_service.create_share(root_share_name) # create directory file_service.create_directory(root_share_name, share_name) files = os.listdir(static_dir_path) for file in files: # delete if file_service.exists(root_share_name, share_name, file): file_service.delete_file(root_share_name, share_name, file) # file upload file_service.create_file_from_path( root_share_name, share_name, # We want to create this blob in the root directory, so we specify None for the directory_name file, static_dir_path + '\\' + file, content_settings=ContentSettings(content_type='image/png')) generator = file_service.list_directories_and_files( root_share_name, share_name) html = "" for file in generator: # file download file_save_path = static_file_dir_path + '\\' + file file_service.get_file_to_path(root_share_name, share_name, file, file_save_path) html = "{}<img src='{}'>".format(html, file_save_path) result = { "result": True, "data": { "file_or_dir_name": [file_or_dir.name for file_or_dir in generator] } } return make_response(json.dumps(result, ensure_ascii=False) + html)
def store_trained_model_in_azure(model): file_service = FileService( account_name='soilhumiditydata293s', account_key= '4PSsEO1xBAIdq3/MppWm+t6eYHi+CWhVn6xNZ6i4mLVgm50K8+NK6lA94v8MxG0bvVEfYCvsv1suxCyCnUYd0A==' ) file_service.delete_file('model', None, 'model') file_service.create_file_from_path( 'model', None, 'model', '/fs/student/aditya_wadaskar/iot/ML_training/model')
def clean(shared_directory, config_path, remove_directory): config_path = os.path.expanduser(config_path) with open(config_path) as f: config = json.load(f) from azure.storage.file import FileService service = FileService(account_name=config['account_name'], account_key=config['account_key']) if service.exists(config['share_name'], shared_directory): for file in service.list_directories_and_files(config['share_name'], shared_directory): service.delete_file(config['share_name'], shared_directory, file.name) if remove_directory: service.delete_directory(config['share_name'], shared_directory)
def delete(self, remote_file): """Delete file from the cloud. The azure url format is https://myaccount.blob.core.windows.net/mycontainer/myblob. Args: remote_file(str): The path of the file to be deleted. Raises: :exc:`~..DriverError`: if the file is not uploaded correctly. """ if 'core.windows.net' not in remote_file: self.logger.error( "Source or destination must be a azure storage url (format " "https://myaccount.blob.core.windows.net/mycontainer/myblob") raise DriverError parse_url = _parse_url(remote_file) key = self.storage_client.storage_accounts.list_keys( self.resource_group_name, parse_url.account).keys[0].value if parse_url.file_type == 'blob': bs = BlockBlobService(account_name=parse_url.account, account_key=key) return bs.delete_blob(parse_url.container_or_share_name, parse_url.file) elif parse_url.file_type == 'file': fs = FileService(account_name=parse_url.account, account_key=key) return fs.delete_file(parse_url.container_or_share_name, parse_url.path, parse_url.file) else: raise ValueError( "This azure storage type is not valid. It should be blob or file." )
def run(job, **kwargs): resource = kwargs.pop('resources').first() file_name = resource.attributes.get(field__name='azure_storage_file_name').value share_name = resource.attributes.get(field__name='azure_storage_file_share_name').value azure_storage_account_name = resource.attributes.get(field__name='azure_storage_account_name').value azure_account_key = resource.attributes.get(field__name='azure_account_key').value set_progress("Connecting To Azure...") file_service = FileService(account_name=azure_storage_account_name, account_key=azure_account_key) set_progress("Connection to Azure established") set_progress("Deleting file %s..." % file_name) file_service.delete_file(file_name=file_name, share_name=share_name, directory_name='') return "Success", "The file has been deleted", ""
def cleanupBackups(self): result = messagebox.askquestion('You are about to save the changes', 'Are you sure?', icon='warning') if result == 'yes': # files = [] # for (dirpath, dirname, filenames) in walk(self.gfwlistFileDir): # files.extend(filenames) # for f in files: # print (f) # the following is for cleaning up files locally # bkups = glob.glob(os.path.join(self.gfwlistFileDir, '*.bk')) # for f in bkups[:len(bkups)-1]: # os.remove(f) azureFileService = FileService(account_name=self.azureAccountName, account_key=self.azureAccountKey) generator = azureFileService.list_directories_and_files(self.azureFileShareName, self.azureFileShareFileDir) for fileOrDir in generator: if (fileOrDir.name.endswith('.bk')): azureFileService.delete_file(self.azureFileShareName, self.azureFileShareFileDir, fileOrDir.name)
def delete_upload(filename): AzureStorageAccount = 'effiles' key = 'axLykwdLsUwKTDY5flU6ivGrt9obV38k2UMVDCSpLYE3K6jAkwsjWOThQydhuMSWHfx6lTq102gdkas/GyKhEA==' up_path = 'uploads' path1 = 'efficientfrontier' file_service = FileService(account_name = AzureStorageAccount, account_key = key) file_service.delete_file (path1, up_path, filename) # Create Cursor cur = mysql.connection.cursor() # Execute cur.execute ('DELETE FROM files_uploaded WHERE filename = %s', [filename]) # Commit to DB mysql.connection.commit() # Close connection cur.close() flash ('File Deleted', 'success') return redirect(url_for('files'))
# Upload print("uploading to: '%s/%s/%s'" % (AZURE_SHARE_NAME, AZURE_BACKUP_FOLDER, FILENAME)) file_service.create_file_from_path(AZURE_SHARE_NAME, AZURE_BACKUP_FOLDER, FILENAME, FILENAME, progress_callback=upload_callback) # Cleaning Backup Files backup_files = file_service.list_directories_and_files( AZURE_SHARE_NAME, AZURE_BACKUP_FOLDER) filenames = [] for file in backup_files: filenames.append(file.name) files_to_delete = [] if len(filenames) >= AZURE_KEEP_BACKUPS: files_to_delete = filenames[:(len(filenames) - AZURE_KEEP_BACKUPS)] for file in files_to_delete: file_service.delete_file(AZURE_SHARE_NAME, AZURE_BACKUP_FOLDER, file) end = time.time() duration = int(end - start) slack_message(success=True, duration=duration, deleted=files_to_delete) except Exception as e: print(e) end = time.time() duration = int(end - start) slack_message(success=False, duration=duration, deleted=[])
# syslog.syslog(syslog.LOG_DEBUG,"Copy des fichiers de sauvegarde sur le répertoire Microsoft AZURE OK !") # warning # suppression des fichiers de sauvegarde os.remove(repertoire_de_sauvegarde+"/save_"+str(BACKUP_DATE)+"db.sql") print("Suppression du fichier "+BACKUP_DATE+"db.sql") os.remove(repertoire_de_sauvegarde+"/save_"+str(BACKUP_DATE)+".tar.bz2") print("Suppression du fichier "+BACKUP_DATE+".tar.bz2") # Liste des fichiers ou répertoires de Microsoft AZURE et suppression des anciennes sauvegardes en fonction du nombre de jour print("Liste des sauvegardes: ") list_file = file_service.list_directories_and_files(AZURE_REP_BKP) for file_or_dir in list_file: if ('save_'+str(BACKUP_DATE_OLD)) in file_or_dir.name: file_service.delete_file(AZURE_REP_BKP,'save_'+str(BACKUP_DATE_OLD),'save_'+str(BACKUP_DATE_OLD)+'db.sql') file_service.delete_file(AZURE_REP_BKP,'save_'+str(BACKUP_DATE_OLD),'save_'+str(BACKUP_DATE_OLD)+'.tar.bz2') file_service.delete_directory(AZURE_REP_BKP,'save_'+str(BACKUP_DATE_OLD)) else: print("") print(file_or_dir.name) logging.warning(file_or_dir.name) # warning # syslog.syslog(syslog.LOG_WARNING, file_or_dir.name) # warning print("") print("La sauvegarde c'est terminé correctement !") logging.warning("La sauvegarde c'est terminé correctement !") # warning # syslog.syslog(syslog.LOG_WARNING,"La sauvegarde c'est terminé correctement !") # warning ###################################################### # Lancement de la fonction attachée restoreDB / -rDB #
timestamp = str(datetime.now().strftime("%d_%m_%Y__%H_%M_%S")) model_name = 'tf_model_' + timestamp + '.pb' model_name path = tf.train.write_graph(frozen_graph, ".", model_name, as_text=False) # In[68]: print(f'Model saved to {path}') # In[69]: azure_models = file_service.list_directories_and_files('covid-share/model') for file in azure_models: print(f'Deleting {file.name}') file_service.delete_file('covid-share', 'model', file.name) # In[70]: def upload_to_azure(filename): print(f'Uploading {filename}') file_service.create_file_from_path('covid-share', 'model', filename, filename) # In[71]: upload_to_azure(path[2:])