def output_stats_to_json_blob(feed_stats_dict): append_blob_service = AppendBlobService(STORAGE_ACCT_NAME, BLOB_KEY) json_string = '"' + feed_stats_dict['FeedName'] + '":' + json.dumps( feed_stats_dict) + ',' if LOGGING: print("Writing to blob: {}".format(json_string)) append_blob_service.append_blob_from_bytes(BLOB_CONTAINER, STATS_FILE_NAME, json_string.encode())
def stream_logs(client, resource_group, service, app, deployment, no_format=False, raise_error_on_failure=True, logger_level_func=logger.warning): log_file_sas = None error_msg = "Could not get logs for Service: {}".format(service) try: log_file_sas = client.get_log_file_url( resource_group_name=resource_group, service_name=service, app_name=app, deployment_name=deployment).url except (AttributeError, CloudError) as e: logger.warning("%s Exception: %s", error_msg, e) raise CLIError(error_msg) if not log_file_sas: logger.warning("%s Empty SAS URL.", error_msg) raise CLIError(error_msg) account_name, endpoint_suffix, container_name, blob_name, sas_token = get_blob_info( log_file_sas) _stream_logs( no_format, DEFAULT_CHUNK_SIZE, DEFAULT_LOG_TIMEOUT_IN_SEC, AppendBlobService(account_name=account_name, sas_token=sas_token, endpoint_suffix=endpoint_suffix), container_name, blob_name, raise_error_on_failure, logger_level_func)
def main(): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', help='Config file') args = parser.parse_args() if args.config: with open(args.config) as input: config = json.load(input) else: config = {} if not config.get('disable_collection', False): logger.info('Sending version information to singer.io. ' + 'To disable sending anonymous usage data, set ' + 'the config parameter "disable_collection" to true') threading.Thread(target=send_usage_stats).start() block_blob_service = BlockBlobService(config.get( 'account_name', None), config.get('account_key', None)) append_blob_service = AppendBlobService(config.get( 'account_name', None), config.get('account_key', None)) # TODO: Create container/ prefix if missing blob_container_name = config.get('container_name', None) input = io.TextIOWrapper(sys.stdin.buffer, encoding='utf-8') state = persist_lines(block_blob_service, append_blob_service, blob_container_name, input) emit_state(state) logger.debug("Exiting normally")
def test_add_block_to_append_blob_with_container_sas(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange token = self.bs.generate_container_shared_access_signature( self.container_name, permission=ContainerPermissions.READ + ContainerPermissions.CREATE + ContainerPermissions.ADD, expiry=datetime.utcnow() + timedelta(hours=1), ) bs_with_sas = AppendBlobService( account_name=self.settings.STORAGE_ACCOUNT_NAME, sas_token=token, protocol=self.settings.PROTOCOL) # Act test_blob_name = "testblobname" text_blob_content = "test-blob-content" bs_with_sas.create_blob(self.container_name, blob_name=test_blob_name) bs_with_sas.append_blob_from_text(self.container_name, blob_name=test_blob_name, text=text_blob_content) blob_content = bs_with_sas.get_blob_to_text(self.container_name, blob_name=test_blob_name) # Assert self.assertEqual(blob_content.content, text_blob_content)
def __init__(self, **storage_config): account_key = storage_config[CONFIG_STORAGE_KEY] account_name = storage_config[CONFIG_STORAGE_NAME] container_name = storage_config[CONFIG_STORAGE_CONTAINER_NAME] public_url = storage_config[CONFIG_STORAGE_EXTERNAL_URL] self.services = { 'base_blob': BaseBlobService(account_name=account_name, account_key=account_key), 'block_blob': BlockBlobService(account_name=account_name, account_key=account_key), 'append_blob': AppendBlobService(account_name=account_name, account_key=account_key), } if not container_name: raise ValidationError( "You must set which container you want to use") self.container_name = container_name if not public_url: raise ValidationError("You must set public container url") self.public_url = public_url
def update_status(self, status=0, errorMsg=None): if self.files is not None: text = str(status) + '&,&' + datetime.datetime.utcnow().strftime( '%a, %d %b %Y %H:%M:%S GMT') + '&,&' + '&,&'.join(self.files) else: text = str(status) + '&,&' + datetime.datetime.utcnow().strftime( '%a, %d %b %Y %H:%M:%S GMT') if errorMsg is not None: text = str(status) + '&,&' + datetime.datetime.utcnow().strftime( '%a, %d %b %Y %H:%M:%S GMT') + '&,&' + errorMsg name = self.folderName + '/status.cvs' append_blob_service = AppendBlobService( account_name='navview', account_key= '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==', protocol='http') append_blob_service.create_blob( container_name='data', blob_name=name, content_settings=ContentSettings(content_type='text/plain')) append_blob_service.append_blob_from_text(container_name='data', blob_name=name, text=text) #if __name__ == '__main__': #DataUpload(dirPath='../../demo_saved_data/2018-07-25-13-01-51').begin_update_files()
def __init__(self): #account = account_name or secretconf["azure"]["storage"][0]["account"] #key = account_key or secretconf["azure"]["storage"][0]["key"] connstr = os.getenv( "AZURE_STORAGE_CONNECTION_STRING", False) or secretconf["azure"]["storage"][0]["connection_string"] self.abservice = AppendBlobService(connection_string=connstr)
def acr_build_show_logs(client, build_id, registry_name, resource_group_name, raise_error_on_failure=False): log_file_sas = None error_message = "Could not get build logs for build ID: {}.".format(build_id) try: build_log_result = client.get_log_link( resource_group_name=resource_group_name, registry_name=registry_name, build_id=build_id) log_file_sas = build_log_result.log_link except (AttributeError, CloudError) as e: logger.debug("%s Exception: %s", error_message, e) raise CLIError(error_message) if not log_file_sas: logger.debug("%s Empty SAS URL.", error_message) raise CLIError(error_message) account_name, endpoint_suffix, container_name, blob_name, sas_token = _get_blob_info(log_file_sas) _stream_logs(byte_size=1024, # 1 KiB timeout_in_seconds=1800, # 30 minutes blob_service=AppendBlobService( account_name=account_name, sas_token=sas_token, endpoint_suffix=endpoint_suffix), container_name=container_name, blob_name=blob_name, raise_error_on_failure=raise_error_on_failure)
def update_files(self, fileName): try: filePath = os.path.abspath( os.path.join(os.path.dirname(__file__), self.dirPath)) + '/' + fileName f = open(filePath, 'r') text = f.read() f.close() name = self.folderName + '/' + fileName append_blob_service = AppendBlobService( account_name='navview', account_key= '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==', protocol='http') append_blob_service.create_blob( container_name='data', blob_name=name, content_settings=ContentSettings(content_type='text/plain')) append_blob_service.append_blob_from_bytes( container_name='data', blob_name=name, blob=text, progress_callback=self.processCall) except Exception as e: print(e)
def stream_logs(client, run_id, registry_name, resource_group_name, no_format=False, raise_error_on_failure=False): log_file_sas = None error_msg = "Could not get logs for ID: {}".format(run_id) try: log_file_sas = client.get_log_sas_url( resource_group_name=resource_group_name, registry_name=registry_name, run_id=run_id).log_link except (AttributeError, CloudError) as e: logger.debug("%s Exception: %s", error_msg, e) raise CLIError(error_msg) if not log_file_sas: logger.debug("%s Empty SAS URL.", error_msg) raise CLIError(error_msg) account_name, endpoint_suffix, container_name, blob_name, sas_token = get_blob_info( log_file_sas) _stream_logs( no_format, DEFAULT_CHUNK_SIZE, DEFAULT_LOG_TIMEOUT_IN_SEC, AppendBlobService(account_name=account_name, sas_token=sas_token, endpoint_suffix=endpoint_suffix), container_name, blob_name, raise_error_on_failure)
def WriteLog( txt ): """ ログファイルにテキストを出力する。末尾に改行コードが追加される。 """ szRet = ""; if( DEBUG_MODE ): print( r"{0:%Y-%m-%d %H:%M:%S}".format( datetime.datetime.now() ) + r" : " + txt + "\r\n" ); return( "Debug モードのためスキップしました。" ); try: szRet = "AppendBlobService"; blob_service = AppendBlobService( account_name, account_key ); szRet = "append_blob_from_text"; blob_service.append_blob_from_text( log_container_name, log_file_name, r"{0:%Y-%m-%d %H:%M:%S}".format( datetime.datetime.now() ) + r" : " + txt + "\r\n" ) szRet = "OK"; except: #szRet = "Log exception"; pass; #try return szRet;
def __init__(self, name, key, subFolderName=None): super(LogWriter, self).__init__() self._name = name self._key = key self.m_szLogFileName = "" self.m_szSubFolderName = subFolderName self.m_pBlobService = AppendBlobService(name, key)
def main(): bbs = BlockBlobService(settings.STORAGE_ACCOUNT_NAME, settings.STORAGE_ACCOUNT_KEY) pbs = PageBlobService(settings.STORAGE_ACCOUNT_NAME, settings.STORAGE_ACCOUNT_KEY) abs = AppendBlobService(settings.STORAGE_ACCOUNT_NAME, settings.STORAGE_ACCOUNT_KEY) service.create_container(CONTAINER_NAME) process(bbs, LOCAL_BLOCK_BLOB_FILES, CONNECTION_COUNTS) process(pbs, LOCAL_PAGE_BLOB_FILES, CONNECTION_COUNTS) process(abs, LOCAL_APPEND_BLOB_FILES, CONNECTION_COUNTS)
def connect_append_blob_service(self, reconnect=False): if type(self.append_blob_service).__name__ != 'AppendBlobService' or reconnect: try: self.append_blob_service = AppendBlobService(account_name=self.account_name, account_key=self.account_key) except Exception as ex: print('Exception in connection to append blob service: {0}'.format(ex)) return False return True
def azureStorage(self, accountName, accountkey, countainerName, fileName, text): self.append_blob_service = AppendBlobService(account_name=accountName, account_key=accountkey, protocol='http') self.append_blob_service.create_blob( container_name=countainerName, blob_name=fileName, content_settings=ContentSettings(content_type='text/plain')) self.append_blob_service.append_blob_from_text(countainerName, fileName, text)
def append_log(today): """ ログ情報をblobにあげる。事前にblobアカウント・blob containerを作っておくこと :param today: :return: """ print('append log') append_blob_service = AppendBlobService(account_name=account_name, account_key=account_key) append_blob_service.append_blob_from_text(container_name, blob_name, 'test' + '\n')
def azure_blob_storage(): try: # To create a blob service or connection to azure blob storage block_blob_service = BlockBlobService(account_name='account_name', account_key='account_key') # To create a container container_name ='container_name' block_blob_service.create_container(container_name) # Set access to a container such as public ,readonly,private block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container) # Create a file to upload to azure blob storage local_file_name ="Test.txt" path_to_file =os.path.join('local_path', local_file_name) # To write to the file local_file = open(full_path_to_file, 'w') local_file.write("hi peoplee") local_file.close() # Upload the created file, use local_file_name for the blob name block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file) # To List all the blobs in the container container_List = block_blob_service.list_blobs(container_name) for file in container_List: print("Blob name: "+{file.name}) # Download the blob(s). download_file_path = os.path.join('local_path', 'local_file_name') block_blob_service.get_blob_to_path(container_name, local_file_name, download_file_path) #delete a blob block_blob_service.delete_blob('container_name', 'blob_name') #Append to a blob service append_blob_service = AppendBlobService(account_name='myaccount', account_key='mykey') # The same containers can hold all types of blobs append_blob_service.create_container('container_name') #To append file must exists append_blob_service.create_blob('container_name', 'append_blob') #Append to a blob service append_blob_service.append_blob_from_text('container_name', 'append_blob', 'Hello, world!') append_blob = append_blob_service.get_blob_to_text('container_name', 'append_blob') # Clean up resources. This includes the container and the temp files block_blob_service.delete_container(container_name) except Exception as e:
def claim_and_run_device(driver_id): iot_hub = IoTHub(IOT_HUB_NAME, IOT_HUB_OWNER_KEY) device, device_twin = iot_hub.claim_device(driver_id) device_twin_json = json.loads(device_twin) device_id = device_twin_json['deviceId'] iothub_device = IoTHubDevice(IOT_HUB_NAME, device_id, device.primaryKey) append_blob_service = AppendBlobService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY) logs_container_name = 'logs' append_blob_service.create_container(logs_container_name, fail_on_exist=False) log_blob_name = '{0}.log'.format(device_id) def report_state(state): iothub_device.send_reported_state(state) def send_telemetry(data): iothub_device.send_message(data) def log(message, code, level): if not append_blob_service.exists(logs_container_name, log_blob_name): append_blob_service.create_blob( logs_container_name, log_blob_name, if_none_match='*', ) level_name = logging.getLevelName(level) output = io.StringIO() entry_data = [ str(datetime.datetime.utcnow()) + 'Z', level_name, device_id, code, message ] writer = csv.writer(output, quoting=csv.QUOTE_MINIMAL) writer.writerow(entry_data) entry_text = output.getvalue() append_blob_service.append_blob_from_text(logs_container_name, log_blob_name, entry_text) device_simulator = SimulatorFactory.create('devices.engines.Engine', report_state, send_telemetry, log) if not device_simulator.initialize(device_twin_json): return def device_twin_callback(update_state, payload, user_context): device_simulator.on_update(str(update_state), json.loads(payload)) iothub_device.client.set_device_twin_callback(device_twin_callback, 0) device_simulator.run()
def __init__(self, storage_creds, max_retries=10): self.storage_id = storage_creds["name"] self.storage_key = storage_creds["key"] self.bs = BlockBlobService(account_name=self.storage_id, account_key=self.storage_key) self.append_bs = AppendBlobService(account_name=self.storage_id, account_key=self.storage_key) self.max_retries = max_retries self.set_retries(max_retries)
def open_stats_blob_json(): append_blob_service = AppendBlobService(STORAGE_ACCT_NAME, BLOB_KEY) file_contents = "{" + "\"ProcessDate\": \"{}\",".format(current_date) #file_contents = "{" try: append_blob_service.create_blob(BLOB_CONTAINER, STATS_FILE_NAME, if_none_match='*') except: print("Tried to Create Existing Blob") append_blob_service.append_blob_from_bytes(BLOB_CONTAINER, STATS_FILE_NAME, file_contents.encode())
def write_to_azure(self): # check for internet # if not self.internet_on(): # return False # record file to cloud # f = open("data/" + self.name,"r") f = open("data/" + self.user['fileName'], "r") text = f.read() try: self.append_blob_service = AppendBlobService(account_name='navview', account_key='+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==', protocol='http') self.append_blob_service.create_blob(container_name='data', blob_name=self.name, content_settings=ContentSettings(content_type='text/plain')) self.append_blob_service.append_blob_from_text('data',self.name, text) except: # Try again! print('trying to write again due to exception') self.append_blob_service = AppendBlobService(account_name='navview', account_key='+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==', protocol='http') self.append_blob_service.create_blob(container_name='data', blob_name=self.name, content_settings=ContentSettings(content_type='text/plain')) self.append_blob_service.append_blob_from_text('data',self.name, text) # record record to ansplatform self.record_to_ansplatform()
def prepare_storage(settings): default_storage_account_name = settings["DEFAULT_STORAGE_ACCOUNT_NAME"] storage_access_key = settings["STORAGE_ACCESS_KEY"] blob_service = AppendBlobService(default_storage_account_name, storage_access_key) blob_service.create_container('bosh') blob_service.create_container( container_name='stemcell', public_access='blob' ) # Prepare the table for storing meta datas of storage account and stemcells table_service = TableService(default_storage_account_name, storage_access_key) table_service.create_table('stemcells')
def WriteLog(txt): """ ログファイルにテキストを出力する。末尾に改行コードが追加される。 """ szRet = "" try: szRet = "AppendBlobService" blob_service = AppendBlobService(account_name, account_key) szRet = "append_blob_from_text" blob_service.append_blob_from_text( log_container_name, log_file_name, r"{0:%Y-%m-%d %H:%M:%S}".format(datetime.datetime.now()) + r" : " + txt + "\r\n") szRet = "OK" except: #szRet = "Log exception"; pass return szRet
def __init__(self): '''Initialize and create a blob with CSV extension ''' self.name = 'data-' + datetime.datetime.now().strftime( '%Y_%m_%d_%H_%M_%S') + '.csv' self.append_blob_service = AppendBlobService( account_name='navview', account_key= '+roYuNmQbtLvq2Tn227ELmb6s1hzavh0qVQwhLORkUpM0DN7gxFc4j+DF/rEla1EsTN2goHEA1J92moOM/lfxg==', protocol='http') self.append_blob_service.create_blob( container_name='data', blob_name=self.name, content_settings=ContentSettings(content_type='text/plain')) self.first_row = 0 self.write_str = ''
def azure_storage(self, accountName, sasToken, countainerName,fileName,text): if 0: self.append_blob_service = AppendBlobService(account_name=accountName, sas_token=sasToken, protocol='http') self.append_blob_service.create_blob(container_name=countainerName, blob_name=fileName, content_settings=ContentSettings(content_type='text/plain')) self.append_blob_service.append_blob_from_text(countainerName, fileName, text) else: self.block_blob_service = BlockBlobService(account_name=accountName, sas_token=sasToken, protocol='http') self.block_blob_service.create_blob_from_text( container_name= countainerName, blob_name= fileName, text=text, content_settings=ContentSettings(content_type='text/plain'))
def prepare_storage_account(storage_account_name, storage_access_key, endpoint_suffix, protocol="https"): blob_service = AppendBlobService(account_name=storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix, protocol=protocol) blob_service.create_container('bosh') blob_service.create_container(container_name='stemcell', public_access='blob') # Prepare the table for storing metadata of storage account and stemcells table_service = TableService(account_name=storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix, protocol=protocol) table_service.create_table('stemcells')
def _get_client(self): """ Azure blob service Returns: dict of azure.storage.blob.baseblobservice.BaseBlobService subclass: Service """ parameters = self._secured_storage_parameters().copy() try: del parameters["blob_type"] except KeyError: pass return { _BlobTypes.PageBlob: PageBlobService(**parameters), _BlobTypes.BlockBlob: BlockBlobService(**parameters), _BlobTypes.AppendBlob: AppendBlobService(**parameters), }
def _get_client(self): """ Azure blob service Returns: dict of azure.storage.blob.baseblobservice.BaseBlobService subclass: Service """ parameters = self._secured_storage_parameters().copy() # Parameter added by pycosio and unsupported by blob services. try: del parameters['blob_type'] except KeyError: pass return { _BlobTypes.PageBlob: PageBlobService(**parameters), _BlobTypes.BlockBlob: BlockBlobService(**parameters), _BlobTypes.AppendBlob: AppendBlobService(**parameters) }
def CreateLogFile(): """ ログファイルを作成する。WriteLog を呼び出す前に実行すること。 """ szRet = ""; if( DEBUG_MODE ): return( "Debug モードのためスキップします。" ); try: szRet = "AppendBlobService"; blob_service = AppendBlobService( account_name, account_key ); szRet = "create_container"; bIsExists = blob_service.exists( log_container_name ); if bIsExists: pass; else: blob_service.create_container( log_container_name, public_access=PublicAccess.Blob ); bIsExists = blob_service.exists( log_container_name, log_file_name ); if bIsExists: szRet = "already blob." else: szRet = "create_blob"; blob_service.create_blob( log_container_name, log_file_name ); szRet = "OK"; except: #szRet = "Log exception"; pass; return szRet;
def init_storage_services(self): """ Initializes the storage service clients using values from config.py. :return: True on success. False on failure. :rtype: boolean """ try: # creates instance of BlockBlobService and AppendBlobService to use for completed results storage self.storage_service = BlockBlobService( account_name=self.config.storage_account_name, sas_token=self.config.results_container_sas_token) self.append_storage_service = AppendBlobService( account_name=self.config.storage_account_name, sas_token=self.config.results_container_sas_token) self.storage_service.create_container( self.config.results_container_name) # creates instances of Azure QueueService self.job_status_queue_service = QueueService( account_name=self.config.storage_account_name, sas_token=self.config.job_status_queue_sas_token) self.job_status_queue_service.encode_function = models.QueueMessageFormat.noencode self.results_queue_service = QueueService( account_name=self.config.storage_account_name, sas_token=self.config.results_queue_sas_token) self.results_queue_service.create_queue( self.config.results_container_name) self.results_queue_service.encode_function = models.QueueMessageFormat.noencode # creates instance of Redis client to use for job status storage pool = redis.ConnectionPool(host=self.redis_host, port=self.redis_port) self.storage_service_cache = redis.Redis(connection_pool=pool) return True except Exception as ex: self.log_exception(ex, self.init_storage_services.__name__) return False