def create_blob(blob, txt): uri = blob.uri host_base = cs.get_host_base_from_uri(uri) service = BlobService(blob.name, blob.key, host_base=host_base) container_name = cs.get_container_name_from_uri(uri) blob_name = cs.get_blob_name_from_uri(uri) service.put_block_blob_from_text(container_name, blob_name, txt)
class AzureBlobStorage(StorageBase): def __init__(self, account_name, account_key, container_name): self.__container_name = container_name self.__blob_service = BlobService(account_name=account_name, account_key=account_key) self.__blob_service.create_container(container_name) def get(self, key, default=None): logger.info('get: key = %s' % key) return pickle.loads(self.__blob_service .get_blob_to_text(self.__container_name, key)) def set(self, key, value): logger.info('get: key = %s, value = %s' % (key, value)) self.__blob_service.put_block_blob_from_text(self.__container_name, key, pickle.dumps(value))
class BlobCache: ''' Simplistic cache toolkit targetting an Azure Blob Service. name: the name of a storage account. key: the access key for the storage account. container: the name of the container to use. ''' def __init__(self, name, key, container): self.container = container self.blobstore = BlobService(name, key) self.blobstore.create_container(self.container) def getresponse(self, cachekey): ''' Get a value from the cache. cachekey: The key. Kilroy notes that this throws an exception rather than returning a value on failure. ''' return self.blobstore.get_blob_to_text(self.container, str2blobname(cachekey)) def putresponse(self, cachekey, value): ''' Put a value in the cache with the given key. cachekey: The key. value: The value to associate with the key. ''' return self.blobstore.put_block_blob_from_text(self.container, str2blobname(cachekey), value) def invalidate(self, cachekey): ''' Invalidate a value in the cache. Immediate. Permanent. cachekey: The key. ''' self.blobstore.delete_blob(self.container, str2blobname(cachekey))
class BlobCache: ''' Simplistic cache toolkit targetting an Azure Blob Service. name: the name of a storage account. key: the access key for the storage account. container: the name of the container to use. ''' def __init__(self, name, key, container): self.container = container self.blobstore = BlobService(name, key) self.blobstore.create_container(self.container) def getresponse(self, cachekey): ''' Get a value from the cache. cachekey: The key. Kilroy notes that this throws an exception rather than returning a value on failure. ''' return self.blobstore.get_blob_to_text(self.container,str2blobname(cachekey)) def putresponse(self, cachekey, value): ''' Put a value in the cache with the given key. cachekey: The key. value: The value to associate with the key. ''' return self.blobstore.put_block_blob_from_text(self.container, str2blobname(cachekey), value) def invalidate(self, cachekey): ''' Invalidate a value in the cache. Immediate. Permanent. cachekey: The key. ''' self.blobstore.delete_blob(self.container, str2blobname(cachekey))
def generate_website_and_upload_azure(azure_csv_container, azure_web_container): blob_service = BlobService(account_name=os.getenv('ACC_NAME'), account_key=os.getenv('ACCESS_KEY')) blob_list = blob_service.list_blobs(azure_csv_container) blob_name_list = blob_list.blobs keys = [] #Only keep files whose dates can be parsed for k in blob_name_list: try: parser.parse(k.name[:8]) keys.append(k) except: pass keys = [k for k in keys if (".zip" in k.name or ".csv" in k.name)] my_array = [] for k in keys: my_dict = {} url = r"http://fhrscsvs.blob.core.windows.net/{}/{}".format( azure_csv_container, k.name) name = k.name date = parser.parse(name[:8]) dateformat = date.strftime("%a %d %b %Y") my_dict["Date of data download"] = dateformat my_dict["Size"] = sizeof_fmt(k.properties.content_length) name = get_link_text(name, dateformat, my_dict) my_dict["File"] = "<a href='{0}'>{1}</a>".format(url, name) my_array.append(my_dict) my_array = sorted(my_array, key=lambda k: k['File'], reverse=True) table_array_fullsnapshot = [ a for a in my_array if "__all_current" in a["File"] ] table_array_differences = [a for a in my_array if "__diff" in a["File"]] template_dir = os.getenv('TEMPLATE_DIR') loader = jinja2.FileSystemLoader(template_dir) environment = jinja2.Environment(loader=loader) j_template = environment.get_template("template.html") order = ["File", "Size"] timestamp = datetime.datetime.now().strftime("%a %d %b %Y at %H:%M") import math sinarray = [(math.cos(math.radians(i * 5 - 180)) + 1) * 14 for i in range(0, 73)] html = j_template.render(table_array_fullsnapshot=table_array_fullsnapshot, order=order, timestamp=timestamp, sinarray=sinarray, table_array_differences=table_array_differences) blob_service.put_block_blob_from_text( azure_web_container, "index.html", html, x_ms_blob_content_type='text/html', text_encoding="utf-8", )
class SAzure(SyncStorage): def __init__(self): super().__init__() self.msg_key_na = _('Key not available') try: import alxlib.key key = alxlib.key.Key() if os.path.isfile(key.get_path()): sys.path.insert(0, key.get_dir()) import alxkey self.key = alxkey.alxkey_azure """self.blob = BlobService(account_name=self.key['AZURE_STORAGE_ACCOUNT_NAME'], account_key=self.key['AZURE_ACCESS_KEY'])""" else: # raise (self.msg_key_na) self.key = None except: pass # raise (self.msg_key_na) def connect(self): try: self.blob = BlobService(account_name=self.key['AZURE_STORAGE_ACCOUNT_NAME'], account_key=self.key['AZURE_ACCESS_KEY']) return self.blob.list_containers(maxresults=1) except: return None def connect_blob(self, az_account_name=None, az_account_key=None): try: if az_account_name != None: self.key['AZURE_STORAGE_ACCOUNT_NAME'] = az_account_name self.key['AZURE_ACCESS_KEY'] = az_account_key return self.connect() except: return None def path_clean(self, path: str): try: i = path.index("//") + 2 self.container = path[0:i] if path[len(path) - 1] != "/": path += "/" return path[i:] except: print(_("Bad Path")) exit(1) def spath(self, container, root, b): spath = SyncPath() spath.BasePath = container if b.name[len(b.name)-1]=="/": spath.IsDir= True else: spath.IsFile= True spath.AbsPath = b.name if len(root)>0: spath.SPath = b.name[len(root) - 1:] else: spath.SPath=b.name spath.Size = b.properties.content_length import alxlib.time_help spath.ModifiedTS = alxlib.time_help.to_timestamp(b.properties.last_modified) spath.MD5 = b.properties.content_md5 spath.sys="azure" return spath def path_split(self, path: str): try: list = path.split("/") container = list[0] uri = "" if len(list) > 1: uri = "/".join(map(str, list[1:])) return container, uri except: print(_("Bad path")) exit(1) def path_list_blobs(self, container, uri): try: if len(uri)>0: blobs = self.blob.list_blobs(container, prefix=uri) else: blobs = self.blob.list_blobs(container) """for blob in blobs: print(blob.properties.__dict__) print(blob.name) print(blob.url)""" return blobs except Exception as e: print(_("Bad connection")) logging.warning("container {0}, path {1}".format(container, uri)) exit(1) def path_list(self, path): try: logging.debug("path_list {0}".format(path)) container, uri = self.path_split(path) logging.debug("Container: {0}, Uri: {1}".format(container, uri)) self.connect() self.blob.create_container(container) blobs = self.path_list_blobs(container, uri) d = {} for b in blobs: spath = self.spath(container, uri, b) # print(b.__dict__) #print(str(b.properties.last_modified.__dict__)) #print(str(spath.ModifiedTS)) d[spath.SPath] = spath # print(d) return d except Exception as e: print(e) def remove(self, src: SyncPath): try: logging.debug("Removing {0}".format(src.AbsPath)) self.connect() self.blob.create_container(src.BasePath) self.blob.delete_blob(src.BasePath, src.AbsPath) except: pass def copy_local2azure(self, src, base_dir): try: container, uri = self.path_split(base_dir) if len(src.SPath)>0 and src.SPath[0]=="/": path= uri+ src.SPath[1:] else: path= uri+src.SPath logging.debug("copy_local2azure Spath {0}. path:{1}".format(src.SPath, path)) self.connect() if not src.IsDir: self.blob.put_block_blob_from_path (container, path, src.AbsPath) else: self.blob.put_block_blob_from_text(container, path+"/", "") except Exception as e: print("Error Copying") print(e) def copy_azure2local(self, src, base_dir): try: if len(src.SPath)>0 and (src.SPath[0] == "/" or src.SPath[0] == "\\") : path = src.SPath[1:] else: path = src.SPath path= os.path.normpath(os.path.join(base_dir, path)) logging.debug("copy_azure2local basedir:{0} Spath {1}, path {2}, abs: {3}".format( base_dir, src.SPath, path, src.AbsPath)) if not os.path.isdir(path): os.makedirs(os.path.dirname(path), exist_ok=True) #print( os.path.dirname(path)+"***************") if not (len(src.AbsPath)>0 and src.AbsPath[len(src.AbsPath)-1]=="/"): self.blob.get_blob_to_path(src.BasePath, src.AbsPath, path) """container, uri = self.path_split(base_dir) if len(src.SPath)>0 and src.SPath[0]=="/": path= uri+ src.SPath[1:] else: path= uri+src.SPath self.connect() if not src.IsDir: self.blob.get_blob_to_path(src.BasePath, path, src.AbsPath) else: self.blob.put_block_blob_from_text(container, path, "")""" except Exception as e: print("Error copying") print(e)
class BlobServiceAdapter(Component): """The :class:`BlobServiceAdapter` class is a thin wrapper over azure.storage.BlobService. All the attributes of the wrapper stream are proxied by the adapter so it's possible to do ``adapter.create_container()`` instead of the long form ``adapter.blob_service.adapter()``. """ def __init__(self): self.blob_service = BlobService( account_name=self.util.get_config("storage.azure.account_name"), account_key=self.util.get_config("storage.azure.account_key"), host_base=self.util.get_config("storage.azure.blob_service_host_base"), ) def __getattr__(self, name): return getattr(self.blob_service, name) def create_container_in_storage(self, container_name, access="container"): """create a container if doesn't exist :type container_name: str|unicode :param container_name: Name of container to create. :type access: str|unicode :param access: Optional. Possible values include: container, blob :return: """ try: names = [x.name for x in self.blob_service.list_containers()] if container_name not in names: return self.blob_service.create_container(container_name, x_ms_blob_public_access=access) else: self.log.debug("container already exists in storage") return True except Exception as e: self.log.error(e) return False def upload_file_to_azure(self, container_name, blob_name, stream): """ Creates a new block blob from a file/stream, or updates the content of an existing block blob, with automatic chunking and progress notifications. :type container_name: str|unicode :param container_name: Name of existing container. :type blob_name: str | unicode :param blob_name: Name of blob to create or update. :type stream: file :param stream: Opened file/stream to upload as the blob content. """ try: if self.create_container_in_storage(container_name, "container"): self.blob_service.put_block_blob_from_file(container_name, blob_name, stream) return self.blob_service.make_blob_url(container_name, blob_name) else: return None except Exception as e: self.log.error(e) return None def upload_file_to_azure_from_bytes(self, container_name, blob_name, blob): """ Creates a new block blob from an array of bytes, or updates the content of an existing block blob, with automatic chunking and progress notifications. :type container_name: str|unicode :param container_name: Name of existing container. :type blob_name: str|unicode :param blob_name: Name of blob to create or update. :type blob: bytes :param blob: Content of blob as an array of bytes. """ try: if self.create_container_in_storage(container_name, "container"): self.blob_service.put_block_blob_from_bytes(container_name, blob_name, blob) return self.blob_service.make_blob_url(container_name, blob_name) else: return None except Exception as e: self.log.error(e) return None def upload_file_to_azure_from_text(self, container_name, blob_name, text): """ Creates a new block blob from str/unicode, or updates the content of an existing block blob, with automatic chunking and progress notifications. :type container_name: str|unicode :param container_name: Name of existing container. :type blob_name: str|unicode :param blob_name: Name of blob to create or update. :type text: str|unicode :param text: Text to upload to the blob. """ try: if self.create_container_in_storage(container_name, "container"): self.blob_service.put_block_blob_from_text(container_name, blob_name, text) return self.blob_service.make_blob_url(container_name, blob_name) else: return None except Exception as e: self.log.error(e) return None def upload_file_to_azure_from_path(self, container_name, blob_name, path): """ Creates a new page blob from a file path, or updates the content of an existing page blob, with automatic chunking and progress notifications. :type container_name: str|unicode :param container_name: Name of existing container. :type blob_name: str|unicode :param blob_name: Name of blob to create or update. :type path: str|unicode :param path: Path of the file to upload as the blob content. """ try: if self.create_container_in_storage(container_name, "container"): self.blob_service.put_block_blob_from_path(container_name, blob_name, path) return self.blob_service.make_blob_url(container_name, blob_name) else: return None except Exception as e: self.log.error(e) return None def delete_file_from_azure(self, container_name, blob_name): try: if self.create_container_in_storage(container_name, "container"): self.blob_service.delete_blob(container_name, blob_name) except Exception as e: self.log.error(e) return None
class BlobServiceAdapter(Component): """The :class:`BlobServiceAdapter` class is a thin wrapper over azure.storage.BlobService. All the attributes of the wrapper stream are proxied by the adapter so it's possible to do ``adapter.create_container()`` instead of the long form ``adapter.blob_service.adapter()``. """ def __init__(self): self.blob_service = BlobService(account_name=self.util.get_config("storage.azure.account_name"), account_key=self.util.get_config("storage.azure.account_key"), host_base=self.util.get_config("storage.azure.blob_service_host_base")) def __getattr__(self, name): return getattr(self.blob_service, name) def create_container_in_storage(self, container_name, access="container"): """create a container if doesn't exist :type container_name: str|unicode :param container_name: Name of container to create. :type access: str|unicode :param access: Optional. Possible values include: container, blob :return: """ try: names = [x.name for x in self.blob_service.list_containers()] if container_name not in names: return self.blob_service.create_container(container_name, x_ms_blob_public_access=access) else: self.log.debug("container already exists in storage") return True except Exception as e: self.log.error(e) return False def upload_file_to_azure(self, container_name, blob_name, stream): """ Creates a new block blob from a file/stream, or updates the content of an existing block blob, with automatic chunking and progress notifications. :type container_name: str|unicode :param container_name: Name of existing container. :type blob_name: str | unicode :param blob_name: Name of blob to create or update. :type stream: file :param stream: Opened file/stream to upload as the blob content. """ try: if self.create_container_in_storage(container_name, 'container'): self.blob_service.put_block_blob_from_file(container_name, blob_name, stream) return self.blob_service.make_blob_url(container_name, blob_name) else: return None except Exception as e: self.log.error(e) return None def upload_file_to_azure_from_bytes(self, container_name, blob_name, blob): """ Creates a new block blob from an array of bytes, or updates the content of an existing block blob, with automatic chunking and progress notifications. :type container_name: str|unicode :param container_name: Name of existing container. :type blob_name: str|unicode :param blob_name: Name of blob to create or update. :type blob: bytes :param blob: Content of blob as an array of bytes. """ try: if self.create_container_in_storage(container_name, 'container'): self.blob_service.put_block_blob_from_bytes(container_name, blob_name, blob) return self.blob_service.make_blob_url(container_name, blob_name) else: return None except Exception as e: self.log.error(e) return None def upload_file_to_azure_from_text(self, container_name, blob_name, text): """ Creates a new block blob from str/unicode, or updates the content of an existing block blob, with automatic chunking and progress notifications. :type container_name: str|unicode :param container_name: Name of existing container. :type blob_name: str|unicode :param blob_name: Name of blob to create or update. :type text: str|unicode :param text: Text to upload to the blob. """ try: if self.create_container_in_storage(container_name, 'container'): self.blob_service.put_block_blob_from_text(container_name, blob_name, text) return self.blob_service.make_blob_url(container_name, blob_name) else: return None except Exception as e: self.log.error(e) return None def upload_file_to_azure_from_path(self, container_name, blob_name, path): """ Creates a new page blob from a file path, or updates the content of an existing page blob, with automatic chunking and progress notifications. :type container_name: str|unicode :param container_name: Name of existing container. :type blob_name: str|unicode :param blob_name: Name of blob to create or update. :type path: str|unicode :param path: Path of the file to upload as the blob content. """ try: if self.create_container_in_storage(container_name, 'container'): self.blob_service.put_block_blob_from_path(container_name, blob_name, path) return self.blob_service.make_blob_url(container_name, blob_name) else: return None except Exception as e: self.log.error(e) return None def delete_file_from_azure(self, container_name, blob_name): try: if self.create_container_in_storage(container_name, 'container'): self.blob_service.delete_blob(container_name, blob_name) except Exception as e: self.log.error(e) return None
def generate_website_and_upload_azure(azure_csv_container, azure_web_container): blob_service = BlobService(account_name=os.getenv('ACC_NAME'), account_key=os.getenv('ACCESS_KEY')) blob_list = blob_service.list_blobs(azure_csv_container) blob_name_list = blob_list.blobs keys = [] #Only keep files whose dates can be parsed for k in blob_name_list: try: parser.parse(k.name[:8]) keys.append(k) except: pass keys = [k for k in keys if (".zip" in k.name or ".csv" in k.name)] my_array = [] for k in keys: my_dict = {} url = r"http://fhrscsvs.blob.core.windows.net/{}/{}".format(azure_csv_container,k.name) name = k.name date = parser.parse(name[:8]) dateformat = date.strftime("%a %d %b %Y") my_dict["Date of data download"] = dateformat my_dict["Size"] = sizeof_fmt(k.properties.content_length) name = get_link_text(name,dateformat,my_dict) my_dict["File"] = "<a href='{0}'>{1}</a>".format(url,name) my_array.append(my_dict) my_array = sorted(my_array, key=lambda k: k['File'], reverse=True) table_array_fullsnapshot = [a for a in my_array if "__all_current" in a["File"]] table_array_differences = [a for a in my_array if "__diff" in a["File"]] template_dir = os.getenv('TEMPLATE_DIR') loader = jinja2.FileSystemLoader(template_dir) environment = jinja2.Environment(loader=loader) j_template = environment.get_template("template.html") order = ["File", "Size"] timestamp = datetime.datetime.now().strftime("%a %d %b %Y at %H:%M") import math sinarray = [(math.cos(math.radians(i*5-180))+1)*14 for i in range(0,73)] html = j_template.render(table_array_fullsnapshot=table_array_fullsnapshot, order=order, timestamp = timestamp, sinarray=sinarray, table_array_differences=table_array_differences) blob_service.put_block_blob_from_text( azure_web_container, "index.html", html, x_ms_blob_content_type='text/html', text_encoding="utf-8", )
while True: batch = blob_service.list_blobs('YourContainer', marker=marker, prefix='input_') blobs.extend(batch) if not batch.next_marker: break marker = batch.next_marker for blob in blobs: print(blob.name) #read the blob file as a text file #I just read in the first from the pervious list data = blob_service.get_blob_to_text('rockt', blobs[0].name).split("\n") print("Number of lines in CSV " + str(len(data))) #do your stuff #I want to filter out some lines of my CSV and only keep those having ABC or DEF in them matchers = ['abc', 'def'] matching = [s for s in data if any(xs in s for xs in matchers)] print("Number of lines in CSV " + str(len(matching))) #write your text directly back to blob storage blob_service.put_block_blob_from_text( 'YourContainer', 'YourOutputFile.csv', ''.join(matching), x_ms_blob_content_type='text' )
def puttextobjectinazure (strkey, url, data): blob_service = BlobService(account_name='wanderight', account_key='gdmZeJOCx3HYlFPZZukUhHAfeGAu4cfHWGQZc3+HIpkBHjlznUDjhXMl5HWh5MgbjpJF09ZxRaET1JVF9S2MWQ==') blob_service.put_block_blob_from_text( config['container'], strkey, data, x_ms_meta_name_values={'url':url} )