def get_container(blob_service_client: BlobServiceClient, container_name: str) -> bool: """ Get a container client """ logging.info('blob_service_client.list_containers()') logging.info(list(blob_service_client.list_containers())) try: _ = blob_service_client.get_container_client(container_name) container_client = blob_service_client.get_container_client( container_name) except: container_client = blob_service_client.create_container(container_name) return container_client
def test_blob_tier_set_tier_api(self): url = self._get_premium_account_url() credential = self._get_premium_shared_key_credential() pbs = BlobServiceClient(url, credential=credential) try: container_name = self.get_resource_name('utpremiumcontainer') container = pbs.get_container_client(container_name) if not self.is_playback(): try: container.create_container() except ResourceExistsError: pass blob = self._get_blob_reference() pblob = pbs.get_blob_client(container_name, blob.blob_name) pblob.create_page_blob(1024) blob_ref = pblob.get_blob_properties() self.assertEqual(PremiumPageBlobTier.P10, blob_ref.blob_tier) self.assertIsNotNone(blob_ref.blob_tier) self.assertTrue(blob_ref.blob_tier_inferred) pcontainer = pbs.get_container_client(container_name) blobs = list(pcontainer.list_blobs()) # Assert self.assertIsNotNone(blobs) self.assertGreaterEqual(len(blobs), 1) self.assertIsNotNone(blobs[0]) self.assertNamedItemInContainer(blobs, blob.blob_name) pblob.set_premium_page_blob_tier(PremiumPageBlobTier.P50) blob_ref2 = pblob.get_blob_properties() self.assertEqual(PremiumPageBlobTier.P50, blob_ref2.blob_tier) self.assertFalse(blob_ref2.blob_tier_inferred) blobs = list(pcontainer.list_blobs()) # Assert self.assertIsNotNone(blobs) self.assertGreaterEqual(len(blobs), 1) self.assertIsNotNone(blobs[0]) self.assertNamedItemInContainer(blobs, blob.blob_name) self.assertEqual(blobs[0].blob_tier, PremiumPageBlobTier.P50) self.assertFalse(blobs[0].blob_tier_inferred) finally: container.delete_container()
class AzurePersistor(Persistor): """Store models on Azure""" def __init__( self, azure_container: Text, azure_account_name: Text, azure_account_key: Text ) -> None: from azure.storage.blob import BlobServiceClient super().__init__() self.blob_service = BlobServiceClient( account_url=f"https://{azure_account_name}.blob.core.windows.net/", credential=azure_account_key, ) self._ensure_container_exists(azure_container) self.container_name = azure_container def _ensure_container_exists(self, container_name: Text) -> None: from azure.core.exceptions import ResourceExistsError try: self.blob_service.create_container(container_name) except ResourceExistsError: # no need to create the container, it already exists pass def _container_client(self) -> "ContainerClient": return self.blob_service.get_container_client(self.container_name) def list_models(self) -> List[Text]: """Lists models on remote storage. Returns: Paths to found models. """ try: blob_iterator = self._container_client().list_blobs() return [ self._model_dir_and_model_from_filename(b.name)[1] for b in blob_iterator ] except Exception as e: logger.warning(f"Failed to list models azure blob storage. {e}") return [] def _persist_tar(self, file_key: Text, tar_path: Text) -> None: """Uploads a model persisted in the `target_dir` to Azure.""" with open(tar_path, "rb") as data: self._container_client().upload_blob(name=file_key, data=data) def _retrieve_tar(self, target_filename: Text) -> None: """Downloads a model that has previously been persisted to Azure.""" blob_client = self._container_client().get_blob_client(target_filename) with open(target_filename, "wb") as blob: download_stream = blob_client.download_blob() blob.write(download_stream.readall())
def _get_container_service(container): """ Get the azure block blob service for the container in question Try account_key, sas_token, and no auth in that order """ # the account_url_suffix can be provided via config. Defaults to "blob.core.windows.net" account_url_suffix = container.get("account_url_suffix", "blob.core.windows.net") account_url = f'https://{container["account_name"]}.{account_url_suffix}' proxies = None if "proxy" in container: proxies = {"http": container["proxy"]} # If 'proxy' isn't specified in container block, check if 'https_proxy' is set. elif "https_proxy" in __opts__: proxies = {"https": __opts__["https_proxy"]} # instantiate based upon credential if "account_key" in container: blob_service = BlobServiceClient(account_url=account_url, credential=container["account_key"], proxies=proxies) elif "sas_token" in container: blob_service = BlobServiceClient(account_url=account_url, credential=container["sas_token"], proxies=proxies) else: blob_service = BlobServiceClient(account_url=account_url, proxies=proxies) return blob_service.get_container_client(container["container_name"])
def _download_blob(uri, out_dir: str): # pylint: disable=too-many-locals match = re.search(_BLOB_RE, uri) account_url = re.search(_ACCOUNT_RE, uri).group(0) account_name = match.group(1) storage_url = match.group(2) container_name, prefix = storage_url.split("/", 1) logging.info( "Connecting to BLOB account: [%s], container: [%s], prefix: [%s]", account_name, container_name, prefix) token = Storage._get_azure_storage_token() if token is None: logging.warning( "Azure credentials not found, retrying anonymous access") blob_service_client = BlobServiceClient(account_url, credential=token) container_client = blob_service_client.get_container_client( container_name) count = 0 blobs = container_client.list_blobs(prefix=prefix) for blob in blobs: dest_path = os.path.join(out_dir, blob.name) Path(os.path.dirname(dest_path)).mkdir(parents=True, exist_ok=True) logging.info("Downloading: %s to %s", blob.name, dest_path) downloader = container_client.download_blob(blob.name) with open(dest_path, "wb+") as f: f.write(downloader.readall()) count = count + 1 if count == 0: raise RuntimeError("Failed to fetch model. \ The path or model %s does not exist." % (uri))
def main(args, config): """ Fait le liens avec le config.ini Fait appel aux fonctions (upload/download/ list). """ blobclient = BlobServiceClient( f"https://{config['storage']['account']}.blob.core.windows.net", config["storage"]["key"], logging_enable=False) containerclient = blobclient.get_container_client( config["storage"]["container"]) # liaison avec le container if args.action == "list": logging.warning("Connexion et liaison avec le container") return listb(args, containerclient) #listeb = liste blob else: if args.action == "upload": #Lié avec le parser = upload blobclient = containerclient.get_blob_client( os.path.basename(args.cible)) #Lié avec le parser = se connecter au blob client et lui envoyer au fichier return upload(args.cible, blobclient) #Lié avec le parser = downolad elif args.action == "download": blobclient = containerclient.get_blob_client( os.path.basename(args.remote)) #Lié avec le parser = Télécharger fichier logging.warning("Télécharge le fichier") return download(args.remote, config["general"]["restoredir"], blobclient)
def main(args,config): """ Cette fonction permet de prendre en compte les informations renseigné dans le fichier config.ini et de les réutiliser dans nos autres fonctions. """ logging.info("Lancement de la fonction main") blobclient=BlobServiceClient( f"https://{config['storage']['account']}.blob.core.windows.net", config["storage"]["key"], logging_enable=False) logging.debug("Connexion au compte de stockage effectué") containerclient=blobclient.get_container_client(config["storage"]["container"]) logging.debug("Connexion au container de stockage effectué") if args.action=="list": logging.debug("Lancement de la fonction liste") return listb(args, containerclient) else: if args.action=="upload": blobclient=containerclient.get_blob_client(os.path.basename(args.cible)) logging.debug("Lancement de la fonction upload") logging.warning("Uploading du fichier") return upload(args.cible, blobclient) elif args.action=="download": logging.debug("Lancement de la fonction download") blobclient=containerclient.get_blob_client(os.path.basename(args.remote)) logging.warning("Téléchargement du fichier") return download(args.remote, config["general"]["restoredir"], blobclient)
def main(args, config): logging.debug("Tentative de connexion au compte Azure") blobclient = BlobServiceClient( f"https://{config['storage']['account']}.blob.core.windows.net", config["storage"]["key"], logging_enable=False) logging.debug("Connexion réussie, recherche du contener") containerclient = blobclient.get_container_client( config["storage"]["container"]) logging.debug("Contener trouvé !") # liaison avec le container if args.action == "list": return listb(args, containerclient) #listeb = liste blob else: if args.action == "upload": #si ok upload blobclient = containerclient.get_blob_client( os.path.basename(args.cible)) # se connecter au blob client et lui envoyer au fichier return upload(args.cible, blobclient) #si ok downolad elif args.action == "download": blobclient = containerclient.get_blob_client( os.path.basename(args.remote)) #Télécharger fichier return download(args.remote, config["general"]["restoredir"], blobclient)
def blob_service(self): # pylint: disable=no-name-in-module from azure.storage.blob import BlobServiceClient from azure.core.exceptions import ResourceNotFoundError logger.debug(f"URL {self.path_info}") if self._conn_str: logger.debug(f"Using connection string '{self._conn_str}'") blob_service = BlobServiceClient.from_connection_string( self._conn_str, credential=self._credential ) else: logger.debug(f"Using account url '{self._account_url}'") blob_service = BlobServiceClient( self._account_url, credential=self._credential ) logger.debug(f"Container name {self.path_info.bucket}") container_client = blob_service.get_container_client( self.path_info.bucket ) try: # verify that container exists container_client.get_container_properties() except ResourceNotFoundError: container_client.create_container() return blob_service
def uploadStorageBlobs(storage_name, storage_key, container_name, local_folder, file_list): ''' Upload files to an azure blob container. PARAMS: storage_name : string : Name of the Azure Storage Account storage_key : string : Access Key to the Azure Storage Account container_name : string : Container name to recieve blobs. Must exist local_folder : string : Local folder containing files to upload. file_list : list[string] : List of files from local folder to upload RETURNS: Nothing ''' blob_service = BlobServiceClient(account_url="https://" + storage_name + ".blob.core.windows.net/", credential=storage_key) blob_container = blob_service.get_container_client(container_name) for local_file in file_list: path = os.path.join(local_folder, local_file) with open(path, "rb") as data: try: blob_container.upload_blob(local_file, data) print("File Uploaded : ", container_name, '-', local_file) except ResourceExistsError: print("File Exists : ", container_name, '-', local_file)
def main(args,config): """ this fonction is the heart of our scrip it executes the fonction entered in our command line """ logging.info('main fonction') blobclient=BlobServiceClient( f"https://{config['storage']['account']}.blob.core.windows.net", config["storage"]["key"], logging_enable=False) logging.debug('connecting to the stockage account') containerclient=blobclient.get_container_client(config["storage"]["container"]) logging.debug('connecting to the container') if args.action=="list": logging.debug("List argument choosen, executing List fonction") return listb(args, containerclient) else: if args.action=="upload": blobclient=containerclient.get_blob_client(os.path.basename(args.cible)) logging.debug("arg upload, executing upload fonction") return upload(args.cible, blobclient) elif args.action=="download": logging.debug("arg download . executing download fonction") blobclient=containerclient.get_blob_client(os.path.basename(args.remote)) logging.warning("Downloading file") return download(args.remote, config["general"]["restoredir"], blobclient)
def test_list_blobs(self, resource_group, location, storage_account, storage_account_key): # Arrange bsc = BlobServiceClient(self.account_url(storage_account, "blob"), credential=storage_account_key, connection_data_block_size=1024, max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, max_page_size=1024) self._setup(bsc) blob_client, _ = self._create_block_blob( bsc, blob_name="blockblob", data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE) self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE) container_client = bsc.get_container_client(self.container_name) generator = container_client.list_blobs(include="metadata") for blob in generator: self.assertIsNotNone(blob) # Assert: every listed blob has encryption_scope self.assertEqual(blob.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE) self._teardown(bsc)
def test_sas_signature_is_scrubbed_off(self, resource_group, location, storage_account, storage_account_key): # SAS URL is calculated from storage key, so this test runs live only bsc = BlobServiceClient(self.account_url(storage_account, "blob"), storage_account_key) self._setup(bsc) # Arrange container = bsc.get_container_client(self.container_name) token = generate_container_sas( container.account_name, container.container_name, account_key=container.credential.account_key, permission=ContainerSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) # parse out the signed signature token_components = parse_qs(token) signed_signature = quote( token_components[QueryStringConstants.SIGNED_SIGNATURE][0]) sas_service = ContainerClient.from_container_url(container.url, credential=token) # Act with LogCaptured(self) as log_captured: sas_service.get_account_information(logging_enable=True) log_as_str = log_captured.getvalue() # Assert # make sure the query parameter 'sig' is logged, but its value is not self.assertTrue( QueryStringConstants.SIGNED_SIGNATURE in log_as_str) self.assertFalse(signed_signature in log_as_str)
def main(args, config): logging.info("lancement la function main") blobclient = BlobServiceClient( f"https://{config['storage']['account']}.blob.core.windows.net", config["storage"]["key"], logging_enable=False) logging.debug("connection u compte storage") containerclient = blobclient.get_container_client( config["storage"]["container"]) logging.debug("connection au container") if args.action == "list": logging.debug('lancement la function list') return listb(args, containerclient) else: if args.action == "upload": blobclient = containerclient.get_blob_client( os.path.basename(args.cible)) logging.debug('lancement la function upload') loggi.warning('uploading') return upload(args.cible, blobclient) elif args.action == "download": logging.debug('lancement la function upload') blobclient = containerclient.get_blob_client( os.path.basename(args.remote)) loggi.warning('dowloading') return download(args.remote, config["general"]["restoredir"], blobclient)
class BlobText(): def __init__(self, account_name, account_key, sas_token, container_name): self.account_name = account_name self.account_key = account_key self.sas_token = sas_token self.container_name = container_name # Create the BlobServiceClient object which will be used to create a container client self.blob_service_client = BlobServiceClient( "https://" + account_name + ".blob.core.windows.net/", account_key) # Create the container self.container_client = self.blob_service_client.get_container_client( container_name) if self.container_client is None: self.container_client = self.blob_service_client.create_container( container_name) def write_text(self, blob_name, text): data = text.encode('utf-16') blobClient = self.container_client.get_blob_client(blob_name) if blobClient.exists: blobClient.delete_blob blobClient.upload_blob(data, overwrite=True) def read_text(self, blob_name): blobClient = self.container_client.get_blob_client(blob_name) if blobClient.exists: data = blobClient.download_blob().readall() return data.decode('utf-16') else: return None
def _get_container_service(container): """ Get the azure block blob service for the container in question Try account_key, sas_token, and no auth in that order """ account_url = f'https://{container["account_name"]}.blob.core.windows.net' proxies = None if 'proxy' in container: proxies = {'http': container['proxy']} # If 'proxy' isn't specified in container block, check if 'https_proxy' is set. elif 'https_proxy' in __opts__: proxies = {'https': __opts__['https_proxy']} # instantiate based upon credential if "account_key" in container: blob_service = BlobServiceClient(account_url=account_url, credential=container["account_key"], proxies=proxies) elif "sas_token" in container: blob_service = BlobServiceClient(account_url=account_url, credential=container["sas_token"], proxies=proxies) else: blob_service = BlobServiceClient(account_url=account_url, proxies=proxies) return blob_service.get_container_client(container['container_name'])
def blob_service(self): # pylint: disable=no-name-in-module from azure.core.exceptions import ( HttpResponseError, ResourceNotFoundError, ) from azure.storage.blob import BlobServiceClient logger.debug(f"URL {self.path_info}") if self._conn_str: logger.debug(f"Using connection string '{self._conn_str}'") blob_service = BlobServiceClient.from_connection_string( self._conn_str, credential=self._credential) else: logger.debug(f"Using account url '{self._account_url}'") blob_service = BlobServiceClient(self._account_url, credential=self._credential) logger.debug(f"Container name {self.path_info.bucket}") container_client = blob_service.get_container_client( self.path_info.bucket) try: # verify that container exists container_client.get_container_properties() except ResourceNotFoundError: container_client.create_container() except HttpResponseError as exc: # client may not have account-level privileges if exc.status_code != 403: raise return blob_service
def _reinit_session(self): """ Create a new session """ if "AZURE_STORAGE_CONNECTION_STRING" in os.environ: logging.info("Authenticating to Azure with connection string") client = BlobServiceClient.from_connection_string( conn_str=os.getenv("AZURE_STORAGE_CONNECTION_STRING"), container_name=self.bucket_name, ) else: if "AZURE_STORAGE_SAS_TOKEN" in os.environ: logging.info("Authenticating to Azure with SAS token") credential = os.getenv("AZURE_STORAGE_SAS_TOKEN") elif "AZURE_STORAGE_KEY" in os.environ: logging.info("Authenticating to Azure with shared key") credential = os.getenv("AZURE_STORAGE_KEY") else: logging.info( "Authenticating to Azure with default credentials") # azure-identity is not part of azure-storage-blob so only import # it if needed try: from azure.identity import DefaultAzureCredential except ImportError: raise SystemExit( "Missing required python module: azure-identity") credential = DefaultAzureCredential() client = BlobServiceClient( account_url=self.account_url, credential=credential, container_name=self.bucket_name, ) self.container_client = client.get_container_client(self.bucket_name)
def generate_token(): blob_service_client = BlobServiceClient(account_url=config.URL, credential=config.SHARED_KEY) try: for i in blob_service_client.list_containers(): continue except: return 'cannot generate the sas token' container_client = blob_service_client.get_container_client("mycontainer") # container_token = generate_container_sas( # container_client.account_name, # container_client.container_name, # account_key=container_client.credential.account_key, # policy_id='my-access-policy-id' # ) sas_token = generate_account_sas( blob_service_client.account_name, account_key=blob_service_client.credential.account_key, resource_types=ResourceTypes(object=True), permission=AccountSasPermissions(read=True, write=True, add=True, create=True), expiry=datetime.utcnow() + timedelta(hours=1)) return sas_token
def test_service_client_api_version_property(self): service_client = BlobServiceClient( "https://foo.blob.core.windows.net/account", credential="fake_key") self.assertEqual(service_client.api_version, self.api_version_2) self.assertEqual(service_client._client._config.version, self.api_version_2) with pytest.raises(AttributeError): service_client.api_version = "foo" service_client = BlobServiceClient( "https://foo.blob.core.windows.net/account", credential="fake_key", api_version=self.api_version_1) self.assertEqual(service_client.api_version, self.api_version_1) self.assertEqual(service_client._client._config.version, self.api_version_1) container_client = service_client.get_container_client("foo") self.assertEqual(container_client.api_version, self.api_version_1) self.assertEqual(container_client._client._config.version, self.api_version_1) blob_client = service_client.get_blob_client("foo", "bar") self.assertEqual(blob_client.api_version, self.api_version_1) self.assertEqual(blob_client._client._config.version, self.api_version_1)
def main(args, config): """This function get the properties from the ContainerClient to interact with a specific blob if the argument is list , it will return the blobs in the container, else if the argument is upload it will upload the blob from the contenur AZURE to our pc otherwise if the argument is download it will download the blob from the conteneur to our pc """ logging.info("lancement de la fonction main") blobclient = BlobServiceClient( f"https://{config['storage']['account']}.blob.core.windows.net", config["storage"]["key"], logging_enable=False) logging.debug("connection au compte de stockage effectuer") containerclient = blobclient.get_container_client( config["storage"]["container"]) logging.debug("connection au container de stockage") if args.action == "list": logging.debug("l'arg list a été passé. Lancement de la fonction liste") return listb(args, containerclient) else: if args.action == "upload": blobclient = containerclient.get_blob_client( os.path.basename(args.cible)) logging.debug( "arg upload a été passé. Lancement de la fonction upload") return upload(args.cible, blobclient) elif args.action == "download": logging.debug( "arg download a été passé. Lancement de la fonction download") blobclient = containerclient.get_blob_client( os.path.basename(args.remote)) return download(args.remote, config["general"]["restoredir"], blobclient)
def __init__(self, blob_service: BlobServiceClient, db: DatabaseProxy): try: self.blob_container = blob_service.create_container("images") except ResourceExistsError as exc: self.blob_container = blob_service.get_container_client( container="images") self.cosmos_container = db.create_container_if_not_exists( id="images", partition_key=PartitionKey(path="/image/path"))
def blob_clients(blob_access): # Create the BlobServiceClient object which will be used to create a container client blob_service_client = BlobServiceClient( f"https://{blob_access.account}/?{blob_access.credentials}") container_client = blob_service_client.get_container_client( blob_access.container) return blob_service_client, container_client
def main(args, config): """ Fait le liens avec le config.ini Fait appel aux fonctions (upload/download/ list). """ logging.debug("entrée dans la fonction main") nom_fichier(args.cible) blobclient = BlobServiceClient( f"https://{config['storage']['account']}.blob.core.windows.net", config["storage"]["key"], logging_enable=False) containerclient = blobclient.get_container_client( config["storage"]["container"]) try: conn = mysql.connector.connect(**configuration) print("Connection established") except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print("Something is wrong with the user name or password") elif err.errno == errorcode.ER_BAD_DB_ERROR: print("Database does not exist") else: print(err) else: cursor = conn.cursor() cursor.execute("SELECT * FROM liste_livres;") rows = cursor.fetchall() for row in rows: print("Data row = (%s, %s, %s, %s)" % (str(row[0]), str(row[1]), str(row[2]), str(row[3]))) if args.action == "upload": logging.debug("args action est égal à upload ") #Lié avec le parser = upload blobclient = containerclient.get_blob_client( os.path.basename(args.cible)) #Lié avec le parser = se connecter au blob client et lui envoyer au fichier titre = livre_existant(cursor) infos = input("rentrez le nom de l'auteur: ") nom_du_fichier = nom_fichier(args.cible) url_blob = "https://librarystokage2.blob.core.windows.net/conteneur-livres-blob/" + nom_du_fichier logging.debug("essaie d'enregistrer les données dans la base") cursor.execute( "INSERT INTO liste_livres (titre, infos, url_blob) VALUES (%s, %s, %s);", (titre, infos, url_blob)) logging.debug( "le titre et les infos du nouveau livre a bien été enregistré dans la base" ) print("Inserted", cursor.rowcount, "row(s) of data.") conn.commit() cursor.close() conn.close() print(" nouveau livre enregistré dans la base!! Youhou!!") return upload(args.cible, blobclient)
def upload_image(service: BlobServiceClient, data, container_name='post'): blob_name = str(uuid4()) logging.debug(len(data)) # blob 컨테이너가 먼저 만들어져 있어야 저장이 가능함 container_client = service.get_container_client(container_name) container_client.upload_blob(name=blob_name, data=data) url = f"https://respacimages.blob.core.windows.net/post/{blob_name}" return url
def create_if_not_exists_container(storage_name: str, key: str, container_name: str, logger: Logger): input_url = get_account_url(storage_name) service = BlobServiceClient(account_url=input_url, credential=key) container = service.get_container_client(container_name) try: container.create_container() logger.info(f"Creating container: {container_name}") except ResourceExistsError: logger.warning("Output container already exists")
def upload_blob(file, filename, input_f, year): blob_service = BlobServiceClient(account_url=url, credential=key) loc_insert_filename = container + "/" + "output" if (input_f): loc_insert_filename = container + "/" + "input" insert_filenm = filename + "-" + year + ".csv" if (filename[0:5] == "model"): insert_filenm = filename + "-" + year + ".sav" blob_container = blob_service.get_container_client(loc_insert_filename) blob_container.upload_blob(insert_filenm, file, overwrite=True) return (None)
def main(args, config): # args => tous les arguments passés en ligne de commande # config => objet Configparser contenant les information contenues dans config logging.debug( f"démarrage du programme, recherche de connection au compte de stockage {config['storage']['account']}" ) # création d'une instance de classe BlobServiceClient, identification compte de stockage son url blobclient = BlobServiceClient( f"https://{config['storage']['account']}.blob.core.windows.net", config["storage"]["key"], logging_enable=False) logging.info( f"Connection au compte de stockage {config['storage']['account']} réussie" ) logging.debug( f"recherche de connection au container Azure {config['storage']['container']}" ) # initialise containerclient qui pemet d'identifier le container d'interêt containerclient = blobclient.get_container_client( config["storage"]["container"]) logging.info( f"Connection au container Azure {config['storage']['container']} réussie" ) # si argument list => lance fonction listb if args.action == "list": logging.debug( "action demandée : afficher list des blobs, lancement de la fonction listb" ) return listb(containerclient) # sinon, else: # soit on charge les objets blob sur container if args.action == "upload": # réinitialisation de la variable blobclient pour qu'elle 'pointe' sur le dossier local blobclient = containerclient.get_blob_client( os.path.basename(args.cible)) # lancement de la fonction upload log.debug( "action demandée : charger des blobs dans un container, lancement de la fonction upload" ) return upload(args.cible, blobclient) # soit on télécharge des objets blob dnas un dossier local elif args.action == "download": # réinitialisation de la variable blobclient pour qu'elle 'pointe' sur le dossier local # args.cible => le chemin vers le dossier local blobclient = containerclient.get_blob_client( os.path.basename(args.remote)) log.debug( "action demandée : charger des blobs dans un container, lancement de la fonction upload" ) # lancement de la fonction download return download(args.remote, config["general"]["restoredir"], blobclient)
def copy_object(source_blob_url, container, blob_name, metadata, tags, dest_blob_service_client: BlobServiceClient): container_client = dest_blob_service_client.get_container_client(container) if not container_client.exists(): container_client.create_container() blob_client = container_client.get_blob_client(blob_name) copy_status = blob_client.start_copy_from_url(source_url=source_blob_url, metadata=metadata, tags=tags, requires_sync=True) logging.info(f'copy status: {copy_status}')
def copy_file_to_blob(account_name, sas_token, container_name, local_path, remote_path): """ Copies a local file to blob storage """ blob_service_client = BlobServiceClient( account_url=account_name_to_url(account_name), credential=sas_token) container_client = blob_service_client.get_container_client(container_name) with open(local_path, 'rb') as data: container_client.upload_blob(remote_path, data)