コード例 #1
0
 def __init__(self, connection_string, share_name, file_path):
     self.share_cli = ShareClient.from_connection_string(
         conn_str=connection_string, share_name=share_name)
     self.file_cli = ShareFileClient.from_connection_string(
         conn_str=connection_string,
         share_name=share_name,
         file_path=file_path)
コード例 #2
0
    def upload_a_file_to_share(self):
        # Instantiate the ShareClient from a connection string
        from azure.storage.fileshare import ShareClient
        share = ShareClient.from_connection_string(self.connection_string,
                                                   share_name="helloworld2")

        # Create the share
        share.create_share()

        try:
            # Instantiate the ShareFileClient from a connection string
            # [START create_file_client]
            from azure.storage.fileshare import ShareFileClient
            file = ShareFileClient.from_connection_string(
                self.connection_string,
                share_name="helloworld2",
                file_path="myfile")
            # [END create_file_client]

            # Upload a file
            with open(SOURCE_FILE, "rb") as source_file:
                file.upload_file(source_file)

        finally:
            # Delete the share
            share.delete_share()
コード例 #3
0
def delete_dir_tree(c_str, s_name, d_name, space=""):

    dir_client = ShareDirectoryClient.from_connection_string(
        conn_str=c_str, share_name=s_name, directory_path=d_name)

    my_list = []
    for item in dir_client.list_directories_and_files():
        my_list.append(item)

    for ele in my_list:
        print(space, ele)

        if ele['is_directory']:
            delete_dir_tree(c_str,
                            s_name,
                            d_name + "/" + ele['name'],
                            space=space + "   ")
        else:
            file_client = ShareFileClient.from_connection_string(
                conn_str=c_str,
                share_name=s_name,
                file_path=d_name + "/" + ele['name'])
            file_client.delete_file()

    dir_client.delete_directory()
コード例 #4
0
def helper_download_dir(source_dir, desti_dir, c_str, s_name, space=""):

    dir_client = ShareDirectoryClient.from_connection_string(
        conn_str=c_str, share_name=s_name, directory_path=source_dir)

    my_list = []
    for item in dir_client.list_directories_and_files():
        my_list.append(item)

    for ele in my_list:
        print(space, ele)

        if ele['is_directory']:
            os.mkdir(desti_dir + "/" + ele['name'])
            helper_download_dir(source_dir + "/" + ele['name'],
                                desti_dir + "/" + ele['name'], c_str, s_name,
                                space + "   ")
        else:

            file_client = ShareFileClient.from_connection_string(
                conn_str=c_str,
                share_name=s_name,
                file_path=source_dir + "/" + ele['name'])

            with open(desti_dir + "/" + ele['name'], "wb") as data:
                stream = file_client.download_file()
                data.write(stream.readall())
コード例 #5
0
    def download_snapshot_file(self, connection_string, share_name, snapshot_time, dir_name, file_name):
        try:
            # Build the remote path
            source_file_path = dir_name + "/" + file_name

            # Add a prefix to the local filename to 
            # indicate it's a file from a snapshot
            dest_file_name = "SNAPSHOT-" + file_name

            # Create a ShareFileClient from a connection string
            snapshot_file_client = ShareFileClient.from_connection_string(
                conn_str=connection_string, share_name=share_name, 
                file_path=source_file_path, snapshot=snapshot_time)

            print("Downloading to:", dest_file_name)

            # Open a file for writing bytes on the local system
            with open(dest_file_name, "wb") as data:
                # Download the file from Azure into a stream
                stream = snapshot_file_client.download_file()
                # Write the stream to the local file
                data.write(stream.readall())

        except ResourceNotFoundError as ex:
            print("ResourceNotFoundError:", ex.message)
コード例 #6
0
ファイル: az_files_service.py プロジェクト: equinor/seis-ee
    def download_file(self, azure_storage_path_to_file: str):
        if settings.ENVIRONMENT == "prod":
            output_filename = "temp_file.sgy"
        elif settings.ENVIRONMENT == "dev":
            output_filename = "data/temp_file.sgy"

        # Create a ShareFileClient from a connection string
        file_client = ShareFileClient.from_connection_string(
            self.conn_str, self.share, azure_storage_path_to_file)
        file_properties = file_client.get_file_properties()
        file_size = file_properties.content_length  # size of file in bytes
        if file_size > settings.FILE_SIZE_LIMIT_IN_BYTES:
            raise DownloadFileException(
                "Tried to decimate a file larger than 400 MB. Abort.")
        logger.info(
            f"Downloading file from azure storage to local file {output_filename}"
        )

        # Open a file for writing bytes on the local system - will write over existing file
        with open(output_filename, "wb") as data:
            # Download the file from Azure into a stream
            stream = file_client.download_file()
            # Write the stream to the local file
            data.write(stream.readall())

        return output_filename
コード例 #7
0
    def __init__(self, conn_str: str, share_name: str, file_path: str) -> None:
        """Initialize the sender class.

        Retrieves the DB from the file share. All the parameters of __init__
        are there to retrieve the DB.

        Parameters
        ----------
        conn_str
            Connection strin to the storage account containing the DB. Every
            Function App has an storage account associated with it. It's
            connection strin is stored in the default env variable
            AzureWebJobsStorage.
        share_name
            Name of the share where the DB is kept.
        file_path
            Path within the File Share to the DB.
        """

        file_client = ShareFileClient.from_connection_string(
            conn_str=conn_str,
            share_name=share_name,
            file_path=file_path,
        )

        data = file_client.download_file()
        self.email_db = json.loads(data.readall())
コード例 #8
0
ファイル: web.py プロジェクト: ilabservice/evsuits
def downloadFile(ipcSn, dirName, fileName, destDir):
  file_path=ipcSn + '/'+dirName+'/'+fileName
  destDir = destDir + '/' + fileName
  print("downloading {}: {} {} {}".format(destDir, ipcSn, dirName, file_path))
  with ShareFileClient.from_connection_string(conn_str=CONNSTR, share_name=SHARENAME, file_path=file_path) as fc:
      with open(destDir, "wb") as f:
          data = fc.download_file()
          data.readinto(f)
コード例 #9
0
ファイル: az_files_service.py プロジェクト: equinor/seis-ee
 def file_exists(self, path: str) -> bool:
     try:
         file_client = ShareFileClient.from_connection_string(
             self.conn_str, self.share, path)
         # If getting props does not raise an error, we assume the file exists
         file_client.get_file_properties()
         return True
     except ResourceNotFoundError:
         return False
コード例 #10
0
 def __init__(self,
              connection_string,
              share_name='funcstatemarkershare',
              file_path='funcstatemarkerfile'):
     self.share_cli = ShareClient.from_connection_string(
         conn_str=connection_string, share_name=share_name)
     self.file_cli = ShareFileClient.from_connection_string(
         conn_str=connection_string,
         share_name=share_name,
         file_path=file_path)
コード例 #11
0
def test_azure_files_ingress(conn_str, share_name, source_disk, file_to_upload):
    time_start = time.time()

    file_client = ShareFileClient.from_connection_string(conn_str, share_name, file_to_upload)

    with open(f"{source_disk}\\{file_to_upload}", "rb") as source_file:
        file_client.upload_file(source_file)

    dt = time.time() - time_start
    speed = (1.0 * fsize_b / dt) / mb  # in MBytes/sec
    print(f"Ingress speed: {speed:.2f} MBytes/sec")
コード例 #12
0
def download_a_file(this_file_path, this_share_name, local_file_path):
    refresh_var_env()
    file_client = ShareFileClient.from_connection_string(
        conn_str=AZ_CONN_STR, 
        share_name=this_share_name, 
        file_path=this_file_path
        )

    with open(local_file_path, "wb") as file_handle:
        data = file_client.download_file()
        data.readinto(file_handle)
        return 0
コード例 #13
0
    def delete_azure_file(self, connection_string, share_name, file_path):
        try:
            # Create a ShareFileClient from a connection string
            file_client = ShareFileClient.from_connection_string(
                connection_string, share_name, file_path)

            print("Deleting file:", share_name + "/" + file_path)

            # Delete the file
            file_client.delete_file()

        except ResourceNotFoundError as ex:
            print("ResourceNotFoundError:", ex.message)
コード例 #14
0
ファイル: __main__.py プロジェクト: mguelfi/nsu
def file_upload(fp, scan_name):
    """
    Uploads the contents of a file-like object as file using the credentials
    in the configuration into the folder in the configuration.

    :param fp:
        file-like object to upload
    :param scan_name:
        str the name assigned to the object on the upload server
    """
    file_client = ShareFileClient.from_connection_string(
            args.conn_str, args.folder, scan_name)
    file_client.upload_file(fp)
def upload_source(source_name, source_dir, desti_dir, c_str, s_name, useless_ele = {"__pycache__"}, space = ""):

    if os.path.isdir(source_dir + "/" + source_name):
        dir_client = ShareDirectoryClient.from_connection_string(conn_str=c_str, share_name=s_name, directory_path=desti_dir + "/" + source_name)
        dir_client.create_directory()

        print(source_dir + "/" + source_name)
        helper_copy_dir(source_dir + "/" + source_name, desti_dir + "/" + source_name, c_str, s_name, useless_ele, space = space)
    
    else:
        file_client = ShareFileClient.from_connection_string(conn_str=c_str, share_name=s_name, file_path=desti_dir + "/" + source_name)

        with open(source_dir + "/" + source_name, "rb") as source_file:
            file_client.upload_file(source_file)

    print("Upload Complete")
コード例 #16
0
    def upload_local_file(self, connection_string, local_file_path, share_name, dest_file_path):
        try:
            source_file = open(local_file_path, "rb")
            data = source_file.read()

            # Create a ShareFileClient from a connection string
            file_client = ShareFileClient.from_connection_string(
                connection_string, share_name, dest_file_path)

            print("Uploading to:", share_name + "/" + dest_file_path)
            file_client.upload_file(data)

        except ResourceExistsError as ex:
            print("ResourceExistsError:", ex.message)

        except ResourceNotFoundError as ex:
            print("ResourceNotFoundError:", ex.message)
def helper_copy_dir(source_dir, desti_dir, c_str, s_name, useless_ele, space = ""):
    for ele in os.listdir(source_dir):
        if ele in useless_ele:
            continue

        print(space, int(os.path.isdir(source_dir + "/" + ele)), ele)

        if os.path.isdir(source_dir + "/" + ele):
            dir_client = ShareDirectoryClient.from_connection_string(conn_str=c_str, share_name=s_name, directory_path=desti_dir + "/" + ele)
            dir_client.create_directory()

            helper_copy_dir(source_dir + "/" + ele, desti_dir + "/" + ele, c_str, s_name, useless_ele, space = space + "   ")
        else:
            file_client = ShareFileClient.from_connection_string(conn_str=c_str, share_name=s_name, file_path=desti_dir + "/" + ele)

            with open(source_dir + "/" + ele, "rb") as source_file:
                file_client.upload_file(source_file)
コード例 #18
0
def download_source(source_name,
                    source_dir,
                    desti_dir,
                    c_str,
                    s_name,
                    space=""):

    dir_client = ShareDirectoryClient.from_connection_string(
        conn_str=c_str, share_name=s_name, directory_path=source_dir)

    flag = True
    sorce_info = None

    for ele in dir_client.list_directories_and_files():
        if ele['name'] == source_name:
            sorce_info = ele
            flag = False
            break

    if flag:
        print("source Not Exist")
        return

    print(sorce_info)

    if sorce_info['is_directory']:
        os.mkdir(desti_dir + "/" + ele['name'])
        helper_download_dir(source_dir + "/" + ele['name'],
                            desti_dir + "/" + ele['name'], c_str, s_name,
                            space + "   ")

    else:
        file_client = ShareFileClient.from_connection_string(
            conn_str=c_str,
            share_name=s_name,
            file_path=source_dir + "/" + ele['name'])

        with open(desti_dir + "/" + ele['name'], "wb") as data:
            stream = file_client.download_file()
            data.write(stream.readall())

    print("Download Complete")
コード例 #19
0
ファイル: az_files_service.py プロジェクト: equinor/seis-ee
    def upload_file(self, path: str):
        name_to_upload_as = path.replace("data/", "")
        self.create_tree(name_to_upload_as)

        try:
            with open(path, "rb") as source_file:
                data = source_file.read()
                file_client = ShareFileClient.from_connection_string(
                    self.conn_str, self.share, name_to_upload_as)
                logger.info(
                    f"Uploading decimated file to Azure Files: {name_to_upload_as}"
                )
                file_client.upload_file(data)

        except ResourceExistsError as ex:
            logger.error("ResourceExistsError:", ex.message)
        except ResourceNotFoundError as ex:
            logger.error("ResourceNotFoundError:", ex.message)

        return name_to_upload_as
コード例 #20
0
def download_from_azure_file_share(connection_string, share_name, source_file_path, local_dir_path):
    try:
        # Add a prefix to the filename to
        # distinguish it from the uploaded file
        file_name = source_file_path.split("/")[-1]
        dest_file_name = os.path.join(local_dir_path, file_name)

        # Create a ShareFileClient from a connection string
        file_client = ShareFileClient.from_connection_string(
            connection_string, share_name, source_file_path)

        print("Downloading to:", dest_file_name)

        # Open a file for writing bytes on the local system
        with open(dest_file_name, "wb") as data:
            # Download the file from Azure into a stream
            stream = file_client.download_file()
            # Write the stream to the local file
            data.write(stream.readall())

    except ResourceNotFoundError as ex:
        print("ResourceNotFoundError:", ex.message)
コード例 #21
0
ファイル: __init__.py プロジェクト: jrgarner/azure-dc-extract
def upload_file(filename, logger):
    # JRG - NEED TO FIGURE OUT HOW TO HANDLE GROWTH OF FILES IN SHARE
    global FS_CONNECTION_STRING, SHARE_NAME

    # strip path from passed in filename...just filename for file_path below
    filepath, just_filename = os.path.split(filename)

    # add timestamp so we don't overwrite existing version.
    now = datetime.datetime.now()
    dt = now.strftime("%Y%m%d%H%M%S")
    filename_components = just_filename.split('.')
    just_filename = filename_components[
        0] + "-" + dt + "." + filename_components[1]
    logger.info("Uploading file as (%s)", just_filename)

    logger.info("Connecting to file share...")
    # connect to fileshare
    try:
        file = ShareFileClient.from_connection_string(FS_CONNECTION_STRING,
                                                      share_name=SHARE_NAME,
                                                      file_path=just_filename)
    except (NameError) as err:
        logger.error("Failed to connect to file share (%s), exiting...",
                     FS_CONNECTION_STRING)
        logger.error(str(err))
        return False

    logger.info("Uploading file (%s)...", filename)
    # Upload a file
    try:
        with open(filename, "rb") as source:
            file.upload_file(source)
    except:
        logger.error("Failed to upload file (%s), exiting...", filename)
        logger.error("ERROR: " + str(sys.exc_info()[0]))
        return False

    logger.info("File successfully uploaded (%s)", filename)
    return True
コード例 #22
0
def get_template(conn_str: str, share_name: str,
                 template_path: str) -> Template:
    """Retrieve Jinja2 template from Azure File Share.

    Parameters
    ----------
    conn_str
        Connection string to the storage account. Typically stored in an env
        variable.
    share_name
        Name of the file share in the storage account where the template file
        is kept.
    template_path
        Full path to the template file relative to the root of the file share.
    """
    data = ShareFileClient.from_connection_string(
        conn_str=conn_str,
        share_name=share_name,
        file_path=template_path,
    ).download_file()

    template = Template(data.readall().decode("utf-8"))

    return template
コード例 #23
0
    def download_azure_file(self, connection_string, share_name, dir_name, file_name):
        try:
            # Build the remote path
            source_file_path = dir_name + "/" + file_name

            # Add a prefix to the filename to 
            # distinguish it from the uploaded file
            dest_file_name = "DOWNLOADED-" + file_name

            # Create a ShareFileClient from a connection string
            file_client = ShareFileClient.from_connection_string(
                connection_string, share_name, source_file_path)

            print("Downloading to:", dest_file_name)

            # Open a file for writing bytes on the local system
            with open(dest_file_name, "wb") as data:
                # Download the file from Azure into a stream
                stream = file_client.download_file()
                # Write the stream to the local file
                data.write(stream.readall())

        except ResourceNotFoundError as ex:
            print("ResourceNotFoundError:", ex.message)
コード例 #24
0
ファイル: web.py プロジェクト: ilabservice/evsuits
def uploadFile(ipcSn, dirName, fileName, srcPath):
  file_path=ipcSn + '/'+dirName+'/' + fileName
  fc = ShareFileClient.from_connection_string(conn_str=CONNSTR, share_name=SHARENAME, file_path=file_path)
  with open(srcPath + '/' + fileName, "rb") as source_file:
    fc.upload_file(source_file)
コード例 #25
0
def get_fileshare_client(file_path) -> ShareFileClient:
    conn_str = AZURE_STORAGE  # Connection string stored as local env. var.
    return ShareFileClient.from_connection_string(conn_str=conn_str, share_name='history', file_path=file_path)
コード例 #26
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    target_table = "PipelinePauseData"
    token = utilities.get_param(req, "token")

    table_service = utilities.setup_table_service(
        os.environ["AzureWebJobsStorage"], target_table,
    )

    # Since we can't use authentication for the API we will check as
    # soon as possible if the token for the pipeline restart is valid.
    # if it is not we halt execution and return a 500 code.
    try:
        paused_pipeline = table_service.get_entity(
            table_name=target_table, partition_key="PauseData", row_key=token
        )
    except AzureMissingResourceHttpError as e:
        raise exceptions.HttpError(
            str(e),
            func.HttpResponse(str(e), status_code=500)
        )

    # acted_upon monitors if a token has already been used. We use it here to
    # block the second and further attempts at restarting.
    acted_upon = paused_pipeline["acted_upon"]

    has_expired = check_if_expired(
        paused_pipeline["Timestamp"], paused_pipeline["expiration_time"],
    )

    if not acted_upon and not has_expired:
        logging.info(token)

        # DefaultAzureCredential does not work when manipulating ADF. It will
        # complain about a missing session method.
        # Remember to give the contributor role to the application.
        # Azure Portal -> Subscriptions -> IAM roles
        credentials = ServicePrincipalCredentials(
            client_id=os.environ["AZURE_CLIENT_ID"],
            secret=os.environ["AZURE_CLIENT_SECRET"],
            tenant=os.environ["AZURE_TENANT_ID"],
        )

        subscription_id = os.environ["subscription_id"]
        adf_client = DataFactoryManagementClient(credentials, subscription_id)
        logging.info(adf_client)

        # The restart data is accessed via a lookup activity from within ADF
        run_response = restart_pipeline(
            adf_client=adf_client,
            resource_group=paused_pipeline["resource_group"],
            factory_name=paused_pipeline["factory_name"],
            pipeline_name=paused_pipeline["pipeline_name"],
            token=token,
        )
        logging.info(run_response)

        # After running acted_upon is set to 1
        paused_pipeline["acted_upon"] = 1
        table_service.update_entity(target_table, paused_pipeline)

        # Retrieve and display success webpage.
        confirmation_site = (
            ShareFileClient.from_connection_string(
                conn_str=os.environ["AzureWebJobsStorage"],
                share_name=paused_pipeline["share_name"],
                file_path=paused_pipeline["web_path"],
            )
            .download_file()
            .readall()
            .decode("utf-8")
        )

        return func.HttpResponse(confirmation_site, mimetype="text/html")

    else:  # already acted_upon or expired
        return func.HttpResponse("Invalid token.", status_code=500,)