コード例 #1
0
def download_azure_blob(account_name, account_key, file_uri, download_dir):
    (blob_name, container_name) = parse_blob_uri(file_uri)
    host_base = get_host_base_from_uri(file_uri)
    download_path = os.path.join(download_dir, blob_name)
    blob_service = BlobService(account_name, account_key, host_base=host_base)
    max_retry = 3
    for retry in range(1, max_retry + 1):
        try:
            blob_service.get_blob_to_path(container_name, blob_name, download_path)
        except Exception:
            hutil.error('Failed to download Azure blob, retry = ' + str(retry) + ', max_retry = ' + str(max_retry))
            if retry != max_retry:
                hutil.log('Sleep 10 seconds')
                time.sleep(10)
            else:
                waagent.AddExtensionEvent(name=ExtensionShortName,
                                          op=Operation.Download,
                                          isSuccess=False,
                                          message="(03303)Failed to download file from Azure Storage")
                raise Exception('Failed to download azure blob: ' + blob_name)
    waagent.AddExtensionEvent(name=ExtensionShortName,
                              op=Operation.Download,
                              isSuccess=True,
                              message="(03301)Succeeded to download file from Azure Storage")
    return download_path
コード例 #2
0
def download_azure_blob(account_name, account_key, file_uri, download_dir):
    waagent.AddExtensionEvent(name=ExtensionShortName, op="EnableInProgress", isSuccess=True, message="Downloading from azure blob")
    try:
        (blob_name, container_name) = parse_blob_uri(file_uri)
        host_base = get_host_base_from_uri(file_uri)
        
        download_path = os.path.join(download_dir, blob_name)
        blob_service = BlobService(account_name, account_key, host_base=host_base)
    except Exception as e:
        waagent.AddExtensionEvent(name=ExtensionShortName, op='DownloadInProgress', isSuccess=True, message='Enable failed with the azure storage error : {0}, stack trace: {1}'.format(str(e), traceback.format_exc()))
        hutil.error('Failed to enable the extension with error: %s, stack trace: %s' %(str(e), traceback.format_exc()))
        hutil.do_exit(1, 'Enable', 'error', '1', 'Enable failed: {0}'.format(e))
    
    max_retry = 3
    for retry in range(1, max_retry + 1):
        try:
            blob_service.get_blob_to_path(container_name, blob_name, download_path)
        except Exception:
            hutil.error('Failed to download Azure blob, retry = ' + str(retry) + ', max_retry = ' + str(max_retry))
            if retry != max_retry:
                hutil.log('Sleep 10 seconds')
                time.sleep(10)
            else:
                waagent.AddExtensionEvent(name=ExtensionShortName,
                                          op=Operation.Download,
                                          isSuccess=False,
                                          message="(03303)Failed to download file from Azure Storage")
                raise Exception('Failed to download azure blob: ' + blob_name)
    waagent.AddExtensionEvent(name=ExtensionShortName,
                              op=Operation.Download,
                              isSuccess=True,
                              message="(03301)Succeeded to download file from Azure Storage")
    return download_path
コード例 #3
0
ファイル: dsc.py プロジェクト: vityagi/azure-linux-extensions
def download_azure_blob(account_name, account_key, file_uri, download_dir):
    (blob_name, container_name) = parse_blob_uri(file_uri)
    host_base = get_host_base_from_uri(file_uri)
    download_path = os.path.join(download_dir, blob_name)
    blob_service = BlobService(account_name, account_key, host_base=host_base)
    max_retry = 3
    for retry in range(1, max_retry + 1):
        try:
            blob_service.get_blob_to_path(container_name, blob_name, download_path)
        except Exception:
            hutil.error('Failed to download Azure blob, retry = ' + str(retry) + ', max_retry = ' + str(max_retry))
            if retry != max_retry:
                hutil.log('Sleep 10 seconds')
                time.sleep(10)
            else:
                waagent.AddExtensionEvent(name=ExtensionShortName,
                                          op=Operation.Download,
                                          isSuccess=False,
                                          message="(03303)Failed to download file from Azure Storage")
                raise Exception('Failed to download azure blob: ' + blob_name)
    waagent.AddExtensionEvent(name=ExtensionShortName,
                              op=Operation.Download,
                              isSuccess=True,
                              message="(03301)Succeeded to download file from Azure Storage")
    return download_path
コード例 #4
0
ファイル: dsc.py プロジェクト: axelgMS/azure-linux-extensions
def download_azure_blob(account_name, account_key, file_uri, download_dir):
    waagent.AddExtensionEvent(name=ExtensionShortName,
                              op="EnableInProgress",
                              isSuccess=True,
                              message="Downloading from azure blob")
    try:
        (blob_name, container_name) = parse_blob_uri(file_uri)
        host_base = get_host_base_from_uri(file_uri)

        blob_parent_path = os.path.join(download_dir,
                                        os.path.dirname(blob_name))
        if not os.path.exists(blob_parent_path):
            os.makedirs(blob_parent_path)

        download_path = os.path.join(download_dir, blob_name)
        blob_service = BlobService(account_name,
                                   account_key,
                                   host_base=host_base)
    except Exception as e:
        waagent.AddExtensionEvent(
            name=ExtensionShortName,
            op='DownloadInProgress',
            isSuccess=True,
            message=
            'Enable failed with the azure storage error : {0}, stack trace: {1}'
            .format(str(e), traceback.format_exc()))
        hutil.error(
            'Failed to enable the extension with error: %s, stack trace: %s' %
            (str(e), traceback.format_exc()))
        hutil.do_exit(1, 'Enable', 'error', '1',
                      'Enable failed: {0}'.format(e))

    max_retry = 3
    for retry in range(1, max_retry + 1):
        try:
            blob_service.get_blob_to_path(container_name, blob_name,
                                          download_path)
        except Exception:
            hutil.error('Failed to download Azure blob, retry = ' +
                        str(retry) + ', max_retry = ' + str(max_retry))
            if retry != max_retry:
                hutil.log('Sleep 10 seconds')
                time.sleep(10)
            else:
                waagent.AddExtensionEvent(
                    name=ExtensionShortName,
                    op=Operation.Download,
                    isSuccess=False,
                    message="(03303)Failed to download file from Azure Storage"
                )
                raise Exception('Failed to download azure blob: ' + blob_name)
    waagent.AddExtensionEvent(
        name=ExtensionShortName,
        op=Operation.Download,
        isSuccess=True,
        message="(03301)Succeeded to download file from Azure Storage")
    return download_path
コード例 #5
0
def download_file(container, path, dest):
    blob_service = BlobService(account_name=storage_name, account_key=storage_key)
    loop = 0
    while True:
        try:
            blob_service.get_blob_to_path(container, path, dest)
            break
        except azure.http.HTTPError as e:
            loop = loop + 1
            if loop >= 3:
                return
コード例 #6
0
def download_blob(storage_account_name, storage_account_key, blob_uri, seqNo, command):
    container_name = get_container_name_from_uri(blob_uri)
    blob_name = get_blob_name_from_uri(blob_uri)
    download_dir = get_download_directory(seqNo)
    download_path = os.path.join(download_dir, blob_name)
    # Guest agent already ensure the plugin is enabled one after another. The blob download will not conflict.
    blob_service = BlobService(storage_account_name, storage_account_key)
    try:
        blob_service.get_blob_to_path(container_name, blob_name, download_path)
    except Exception, e:
        hutil.error("Failed to download blob with uri:" + blob_uri + "with error:" + str(e))
        raise
コード例 #7
0
def download_blob(storage_account_name, storage_account_key, blob_uri, seqNo,
                  command, hutil):
    container_name = get_container_name_from_uri(blob_uri)
    blob_name = get_blob_name_from_uri(blob_uri)
    download_dir = get_download_directory(seqNo)
    download_path = os.path.join(download_dir, blob_name)
    # Guest agent already ensure the plugin is enabled one after another. The blob download will not conflict.
    blob_service = BlobService(storage_account_name, storage_account_key)
    try:
        blob_service.get_blob_to_path(container_name, blob_name, download_path)
    except Exception, e:
        hutil.error("Failed to download blob with uri:" + blob_uri +
                    "with error:" + str(e))
        raise
コード例 #8
0
def download_blob(storage_account_name, storage_account_key,
                  blob_uri, dst, hutil):
    seqNo = hutil.get_seq_no()
    container_name = get_container_name_from_uri(blob_uri)
    blob_name = get_blob_name_from_uri(blob_uri)
    download_dir = prepare_download_dir(seqNo)
    download_path = os.path.join(download_dir, dst)
    #Guest agent already ensure the plugin is enabled one after another.
    #The blob download will not conflict.
    blob_service = BlobService(storage_account_name, storage_account_key)
    try:
        blob_service.get_blob_to_path(container_name, blob_name, download_path)
    except Exception, e:
        hutil.log_and_syslog(logging.ERROR, ("Failed to download blob with uri:{0} "
                     "with error {1}").format(blob_uri,e))
        raise
コード例 #9
0
def download_blob(storage_account_name, storage_account_key,
                  blob_uri, dst, hutil):
    seqNo = hutil.get_seq_no()
    container_name = get_container_name_from_uri(blob_uri)
    blob_name = get_blob_name_from_uri(blob_uri)
    download_dir = prepare_download_dir(seqNo)
    download_path = os.path.join(download_dir, dst)
    #Guest agent already ensure the plugin is enabled one after another.
    #The blob download will not conflict.
    blob_service = BlobService(storage_account_name, storage_account_key)
    try:
        blob_service.get_blob_to_path(container_name, blob_name, download_path)
    except Exception, e:
        hutil.log_and_syslog(logging.ERROR, ("Failed to download blob with uri:{0} "
                     "with error {1}").format(blob_uri,e))
        raise
コード例 #10
0
ファイル: app.py プロジェクト: engineershreyas/SquadShots
def download():
    blob_service = BlobService(account_name='squadshots', account_key='UgxaWKAKv2ZvhHrPt0IHi4EQedPpZw35r+RXkAYB2eICPrG3TjSwk2G8gUzG/PNDDTV+4CVCYWCvZSiad5xMQQ==')
    try:
        blob_service.get_blob_to_path('album','image','static/output.png')
    except Exception as e:
        print e
    blobs = blob_service.list_blobs('album',None,None,None,'metadata',None)
    for blob in blobs:
        if blob.metadata != None:
            for key in blob.metadata:
                if (blob.metadata)[key] == session['username']:
                        blob_service.get_blob_to_path('album',blob.name,'static/output.png')
    for i in blob_service.list_containers():
        print "This container is " + i.name


    return render_template('album.html',filename="static/output.png")
コード例 #11
0
def download_azure_blob(account_name, account_key, container_name, blob_name, download_path):
    blob_service = BlobService(account_name, account_key)
    max_retry = 3
    for retry in range(1, max_retry + 1):
        try:
            blob_service.get_blob_to_path(container_name, blob_name, download_path)
        except Exception, e:
            hutil.error('Failed to download Azure blob, retry = ' + str(retry) + ', max_retry = ' + str(max_retry))
            if retry != max_retry:
                hutil.log('Sleep 10 seconds')
                time.sleep(10)
            else:
                waagent.AddExtensionEvent(name=ExtensionShortName,
                                          op=DownloadOp,
                                          isSuccess=False,
                                          message="(03303)Failed to download file from Azure Storage")
                raise Exception('Failed to download azure blob: ' + blob_name)
コード例 #12
0
ファイル: azure.py プロジェクト: Ormod/pghoard
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name):
        BaseTransfer.__init__(self)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")

    def get_metadata_for_key(self, key):
        key = fix_path(key)
        return self.list_path(key)[0]['metadata']

    def list_path(self, path):
        return_list = []
        path = fix_path(path)
        self.log.info("Asking for listing of: %r", path)
        for r in self.conn.list_blobs(self.container_name, prefix=path, delimiter="/",
                                      include="metadata"):
            entry = {"name": r.name, "size": r.properties.content_length,
                     "last_modified": dateutil.parser.parse(r.properties.last_modified),
                     "metadata": r.metadata}
            return_list.append(entry)
        return return_list

    def delete_key(self, key_name):
        key_name = fix_path(key_name)
        self.log.debug("Deleting key: %r", key_name)
        return self.conn.delete_blob(self.container_name, key_name)

    def get_contents_to_file(self, obj_key, filepath_to_store_to):
        obj_key = fix_path(obj_key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", obj_key, filepath_to_store_to)
        return self.conn.get_blob_to_path(self.container_name, obj_key, filepath_to_store_to)

    def get_contents_to_string(self, obj_key):
        obj_key = fix_path(obj_key)
        self.log.debug("Starting to fetch the contents of: %r", obj_key)
        return self.conn.get_blob_to_bytes(self.container_name, obj_key), self.get_metadata_for_key(obj_key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        # For whatever reason Azure requires all values to be strings at the point of sending
        metadata_to_send = dict((str(k), str(v)) for k, v in metadata.items())
        self.conn.put_block_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=metadata_to_send)

    def store_file_from_disk(self, key, filepath, metadata=None):
        # For whatever reason Azure requires all values to be strings at the point of sending
        metadata_to_send = dict((str(k), str(v)) for k, v in metadata.items())
        self.conn.put_block_blob_from_path(self.container_name, key, filepath,
                                           x_ms_meta_name_values=metadata_to_send)

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
コード例 #13
0
def download_and_save_blob(storage_account_name, storage_account_key, blob_uri,
                           download_dir, hutil):
    container_name = get_container_name_from_uri(blob_uri, hutil)
    blob_name = get_blob_name_from_uri(blob_uri, hutil)
    host_base = get_host_base_from_uri(blob_uri)
    # If blob_name is a path, extract the file_name
    last_sep = blob_name.rfind('/')
    if last_sep != -1:
        file_name = blob_name[last_sep + 1:]
    else:
        file_name = blob_name
    download_path = os.path.join(download_dir, file_name)
    # Guest agent already ensure the plugin is enabled one after another.
    # The blob download will not conflict.
    blob_service = BlobService(storage_account_name,
                               storage_account_key,
                               host_base=host_base)
    blob_service.get_blob_to_path(container_name, blob_name, download_path)
    return blob_name, container_name, host_base, download_path
コード例 #14
0
def download_and_save_blob(storage_account_name,
                           storage_account_key,
                           blob_uri,
                           download_dir):
    container_name = get_container_name_from_uri(blob_uri)
    blob_name = get_blob_name_from_uri(blob_uri)
    host_base = get_host_base_from_uri(blob_uri)
    # If blob_name is a path, extract the file_name
    last_sep = blob_name.rfind('/')
    if last_sep != -1:
        file_name = blob_name[last_sep+1:]
    else:
        file_name = blob_name
    download_path = os.path.join(download_dir, file_name)
    # Guest agent already ensure the plugin is enabled one after another.
    # The blob download will not conflict.
    blob_service = BlobService(storage_account_name,
                               storage_account_key,
                               host_base=host_base)
    blob_service.get_blob_to_path(container_name, blob_name, download_path)
    return (blob_name, container_name, host_base, download_path)
コード例 #15
0
def download_blob(storage_account_name, storage_account_key, 
                  blob_uri, seqNo, command, hutil):
    container_name = get_container_name_from_uri(blob_uri)
    blob_name = get_blob_name_from_uri(blob_uri)
    download_dir = prepare_download_dir(seqNo)
    # if blob_name is a path, extract the file_name
    last_sep = blob_name.rfind('/')
    if last_sep != -1:
        file_name = blob_name[last_sep+1:]
    else:
        file_name = blob_name
    download_path = os.path.join(download_dir, file_name)
    #Guest agent already ensure the plugin is enabled one after another. 
    #The blob download will not conflict.
    blob_service = BlobService(storage_account_name, storage_account_key)
    try:
        blob_service.get_blob_to_path(container_name, blob_name, download_path)
    except Exception, e:
        hutil.error(("Failed to download blob with uri:{0}"
                     "with error{1}").format(blob_uri,e))
        raise
コード例 #16
0
def download_azure_blob(account_name, account_key, container_name, blob_name,
                        download_path):
    blob_service = BlobService(account_name, account_key)
    max_retry = 3
    for retry in range(1, max_retry + 1):
        try:
            blob_service.get_blob_to_path(container_name, blob_name,
                                          download_path)
        except Exception, e:
            hutil.error('Failed to download Azure blob, retry = ' +
                        str(retry) + ', max_retry = ' + str(max_retry))
            if retry != max_retry:
                hutil.log('Sleep 10 seconds')
                time.sleep(10)
            else:
                waagent.AddExtensionEvent(
                    name=ExtensionShortName,
                    op=DownloadOp,
                    isSuccess=False,
                    message="(03303)Failed to download file from Azure Storage"
                )
                raise Exception('Failed to download azure blob: ' + blob_name)
コード例 #17
0
ファイル: azure.py プロジェクト: c2h5oh/pghoard
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name, prefix=None):
        # NOTE: Azure wants all paths to start with a slash
        prefix = "/{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")
        # XXX: AzureTransfer isn't actively tested and hasn't its error handling is probably lacking
        self.log.warning("AzureTransfer is experimental and has not been thoroughly tested")

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key)
        return self._list_blobs(key)[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key):
        path = self.format_key_for_backend(key, trailing_slash=True)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        items = self.conn.list_blobs(self.container_name, prefix=path, delimiter="/", include="metadata")
        result = []
        for item in items:
            result.append({
                "last_modified": dateutil.parser.parse(item.properties.last_modified),
                "metadata": item.metadata,
                "name": self.format_key_from_backend(item.name),
                "size": item.properties.content_length,
            })
        return result

    def delete_key(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Deleting key: %r", key)
        return self.conn.delete_blob(self.container_name, key)

    def get_contents_to_file(self, key, filepath_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to)
        return self.conn.get_blob_to_path(self.container_name, key, filepath_to_store_to)

    def get_contents_to_fileobj(self, key, fileobj_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_file(self.container_name, key, fileobj_to_store_to)

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_bytes(self.container_name, key), self._metadata_for_key(key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=metadata_to_send)

    def store_file_from_disk(self, key, filepath, metadata=None, multipart=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_path(self.container_name, key, filepath,
                                           x_ms_meta_name_values=metadata_to_send)

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
コード例 #18
0
ファイル: app.py プロジェクト: engineershreyas/SquadShots
def upload():

    file = request.files['fileInput']
    print "File is" + file.filename


    if file:
        data = file.read()


        blob_service = BlobService(account_name='squadshots', account_key='UgxaWKAKv2ZvhHrPt0IHi4EQedPpZw35r+RXkAYB2eICPrG3TjSwk2G8gUzG/PNDDTV+4CVCYWCvZSiad5xMQQ==')
        blob_service.create_container('album')

        blob_service.put_block_blob_from_bytes(
            'album',
            file.filename + "_blob",
            data,
            x_ms_blob_content_type='image/png'
        )

        if 'username' in session:
            un = session['username']
        else:
            print "not in session"

        blob_service.set_blob_metadata(container_name="album",
                                   blob_name=file.filename + "_blob",
                                   x_ms_meta_name_values={'metaun': un})

        blob_service.get_blob_to_path('album',file.filename + "_blob",'static/output.png')
        f = open('input_person.png','w+')
        f.write(data)
        f.close()


        [X,y] = read_images(OUTPUT_DIRECTORY, (256,256))
    # Convert labels to 32bit integers. This is a workaround for 64bit machines,
        y = np.asarray(y, dtype=np.int32)

    # Create the Eigenfaces model.
        model = cv2.createEigenFaceRecognizer()
    # Learn the model. Remember our function returns Python lists,
    # so we use np.asarray to turn them into NumPy lists to make
    # the OpenCV wrapper happy:
        model.train(np.asarray(X), np.asarray(y))

    # Save the model for later use
        model.save("eigenModel.xml")



           # Create an Eign Face recogniser
        t = float(100000)
        model = cv2.createEigenFaceRecognizer(threshold=t)

        # Load the model
        model.load("eigenModel.xml")

       # Read the image we're looking for
        try:
            sampleImage = cv2.imread('static/output.png', cv2.IMREAD_GRAYSCALE)
            if sampleImage != None:
                sampleImage = cv2.resize(sampleImage, (256,256))
            else:
                print "sample image is  null"
        except IOError:
            print "IO error"

      # Look through the model and find the face it matches
        [p_label, p_confidence] = model.predict(sampleImage)

    # Print the confidence levels
        print "Predicted label = %d (confidence=%.2f)" % (p_label, p_confidence)

    # If the model found something, print the file path
        if (p_label > -1):
            count = 0
            for dirname, dirnames, filenames in os.walk(OUTPUT_DIRECTORY):
                for subdirname in dirnames:
                    subject_path = os.path.join(dirname, subdirname)
                    if (count == p_label):
                        for filename in os.listdir(subject_path):
                            print "subject path = " + subject_path

                    count = count+1

    return "uploaded"
コード例 #19
0
ファイル: toillib.py プロジェクト: adamnovak/hgvm
class AzureIOStore(IOStore):
    """
    A class that lets you get input from and send output to Azure Storage.
    
    """
    
    def __init__(self, account_name, container_name, name_prefix=""):
        """
        Make a new AzureIOStore that reads from and writes to the given
        container in the given account, adding the given prefix to keys. All
        paths will be interpreted as keys or key prefixes.
        
        If the name prefix does not end with a trailing slash, and is not empty,
        one will be added automatically.
        
        Account keys are retrieved from the AZURE_ACCOUNT_KEY environment
        variable or from the ~/.toilAzureCredentials file, as in Toil itself.
        
        """
        
        # Make sure azure libraries actually loaded
        assert(have_azure)
        
        self.account_name = account_name
        self.container_name = container_name
        self.name_prefix = name_prefix
        
        if self.name_prefix != "" and not self.name_prefix.endswith("/"):
            # Make sure it has the trailing slash required.
            self.name_prefix += "/"
        
        # Sneak into Toil and use the same keys it uses
        self.account_key = toil.jobStores.azureJobStore._fetchAzureAccountKey(
            self.account_name)
            
        # This will hold out Azure blob store connection
        self.connection = None
        
    def __getstate__(self):
        """
        Return the state to use for pickling. We don't want to try and pickle
        an open Azure connection.
        """
     
        return (self.account_name, self.account_key, self.container_name, 
            self.name_prefix)
        
    def __setstate__(self, state):
        """
        Set up after unpickling.
        """
        
        self.account_name = state[0]
        self.account_key = state[1]
        self.container_name = state[2]
        self.name_prefix = state[3]
        
        self.connection = None
        
    def __connect(self):
        """
        Make sure we have an Azure connection, and set one up if we don't.
        """
        
        if self.connection is None:
            RealTimeLogger.get().debug("Connecting to account {}, using "
                "container {} and prefix {}".format(self.account_name,
                self.container_name, self.name_prefix))
        
            # Connect to the blob service where we keep everything
            self.connection = BlobService(
                account_name=self.account_name, account_key=self.account_key)
            
            
    def read_input_file(self, input_path, local_path):
        """
        Get input from Azure.
        """
        
        self.__connect()
        
        
        RealTimeLogger.get().debug("Loading {} from AzureIOStore".format(
            input_path))
        
        # Download the blob. This is known to be synchronous, although it can
        # call a callback during the process.
        self.connection.get_blob_to_path(self.container_name,
            self.name_prefix + input_path, local_path)
            
    def list_input_directory(self, input_path, recursive=False):
        """
        Loop over fake /-delimited directories on Azure. The prefix may or may
        not not have a trailing slash; if not, one will be added automatically.
        
        Returns the names of files and fake directories in the given input fake
        directory, non-recursively.
        
        """
        
        self.__connect()
        
        RealTimeLogger.get().info("Enumerating {} from AzureIOStore".format(
            input_path))
        
        # Work out what the directory name to list is
        fake_directory = self.name_prefix + input_path
        
        if fake_directory != "" and not fake_directory.endswith("/"):
            # We have a nonempty prefix, and we need to end it with a slash
            fake_directory += "/"
        
        # This will hold the marker that we need to send back to get the next
        # page, if there is one. See <http://stackoverflow.com/a/24303682>
        marker = None
        
        # This holds the subdirectories we found; we yield each exactly once if
        # we aren't recursing.
        subdirectories = set()
        
        while True:
        
            # Get the results from Azure. We skip the delimiter since it doesn't
            # seem to have the placeholder entries it's suppsoed to.
            result = self.connection.list_blobs(self.container_name, 
                prefix=fake_directory, marker=marker)
                
            for blob in result:
                # Yield each result's blob name, but directory names only once
                
                # Drop the common prefix
                relative_path = blob.name[len(fake_directory):]
                
                if (not recursive) and "/" in relative_path:
                    # We found a file in a subdirectory, and we aren't supposed
                    # to be recursing.
                    subdirectory, _ = relative_path.split("/", 1)
                    
                    if subdirectory not in subdirectories:
                        # It's a new subdirectory. Yield and remember it
                        subdirectories.add(subdirectory)
                        
                        yield subdirectory
                else:
                    # We found an actual file  
                    yield relative_path
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break 
    
    def write_output_file(self, local_path, output_path):
        """
        Write output to Azure. Will create the container if necessary.
        """
        
        self.__connect()
        
        RealTimeLogger.get().debug("Saving {} to AzureIOStore".format(
            output_path))
        
        try:
            # Make the container
            self.connection.create_container(self.container_name)
        except azure.WindowsAzureConflictError:
            # The container probably already exists
            pass
        
        # Upload the blob (synchronously)
        # TODO: catch no container error here, make the container, and retry
        self.connection.put_block_blob_from_path(self.container_name,
            self.name_prefix + output_path, local_path)
            
    def exists(self, path):
        """
        Returns true if the given input or output file exists in Azure already.
        
        """
        
        self.__connect()
        
        marker = None
        
        while True:
        
            # Get the results from Azure.
            result = self.connection.list_blobs(self.container_name, 
                prefix=self.name_prefix + path, marker=marker)
                
            for blob in result:
                # Look at each blob
                
                if blob.name == self.name_prefix + path:
                    # Found it
                    return True
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break 
        
        return False
コード例 #20
0
ファイル: azurebackend.py プロジェクト: henrysher/duplicity
class AzureBackend(duplicity.backend.Backend):
    """
    Backend for Azure Blob Storage Service
    """
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import Microsoft Azure Storage SDK for Python library.
        try:
            import azure
            import azure.storage
            if hasattr(azure.storage, 'BlobService'):
                # v0.11.1 and below
                from azure.storage import BlobService
                self.AzureMissingResourceError = azure.WindowsAzureMissingResourceError
                self.AzureConflictError = azure.WindowsAzureConflictError
            else:
                # v1.0.0 and above
                import azure.storage.blob
                if hasattr(azure.storage.blob, 'BlobService'):
                    from azure.storage.blob import BlobService
                else:
                    from azure.storage.blob.blockblobservice import BlockBlobService as BlobService
                self.AzureMissingResourceError = azure.common.AzureMissingResourceHttpError
                self.AzureConflictError = azure.common.AzureConflictHttpError
        except ImportError as e:
            raise BackendException("""\
Azure backend requires Microsoft Azure Storage SDK for Python (https://pypi.python.org/pypi/azure-storage/).
Exception: %s""" % str(e))

        # TODO: validate container name
        self.container = parsed_url.path.lstrip('/')

        if 'AZURE_ACCOUNT_NAME' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_NAME environment variable not set.')

        if 'AZURE_ACCOUNT_KEY' in os.environ:
            if 'AZURE_ENDPOINT_SUFFIX' in os.environ:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                account_key=os.environ['AZURE_ACCOUNT_KEY'],
                                                endpoint_suffix=os.environ['AZURE_ENDPOINT_SUFFIX'])
            else:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                account_key=os.environ['AZURE_ACCOUNT_KEY'])
            self._create_container()
        elif 'AZURE_SHARED_ACCESS_SIGNATURE' in os.environ:
            if 'AZURE_ENDPOINT_SUFFIX' in os.environ:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                sas_token=os.environ['AZURE_SHARED_ACCESS_SIGNATURE'],
                                                endpoint_suffix=os.environ['AZURE_ENDPOINT_SUFFIX'])
            else:
                self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                                sas_token=os.environ['AZURE_SHARED_ACCESS_SIGNATURE'])
        else:
            raise BackendException(
                'Neither AZURE_ACCOUNT_KEY nor AZURE_SHARED_ACCESS_SIGNATURE environment variable not set.')

        if globals.azure_max_single_put_size:
            # check if we use azure-storage>=0.30.0
            try:
                _ = self.blob_service.MAX_SINGLE_PUT_SIZE
                self.blob_service.MAX_SINGLE_PUT_SIZE = globals.azure_max_single_put_size
            # fallback for azure-storage<0.30.0
            except AttributeError:
                self.blob_service._BLOB_MAX_DATA_SIZE = globals.azure_max_single_put_size

        if globals.azure_max_block_size:
            # check if we use azure-storage>=0.30.0
            try:
                _ = self.blob_service.MAX_BLOCK_SIZE
                self.blob_service.MAX_BLOCK_SIZE = globals.azure_max_block_size
            # fallback for azure-storage<0.30.0
            except AttributeError:
                self.blob_service._BLOB_MAX_CHUNK_DATA_SIZE = globals.azure_max_block_size

    def _create_container(self):
        try:
            self.blob_service.create_container(self.container, fail_on_exist=True)
        except self.AzureConflictError:
            # Indicates that the resource could not be created because it already exists.
            pass
        except Exception as e:
            log.FatalError("Could not create Azure container: %s"
                           % unicode(e.message).split('\n', 1)[0],
                           log.ErrorCode.connection_failed)

    def _put(self, source_path, remote_filename):
        kwargs = {}
        if globals.azure_max_connections:
            kwargs['max_connections'] = globals.azure_max_connections

        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-a-blob-into-a-container
        try:
            self.blob_service.create_blob_from_path(self.container, remote_filename, source_path.name, **kwargs)
        except AttributeError:  # Old versions use a different method name
            self.blob_service.put_block_blob_from_path(self.container, remote_filename, source_path.name, **kwargs)

    def _get(self, remote_filename, local_path):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#download-blobs
        self.blob_service.get_blob_to_path(self.container, remote_filename, local_path.name)

    def _list(self):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#list-the-blobs-in-a-container
        blobs = []
        marker = None
        while True:
            batch = self.blob_service.list_blobs(self.container, marker=marker)
            blobs.extend(batch)
            if not batch.next_marker:
                break
            marker = batch.next_marker
        return [blob.name for blob in blobs]

    def _delete(self, filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#delete-blobs
        self.blob_service.delete_blob(self.container, filename)

    def _query(self, filename):
        prop = self.blob_service.get_blob_properties(self.container, filename)
        try:
            info = {'size': int(prop.properties.content_length)}
        except AttributeError:
            # old versions directly returned the properties
            info = {'size': int(prop['content-length'])}
        return info

    def _error_code(self, operation, e):
        if isinstance(e, self.AzureMissingResourceError):
            return log.ErrorCode.backend_not_found
コード例 #21
0
ファイル: sys.py プロジェクト: gomes-/alx
class SAzure(SyncStorage):
    def __init__(self):
        super().__init__()
        self.msg_key_na = _('Key not available')
        try:
            import alxlib.key

            key = alxlib.key.Key()
            if os.path.isfile(key.get_path()):
                sys.path.insert(0, key.get_dir())

                import alxkey

                self.key = alxkey.alxkey_azure
                """self.blob = BlobService(account_name=self.key['AZURE_STORAGE_ACCOUNT_NAME'],
                                        account_key=self.key['AZURE_ACCESS_KEY'])"""
            else:
                # raise (self.msg_key_na)
                self.key = None
        except:
            pass
            # raise (self.msg_key_na)

    def connect(self):
        try:

            self.blob = BlobService(account_name=self.key['AZURE_STORAGE_ACCOUNT_NAME'],
                                    account_key=self.key['AZURE_ACCESS_KEY'])

            return self.blob.list_containers(maxresults=1)

        except:
            return None

    def connect_blob(self, az_account_name=None, az_account_key=None):

        try:
            if az_account_name != None:
                self.key['AZURE_STORAGE_ACCOUNT_NAME'] = az_account_name
                self.key['AZURE_ACCESS_KEY'] = az_account_key

            return self.connect()

        except:
            return None

    def path_clean(self, path: str):
        try:
            i = path.index("//") + 2
            self.container = path[0:i]
            if path[len(path) - 1] != "/":
                path += "/"

            return path[i:]
        except:
            print(_("Bad Path"))
            exit(1)

    def spath(self, container, root, b):
        spath = SyncPath()
        spath.BasePath = container
        if b.name[len(b.name)-1]=="/":
            spath.IsDir= True
        else:
            spath.IsFile= True
        spath.AbsPath = b.name
        if len(root)>0:
            spath.SPath = b.name[len(root) - 1:]
        else:
            spath.SPath=b.name
        spath.Size = b.properties.content_length
        import alxlib.time_help

        spath.ModifiedTS = alxlib.time_help.to_timestamp(b.properties.last_modified)
        spath.MD5 = b.properties.content_md5
        spath.sys="azure"
        return spath

    def path_split(self, path: str):
        try:
            list = path.split("/")
            container = list[0]
            uri = ""
            if len(list) > 1:
                uri = "/".join(map(str, list[1:]))

            return container, uri
        except:
            print(_("Bad path"))
            exit(1)

    def path_list_blobs(self, container, uri):

        try:
            if len(uri)>0:
                blobs = self.blob.list_blobs(container, prefix=uri)
            else:
                blobs = self.blob.list_blobs(container)


            """for blob in blobs:
                print(blob.properties.__dict__)
                print(blob.name)
                print(blob.url)"""
            return blobs
        except Exception as e:
            print(_("Bad connection"))
            logging.warning("container {0}, path {1}".format(container, uri))
            exit(1)

    def path_list(self, path):
        try:
            logging.debug("path_list {0}".format(path))

            container, uri = self.path_split(path)
            logging.debug("Container: {0}, Uri: {1}".format(container, uri))

            self.connect()
            self.blob.create_container(container)

            blobs = self.path_list_blobs(container, uri)

            d = {}

            for b in blobs:
                spath = self.spath(container, uri, b)
                # print(b.__dict__)
                #print(str(b.properties.last_modified.__dict__))
                #print(str(spath.ModifiedTS))
                d[spath.SPath] = spath
            # print(d)
            return d
        except Exception as e:
            print(e)

    def remove(self, src: SyncPath):
        try:
            logging.debug("Removing {0}".format(src.AbsPath))
            self.connect()
            self.blob.create_container(src.BasePath)
            self.blob.delete_blob(src.BasePath, src.AbsPath)
        except:
            pass


    def copy_local2azure(self, src, base_dir):
        try:

            container, uri = self.path_split(base_dir)

            if len(src.SPath)>0 and src.SPath[0]=="/":
                path= uri+ src.SPath[1:]
            else:
                path= uri+src.SPath
            logging.debug("copy_local2azure Spath {0}. path:{1}".format(src.SPath, path))
            self.connect()
            if not src.IsDir:
                self.blob.put_block_blob_from_path (container, path, src.AbsPath)
            else:
                self.blob.put_block_blob_from_text(container, path+"/", "")
        except Exception as e:
            print("Error Copying")
            print(e)

    def copy_azure2local(self, src, base_dir):
        try:

            if len(src.SPath)>0 and (src.SPath[0] == "/" or src.SPath[0] == "\\") :
                path = src.SPath[1:]
            else:
                path = src.SPath


            path= os.path.normpath(os.path.join(base_dir, path))
            logging.debug("copy_azure2local basedir:{0} Spath {1}, path {2}, abs: {3}".format( base_dir, src.SPath, path, src.AbsPath))


            if not os.path.isdir(path):
               os.makedirs(os.path.dirname(path), exist_ok=True)
            #print( os.path.dirname(path)+"***************")

            if not (len(src.AbsPath)>0 and src.AbsPath[len(src.AbsPath)-1]=="/"):
                self.blob.get_blob_to_path(src.BasePath, src.AbsPath, path)




            """container, uri = self.path_split(base_dir)

            if len(src.SPath)>0 and src.SPath[0]=="/":
                path= uri+ src.SPath[1:]
            else:
                path= uri+src.SPath
            self.connect()
            if not src.IsDir:
                self.blob.get_blob_to_path(src.BasePath, path, src.AbsPath)
            else:
                self.blob.put_block_blob_from_text(container, path, "")"""
        except Exception as e:
            print("Error copying")
            print(e)
コード例 #22
0
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name, prefix=None):
        # NOTE: Azure wants all paths to start with a slash
        prefix = "/{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlobService(account_name=self.account_name,
                                account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")
        # XXX: AzureTransfer isn't actively tested and hasn't its error handling is probably lacking
        self.log.warning(
            "AzureTransfer is experimental and has not been thoroughly tested")

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key)
        return self._list_blobs(key)[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key):
        path = self.format_key_for_backend(key, trailing_slash=True)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        items = self.conn.list_blobs(self.container_name,
                                     prefix=path,
                                     delimiter="/",
                                     include="metadata")
        result = []
        for item in items:
            result.append({
                "last_modified":
                dateutil.parser.parse(item.properties.last_modified),
                "metadata":
                item.metadata,
                "name":
                self.format_key_from_backend(item.name),
                "size":
                item.properties.content_length,
            })
        return result

    def delete_key(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Deleting key: %r", key)
        return self.conn.delete_blob(self.container_name, key)

    def get_contents_to_file(self, key, filepath_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", key,
                       filepath_to_store_to)
        return self.conn.get_blob_to_path(self.container_name, key,
                                          filepath_to_store_to)

    def get_contents_to_fileobj(self, key, fileobj_to_store_to):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_file(self.container_name, key,
                                          fileobj_to_store_to)

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_bytes(self.container_name,
                                           key), self._metadata_for_key(key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_bytes(
            self.container_name,
            key,
            memstring,
            x_ms_meta_name_values=metadata_to_send)

    def store_file_from_disk(self,
                             key,
                             filepath,
                             metadata=None,
                             multipart=None):
        key = self.format_key_for_backend(key)
        # Azure requires all metadata keys and values to be strings
        metadata_to_send = {str(k): str(v) for k, v in metadata.items()}
        self.conn.put_block_blob_from_path(
            self.container_name,
            key,
            filepath,
            x_ms_meta_name_values=metadata_to_send)

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs",
                       container_name,
                       time.time() - start_time)
        return container_name
コード例 #23
0
class AzureBackend(duplicity.backend.Backend):
    u"""
    Backend for Azure Blob Storage Service
    """
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import Microsoft Azure Storage SDK for Python library.
        try:
            import azure
            import azure.storage
            if hasattr(azure.storage, u'BlobService'):
                # v0.11.1 and below
                from azure.storage import BlobService
                self.AzureMissingResourceError = azure.WindowsAzureMissingResourceError
                self.AzureConflictError = azure.WindowsAzureConflictError
            else:
                # v1.0.0 and above
                import azure.storage.blob
                if hasattr(azure.storage.blob, u'BlobService'):
                    from azure.storage.blob import BlobService
                else:
                    from azure.storage.blob.blockblobservice import BlockBlobService as BlobService
                self.AzureMissingResourceError = azure.common.AzureMissingResourceHttpError
                self.AzureConflictError = azure.common.AzureConflictHttpError
        except ImportError as e:
            raise BackendException(u"""\
Azure backend requires Microsoft Azure Storage SDK for Python (https://pypi.python.org/pypi/azure-storage/).
Exception: %s""" % str(e))

        # TODO: validate container name
        self.container = parsed_url.path.lstrip(u'/')

        if u'AZURE_ACCOUNT_NAME' not in os.environ:
            raise BackendException(u'AZURE_ACCOUNT_NAME environment variable not set.')

        if u'AZURE_ACCOUNT_KEY' in os.environ:
            if u'AZURE_ENDPOINT_SUFFIX' in os.environ:
                self.blob_service = BlobService(account_name=os.environ[u'AZURE_ACCOUNT_NAME'],
                                                account_key=os.environ[u'AZURE_ACCOUNT_KEY'],
                                                endpoint_suffix=os.environ[u'AZURE_ENDPOINT_SUFFIX'])
            else:
                self.blob_service = BlobService(account_name=os.environ[u'AZURE_ACCOUNT_NAME'],
                                                account_key=os.environ[u'AZURE_ACCOUNT_KEY'])
            self._create_container()
        elif u'AZURE_SHARED_ACCESS_SIGNATURE' in os.environ:
            if u'AZURE_ENDPOINT_SUFFIX' in os.environ:
                self.blob_service = BlobService(account_name=os.environ[u'AZURE_ACCOUNT_NAME'],
                                                sas_token=os.environ[u'AZURE_SHARED_ACCESS_SIGNATURE'],
                                                endpoint_suffix=os.environ[u'AZURE_ENDPOINT_SUFFIX'])
            else:
                self.blob_service = BlobService(account_name=os.environ[u'AZURE_ACCOUNT_NAME'],
                                                sas_token=os.environ[u'AZURE_SHARED_ACCESS_SIGNATURE'])
        else:
            raise BackendException(
                u'Neither AZURE_ACCOUNT_KEY nor AZURE_SHARED_ACCESS_SIGNATURE environment variable not set.')

        if globals.azure_max_single_put_size:
            # check if we use azure-storage>=0.30.0
            try:
                _ = self.blob_service.MAX_SINGLE_PUT_SIZE
                self.blob_service.MAX_SINGLE_PUT_SIZE = globals.azure_max_single_put_size
            # fallback for azure-storage<0.30.0
            except AttributeError:
                self.blob_service._BLOB_MAX_DATA_SIZE = globals.azure_max_single_put_size

        if globals.azure_max_block_size:
            # check if we use azure-storage>=0.30.0
            try:
                _ = self.blob_service.MAX_BLOCK_SIZE
                self.blob_service.MAX_BLOCK_SIZE = globals.azure_max_block_size
            # fallback for azure-storage<0.30.0
            except AttributeError:
                self.blob_service._BLOB_MAX_CHUNK_DATA_SIZE = globals.azure_max_block_size

    def _create_container(self):
        try:
            self.blob_service.create_container(self.container, fail_on_exist=True)
        except self.AzureConflictError:
            # Indicates that the resource could not be created because it already exists.
            pass
        except Exception as e:
            log.FatalError(u"Could not create Azure container: %s"
                           % str(e.message).split(u'\n', 1)[0],
                           log.ErrorCode.connection_failed)

    def _put(self, source_path, remote_filename):
        remote_filename = fsdecode(remote_filename)
        kwargs = {}
        if globals.azure_max_connections:
            kwargs[u'max_connections'] = globals.azure_max_connections

        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-a-blob-into-a-container
        try:
            self.blob_service.create_blob_from_path(self.container, remote_filename, source_path.name, **kwargs)
        except AttributeError:  # Old versions use a different method name
            self.blob_service.put_block_blob_from_path(self.container, remote_filename, source_path.name, **kwargs)

        self._set_tier(remote_filename)

    def _set_tier(self, remote_filename):
        if globals.azure_blob_tier is not None:
            try:
                self.blob_service.set_standard_blob_tier(self.container, remote_filename, globals.azure_blob_tier)
            except AttributeError:  # might not be available in old API
                pass

    def _get(self, remote_filename, local_path):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#download-blobs
        self.blob_service.get_blob_to_path(self.container, fsdecode(remote_filename), local_path.name)

    def _list(self):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#list-the-blobs-in-a-container
        blobs = []
        marker = None
        while True:
            batch = self.blob_service.list_blobs(self.container, marker=marker)
            blobs.extend(batch)
            if not batch.next_marker:
                break
            marker = batch.next_marker
        return [blob.name for blob in blobs]

    def _delete(self, filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#delete-blobs
        self.blob_service.delete_blob(self.container, fsdecode(filename))

    def _query(self, filename):
        prop = self.blob_service.get_blob_properties(self.container, fsdecode(filename))
        try:
            info = {u'size': int(prop.properties.content_length)}
        except AttributeError:
            # old versions directly returned the properties
            info = {u'size': int(prop[u'content-length'])}
        return info

    def _error_code(self, operation, e):
        if isinstance(e, self.AzureMissingResourceError):
            return log.ErrorCode.backend_not_found
コード例 #24
0
ファイル: azurebackend.py プロジェクト: muff1nman/duplicity
class AzureBackend(duplicity.backend.Backend):
    """
    Backend for Azure Blob Storage Service
    """
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import Microsoft Azure SDK for Python library.
        try:
            import azure
            from azure.storage import BlobService
        except ImportError:
            raise BackendException('Azure backend requires Microsoft Azure SDK for Python '
                                   '(https://github.com/Azure/azure-sdk-for-python).')

        if 'AZURE_ACCOUNT_NAME' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_NAME environment variable not set.')

        if 'AZURE_ACCOUNT_KEY' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_KEY environment variable not set.')

        account_name = os.environ['AZURE_ACCOUNT_NAME']
        account_key = os.environ['AZURE_ACCOUNT_KEY']
        self.WindowsAzureMissingResourceError = azure.WindowsAzureMissingResourceError
        self.blob_service = BlobService(account_name=account_name, account_key=account_key)
        # TODO: validate container name
        self.container = parsed_url.path.lstrip('/')
        try:
            self.blob_service.create_container(self.container, fail_on_exist=True)
        except azure.WindowsAzureConflictError:
            # Indicates that the resource could not be created because it already exists.
            pass
        except Exception as e:
            log.FatalError("Could not create Azure container: %s"
                           % unicode(e.message).split('\n', 1)[0],
                           log.ErrorCode.connection_failed)

    def _put(self, source_path, remote_filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-blob
        self.blob_service.put_block_blob_from_path(self.container, remote_filename, source_path.name)

    def _get(self, remote_filename, local_path):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#download-blobs
        self.blob_service.get_blob_to_path(self.container, remote_filename, local_path.name)

    def _list(self):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#list-blob
        blobs = self.blob_service.list_blobs(self.container)
        return [blob.name for blob in blobs]

    def _delete(self, filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#delete-blobs
        self.blob_service.delete_blob(self.container, filename)

    def _query(self, filename):
        prop = self.blob_service.get_blob_properties(self.container, filename)
        return {'size': int(prop['content-length'])}

    def _error_code(self, operation, e):
        if isinstance(e, self.WindowsAzureMissingResourceError):
            return log.ErrorCode.backend_not_found
コード例 #25
0
ファイル: azurebackend.py プロジェクト: shahwangithub/lib
class AzureBackend(duplicity.backend.Backend):
    """
    Backend for Azure Blob Storage Service
    """
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # Import Microsoft Azure Storage SDK for Python library.
        try:
            import azure
            import azure.storage
            if hasattr(azure.storage, 'BlobService'):
                # v0.11.1 and below
                from azure.storage import BlobService
                self.AzureMissingResourceError = azure.WindowsAzureMissingResourceError
                self.AzureConflictError = azure.WindowsAzureConflictError
            else:
                # v1.0.0 and above
                from azure.storage.blob import BlobService
                self.AzureMissingResourceError = azure.common.AzureMissingResourceHttpError
                self.AzureConflictError = azure.common.AzureConflictHttpError
        except ImportError as e:
            raise BackendException("""\
Azure backend requires Microsoft Azure Storage SDK for Python (https://pypi.python.org/pypi/azure-storage/).
Exception: %s""" % str(e))

        if 'AZURE_ACCOUNT_NAME' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_NAME environment variable not set.')
        if 'AZURE_ACCOUNT_KEY' not in os.environ:
            raise BackendException('AZURE_ACCOUNT_KEY environment variable not set.')
        self.blob_service = BlobService(account_name=os.environ['AZURE_ACCOUNT_NAME'],
                                        account_key=os.environ['AZURE_ACCOUNT_KEY'])

        # TODO: validate container name
        self.container = parsed_url.path.lstrip('/')
        try:
            self.blob_service.create_container(self.container, fail_on_exist=True)
        except self.AzureConflictError:
            # Indicates that the resource could not be created because it already exists.
            pass
        except Exception as e:
            log.FatalError("Could not create Azure container: %s"
                           % unicode(e.message).split('\n', 1)[0],
                           log.ErrorCode.connection_failed)

    def _put(self, source_path, remote_filename):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-a-blob-into-a-container
        self.blob_service.put_block_blob_from_path(self.container, remote_filename, source_path.name)

    def _get(self, remote_filename, local_path):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#download-blobs
        self.blob_service.get_blob_to_path(self.container, remote_filename, local_path.name)

    def _list(self):
        # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#list-the-blobs-in-a-container
        blobs = []
        marker = None
        while True:
            batch = self.blob_service.list_blobs(self.container, marker=marker)
            blobs.extend(batch)
            if not batch.next_marker:
                break
            marker = batch.next_marker
        return [blob.name for blob in blobs]

    def _delete(self, filename):
        # http://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#delete-blobs
        self.blob_service.delete_blob(self.container, filename)

    def _query(self, filename):
        prop = self.blob_service.get_blob_properties(self.container, filename)
        return {'size': int(prop['content-length'])}

    def _error_code(self, operation, e):
        if isinstance(e, self.AzureMissingResourceError):
            return log.ErrorCode.backend_not_found