コード例 #1
0
def run_sample():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(
            account_name='adladandiag',
            account_key=
            'PLkDD//pOQDCbJ/EzzAzfszFrLuj2RzpFkssg95IHIgmw1aY38/y0u81q1Ux6DJcp9t6XHHNnAy3WBcutIaTUg=='
        )

        # Create a container called 'netmon-capture-logs'.
        container_name = 'netmon-capture-logs'

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(
            container_name, public_access=PublicAccess.Container)

        # Create a file in Documents to test the upload and download.
        local_path = os.path.expanduser("C:\Network Traces\*")
        local_file_name = "test.cap"
        full_path_to_file = os.path.join(local_path, local_file_name)

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name,
                                                 local_file_name,
                                                 full_path_to_file)

    except Exception as e:
        print(e)
コード例 #2
0
ファイル: upload.py プロジェクト: acumenix/nise
def upload_to_azure_container(storage_file_name, local_path, storage_file_path):
    """Upload data to a storage account.

    Args:
        storage_file_name (String): The container to upload file to
        local_path  (String): The full local file system path of the file
        storage_file_path (String): The file path to upload to within container

    Returns:
        (Boolean): True if file was uploaded

    """
    uploaded = True
    try:
        account_key = str(os.environ.get('AZURE_ACCOUNT_KEY'))
        storage_account = str(os.environ.get('AZURE_STORAGE_ACCOUNT'))
        # Create the BlockBlockService that is used to call the
        # Blob service for the storage account.
        block_blob_service = BlockBlobService(
            account_name=storage_account, account_key=account_key)

        # Upload the created file, use local_file_name for the blob name.
        block_blob_service.create_blob_from_path(
            storage_file_name, storage_file_path, local_path)
        print(f'uploaded {storage_file_name} to {storage_account}')
    # pylint: disable=broad-except
    except Exception as error:
        print(error)
        uploaded = False
    return uploaded
コード例 #3
0
ファイル: centralCamera.py プロジェクト: TomIhsle/projects
def upload_blob(filename, filepath):
    if os.getenv("AZURE_STORAGE_ACCOUNT") and os.getenv("AZURE_STORAGE_KEY"):
        # Instantiate blob service
        block_blob_service = BlockBlobService(account_name=os.getenv("AZURE_STORAGE_ACCOUNT"),
                                              account_key=os.getenv("AZURE_STORAGE_KEY"))

        blob_name = filename

        # Upload blob
        try:
            block_blob_service.create_blob_from_path(
                os.getenv("AZURE_STORAGE_CONTAINER_NAME"),
                blob_name,
                filepath,
                content_settings=ContentSettings(content_type='image/jpg'))

            print 'Uploaded the photo: {}'.format(blob_name)
            print 'Check Storage Explorer under: {} > {} > {}'.format(os.getenv("AZURE_STORAGE_ACCOUNT"),
                                                                      os.getenv("AZURE_STORAGE_CONTAINER_NAME"),
                                                                      blob_name)
            print 'It is accessible at: https://{}.blob.core.windows.net/{}/{}'.format(os.getenv("AZURE_STORAGE_ACCOUNT"),
                                                                                       os.getenv("AZURE_STORAGE_CONTAINER_NAME"),
                                                                                       blob_name)
        except:
            print 'Unable to upload: {}'.format(blob_name)
    else:
        print 'Set your AZURE_STORAGE_ACCOUNT and AZURE_STORAGE_KEY in the .env file.'
コード例 #4
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    acc_name = req.params.get('account_name')
    acc_key = req.params.get('account_key')
    src_container_name = req.params.get('src_container_name')
    dst_container_name = req.params.get('dst_container_name')
    blob_name = req.params.get('blob_name')

    block_blob_service = BlockBlobService(account_name=acc_name,
                                          account_key=acc_key)
    download_path = "/tmp/" + blob_name

    start = time()
    block_blob_service.get_blob_to_path(src_container_name, blob_name,
                                        download_path)
    download_time = time() - start
    logging.info("Download time : " + str(download_time))

    start = time()
    upload_path = download_path
    block_blob_service.create_blob_from_path(dst_container_name,
                                             upload_path.split("/")[2],
                                             upload_path)
    upload_time = time() - start
    logging.info("Upload time : " + str(upload_time))

    result = "download_time : " + str(
        download_time) + "(s) upload_time : " + str(upload_time) + "(s)"

    return func.HttpResponse(result)
コード例 #5
0
def upload_cmd_tree():
    blob_file_name = 'extensionCommandTree.json'
    downloaded_file_name = 'extCmdTreeDownloaded.json'
    file_path = os.path.expanduser(os.path.join('~', '.azure', file_name))

    client = BlockBlobService(account_name=STORAGE_ACCOUNT,
                              account_key=STORAGE_ACCOUNT_KEY)
    client.create_blob_from_path(container_name=STORAGE_CONTAINER,
                                 blob_name=blob_file_name,
                                 file_path=file_path)

    url = client.make_blob_url(container_name=STORAGE_CONTAINER,
                               blob_name=blob_file_name)

    download_file_path = os.path.expanduser(
        os.path.join('~', '.azure', downloaded_file_name))
    download_file(url, download_file_path)
    if filecmp.cmp(file_path, download_file_path):
        print(
            "extensionCommandTree.json uploaded successfully. URL: {}".format(
                url))
    else:
        raise Exception(
            "Failed to update extensionCommandTree.json in the storage account"
        )
コード例 #6
0
    def _upload_file_to_azure_storage_account(self,
                                              client: BlockBlobService,
                                              source: str,
                                              destination: str,
                                              container: str = None):
        """Upload the file to the specified Azure Storage Account.

        Assumption is that any cloud environment has access to a shared repository of artifacts.

        Args:
            client: Azure Storage Account client
            destination: Name of the file
            container: Name of the container the file should be uploaded to
        """
        if not container:
            container = self.config["azure"]["common"][
                "artifacts_shared_storage_account_container_name"]
        logger.info(f"""uploading artifact from
             | from ${source}
             | to ${destination}
             | in container {container}""")

        client.create_blob_from_path(container_name=container,
                                     blob_name=destination,
                                     file_path=source)
コード例 #7
0
def upload_file_to_container(block_blob_client: azureblob.BlockBlobService,
                             container_name: str,
                             file_path: str,
                             folder: str = None) -> batchmodels.ResourceFile:
    """
    Uploads a local file to an Azure Blob storage container.

    :param block_blob_client: A blob service client.
    :param container_name: The name of the Azure Blob storage container.
    :param file_path: The local path to the file.
    :param folder: The folder on container to store the file, default None.
    :return: A ResourceFile initialized with a SAS URL appropriate for Batch
    tasks.
    """
    if folder:
        blob_name = f"{folder}/{os.path.basename(file_path)}"
    else:
        blob_name = os.path.basename(file_path)

    print('Uploading file {} to container [{}]...'.format(
        file_path, container_name))

    block_blob_client.create_blob_from_path(container_name, blob_name,
                                            file_path)

    # Obtain the SAS token for the container.
    sas_token = get_container_sas_token(block_blob_client, container_name,
                                        azureblob.BlobPermissions.READ)

    sas_url = block_blob_client.make_blob_url(container_name,
                                              blob_name,
                                              sas_token=sas_token)

    return batchmodels.ResourceFile(file_path=blob_name, http_url=sas_url)
コード例 #8
0
def upload_file(full_path_to_file):
    try:
        acc_name = 'accname'
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(account_name=acc_name,
                                              account_key='acckey')

        # Create a container called 'quickstartblobs'.
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(
            container_name, public_access=PublicAccess.Container)

        local_file_name = "Bill.jpg"

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name,
                                                 local_file_name,
                                                 full_path_to_file)

        # List the blobs in the container
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        return "https://" + acc_name + ".blob.core.windows.net/" + container_name + "/" + local_file_name
    except Exception as e:
        print(e)
コード例 #9
0
    def push2AzureAsBlobs(self):
        ''' push the files to azure
        '''
        service = BlockBlobService(account_name=self.account_name,
                                   account_key=self.account_key)
        permission = ContainerPermissions(read=True, write=True)
        sas = service.generate_container_shared_access_signature(
            container_name=self.container_name,
            permission=permission,
            protocol='https',
            start=datetime.now(),
            expiry=datetime.now() + timedelta(days=1))

        service = BlockBlobService(account_name=self.account_name,
                                   sas_token=sas)

        logfiles = self.getfilelist()
        for insfile in logfiles:
            print(insfile)
            basename = os.path.basename(insfile)
            if service.exists(container_name=self.container_name,
                              blob_name=basename):
                print('File ' + basename + ' has been uploaded before.')
            else:
                service.create_blob_from_path(
                    container_name=self.container_name,
                    blob_name=basename,
                    file_path=insfile,
                    content_settings=ContentSettings(
                        content_type=mimetypes.guess_type(basename)[0]),
                    validate_content=False)
コード例 #10
0
    def upload_video(self,
                     account_name='',
                     sas_token='',
                     container='',
                     blob='',
                     path=''):
        """Upload video to the provided account."""

        block_blob_service = None

        if account_name and sas_token and container and blob:
            block_blob_service = BlockBlobService(account_name=account_name,
                                                  sas_token=sas_token)
            containers = block_blob_service.list_containers()

            print("Available containers:")
            for container_entry in containers:
                print(container_entry.name)

            container_matches = any(x for x in containers
                                    if x.name == container)

            if not container_matches:
                block_blob_service.create_container(container)
        else:
            print('ERROR: No account credentials for Azure Storage specified.')

        block_blob_service.create_blob_from_path(
            container,
            blob,
            path,
            content_settings=ContentSettings(content_type='video/mp4'))
コード例 #11
0
def run_azure():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(connection_string='DefaultEndpointsProtocol=https;AccountName=flatscontainer;AccountKey=kx/bb+HLnwGFXwvT194g3Gl5EY8kM2OlG/Wb8El8b4yd/rjlcpqHHVjSJd0XG53DOxH4qiHszcU+sxogA52suA==;EndpointSuffix=core.windows.net', account_key='kx/bb+HLnwGFXwvT194g3Gl5EY8kM2OlG/Wb8El8b4yd/rjlcpqHHVjSJd0XG53DOxH4qiHszcU+sxogA52suA==')

        # Create a container.
        container_name ='datafromscraper'
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)
        # Create a file in Documents to test the upload and download.
        local_path=os.path.expanduser("..\dane") # <----------- ścieżka do pliku
        local_file_name ="flats_final.csv" # <---------------- nazwa pliku 
        full_path_to_file =os.path.join(local_path, local_file_name)

        #print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob " + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)

        # List the blobs in the container
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)
        
    except Exception as e:
        print(e)
コード例 #12
0
class AzureHelper:

    __instance = None
    __block_blob_service = None

    @staticmethod
    def getInstance():
        if AzureHelper.__instance == None:
            AzureHelper()
        return AzureHelper.__instance

    def __init__(self):
        if AzureHelper.__instance != None:
            raise Exception("AzureHelper class is a singleton!")
        else:
            AzureHelper.__instance = self
            self.__block_blob_service = BlockBlobService(
                connection_string=Config.AZURE_CONNECTION_STRING)

    def pushToAzure(self, object_name):
        # push to azure
        if os.path.isfile("output/{}.csv".format(object_name)):
            try:
                self.__block_blob_service.create_blob_from_path(
                    Config.AZURE_CONTAINER_NAME,
                    Config.AZURE_FOLDER_NAME + "/{}.csv".format(object_name),
                    "output/{}.csv".format(object_name))
                print("^^^^^^ {} object pushed to Azure Storage Blob".format(
                    object_name))
            except Exception as inst:
                print(inst)
                IOHelper.appendToLog("azure_error.log", "\n\n{}".format(inst))
コード例 #13
0
def run_sample():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(
            account_name='pavic',
            account_key=
            'lPdOTarDD4n3xuwz5EHMNOK3q/SNpnZ/1FKYKHNJSL4V4ZPt1LlvtjTZlpCfnghYFYQi20nTpBx6GVRPKfghoQ=='
        )

        # Create a container called 'quickstartblobs'.
        container_name = 'aicamvideoclips'
        block_blob_service.create_container(container_name)
        #block_blob_service.cre

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(
            container_name, public_access=PublicAccess.Container)

        # Create a file in Documents to test the upload and download.
        local_path = os.path.abspath(os.path.curdir)
        local_file_name = "Capture20190404_104457.mp4"  # input("Enter file name to upload : ")
        full_path_to_file = os.path.join(local_path, local_file_name)

        # Write text to the file.
        #file = open(full_path_to_file,  'w')
        #file.write("Hello, World!")
        #file.close()

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(
            container_name, "201904\\10\\" + local_file_name,
            full_path_to_file)

        # List the blobs in the container
        #print("\nList blobs in the container")
        #generator = block_blob_service.list_blobs(container_name)
        #for blob in generator:
        #   print("\t Blob name: " + blob.name)

        # Download the blob(s).
        # Add '_DOWNLOADED' as prefix to '.txt' so you can see both files in Documents.
        #full_path_to_file2 = os.path.join(local_path, str.replace(local_file_name ,'.mp4', '_DOWNLOADED.mp4'))
        #print("\nDownloading blob to " + "201904\\10\\" +full_path_to_file2)
        #block_blob_service.get_blob_to_path(container_name, "201904\\10\\" +local_file_name, full_path_to_file2)

        sys.stdout.write(
            "Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
            "application will exit.")
        sys.stdout.flush()
        input()

        # Clean up resources. This includes the container and the temp files
        #block_blob_service.delete_container(container_name)
        #os.remove(full_path_to_file)
        #os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
コード例 #14
0
ファイル: lib_azure.py プロジェクト: Jahia/jelastic-packages
    def upload_file(self, file_name, object_name=None, **kwargs):
        if not self.check_if_sto_acc_exist():
            logging.info(
                "I will create storage account {} in rg {} for you".format(
                    self.sto_account, self.rg))
            self.create_sto_account()
        if not self.test_if_obj_exist(object_name=self.sto_cont_name):
            logging.info("I will create {}:{}:{} for you".format(
                self.rg, self.sto_account, self.sto_cont_name))
            self.create_sto_container()

        # if blob object name was not specified, use file_name
        if object_name is None:
            object_name = file_name

        sto_key = self.get_sto_account_key()
        blob = BlockBlobService(self.sto_account, sto_key)
        try:
            blob.create_blob_from_path(self.sto_cont_name, object_name,
                                       file_name)
            logging.info("File {} successfully uploaded to {}:{}:{}".format(
                file_name, self.sto_account, self.sto_cont_name, object_name))
        except:
            logging.error("Cannot upload {} to {}:{}:{}".format(
                file_name, self.sto_account, self.sto_cont_name, object_name))
            return False
        return True
コード例 #15
0
def index(request):
    context = RequestContext(request)
    if request.method == 'POST':
        formobj = fileform(request.POST,request.FILES)
        if formobj.is_valid():
            modelobj = formobj.save(commit=False)
            modelobj.image=request.FILES['image']
            modelobj.save()

            mp3= request.FILES['image']

            block_blob_service = BlockBlobService(account_name=myaccount, account_key=mykey)
            block_blob_service.create_container('harshultest2', public_access=PublicAccess.Container)
            block_blob_service.set_container_acl('harshultest2', public_access=PublicAccess.Container)

            with open('t.png','wb+') as formfile:
                for chunk in mp3.chunks():
                    formfile.write(chunk)

            block_blob_service.create_blob_from_path(
                'harshultest2',
                str(request.FILES['image']),
                't.png',

            content_settings=ContentSettings(content_type='image')
                )

            return HttpResponseRedirect(reverse('list'))
    else:
        formobj = fileform()
        return render_to_response('app/myform.html', {'formobj': formobj},context)
コード例 #16
0
    def save_jsonfile(self, account, container_name, folder_name,
                      local_file_name):
        try:
            block_blob_service = BlockBlobService(account.account_name,
                                                  account.account_key)
            # Set the permission so the blobs are public.
            block_blob_service.set_container_acl(
                container_name, public_access=PublicAccess.Container)

            # Create a file in Documents to test the upload and download.
            full_path_to_file = os.path.join(folder_name, local_file_name)

            #Upload the created file, use local_file_name for the blob name
            block_blob_service.create_blob_from_path(container_name,
                                                     local_file_name,
                                                     full_path_to_file)

            # List the blobs in the container
            print("\nList blobs in the container")
            generator = block_blob_service.list_blobs(container_name)
            for blob in generator:
                print("\t Blob name: " + blob.name)

            sys.stdout.write("Don't forget to delete resources "
                             "application will exit.")
            sys.stdout.flush()

        except Exception as e:
            print('Error occurred in create container.', e)
コード例 #17
0
def process():

    data = request.get_json()

    url = data['url']

    pattern = re.compile(r'(?<=net\/).*?(?=\/)')
    match = pattern.findall(url)
    container_name = match[0]
    reg = r"(?<=" + container_name + "\/).*"
    pattern = re.compile(reg)
    match = pattern.findall(url)
    file_name1 = match[0]
    block_blob_service = BlockBlobService(
        account_name='dsconvreport',
        account_key=
        '2wIt3xVY2HR5mXfl2489ctyE1CIewgwA0am+jE85HkOfOBKc7Af0KHHb2YS9Z466T+v9KClZXYeht21M3oXFYw=='
    )

    full_path_to_file = file_name1[:-5] + "_downloaded.xlsx"

    block_blob_service.get_blob_to_path(container_name, file_name1,
                                        full_path_to_file)

    df = pd.read_excel(full_path_to_file)

    val = df.values.tolist()

    items = []
    supp_val = []
    ranks = []

    for i in val:
        if i[0] not in items:
            items.append(i[0])

    for j in items:
        for i in val:
            if i[0] == j:
                supp_val.append(i[2])
        ranks.extend(rd(supp_val, method='dense'))
        supp_val = []

    df['supplier_rank'] = ranks
    files = 'Test_Supplierdata_kundan.xlsx'
    df.to_excel(files)

    block_blob_service.create_blob_from_path(container_name,
                                             'Test_Supplierdata_kundan.xlsx',
                                             'Test_Supplierdata_kundan.xlsx')

    op_url = "https://dsconvreport.blob.core.windows.net/" + container_name + "/" + files
    results[
        'result'] = "Congratulations, Your File has been Uploaded Successfully"
    results['uploaded_url'] = op_url

    return jsonify({
        'output_url': op_url,
        'message': 'Your file has been downloaded and uploaded'
    })
コード例 #18
0
def pagehandler(url, pageresponse, soup):
    """Function to be customized for processing of a single page.

    pageurl = URL of this page
    pageresponse = page content; response object from requests module
    soup = Beautiful Soup object created from pageresponse

    Return value = whether or not this page's links should be crawled.
    """
    print("Crawling:" + url + " ({0} bytes)".format(len(pageresponse.text)))
    #url="https://www.bellevue.edu/student-support/career-services/pdfs/resume-samples.pdf"
    #get the page content and write in a file
    # Get the filename from the URL path
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        # Comment below lines if you're not using BLOB
        block_blob_service = BlockBlobService(account_name='<ACCOUNT_NAME>',
                                              account_key='<ACCOUNT_KEY>')

        # Create a container
        container_name = '<CONTAINER_NAME>'

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(
            container_name, public_access=PublicAccess.Container)

        filename = url[url.rfind("/") + 1:]
        filename_small = filename.replace(".", "_small.")
        #print("Filename " + filename)

        ## The following code takes the body of the content between the <div> tags of a particular id
        ## This is Optional. If you do not want to use to <DIV> comment the DIV related code
        div = soup.find("div", {"id": "<DIV ID>"})

        div_text = div.get_text()
        ## Additional parsing if needed
        parsed_url = url.replace("<SOURCE STR>",
                                 "").replace("<CURRENT VALUE>", "<NEW VALUE>")
        print(div_text)
        if (div == None):
            print("None found")
        else:
            f = open("<LOCAL FOLDER NAME>/" + parsed_url, "a+")
            # write the content to file created
            #f.write(pageresponse.text)
            #r = requests.get(url, stream=True)
            #r = div
            #with open("extracted_data/" + filename, "wb") as f:
            #for chunk in r.iter_content(chunk_size=1024):
            #if chunk:
            f.write(div_text)
            block_blob_service.create_blob_from_path(
                container_name, parsed_url, '<LOCAL PATH>' + parsed_url)

            f.close()
    except:

        print("HTML Not processed:" + url)
    # wordcount(soup) # display unique word counts
    return True
コード例 #19
0
ファイル: main_server.py プロジェクト: rheehot/azureServer
def run_sample(localFileName, container):
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(
            account_name='projectimage',
            account_key=
            'JRchgJJAHjSzDMHMe73/9p65tTQTY7R9v/flqDfZagSTj00JiPXlAqi44B3P4Dkr3htQL3Eq2DAG81DXS7GdTw=='
        )

        # Create a container called 'quickstartblobs'.
        container_name = container  # 저장소 이름을 대문자 사용하지말자
        # 대문자는 쓸 수 없는 문자라고 에러를 뱉는다.
        block_blob_service.create_container(container_name)
        # Set the permission so the blobsd are public.
        block_blob_service.set_container_acl(
            container_name, public_access=PublicAccess.Container)
        # # Create a file in Documents to test the upload and download.
        local_path = os.path.expanduser("~/BlobStorage")
        local_file_name = localFileName
        storage_url = METADATA.storage_url
        blob_url = storage_url + local_file_name  # 이미지 분석에 사용

        full_path_to_file = os.path.join(local_path, local_file_name)

        block_blob_service.create_blob_from_path(container_name,
                                                 local_file_name,
                                                 full_path_to_file)

        result_image_analyze = use_describe_image_api(blob_url)
        result_tran = translate_ko(result_image_analyze)
        return result_tran

    except Exception as e:
        print(e)
コード例 #20
0
 def upload_blob(self, filename, filepath):
     blob_service_client = BlockBlobService(account_name=self.container_acc,
                                            account_key=self.container_key)
     blob_service_client.set_container_acl(
         self.container_name, public_access=PublicAccess.Container)
     blob_service_client.create_blob_from_path(self.container_name,
                                               filename, filepath)
コード例 #21
0
def take_picture():
    print "take picture method called"
    # Turn on LED  & Lamp
    GPIO.output(config.GPIO_PIN_ADDRESS, GPIO.HIGH)
    GPIO.output(config.LAMP_ADDRESS, GPIO.HIGH)

    # Take Picture
    camera = picamera.PiCamera()
    pic_id = "/home/pi/iot-hub-python-raspberrypi-client-app/imgs/" + str(
        uuid.uuid4()) + '.jpg'
    camera.capture(pic_id)
    camera.close()  #turn camera off

    # Upload picture
    block_blob_service = BlockBlobService(account_name='yourBlobStorage',
                                          account_key='your-account-key')
    block_blob_service.create_blob_from_path(
        'mycontainer',
        str(pic_id),
        str(pic_id),
        content_settings=ContentSettings(content_type='image/jpeg'))
    print "picture uploaded"

    #Turn off LED & Lamp
    GPIO.output(config.GPIO_PIN_ADDRESS, GPIO.LOW)
    GPIO.output(config.LAMP_ADDRESS, GPIO.LOW)
コード例 #22
0
ファイル: application.py プロジェクト: ImaiTomoaki/pelion
def save_image(image, blob_name):
    with open(blob_name, 'wb') as f:
        f.write(image)
    blobService = BlockBlobService(account_name=account_name,
                                   account_key=account_key)
    blobService.create_blob_from_path(container_name, blob_name, blob_name)
    os.remove(blob_name)
コード例 #23
0
def upload_az(az_account_name, az_account_key, az_container, filename):
    print("[COMAE] Uploading to Azure container " + az_container)
    blobpath = os.path.basename(filename)
    block_blob_service = BlockBlobService(account_name=az_account_name,
                                          account_key=az_account_key)
    block_blob_service.create_container(az_container)
    block_blob_service.create_blob_from_path(az_container, blobpath, filename)
コード例 #24
0
ファイル: upload_file_to_blob.py プロジェクト: guyrt/openfec
class BlobUploader(object):

    def __init__(self, blob_container=None, make_container_public=False):
        """
        Class to handle uploading to an azure blob connection.

        :param make_container_public: True iff you are okay with public read access to your data. Useful for teaching a course
        :return:
        """
        self.blob_container = blob_container or BLOB_CONTAINER
        self.blob_service = BlockBlobService(account_name=BLOB_ACCOUNTNAME, account_key=BLOB_ACCOUNTKEY)
        # if make_container_public:
        #     self.blob_service.create_container(BLOB_CONTAINER, public_access=PublicAccess)
        # else:
        #     self.blob_service.create_container(BLOB_CONTAINER)

    def put_json_file(self, file_obj, filename):
        """
        Put a file into azure blob store.

        Allows user to specify format. For example, once could use:
        <prefix>/YYYYMMDD.json
        """
        file_obj.seek(0)
        self.blob_service.create_blob_from_path(
            self.blob_container, 
            filename, 
            file_obj.name, 
            content_settings=ContentSettings(content_type="text/json")
        )
コード例 #25
0
def main():
    img, filename = get_img()
    ############## FOR TESTING ########
    #filename = "img.jpg"
    #img = plt.imread("img.jpg")
    ##################################

    json_res = send_api_helmet(filename)
    face_detected = send_face_recognition(filename)
    print_face_img(face_detected, img)
    tab = read_answer_api_helmet(json_res, img, re.split("/", filename)[-1])
    date = datetime.today().strftime(("%Y-%m-%d"))
    csv_name = str(date) + ".csv"
    rows = create_row_csv(face_detected, tab)
    if len(rows) == 0:
        print("SAS closed. Unknow face detected")
    else:
        res = 0
        for row in rows:
            print(row[1] + " was detected on the photo with " + row[3])
            if row[3] == "no helmet":
                res = 1
        if res == 0:
            print("SAS open.")
        else:
            print("SAS closed.")
    print(rows)
    add_data(csv_name, rows)
    block_blob_service = BlockBlobService(account_name=ACCOUNT_NAME,
                                          account_key=ACCOUNT_KEY)
    block_blob_service.create_blob_from_path(CONTAINER_NAME_IMAGE,
                                             re.split("/", filename)[-1],
                                             filename)
    block_blob_service.create_blob_from_path(CONTAINER_NAME_CSV, csv_name,
                                             csv_name)
コード例 #26
0
def uploadToAzure(files):
    block_blob_service = BlockBlobService(account_name=azureAccountName,
                                          account_key=azureAccountKey)

    totalFiles = str(len(files))
    fileCount = 0

    for file in files:
        fileCount += 1
        print "Processing: " + file[1] + " (" + str(
            fileCount) + "/" + totalFiles + ")"
        filepath = os.path.join(file[0], file[1])
        archivepath = file[0].replace(cameraPath, archivePath)
        if (filepath[-4:] == ".jpg") and (os.path.getsize(filepath) > 10485):
            try:
                containerName = file[0][20:28]
                block_blob_service.create_container(containerName)
                block_blob_service.create_blob_from_path(
                    container_name=containerName,  # Extract the date
                    blob_name=file[1],  # filename
                    file_path=filepath,
                    content_settings=ContentSettings(
                        content_type=jpgContentType))
                try:
                    archiveFile(filepath, archivepath, file[1])
                except OSError as e:
                    print e.errno
            except:
                pass
        else:
            print "Incorrect extension or file size below threshold of 10MB. Archiving."
            archiveFile(filepath, archivepath, file[1])
コード例 #27
0
def upload_icon(sas_url, file_path):
    # Break the SAS URL
    (scheme, netloc, path, params, query, fragment) = urlparse(sas_url)
    # Account is the first part of the netlocation upto the dot
    account_name = netloc[0:netloc.index('.')]
    # Container name is the path
    container_name = path.strip('/')

    # Create a block blob service
    blockblob_service = BlockBlobService(account_name=account_name,
                                         sas_token=query)

    # Get the file name of the icon
    file_name = os.path.basename(file_path)
    # Determine the content type and encoding for the file
    (content_type, content_encoding) = mimetypes.guess_type(file_name)
    content_settings = ContentSettings(content_type=content_type,
                                       content_encoding=content_encoding)

    # Upload the icon
    blockblob_service.create_blob_from_path(container_name=container_name,
                                            blob_name=file_name,
                                            file_path=file_path,
                                            content_settings=content_settings)

    # Append the icon name to the path to generate the download link
    path = path + '/' + file_name
    urlparts = (scheme, netloc, path, params, query, fragment)
    sas_download_url = urlunparse(urlparts)

    return sas_download_url
コード例 #28
0
 def uploadFileWithBlobSasUrl(self, file_name_full_path):
     retryRemaining = 3
     while retryRemaining > 0:
         try:
             self.telemetryLogger.info("Cloud Environment: " +
                                       self.cloudEnv)
             sas_service = BlockBlobService(
                 account_name=self.destination_storage_account,
                 sas_token=self.destination_sas_token,
                 endpoint_suffix=self.BlobUploadUrlSuffix())
             self.telemetryLogger.info('Uploading to Blob starting.')
             start_time = datetime.now()
             sas_service.create_blob_from_path(
                 self.destination_container_name,
                 self.destination_blob_name, file_name_full_path)
             self.telemetryLogger.info(
                 'Uploading to Blob completed. Time take: ' +
                 str((datetime.now() - start_time).total_seconds() * 1000) +
                 ' ms')
             break
         except Exception as ex:
             retryRemaining -= 1
             if retryRemaining <= 0:
                 self.telemetryLogger.error(
                     'Encountered error during blob upload multiple times after exhausting all retry attempts'
                 )
                 raise BlobUploadException(ex)
             exMessage = str(ex)
             self.telemetryLogger.warning(
                 'Encountered error during blob upload: ' + exMessage)
             self.telemetryLogger.warning('Retrying. ' +
                                          str(retryRemaining) +
                                          ' attempt(s) remaining.')
コード例 #29
0
class RemoteStorage():
    def __init__(self, user, key, container):
        self.bs = BlockBlobService(account_name=user, account_key=key)
        self.container = container

    def get_obj(self, blob_name):
        try:
            return self.bs.get_blob_properties(self.container, blob_name)
        except AzureMissingResourceHttpError:
            return None

    def download_blob(self, blob_name, localf):
        self.bs.get_blob_to_path(self.container, blob_name, localf)

    def del_blob(self, blob_name):
        try:
            self.bs.delete_blob(self.container, blob_name)
        except:
            pass

    def upload(self, blob_name, localf):
        self.bs.create_blob_from_path(
            self.container,
            blob_name,
            localf,
            content_settings=ContentSettings(content_type='text/xml'))

    def new_docs(self):
        res = []
        for blob in self.bs.list_blobs(self.container):
            res.append(blob.name)
        return res
コード例 #30
0
def write_file_to_blob(file_path, blob_name, container_name, bbs=None):
    if not bbs:
        bbs = BlockBlobService(
            account_name=config["storage_account_name"],
            account_key=config["storage_account_key"],
        )
    bbs.create_blob_from_path(container_name, blob_name, file_path)
コード例 #31
0
    def download_and_reupload(self, twilio_filename):
        """ Download file from Twilio, upload to Azure, and return the Azure location and local file. """
        blob_service = BlockBlobService(account_name=blob_accountname,
                                        account_key=blob_key)
        response = requests.get(twilio_filename)
        if response.status_code != 200:
            raise TwilioResponseError(
                "Couldn't find Twilio file {0}".format(twilio_filename))

        suffix = "wav"
        short_file_name = "{0}.{1}".format(uuid.uuid4(), suffix)
        azure_path = "/recordings/" + short_file_name

        with TmpFileCleanup() as tmp_file_store:
            #            local_filename = local_tmp_dir + "/" + short_file_name
            local_filename = os.path.join(local_tmp_dir, short_file_name)
            tmp_file_store.tmp_files.append(local_filename)
            f = open(local_filename, "wb")
            f.write(response.content)
            f.close()
            # Now upload the local file to azure
            blob_service.create_blob_from_path(
                blob_container,
                azure_path,
                local_filename,
                content_settings=ContentSettings(content_type='audio/x-wav'))
        return azure_path
コード例 #32
0
def run_sample():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(
            account_name='meetpythonstorage',
            account_key='abcabcabcabcabcabcabcabcabcabcabcabcabac')

        # Create a container called 'quickstartblobs'.
        container_name = 'quickstartblobs'
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(
            container_name, public_access=PublicAccess.Container)

        # Create a file in Documents to test the upload and download.
        local_path = os.path.abspath(os.path.curdir)
        local_file_name = input("Enter file name to upload : ")
        full_path_to_file = os.path.join(local_path, local_file_name)

        # Write text to the file.
        #file = open(full_path_to_file,  'w')
        #file.write("Hello, World!")
        #file.close()

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name,
                                                 local_file_name,
                                                 full_path_to_file)

        # List the blobs in the container
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        # Download the blob(s).
        # Add '_DOWNLOADED' as prefix to '.txt' so you can see both files in Documents.
        full_path_to_file2 = os.path.join(
            local_path, str.replace(local_file_name, '.txt',
                                    '_DOWNLOADED.txt'))
        print("\nDownloading blob to " + full_path_to_file2)
        block_blob_service.get_blob_to_path(container_name, local_file_name,
                                            full_path_to_file2)

        sys.stdout.write(
            "Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
            "application will exit.")
        sys.stdout.flush()
        input()

        # Clean up resources. This includes the container and the temp files
        block_blob_service.delete_container(container_name)
        os.remove(full_path_to_file)
        os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
コード例 #33
0
ファイル: verify-azure.py プロジェクト: mbartoli/blobfs
	def test_get_put_blob(self):
		import config as config
		account_name = config.STORAGE_ACCOUNT_NAME
		account_key = config.STORAGE_ACCOUNT_KEY
		block_blob_service = BlockBlobService(account_name, account_key)
		block_blob_service.create_blob_from_path(
			'cont2',
			'sunset.png',
			'sunset.png',)	
		block_blob_service.get_blob_to_path('cont2', 'sunset.png', 'out-sunset.png')
コード例 #34
0
class _BlobStorageFileHandler(object):

    def __init__(self,
                  account_name=None,
                  account_key=None,
                  protocol='https',
                  container='logs',
                  zip_compression=False,
                  max_connections=1,
                  max_retries=5,
                  retry_wait=1.0,
                  is_emulated=False):
        self.service = BlockBlobService(account_name=account_name,
                                        account_key=account_key,
                                        is_emulated=is_emulated,
                                        protocol=protocol)
        self.container_created = False
        hostname = gethostname()
        self.meta = {'hostname': hostname.replace('_', '-'),
                     'process': os.getpid()}
        self.container = (container % self.meta).lower()
        self.meta['hostname'] = hostname
        self.zip_compression = zip_compression
        self.max_connections = max_connections
        self.max_retries = max_retries
        self.retry_wait = retry_wait

    def put_file_into_storage(self, dirName, fileName):
        """
        Ship the outdated log file to the specified blob container.
        """
        if not self.container_created:
            self.service.create_container(self.container)
            self.container_created = True
        fd, tmpfile_path = None, ''
        try:
            file_path = os.path.join(dirName, fileName)
            if self.zip_compression:
                suffix, content_type = '.zip', 'application/zip'
                fd, tmpfile_path = mkstemp(suffix=suffix)
                with os.fdopen(fd, 'wb') as f:
                    with ZipFile(f, 'w', ZIP_DEFLATED) as z:
                        z.write(file_path, arcname=fileName)
                file_path = tmpfile_path
            else:
                suffix, content_type = '', 'text/plain'
            self.service.create_blob_from_path(container_name=self.container,
                                               blob_name=fileName+suffix,
                                               file_path=file_path,
                                               content_settings=ContentSettings(content_type=content_type),
                                               max_connections=self.max_connections
                                               )  # max_retries and retry_wait no longer arguments in azure 0.33
        finally:
            if self.zip_compression and fd:
                os.remove(tmpfile_path)
コード例 #35
0
ファイル: utils.py プロジェクト: izahn/workshops
def upload_file(STORAGE_NAME, STORAGE_KEY, NEW_CONTAINER_NAME, file, path, extension, content_type):
    """create blob service, and upload files to container"""
    
    blob_service = BlockBlobService(account_name= STORAGE_NAME, account_key=STORAGE_KEY)
    
    try:
        blob_service.create_blob_from_path(NEW_CONTAINER_NAME, file, path, content_settings=ContentSettings(content_type= content_type+extension))    
        print("{} // BLOB upload status: successful".format(file))

    except:
        print("{} // BLOB upload status: failed".format(file))
コード例 #36
0
def run_sample():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(account_name='meetpythonstorage', account_key='duOguiKnYb6ZEbJC6BftWqA2lcH67dWkmCSEJj+KxOTOHCNPeV7r4oO6feTw7gSSoFGKHryL4yqSVWlEkm6jWg==')

        # Create a container called 'quickstartblobs'.
        container_name ='quickstartblobs'
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

        # Create a file in Documents to test the upload and download.
        local_path=os.path.abspath(os.path.curdir)
        local_file_name =input("Enter file name to upload : ")
        full_path_to_file =os.path.join(local_path, local_file_name)

        # Write text to the file.
        #file = open(full_path_to_file,  'w')
        #file.write("Hello, World!")
        #file.close()

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)

        # List the blobs in the container
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        # Download the blob(s).
        # Add '_DOWNLOADED' as prefix to '.txt' so you can see both files in Documents.
        full_path_to_file2 = os.path.join(local_path, str.replace(local_file_name ,'.txt', '_DOWNLOADED.txt'))
        print("\nDownloading blob to " + full_path_to_file2)
        block_blob_service.get_blob_to_path(container_name, local_file_name, full_path_to_file2)

        sys.stdout.write("Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
                         "application will exit.")
        sys.stdout.flush()
        input()

        # Clean up resources. This includes the container and the temp files
        block_blob_service.delete_container(container_name)
        os.remove(full_path_to_file)
        os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
コード例 #37
0
 def spider_closed(self, spider):
     self.exporter.finish_exporting()
     file = self.files.pop(spider)
     filename = file.name
     newname = filename[:-5]+'-'+datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")+'.tsv'
     file.close()
     os.rename(filename, newname)
     if UPLOAD_TO_AZURE_STORAGE:
         block_blob_service = BlockBlobService(account_name=AZURE_ACCOUNT_NAME, account_key=AZURE_ACCOUNT_KEY)
         block_blob_service.create_blob_from_path(AZURE_CONTAINER,
                                                 newname,
                                                 newname,
                                                 content_settings=ContentSettings(content_type='text/tab-separated-values')
                                                         )
コード例 #38
0
ファイル: custom.py プロジェクト: yugangw-msft/azure-cli
def _upload_package_blob(package_file, url):
    """Upload the location file to storage url provided by autostorage"""

    uri = urlsplit(url)
    # in uri path, it always start with '/', so container name is at second block
    pathParts = uri.path.split("/", 2)
    container_name = pathParts[1]
    blob_name = pathParts[2]
    # we need handle the scenario storage account not in public Azure
    hostParts = uri.netloc.split(".", 2)
    account_name = hostParts[0]
    # endpoint suffix needs to ignore the 'blob' part in the host name
    endpoint_suffix = hostParts[2]

    sas_service = BlockBlobService(account_name=account_name, sas_token=uri.query, endpoint_suffix=endpoint_suffix)
    sas_service.create_blob_from_path(container_name=container_name, blob_name=blob_name, file_path=package_file)
コード例 #39
0
ファイル: publish.py プロジェクト: LukaszStem/azure-cli
def publish(build, account, container, sas, **_) -> None:
    client = BlockBlobService(account_name=account, sas_token=sas)
    
    publishing_files = (p for p in glob.iglob(os.path.join(build , '**/*'), recursive=True))
    for source in publishing_files:
        if os.path.isdir(source):
            continue

        blob_path = os.path.join(os.environ['TRAVIS_REPO_SLUG'],
                                 os.environ['TRAVIS_BRANCH'],
                                 os.environ['TRAVIS_BUILD_NUMBER'],
                                 os.path.relpath(source, build))

        content_type, content_encoding = mimetypes.guess_type(os.path.basename(source))
        content_settings = ContentSettings(content_type, content_encoding)
        logger.info(f'Uploading {blob_path} ...')
        client.create_blob_from_path(container_name=container,
                                     blob_name=blob_path,
                                     file_path=source,
                                     content_settings=content_settings)
コード例 #40
0
def upload_file_to_container(block_blob_client: azureblob.BlockBlobService, container_name: str, file_path: str):
    """
    Uploads a local file to an Azure Blob storage container.

    :param block_blob_client: A blob service client.
    :type block_blob_client: `azure.storage.blob.BlockBlobService`
    :param container_name: The name of the Azure Blob storage container.
    :type container_name: str
    :param str file_path: The local path to the file.
    :type file_path: str
    """
    blob_name = os.path.basename(file_path)

    logger.info(
        'Uploading file [{}] to container [{}]...'.format(
            file_path,
            container_name))

    block_blob_client.create_blob_from_path(container_name,
                                            blob_name,
                                            file_path)
コード例 #41
0
ファイル: publish.py プロジェクト: LukaszStem/azure-cli
def nightly(build: str, account: str, container: str, sas: str, **_) -> None:
    client = BlockBlobService(account_name=account, sas_token=sas)

    modules_list = []
    for wheel_file in glob.iglob(os.path.join(build, 'build/*.whl')):
        package_name = os.path.basename(wheel_file).split('-', maxsplit=1)[0].replace('_', '-')
        sdist_file = next(glob.iglob(os.path.join(build, 'source', f'{package_name}*.tar.gz')))

        content_type, content_encoding = mimetypes.guess_type(os.path.basename(wheel_file))
        content_settings = ContentSettings(content_type, content_encoding)
        client.create_blob_from_path(container_name=container,
                                     blob_name=f'{package_name}/{os.path.basename(wheel_file)}',
                                     file_path=wheel_file,
                                     content_settings=content_settings)

        content_type, content_encoding = mimetypes.guess_type(os.path.basename(sdist_file))
        content_settings = ContentSettings(content_type, content_encoding)
        client.create_blob_from_path(container_name=container,
                                     blob_name=f'{package_name}/{os.path.basename(sdist_file)}',
                                     file_path=sdist_file,
                                     content_settings=content_settings)

        package_blobs = (os.path.basename(b.name) for b in client.list_blobs(container, prefix=package_name + '/') 
                                                  if b.name != f"{package_name}/")

        client.create_blob_from_text(container_name=container, 
                                     blob_name=f'{package_name}/',
                                     text=generate_package_list_in_html(f'Links for {package_name}', package_blobs),
                                     content_settings=ContentSettings('text/html'))
        
        modules_list.append(f"{package_name}/")
    
    client.create_blob_from_text(container_name=container, 
                                 blob_name='index.html',
                                 text=generate_package_list_in_html('Simple Index', modules_list),
                                 content_settings=ContentSettings('text/html'))
コード例 #42
0
from gpiozero import MotionSensor
from picamera import PiCamera
from datetime import datetime
from azure.storage.blob import BlockBlobService
from azure.storage.blob import ContentSettings

camera = PiCamera()
pir = MotionSensor(4)
block_blob_service = BlockBlobService(account_name='YOURACCOUNT', account_key='YOURKEY')

while True:
    pir.wait_for_motion()
    filename = "pircam-" +  datetime.now().strftime("%Y-%m-%d_%H.%M.%S.jpg")
    camera.capture(filename)
    pir.wait_for_no_motion()
    block_blob_service.create_blob_from_path(
    'YOURCONTAINER',
    filename,
    filename,
    content_settings=ContentSettings(content_type='image/jpeg'))
policy_name = "readandlistonly"
# The Storage Account Name
storage_account_name = "mystore"
storage_account_key = "mykey"
storage_container_name = "mycontainer"
example_file_path = "..\\sampledata\\sample.log"
policy_name = "mysaspolicy"

# Create the blob service, using the name and key for your Azure Storage account
blob_service = BlockBlobService(storage_account_name, storage_account_key)

# Create the container, if it does not already exist
blob_service.create_container(storage_container_name)

# Upload an example file to the container
blob_service.create_blob_from_path(storage_container_name, "sample.log", example_file_path)

# Create a new policy that expires after a week
access_policy = AccessPolicy(
    permission=ContainerPermissions.READ + ContainerPermissions.LIST, expiry=datetime.utcnow() + timedelta(weeks=1)
)


# Get the existing identifiers (policies) for the container
identifiers = blob_service.get_container_acl(storage_container_name)
# And add the new one ot the list
identifiers[policy_name] = access_policy

# Set the container to the updated list of identifiers (policies)
blob_service.set_container_acl(storage_container_name, identifiers)
コード例 #44
0
ファイル: azure.py プロジェクト: jdavisp3/pghoard
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, bucket_name, prefix=None):
        prefix = "{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = bucket_name
        self.conn = BlockBlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized, %r", self.container_name)

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key, remove_slash_prefix=True, trailing_slash=False)
        results = self._list_blobs(key)
        if not results:
            raise FileNotFoundFromStorageError(key)
        return results[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key, trailing_slash=True):  # pylint: disable=arguments-differ
        # Trailing slash needed when listing directories, without when listing individual files
        path = self.format_key_for_backend(key, remove_slash_prefix=True, trailing_slash=trailing_slash)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        if path:
            items = self.conn.list_blobs(self.container_name, prefix=path, delimiter="/", include="metadata")
        else:  # If you give Azure an empty path, it gives you an authentication error
            items = self.conn.list_blobs(self.container_name, delimiter="/", include="metadata")
        results = []
        for item in items:
            if not isinstance(item, BlobPrefix):
                results.append({
                    "last_modified": item.properties.last_modified,
                    # Azure Storage cannot handle '-' so we turn them into underscores and back again
                    "metadata": dict((k.replace("_", "-"), v) for k, v in item.metadata.items()),
                    "name": self.format_key_from_backend(item.name),
                    "size": item.properties.content_length,
                })
        return results

    def delete_key(self, key):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.log.debug("Deleting key: %r", key)
        try:
            return self.conn.delete_blob(self.container_name, key)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

    def get_contents_to_file(self, key, filepath_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)

        self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to)
        try:
            self.conn.get_blob_to_path(self.container_name, key, filepath_to_store_to)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

        if progress_callback:
            progress_callback(1, 1)
        return self._metadata_for_key(key)

    def get_contents_to_fileobj(self, key, fileobj_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)

        self.log.debug("Starting to fetch the contents of: %r", key)
        try:
            self.conn.get_blob_to_stream(self.container_name, key, fileobj_to_store_to)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

        if progress_callback:
            progress_callback(1, 1)

        return self._metadata_for_key(key)

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.log.debug("Starting to fetch the contents of: %r", key)
        try:
            blob = self.conn.get_blob_to_bytes(self.container_name, key)
            return blob.content, self._metadata_for_key(key)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.conn.create_blob_from_bytes(self.container_name, key, memstring,
                                         metadata=self.sanitize_metadata(metadata, replace_hyphen_with="_"))

    def store_file_from_disk(self, key, filepath, metadata=None, multipart=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.conn.create_blob_from_path(self.container_name, key, filepath,
                                        metadata=self.sanitize_metadata(metadata, replace_hyphen_with="_"))

    def get_or_create_container(self, container_name):
        start_time = time.monotonic()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs",
                       container_name, time.monotonic() - start_time)
        return container_name
コード例 #45
0
ファイル: AzureStorage.py プロジェクト: beebeeep/cacus
class AzureStorage(plugins.IStoragePlugin):

    def configure(self, config):
        self.storage = BlockBlobService(account_name=config['account_name'], account_key=config['account_key'])
        self.container = config['container']
        try:
            container = self.storage.get_container_properties(self.container)
            log.info("Configuring Azure blob storage %s/%s", self.storage.account_name, self.container)
        except AzureMissingResourceHttpError as e:
            log.warning("Container '%s' is missing in account '%s', trying to create new", self.container, self.storage.account_name)
            try:
                self.storage.create_container(self.container)
                self.storage.set_container_acl(self.container, public_access=PublicAccess.Container)
            except Exception as e:
                log.critical("Cannot create new container: %s", e)
                raise plugins.PluginInitException("Cannot create new container")
        except AzureHttpError as e:
            log.critical("Cannot access container '%s' in account '%s': %s", self.container, self.storage.account_name, e)
            raise plugins.PluginInitException("Cannot access container")
        except Exception as e:
            log.critical("Cannot access container '%s' in account '%s': %s", self.container, self.storage.account_name, e)
            raise plugins.PluginInitException("Cannot access container")

    def delete(self, key):
        log.info("Deleting file '%s' from %s/%s", key, self.storage.account_name, self.container)
        try:
            self.storage.delete_blob(self.container, key)
        except AzureMissingResourceHttpError:
            log.error("File '%s' was not found in %s/%s", key, self.storage.account_name, self.container)
            raise common.NotFound('File not found')
        except Exception as e:
            log.error("Cannot delete '%s' from %s/%s: %s", key, self.storage.account_name, self.container, e)
            raise common.FatalError(e)

    def put(self, key, filename=None, file=None):
        storage_key = key
        try:
            if filename:
                log.debug("Uploading %s to %s", filename, self.storage.make_blob_url(self.container, storage_key))
                self.storage.create_blob_from_path(self.container, storage_key, filename, content_settings=ContentSettings(content_type='application/octet-stream'))
            elif file:
                old_pos = file.tell()
                file.seek(0)
                log.debug("Uploading from stream to %s", self.storage.make_blob_url(self.container, storage_key))
                self.storage.create_blob_from_stream(self.container, storage_key, file, content_settings=ContentSettings(content_type='application/octet-stream'))
                file.seek(old_pos)
        except Exception as e:
            # TODO: more detailed error inspection
            log.critical("Error uploading to %s/%s: %s", self.storage.account_name, self.container, e)
            raise common.FatalError(e)
        return storage_key


    def get(self, key, stream):
        # current azure python sdk barely can work with non-seekable streams,
        # so we have to implement chunking by our own
        # TODO: proper ranging? RFC says server SHOULD return 406 once range is unsatisfiable, 
        # but Azure is OK with end pos > blob length unless blob is not empty
        chunk_size = 4*1024*1024
        chunk_start = 0
        chunk_end = chunk_size - 1
        while True:
            try:
                chunk = self.storage._get_blob(self.container, key, start_range=chunk_start, end_range=chunk_end)
                log.debug("Writing %s bytes from %s", len(chunk.content), chunk_start)
                stream.write(chunk.content)
            except IOError:
                # remote side closed connection
                return
            except AzureMissingResourceHttpError as e:
                raise common.NotFound(e)
            except (AzureHttpError, AzureException) as e:
                raise common.TemporaryError('Error while downloading {}: {}'.format(key, e))

            blob_length = int(chunk.properties.content_range.split('/')[1])
            chunk_start, chunk_end, blob_size = map(int, re.match(r'^bytes\s+(\d+)-(\d+)/(\d+)$', chunk.properties.content_range).groups())
            if chunk_end == blob_size - 1:
                # no more data to stream
                break
            else:
                chunk_start = chunk_end + 1
                chunk_end += chunk_size
        return 0
コード例 #46
0
class AzureBlobWriter(BaseWriter):
    """
    Writes items to azure blob containers.

        - account_name (str)
            Public acces name of the azure account.

        - account_key (str)
            Public acces key to the azure account.

        - container (str)
            Blob container name.
    """
    supported_options = {
        'account_name': {'type': six.string_types, 'env_fallback': 'EXPORTERS_AZUREWRITER_NAME'},
        'account_key': {'type': six.string_types, 'env_fallback': 'EXPORTERS_AZUREWRITER_KEY'},
        'container': {'type': six.string_types}
    }
    hash_algorithm = 'md5'
    VALID_CONTAINER_NAME_RE = r'[a-zA-Z0-9-]{3,63}'

    def __init__(self, *args, **kw):
        from azure.storage.blob import BlockBlobService
        super(AzureBlobWriter, self).__init__(*args, **kw)
        account_name = self.read_option('account_name')
        account_key = self.read_option('account_key')

        self.container = self.read_option('container')
        if '--' in self.container or not re.match(self.VALID_CONTAINER_NAME_RE, self.container):
            help_url = ('https://azure.microsoft.com/en-us/documentation'
                        '/articles/storage-python-how-to-use-blob-storage/')
            warnings.warn("Container name %s doesn't conform with naming rules (see: %s)"
                          % (self.container, help_url))

        self.azure_service = BlockBlobService(account_name, account_key)
        self.azure_service.create_container(self.container)
        self.logger.info('AzureBlobWriter has been initiated.'
                         'Writing to container {}'.format(self.container))
        self.set_metadata('files_counter', 0)
        self.set_metadata('blobs_written', [])

    def write(self, dump_path, group_key=None):
        self.logger.info('Start uploading {} to {}'.format(dump_path, self.container))
        self._write_blob(dump_path)
        self.set_metadata('files_counter', self.get_metadata('files_counter') + 1)

    @retry_long
    def _write_blob(self, dump_path):
        blob_name = dump_path.split('/')[-1]
        self.azure_service.create_blob_from_path(
            self.read_option('container'),
            blob_name,
            dump_path,
            max_connections=5,
        )
        self.logger.info('Saved {}'.format(blob_name))
        self._update_metadata(dump_path, blob_name)

    def _update_metadata(self, dump_path, blob_name):
        buffer_info = self.write_buffer.metadata[dump_path]
        file_info = {
            'blob_name': blob_name,
            'size': buffer_info['size'],
            'hash': b64encode(unhexlify(buffer_info['file_hash'])),
            'number_of_records': buffer_info['number_of_records']
        }
        self.get_metadata('blobs_written').append(file_info)

    def _check_write_consistency(self):
        from azure.common import AzureMissingResourceHttpError
        for blob_info in self.get_metadata('blobs_written'):
            try:
                blob = self.azure_service.get_blob_properties(
                    self.read_option('container'), blob_info['blob_name'])
                blob_size = blob.properties.content_length
                blob_md5 = blob.properties.content_settings.content_md5
                if str(blob_size) != str(blob_info['size']):
                    raise InconsistentWriteState(
                        'File {} has unexpected size. (expected {} - got {})'.format(
                            blob_info['blob_name'], blob_info['size'], blob_size
                        )
                    )
                if str(blob_md5) != str(blob_info['hash']):
                    raise InconsistentWriteState(
                        'File {} has unexpected hash. (expected {} - got {})'.format(
                            blob_info['blob_name'], blob_info['hash'], blob_md5
                        )
                    )

            except AzureMissingResourceHttpError:
                raise InconsistentWriteState('Missing blob {}'.format(blob_info['blob_name']))
        self.logger.info('Consistency check passed')
コード例 #47
0
ファイル: __init__.py プロジェクト: bfaludi/azrcmd
class BlobStorage(object):
    # void
    def __init__(self, wasbs_path, dryrun=False):
        parsed = urlparse(wasbs_path)
        if parsed.scheme not in ('wasbs', 'wasb'):
            raise InvalidBlobStorePath('Remote path is not supported! Expected format: `wasb[s]://container/blob-path`')

        self.dryrun = dryrun
        self.schema, self.container, self.blob_path = parsed.scheme, parsed.netloc, parsed.path
        if self.blob_path and self.blob_path[0] == u'/':
            self.blob_path = self.blob_path[1:]

        self.blob_path = self.blob_path or None
        self.pbar = None
        self.service = BlockBlobService(
            account_name=os.environ['AZURE_STORAGE_ACCOUNT'].strip(), 
            account_key=os.environ['AZURE_STORAGE_ACCESS_KEY'].strip())

    @property
    def url(self):
        return u'{}://{}'.format(self.schema, self.container)

    @property
    def path(self):
        return os.path.join(self.url, self.blob_path)

    # Blob
    def get_blob(self):
        for blob in self.list_blobs():
            if blob.path == self.blob_path:
                return blob

    # genexp<list<Blob>>
    def list_blobs(self):
        marker = None
        while True:
            batch = self.service.list_blobs(self.container, prefix=self.blob_path, marker=marker)
            for blob in batch:
                yield Blob(self, blob)
            if not batch.next_marker:
                break
            marker = batch.next_marker

    # void
    def execute(self, executable_fn, message, end=None, **kwargs):
        # Print the original message
        print(message % kwargs, end=end)

        # If dryrun, write the message and exit
        if self.dryrun:
            print('IGNORE (--dryrun)')
            return

        try:
            executable_fn(**kwargs)
            print('OK')
        except Exception as e:
            print('FAIL\n{}'.format(e))

    # void
    def remove_fn(self, path, url=None):
        self.service.delete_blob(self.container, path)

    # void
    def remove_blobs(self, prefix=False):
        if not self.blob_path:
            print(u'Have to specify the path of the blob.')
            sys.exit(1)

        if not prefix:
            return self.execute(self.remove_fn, 'Remove blob from `%(url)s` ... ', path=self.blob_path, url=self.path, end='')

        for blob in self.list_blobs():
            self.execute(self.remove_fn, 'Remove blob from `%(url)s` ... ', path=blob.path, url=blob.url, end='')

    # void
    def upload_fn(self, blob_path, file_path, rel_file_path=None, url=None):
        self.service.create_blob_from_path(self.container, blob_path, file_path, \
            max_connections=int(os.environ.get('AZURE_STORAGE_MAX_CONNECTIONS',1)), \
            progress_callback=self.show_progress)
        self.pbar.finish()
        self.pbar = None

    # tuple<str,str>
    def get_upload_path_pair(self, file_path, common_prefix=None):
        is_directory_ending = self.blob_path and self.blob_path.endswith('/')
        is_container_path = self.blob_path is None

        blob_path = os.path.join(self.blob_path or u'', os.path.split(file_path)[-1]) \
            if any([is_container_path, is_directory_ending]) and common_prefix is None \
            else self.blob_path

        if common_prefix and blob_path:
            blob_path = os.path.join(blob_path, file_path.split(common_prefix)[-1].strip('/'))
        elif common_prefix and not blob_path:
            blob_path = file_path.split(common_prefix)[-1].strip('/')
        elif common_prefix == u'' and blob_path:
            blob_path = os.path.join(blob_path, file_path.strip('/'))
        elif common_prefix == u'' and not blob_path:
            blob_path = file_path.strip('/')

        return (file_path, blob_path)

    # genexp<tuple<str,str>>
    def get_upload_path_pairs(self, file_paths):
        if len(file_paths) == 1:
            yield self.get_upload_path_pair(file_paths[0])
            return

        common_prefix = os.path.split(os.path.commonprefix(file_paths))[0]
        if self.blob_path and not self.blob_path.endswith('/'): self.blob_path += '/'
        for file_path in file_paths:
            yield self.get_upload_path_pair(file_path, common_prefix=common_prefix)

    # void
    def upload_blobs(self, file_paths):
        for file_path, blob_path in self.get_upload_path_pairs(file_paths):
            self.execute(self.upload_fn, 'Upload `%(rel_file_path)s` into `%(url)s`', \
                file_path=file_path, rel_file_path=os.path.relpath(file_path), blob_path=blob_path, \
                url=u'{}/{}'.format(self.url, blob_path))

    # void
    def show_progress(self, current, total):
        def filesize(n,pow=0,b=1024,u='B',pre=['']+[p+'i'for p in'KMGTPEZY']):
            pow,n=min(int(log(max(n*b**pow,1),b)),len(pre)-1),n*b**pow
            return "%%.%if %%s%%s"%abs(pow%(-pow-1))%(n/b**float(pow),pre[pow],u)

        if self.pbar is None:
            self.pbar = ProgressBar(widgets=[
                    ' '*5,
                    'Size: {}'.format(filesize(total)),
                    ' ',
                    Percentage(), 
                    ' ',
                    Bar(),
                    ' ', 
                    ETA(),
                    ' ', 
                    FileTransferSpeed(),
                    ' '*5
                ], 
                maxval=total
            ).start()

        self.pbar.update(current)

    # void
    def download_fn(self, blob_path, file_path, **kwargs):
        self.service.get_blob_to_path(self.container, blob_path, file_path, \
            max_connections=int(os.environ.get('AZURE_STORAGE_MAX_CONNECTIONS',1)), \
            progress_callback=self.show_progress)
        self.pbar.finish()
        self.pbar = None

    # tuple<str,str>
    def get_download_path_pair(self, blob_path, file_path, common_prefix=None):
        file_path = os.path.join(file_path, os.path.split(blob_path)[-1]) \
            if os.path.exists(file_path) and os.path.isdir(file_path) and common_prefix is None \
            else file_path

        if common_prefix:
            file_path = os.path.join(file_path, blob_path.split(common_prefix)[-1].strip('/'))
        elif common_prefix == u'':
            file_path = os.path.join(file_path, blob_path.strip('/'))

        dir_path = os.path.split(file_path)[0]
        if dir_path and not os.path.exists(dir_path):
            os.makedirs(dir_path)

        return blob_path, file_path

    # genexp<tuple<str,str>>
    def get_download_path_pairs(self, file_path, prefix=False, skip_existing=False, sync=False):
        # Ignore if no blob path is defined.
        if not self.blob_path:
            raise BlobPathRequired(u'Blob path is required for `get` command.')

        # Single file download scenario.
        if not prefix:
            # Skip if the file is already existing.
            if skip_existing and os.path.exists(file_path):
                return

            # Only downloads the not existing or the updated files (based on file size).
            if sync and os.path.exists(file_path):
                blob = self.get_blob()
                if blob and get_fresher(blob, file_path) != blob:
                    return

            # Return the caluclated path of the file.
            yield self.get_download_path_pair(self.blob_path, file_path)
            return

        # List the blobs with the given prefix in the ABS.
        blob_paths, blob_paths_dict = [], {}
        for blob in self.list_blobs():
            blob_paths.append(blob.path)
            blob_paths_dict[blob.path] = blob

        # Determine the common prefix between the blobs.
        common_prefix = os.path.dirname(self.blob_path) \
            if not self.blob_path.endswith('/') \
            else self.blob_path
        resolved_file_paths = []

        for blob_path in blob_paths:
            # Determine the input, output path pairs.
            bp, fp = self.get_download_path_pair(blob_path, file_path, common_prefix=common_prefix)

            # If any of the files want to write to the same file, raise an error.
            if fp in resolved_file_paths:
                raise DirectoryRequired('Can not use the same path (`{}`) for multiple blob!' \
                    .format(fp))

            # Ignore the files that already exists.
            if skip_existing and os.path.exists(fp):
                continue

            # Only downloads the not existing or the updated files (based on file size).
            if sync and os.path.exists(fp) and get_fresher(blob_paths_dict[blob_path], fp) != blob_paths_dict[blob_path]:
                continue

            resolved_file_paths.append(fp)
            yield bp, fp

    # void
    def download_blobs(self, file_path, prefix=False, skip_existing=False, sync=False):
        # Iterates over the final input, output paths and download them.
        for blob_path, file_path in self.get_download_path_pairs(file_path, prefix=prefix, skip_existing=skip_existing, sync=sync):
            self.execute(self.download_fn, 'Download `%(url)s` into `%(rel_file_path)s`', \
                blob_path=blob_path, file_path=file_path, rel_file_path=os.path.relpath(file_path), \
                url=u'{}/{}'.format(self.url, blob_path))
コード例 #48
0
        return json_fpath

if __name__ == "__main__":

    parser = argparse.ArgumentParser(description="Processes and stores data into hbase")
    parser.add_argument("--container",dest="container")
    parser.add_argument("--pattern",dest="pattern")    
    
    args = parser.parse_args()   
    container = args.container
    pattern = args.pattern   

    print("Processing files from container : " + str(container))
    if pattern is not None:
        blob_list = block_blob_service.list_blobs(container_name=container, prefix=pattern)
    else:
        blob_list = block_blob_service.list_blobs(container_name=container)
    
    for blob in blob_list:
        print("Processing blob : " + blob.name)
        blob_name = getfilename(blob.name)
        downloadedblob = "downloaded_" + blob_name 
        block_blob_service.get_blob_to_path(container_name=container,blob_name=blob.name, file_path=downloadedblob, open_mode='w')
        if pattern is not None:
            json_outpath = processcsvfile(fname=downloadedblob,seperator="|",outfname=blob_name,outdir='jsonfiles/' + container + "/" + pattern)
            print("uploading blob" + json_outpath)
            block_blob_service.create_blob_from_path(container_name=container,blob_name=str(pattern+ 'json/' + blob_name + ".json"),file_path=json_outpath) 
        else:
            json_outpath = processcsvfile(fname=downloadedblob,seperator="|",outfname=blob_name,outdir='jsonfiles/' + container + "/")
            print("uploading blob" + json_outpath)
            block_blob_service.create_blob_from_path(container_name=container,blob_name=str('json/' + blob_name + ".json"),file_path=json_outpath)            
コード例 #49
0
    api_key = "56395e3fd1f33296c424c2c6e3ee008a"
    lat = i.latitude
    lng = i.longitude
    forecast = forecastio.load_forecast(api_key, lat, lng)
    bydaily = forecast.daily()
    day=bydaily.data[1]
    temp = temp.append(pd.Series([i.cities,day.time,day.temperatureMin,day.temperatureMax,day.precipProbability,day.humidity,day.windSpeed,day.summary],index=['Cities','Day','Min_Temperature','Max_Temperature','Precipitation_prob','Humidity','Wind_Speed','Summary']),ignore_index=True)


# Appending the data for each day to the previous data

Weather=Weather.append(temp,ignore_index=True)



# Storing the temporary updated dataframe locally

Weather.to_csv('Temp_Weather.csv')


# Storing the updated weather data in the local file system to the HDFS i.e the Blob Storage

from azure.storage.blob import ContentSettings
block_blob_service.create_blob_from_path(
    'arnabcluster',
    'Project/Weather/Weather.csv',
    'Temp_Weather.csv',
    content_settings=ContentSettings(content_type='text/csv')
            )

コード例 #50
0
ファイル: perfectND.py プロジェクト: chpmoreno/PerfectND
                      #O,O,X,X,X,O,O,O,
                      #O,O,X,O,0,X,O,O,
                      #O,O,X,O,O,0,X,O,
                      #O,O,X,0,0,0,X,O,
                      #O,O,X,O,O,0,X,O,
                      #O,O,X,O,O,0,X,O,
                      #O,O,X,O,0,X,O,O,
                      #O,O,X,X,X,O,O,O]
        if stink == True:
            X = (255,0,0)
            indx = [2,3,4,10,13,18,22,26,30,34,38,42,46,50,53,58,59,60]
            for j in indx:
                pixels[j] = X
            
        hat.set_pixels(pixels)
                  
        with open('data.csv', 'a') as data_file:
            csv_file = csv.writer(data_file, delimiter=',')
            csv_file.writerow([timestamp, temp, temp_p, hum, pressure])
        
        
        time.sleep(60)
    except KeyboardInterrupt:
        break

# Clear sensor and quit
hat.clear()
block_blob.create_blob_from_path('example', 'final_output.csv', 'data.csv')
print '\rSensor Cleared'
sys.exit()
コード例 #51
0
ファイル: azure.py プロジェクト: edwardvalentini/pghoard
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name, prefix=None):
        # NOTE: Azure wants all paths to start with a slash
        prefix = "/{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlockBlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")
        # XXX: AzureTransfer isn't actively tested and hasn't its error handling is probably lacking
        #self.log.warning("AzureTransfer is experimental and has not been thoroughly tested")

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key)
        return self._list_blobs(key)[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key):
        path = self.format_key_for_backend(key, trailing_slash=True)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        items = self.conn.list_blobs(self.container_name, prefix=path, delimiter="/", include="metadata")
        result = []
        for item in items:
            result.append({
                "last_modified": item.properties.last_modified.isoformat(),
                "metadata": item.metadata,
                "name": self.format_key_from_backend(item.name),
                "size": item.properties.content_length,
            })
        return result

    def delete_key(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Deleting key: %r", key)
        return self.conn.delete_blob(self.container_name, key)

    def get_contents_to_file(self, key, filepath_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to)
        meta = self.conn.get_blob_to_path(self.container_name, key, filepath_to_store_to)
        if progress_callback:
            progress_callback(1, 1)
        return meta

    def get_contents_to_fileobj(self, key, fileobj_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        meta = self.conn.get_blob_to_file(self.container_name, key, fileobj_to_store_to)
        if progress_callback:
            progress_callback(1, 1)
        return meta

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_bytes(self.container_name, key), self._metadata_for_key(key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key)
        self.conn.create_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=self.sanitize_metadata(metadata))

    def store_file_from_disk(self, key, filepath, metadata=None, multipart=None):
        key = self.format_key_for_backend(key)
        self.conn.create_blob_from_path(self.container_name, key, filepath,None)
                                        #    x_ms_meta_name_values=self.sanitize_metadata(metadata))

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
コード例 #52
0
    def file_uploaded(self, botengine, device_object, file_id, filesize_bytes, content_type, file_extension):
        """
        A device file has been uploaded
        :param botengine: BotEngine environment
        :param device_object: Device object that uploaded the file
        :param file_id: File ID to reference this file at the server
        :param filesize_bytes: The file size in bytes
        :param content_type: The content type, for example 'video/mp4'
        :param file_extension: The file extension, for example 'mp4'
        """
        # We are demonstrating video processing here, so avoid video processing on files that are not videos.
        if "video" not in content_type:
            botengine.get_logger().info("The uploaded file is not a video, skipping processing ...")
            return
            
        # Create full file path
        FILE_NAME = "video." + file_extension
        FILE_PATH= os.path.dirname(os.path.abspath(__file__))
        FULL_FILE_PATH =os.path.join(FILE_PATH, FILE_NAME)

        # Download file to local device
        botengine.download_file(file_id, FILE_NAME)

        # Create Blob service and upload file to Microsoft Blob Storage
        block_blob_service = BlockBlobService(account_name='andresdemo', account_key='6kG0iguGEK9h41shJasZOW4v/uPKr1Guu8RFWfLhWf7MnDcJaaw5eFhhYGSqaFk2qpqX8JIpJobe0bY8MmYa+g==')
        block_blob_service.create_container(CONTAINER_NAME)

        # Set Public Access to container
        # A try and except block is used due to a occassional logger exception (Doesn't imapact function)
        try:
            block_blob_service.set_container_acl(CONTAINER_NAME, public_access=PublicAccess.Container)
        except:
            pass
        
        # Convert the file into a blob and store it in Microsoft Azure Blob Storage
        # A try and except block is used due to a occassional logger exception (Doesn't imapact function)
        try:
            block_blob_service.create_blob_from_path(CONTAINER_NAME, FILE_NAME, FULL_FILE_PATH)
        except:
            pass
        
        # Get Video URL
        url = "https://" + ACCOUNT_NAME + ".blob.core.windows.net/" + CONTAINER_NAME + "/" + FILE_NAME

        # Get Access Token
        token = "";
        headers = {
            'Ocp-Apim-Subscription-Key': API_KEY,
        }
        params = urllib.urlencode({
            'allowEdit': 'True',
        })

        # HTTP GET request to Video Indexer API to acquire access token
        try:
            conn = httplib.HTTPSConnection('api.videoindexer.ai')
            conn.request("GET", "/auth/" + ACCOUNT_LOCATION + "/Accounts/" + ACCOUNT_ID + "/AccessToken?%s" % params, headers=headers)
            response = conn.getresponse()
            token = response.read()
            token = token[1:len(token)-1]
            conn.close()
        except Exception as e:
            print("[Errno {0}] {1}".format(e.errno, e.strerror))
        
        # Use Access Token to upload Video file 
        headers = {
            'Content-Type': 'multipart/form-data',
        }
        params = urllib.urlencode({
            'Content-Type': 'multipart/form-data',
            'videoUrl': url,
            'streamingPreset': 'Default',
            'privacy': "Public"
        })
        try:
            conn = httplib.HTTPSConnection('api.videoindexer.ai')
            conn.request("POST", "/" + ACCOUNT_LOCATION + "/Accounts/" + ACCOUNT_ID + "/Videos?accessToken=" +token + "&name=Sample&%s" % params, headers=headers)
            response = conn.getresponse()
            data = response.read()
            d = json.loads(data)
            conn.close()
        except Exception as e:
            print("[Errno {0}] {1}".format(e.errno, e.strerror))

        botengine.get_logger().info('Video Processing..')

        # Get Video Index
        params = urllib.urlencode({
            'accessToken': token,
            'language': 'English',
        })
        conn = httplib.HTTPSConnection('api.videoindexer.ai')
        conn.request("GET", "/" + ACCOUNT_LOCATION + "/Accounts/" + ACCOUNT_ID + "/Videos/"+d["id"]+"/Index?%s" % params)
        response = conn.getresponse()
        data = response.read()
        result = json.loads(data)
        # Set a timer to queue for results
        # Passing in Video ID and Access Token
        self.start_timer_s(botengine, 5, [d["id"], token])
        return
コード例 #53
0
from azure.storage.blob import BlockBlobService
from azure.storage.blob import PublicAccess
from azure.storage.blob import ContentSettings


AzureStorageAccount = "istagingstorage"
AzureStorageAccessKey = "qkFU/ah2v4cHvQ7oAZASb2HRGFUkJhg2xs5KBYB+2fEnYmSp6hZH9U3vEO6TujzHHdBF3HWVgqalwcUuvIBMUQ=="
AzureStorageContainerName = "peter-container"

block_blob_service = BlockBlobService(account_name=AzureStorageAccount, account_key=AzureStorageAccessKey)

block_blob_service.create_blob_from_path(
    AzureStorageContainerName,
    'out.jpg',
    'img.jpg',
    content_settings=ContentSettings(content_type='image/png')
            )
コード例 #54
0
if __name__ == '__main__':

    parms = sys.argv

    input_block_blob_service = BlockBlobService(account_name=parms[1], sas_token=parms[2], protocol='https')
    input_block_blob_service.get_blob_to_path(parms[3], parms[4], 'input.txt')

    input_file=open("input.txt","r")
    wordcount={}
    for word in input_file.read().split():
        if word not in wordcount:
            wordcount[word] = 1
        else:
            wordcount[word] += 1

    input_file.close()

    now = datetime.datetime.now(pytz.timezone("Asia/Tokyo"))
    fmt = "%Y-%m-%d-%H-%M-%S-%Z"
    output_filename = now.strftime(fmt) + ".txt"

    output_file=open(output_filename,"w")
    for k,v in wordcount.items():
        output_file.writelines(k + " " + str(v) + "\n")

    output_file.close()

    output_block_blob_service = BlockBlobService(account_name=parms[5], sas_token=parms[7], protocol='https')
    output_block_blob_service.create_blob_from_path(parms[6], output_filename, output_filename)
コード例 #55
0
#!/usr/bin/python

import sys,os
from azure.storage.blob import BlockBlobService
from azure.storage.blob import ContentSettings


block_blob_service = BlockBlobService(account_name=str(sys.argv[1]), account_key=str(sys.argv[2]))
block_blob_service.create_container('keys')

block_blob_service.create_blob_from_path(
    'keys',
    str(sys.argv[3]),
    os.path.join(os.getcwd(),str(sys.argv[3])),
    content_settings=ContentSettings(content_type='text')
)