Beispiel #1
0
class AzureStorage:
    def __init__(self, connectionString, container):
        self.BlobService = BlockBlobService(connection_string=connectionString)
        nameValue = UtilityHelper.connectStringToDictionary(connectionString)
        self.AccountName = nameValue['AccountName']
        self.container = container

    def getBaseURL(self):
        return 'https://' + self.AccountName + '.blob.core.windows.net/'

    def uploadByLocalFile(self, localFullFileName, remoteBlobName):
        self.BlobService.create_blob_from_path(self.container, remoteBlobName,
                                               localFullFileName)
        blobURL = 'https://' + self.AccountName + '.blob.core.windows.net/' + self.container + '/' + remoteBlobName
        return blobURL

    def uploadByStream(self, streamData, remoteBlobName):
        self.BlobService.create_blob_from_stream(self.container,
                                                 remoteBlobName, streamData)
        blobURL = 'https://' + self.AccountName + '.blob.core.windows.net/' + self.container + '/' + remoteBlobName
        return blobURL

    def uploadByBytes(self, bytesData, remoteBlobName):
        self.BlobService.create_blob_from_bytes(self.container, remoteBlobName,
                                                bytesData)
        blobURL = 'https://' + self.AccountName + '.blob.core.windows.net/' + self.container + '/' + remoteBlobName
        return blobURL

    def delete(self, blobName):
        self.BlobService.delete_blob(self.container, blobName)

    def copy(self, sourceBlobURL, targetBlobName):
        self.BlobService.copy_blob(self.container, targetBlobName,
                                   sourceBlobURL)
class BlobService(object):
    def __init__(self, blobonedge_module_name, blob_account_name,
                 blob_account_key, image_container_name,
                 image_upload_duration_sec, edge_id):
        localblob_connectionstring = 'DefaultEndpointsProtocol=http;BlobEndpoint=http://' + blobonedge_module_name + ':11002/' + blob_account_name + ';AccountName=' + blob_account_name + ';AccountKey=' + blob_account_key + ';'
        print("Try to connect to blob on edge by " +
              localblob_connectionstring)
        self.blockBlobService = BlockBlobService(
            endpoint_suffix='', connection_string=localblob_connectionstring)
        print("Connected to blob on edge")
        self.blockBlobService.create_container(image_container_name)
        print('Created image container - ' + image_container_name)
        self.imageContainerName = image_container_name
        self.imageUploadDurationSec = image_upload_duration_sec
        self.edgeId = edge_id
        self.blobLastUploadTime = time.time()

    def upload_image_to_blob(self, image):
        now = datetime.datetime.now()
        currentTime = time.time()
        print('Last Time:' + str(self.blobLastUploadTime) + '->Current:' +
              str(currentTime))
        if (currentTime - self.blobLastUploadTime >
                self.imageUploadDurationSec):
            image_file_name = self.edgeId + "-img{0:%Y%m%d%H%M%S}".format(
                now) + ".jpg"
            # image_file_name = "image{0:%Y%m%d%H%M%S}".format(now) +".jpg"
            print('Uploading image as ' + image_file_name + ' at ' +
                  str(currentTime))
            self.blockBlobService.create_blob_from_stream(
                self.imageContainerName, image_file_name, image)
            print('Upload done')
            self.blobLastUploadTime = currentTime
Beispiel #3
0
def upload_file():
    if request.method == 'POST':
        # check if the post request has the file part
        #file = request.files['file']
        uploaded_files = request.files.getlist("file[]")
        filenames = []
        for file in uploaded_files:

            if file.filename == '':
                flash('No file selected for uploading')
                return redirect(request.url)
            if file and allowed_file(file.filename):
                filename = secure_filename(file.filename)
                blobservice = BlockBlobService(
                    account_name='flaskstorage',
                    account_key=
                    'M9Hax/c6wKCdVXIcmBafad35/ctWW2OQJQynRMrM29D+mfZXWW53MF0Sthsf0cmWN+/XukVg/aZQ/6XBAB4cgg=='
                )
                df = pd.read_excel(file.stream)
                data = BytesIO()
                df.to_excel(data, index=False)
                data = bytes(data.getvalue())
                data = BytesIO(data)
                blobservice.create_blob_from_stream('htflaskcontainer',
                                                    filename, data)
                data.close()
                filenames.append(filename)
                flash('File(s) successfully uploaded to Blob', 'filename')
        return redirect('/upload')
Beispiel #4
0
def upload_file():
    if request.method == 'POST':
        # check if the post request has the file part
        #file = request.files['file']
        uploaded_files = request.files.getlist("file[]")
        filenames = []
        for file in uploaded_files:

            if file.filename == '':
                flash('No file selected for uploading')
                return redirect(request.url)
            if file and allowed_file(file.filename):
                filename = secure_filename(file.filename)
                blobservice = BlockBlobService(
                    account_name='flaskstorage',
                    account_key=
                    '4+JwE+i1NvLF/oJuqmEVb0nNEiX0+9Tnq8M6U28cA0hsjP4qlpAfaSORdOg0Kphw2CWf/Zp4uPZG+M/sfdZytQ=='
                )
                df = pd.read_excel(file.stream)
                data = BytesIO()
                df.to_excel(data, index=False)
                data = bytes(data.getvalue())
                data = BytesIO(data)
                blobservice.create_blob_from_stream('htflaskcontainer',
                                                    filename, data)
                data.close()
                filenames.append(filename)
                flash('File(s) successfully uploaded to Blob', 'filename')
        return redirect('/upload')
Beispiel #5
0
def pfile(answer, userId):
    f = urllib.request.urlopen(
        'https://blobgroup1.blob.core.windows.net/datacontainer/User_data.json'
    )
    dic = json.load(f)
    dic['UserInfo'][userId] = answer
    #f.write(json.dumps(dic))
    ## Blob 接続準備
    #account_nameにリソース名、account_keyに自分のアクセスキーを入力する
    block_blob_service = BlockBlobService(
        account_name='blobgroup1',
        account_key=
        'KcyFmNHRvBwAEtaQ89C6vYFnRN1ftuTGiPcDjV1IOKBxmRmyxfb1ID97ucUNBbkD1BKqB0FgFrmAyC0d/eeThA=='
    )
    # コンテナを作成する
    container_name = "datacontainer"
    block_blob_service.create_container(container_name)
    # コンテナのアクセス権限をpublicに設定する
    block_blob_service.set_container_acl(container_name,
                                         public_access=PublicAccess.Container)
    ## ファイルをblobに上げる
    #ファイルストリームを用いてファイルにバイナリデータを書き込む
    text = json.dumps(dic)
    file_data = io.BytesIO(text.encode("UTF-8"))
    #ファイルに名前をつける
    file_name = 'User_data.json'
    #ファイルの書き始め位置を指定(seek(0)は先頭から書き始めるという意味)
    file_data.seek(0)
    #blobを作成する
    block_blob_service.create_blob_from_stream(container_name, file_name,
                                               file_data)
    f.close()
Beispiel #6
0
class AzureBlobFeedStorage(BlockingFeedStorage):
    def __init__(self, uri):
        '''
        Azure uses slashes '/' in their keys, which confuses the shit out of urlparse.
        So, we just handle it ourselves here.
        assuming format looks like this:

        azure://account_name:password@container/filename.jsonl

        azure://bobsaccount:1234567890abc1KUj0lK1gXHv4NHrCfKxfxHy3bwQJ+LqFHCay6r1S/Yhw2Ot4Tk6p1zF9IiMcPBo7o9poXZgA==@sites/filename.jsonl
        '''

        container = uri.split('@')[1].split('/')[0]
        filename = '/'.join(uri.split('@')[1].split('/')[1::])
        account_name, account_key = uri[8::].split('@')[0].split(':')

        self.account_name = account_name
        self.account_key = account_key
        self.container = container
        self.filename = filename
        self.blob_service = BlockBlobService(account_name=self.account_name,
                                             account_key=self.account_key)

    def _store_in_thread(self, file):
        file.seek(0)
        self.blob_service.create_blob_from_stream(self.container,
                                                  self.filename, file)
Beispiel #7
0
def transfer_fileshare_to_blob(config, fileshare_uri, output_model_name):
    ''' NB -- transfer proceeds via local temporary file! '''
    file_service = FileService(config.storage_account_name,
                               config.storage_account_key)
    blob_service = BlockBlobService(config.storage_account_name,
                                    config.storage_account_key)
    blob_service.create_container(config.container_trained_models)
    blob_service.create_container(config.predictions_container)

    uri_core = fileshare_uri.split('.file.core.windows.net/')[1].split('?')[0]
    fields = uri_core.split('/')
    fileshare = fields.pop(0)
    subdirectory = '/'.join(fields[:-1])
    file_name = '{}/{}'.format(output_model_name, fields[-1])

    with TemporaryFile() as f:
        file_service.get_file_to_stream(share_name=fileshare,
                                        directory_name=subdirectory,
                                        file_name=fields[-1],
                                        stream=f)
        f.seek(0)
        if 'predictions' in fields[-1]:
            blob_service.create_blob_from_stream(
                config.predictions_container,
                '{}_predictions_test_set.csv'.format(output_model_name), f)
        else:
            blob_service.create_blob_from_stream(
                config.container_trained_models, file_name, f)

    return
def uploadFile():
    accountey = "xxxx"
    accountName = "xxxxx"
    containerName = "xxx-xxx"

    blobService = BlockBlobService(account_name=accountName,
                                   account_key=accountey)
    blobService.create_container(containerName)
    blobService.create_blob_from_stream(containerName, "image4.jpg", file.raw)
Beispiel #9
0
def _save_to_azure(dest_path, file_obj, container):
    blob_service = BlockBlobService(connection_string=settings.AZURE_STORAGE_CONNECTION_STRING)

    blob_service.create_blob_from_stream(container, dest_path, file_obj)
    sas_token = blob_service.generate_blob_shared_access_signature(
        container,
        dest_path,
        BlobPermissions.READ,datetime.utcnow() + timedelta(hours=24*3000))
    blob_url = blob_service.make_blob_url(container, dest_path, sas_token=sas_token)
    return blob_url, blob_url
Beispiel #10
0
    def write_blob(self, sas_uri, blob_name, input_stream):
        sas_service = BlockBlobService(
            account_name=self.get_account_from_uri(sas_uri),
            sas_token=self.get_sas_key_from_uri(sas_uri))

        container_name = self.get_container_from_uri(sas_uri)

        sas_service.create_blob_from_stream(container_name, blob_name, input_stream)

        return sas_service.make_blob_url(container_name, blob_name, sas_token=self.get_sas_key_from_uri(sas_uri))
Beispiel #11
0
def push_dataframe_to_azure_blob(df, account_name, account_key, container_name,
                                 file_name):
    data = StringIO()
    df.to_csv(data)
    data = bytes(data.getvalue(), 'utf-8')
    data = BytesIO(data)
    blob_service = BlockBlobService(account_name=account_name,
                                    account_key=account_key)
    blob_service.create_blob_from_stream(container_name=container_name,
                                         blob_name=file_name,
                                         stream=data)
    data.close()
Beispiel #12
0
class AzureStore(DataStoreInterface):
    service_cache = _ClientCache()

    def __init__(self):
        service = self.service_cache.get('azure')
        if service is None:
            self.service = BlockBlobService(**env.AZURE_STORAGE_CREDENTIALS)
        else:
            self.service: BlockBlobService = service
            self.service_cache.get('azure')
        self.service_cache.put('azure', self.service)

    def get(self, sample_url: str) -> BinaryIO:
        container_name, blob_name = self._parser_url(sample_url)
        stream = io.BytesIO()
        self.service.get_blob_to_stream(container_name, blob_name, stream)
        stream.seek(0)
        return stream

    def put(self, sample_url: str, stream: BinaryIO):
        container_name, blob_name = self._parser_url(sample_url)
        self.service.create_container(container_name)
        self.service.create_blob_from_stream(container_name, blob_name, stream)

    def exists(self, sample_url) -> bool:
        container_name, blob_name = self._parser_url(sample_url)
        return self.service.exists(container_name, blob_name)

    def delete(self, sample_url):
        container_name, blob_name = self._parser_url(sample_url)
        self.service.delete_blob(container_name, blob_name)

    def get_signed_url(self, sample_url) -> str:
        container_name, blob_name = self._parser_url(sample_url)
        signature = self.service.generate_blob_shared_access_signature(
            container_name,
            blob_name,
            permission=BlobPermissions(read=True),
            expiry=datetime.datetime.utcnow() + datetime.timedelta(minutes=15),
        )
        url = self.service.make_blob_url(container_name,
                                         blob_name,
                                         protocol='https',
                                         sas_token=signature)
        return url

    @staticmethod
    def _parser_url(url) -> Tuple[str, str]:
        if not url.startswith(AZURE_PREFIX):
            raise ValueError(f"URL should start with {AZURE_PREFIX}")
        container_name, blob_name = url[len(AZURE_PREFIX):].split('/',
                                                                  maxsplit=1)
        return container_name, blob_name
Beispiel #13
0
 def send_data_to_blob_cloud(self, data, blob_name):
     from azure.storage.blob.baseblobservice import _get_path
     blob_service = BlockBlobService(account_name=ACCOUNT_NAME,
                                     sas_token=SAS_TOKEN)
     blob_service.create_blob_from_stream(
         CONTAINER_NAME,
         blob_name,
         data,
         content_settings=ContentSettings(
             content_type='application/zip',
             content_disposition='attachment; filename="nch-outfile.zip"'))
     return '{}://{}{}'.format(blob_service.protocol,
                               blob_service._get_host(),
                               _get_path(CONTAINER_NAME, blob_name))
class AzureStorageService():
    def __init__(self):

        self.account_name = config.get(
            'ckanext.dataextractor.azure_storage_account_name', None)
        self.account_key = config.get(
            'ckanext.dataextractor.azure_storage_account_key', None)
        self.container_name = config.get(
            'ckanext.dataextractor.azure_storage_container_name', None)
        self.blob_expiration_days = config.get(
            'ckanext.dataextractor.blob_expiration_days', 10)

        if not self.account_name or not self.account_key or not self.container_name:
            raise ValueError(
                'azure_storage_account_name, azure_storage_account_key, '
                'azure_storage_container_name')

        self.service = BlockBlobService(account_name=self.account_name,
                                        account_key=self.account_key)

    def _get_blob_reference(self, prefix, format):
        prefix = prefix.replace('/', '')
        return '{}{}.{}'.format(prefix + '-',
                                str(uuid.uuid4()).replace('-', ''),
                                format.lower())

    def blob_create(self, stream, format, resource_name):

        blob_name = self._get_blob_reference(resource_name, format)
        self.service.create_blob_from_stream(self.container_name, blob_name,
                                             stream)
        blob_url = self.service.make_blob_url(self.container_name, blob_name)

        return blob_url

    def blobs_delete(self):

        blobs = self.service.list_blobs(self.container_name)
        blobs_deleted = 0

        for blob in blobs:
            time_diff = datetime.now() - blob.properties.last_modified.replace(
                tzinfo=None)
            if time_diff.days >= int(self.blob_expiration_days):
                blobs_deleted += 1
                self.service.delete_blob(self.container_name, blob.name)

        return blobs_deleted
Beispiel #15
0
class AzureStorage(BlobStorage):
    """Azure storage provider that utilizes the Azure blob storage.

    Args:
        connection_string: See http://azure.microsoft.com/en-us/documentation/articles/storage-configure-connection-string/ 
            for the connection string format.
        container_name: the name of the blob container in which all blobs
            are stored.
        
    """
    def __init__(self, connection_string, container_name):
        self._service = BlockBlobService(connection_string=connection_string)
        if not self._service.exists(container_name):
            raise ValueError("Container does not exist: " + container_name)
        self._container_name = container_name

    def get_object(self, blob_name):
        blob = self._service.get_blob_to_text(self._container_name, blob_name)
        return json.loads(blob.content)

    def put_object(self, obj, blob_name):
        data = json.dumps(obj).encode("utf-8")
        self._service.create_blob_from_bytes(self._container_name, blob_name,
                                             data)
        return Blob(blob_name, len(data))

    @contextlib.contextmanager
    def get_file(self, blob_name):
        try:
            stream = AzureBlobReader(self._service, self._container_name,
                                     blob_name)
            yield stream
        finally:
            stream.close()

    def put_file(self, fileobj, blob_name):
        self._service.create_blob_from_stream(self._container_name, blob_name,
                                              fileobj)
        size = fileobj.tell()
        return Blob(blob_name, size)

    def put_avro(self, schema, records, blob_name, codec='snappy'):
        writer = AzureBlobWriter(self._service, self._container_name,
                                 blob_name)
        fastavro.writer(writer, schema, records, codec)
        writer.close()
        size = writer.tell()
        return Blob(blob_name, size)
Beispiel #16
0
def main(req: func.HttpRequest) -> func.HttpResponse:

    # 1. Get Header Values
    account_name = req.headers.get('account_name')
    account_key = req.headers.get('account_key')
    input_container = req.headers.get('input_container')
    output_container = req.headers.get('output_container')
    blob_name = req.headers.get('blob_name')

    if None not in [
            account_name, account_key, input_container, output_container,
            blob_name
    ]:

        # 2. Get PDF from Azure Blob Storage
        block_blob_service = BlockBlobService(account_name=account_name,
                                              account_key=account_key)
        stream = io.BytesIO()
        block_blob_service.get_blob_to_stream(container_name=input_container,
                                              blob_name=blob_name,
                                              stream=stream)
        inputpdf = PdfFileReader(stream)

        # 3. Loop through each page in the PDF
        for i in range(inputpdf.numPages):
            output = PdfFileWriter()
            output.addPage(inputpdf.getPage(i))
            output_stream = io.BytesIO()
            output.write(output_stream)
            output_stream.seek(0)
            output_blob_name = blob_name[:-4] + "%s.pdf"
            block_blob_service.create_blob_from_stream(
                container_name=output_container,
                blob_name=output_blob_name % i,
                stream=output_stream)

    # 4. Return HTTP Response
    body = {
        'account_name': account_name,
        'input_container': input_container,
        'output_container': output_container,
        'blob_name': blob_name,
        'account_key': 'YOUR_SECRET_ACCOUNT_KEY' if account_key else None
    }
    return func.HttpResponse(json.dumps(body),
                             headers={'Content-Type': 'application/json'})
Beispiel #17
0
def save_file_to_blob(file_name):
    from azure.storage.blob import BlockBlobService, PublicAccess
    container_name = 'stockrawdata'
    storage_name = 'stockanalysisadls'
    #
    try:
        block_blob_service = BlockBlobService(
            account_name=storage_name,
            account_key=
            'mDmjRCgeoaxYtLf69kXRUVj6PO9gZq4e/1/LHyU4aqMFaOC1AusTRCH4l5ApJDSjNzA1fQAkDviuSLIkII1a2g=='
        )
        block_blob_service.create_blob_from_stream(container_name,
                                                   'raw/nasdaq/' + file_name,
                                                   get_nasdaq_data(file_name))
    except Exception as err:
        print("Error occurred while loading data to ADLS. Error Details ::" +
              str(err))
    def WriteBlob(self, blob_name, value):
        """ 単一 BLOB ファイルを作成しテキストを保存する。 """
        szRet = ""
        if (LogWriter.DEBUG_MODE):
            return ("Debug モードのため書き込みをしません。")

        try:
            #blob_name = r'sample.txt';

            szRet = "BlockBlobService"
            blob_service = BlockBlobService(self._name, self._key)

            szRet = "create_container"
            blob_service.create_container(LogWriter.LOG_CONTAINER_NAME,
                                          public_access=PublicAccess.Blob)

            szRet = "create_blob_from_bytes"
            #blob_service.create_blob_from_bytes(
            #    log_container_name,
            #    log_blob_name,
            #    b'<center><h1>Hello World!</h1></center>',
            #    content_settings=ContentSettings('text/html')
            #)

            if (isinstance(value, str)):
                szRet = "create_blob_from_text"
                blob_service.create_blob_from_text(
                    LogWriter.LOG_CONTAINER_NAME, blob_name, value)
            else:
                szRet = "create_blob_from_stream"
                blob_service.create_blob_from_stream(
                    LogWriter.LOG_CONTAINER_NAME, blob_name, io.BytesIO(value))
            #}if

            #szRet = "make_blob_url"
            #print(blob_service.make_blob_url(log_container_name, log_blob_name))

            szRet = "OK"
        except:
            print(r"Exception.")
        #try

        return szRet
Beispiel #19
0
def upload(request):
	if request.method == 'POST':
		form = UploadImageForm(request.POST, request.FILES)
		if form.is_valid():
			image_filename = str(uuid.uuid4())
			owner = None
			if request.user.is_authenticated:
				owner = request.user
			# Save image in Azure Blob storage
			img = Image.open(request.FILES['file'])
			#print("Image dimensions are", img.size)
			output_blob = BytesIO()
			img.convert('RGB').save(output_blob, format='JPEG')
			output_blob.seek(0)
			block_blob_service = BlockBlobService(account_name='magnifaistorage', account_key=os.getenv('AZ_STORAGE_KEY'))
			upload_container_name = 'uploads'
			if form.cleaned_data['should_colorize']:
				upload_container_name = 'uploads-colorize'
			block_blob_service.create_blob_from_stream(upload_container_name, image_filename + '.jpg', output_blob,
														content_settings=ContentSettings(content_type='image/jpeg'))
			#print("!!! Uploaded image to blob service.")
			# Create metadata for image in database
			metadata = ImageMetadata(filename=image_filename, should_colorize=form.cleaned_data['should_colorize'], owner=owner)
			#print("!!! Should colorize:", form.cleaned_data['should_colorize'])
			# Create shareable link for image in database
			link = ShareLink(name=str(uuid.uuid4()), image=metadata, owner=owner)
			metadata.save()
			#print("!!! Stored image metadata in database.")
			link.save()
			#print("!!! Stored shareable link data in database.")
			if request.user.is_authenticated:
				return redirect('modernaize:image', image_filename)
			else:
				return redirect('modernaize:share', link.name)
			#form.save()
			#return HttpResponseRedirect(reverse('modernaize:recent'))
			#return HttpResponse("Image uploaded successfully!")
	else:
		form = UploadImageForm()
	return render(request, 'modernaize/upload.html', {
		'form': form,
		'active_page': 'upload'
		})
Beispiel #20
0
class AzureBlobFeedStorage(BlockingFeedStorage):
    def __init__(self, uri):
        from azure.storage.blob import BlockBlobService

        container = uri.split("@")[1].split("/")[0]
        filename = "/".join(uri.split("@")[1].split("/")[1::])
        account_name, account_key = uri[8::].split("@")[0].split(":")

        self.account_name = account_name
        self.account_key = account_key
        self.container = container
        self.filename = filename
        self.blob_service = BlockBlobService(account_name=self.account_name,
                                             account_key=self.account_key)

    def _store_in_thread(self, file):
        file.seek(0)
        self.blob_service.create_blob_from_stream(self.container,
                                                  self.filename, file)
Beispiel #21
0
class AzureBlobFeedStorage(BlockingFeedStorage):
    """
    Pulled from https://github.com/curabase/scrapy-feedexporter-azure-blob/
    """
    def __init__(self, uri):
        container = uri.split('@')[1].split('/')[0]
        filename = '/'.join(uri.split('@')[1].split('/')[1::])
        account_name, account_key = uri[8::].split('@')[0].split(':')

        self.account_name = account_name
        self.account_key = account_key
        self.container = container
        self.filename = filename
        self.blob_service = BlockBlobService(
            account_name=self.account_name,
            account_key=self.account_key,
        )

    def _store_in_thread(self, file):
        file.seek(0)
        self.blob_service.create_blob_from_stream(self.container,
                                                  self.filename, file)
Beispiel #22
0
class BlobSaveable(Saveable):
    def __init__(self, account_name, account_key, container_name):
        self.container_name = container_name
        self.blob_service = BlockBlobService(account_name=account_name,
                                             account_key=account_key)

    def save(self, relpath, name, image):
        # save image to bytes stream
        im = Image.fromarray(image)
        stream = io.BytesIO()
        im.save(stream, format='png')
        stream.seek(0)

        # upload blob
        blob_name = '{}/{}.png'.format(relpath, name)
        self.blob_service.create_blob_from_stream(self.container_name,
                                                  blob_name, stream)

        # return path
        path = '{}://{}/{}/{}'.format(self.blob_service.protocol,
                                      self.blob_service.primary_endpoint,
                                      self.container_name, blob_name)
        return path
Beispiel #23
0
def convert_file(url):
    image = Image.open(BytesIO(requests.get(url).content)).resize((IMAGE_WIDTH, IMAGE_HEIGHT))
    image_array = np.array(image)

    augmentation = ElasticTransform()
    data = {"image": image_array}
    augmented = augmentation(**data)
    image = augmented["image"]

    blob_service = BlockBlobService(account_name=account, account_key=key)

    img = Image.fromarray(image)

    imagefile = BytesIO()
    img.save(imagefile, format='PNG')
    imagefile.seek(0)

    Randomfilename = id_generator()
    filename = Randomfilename + '.png'

    blob_service.create_blob_from_stream(container, filename, imagefile)

    ref =  'https://'+ account + '.blob.core.windows.net/' + container + '/' + filename
    redis_client.set(url, ref)
Beispiel #24
0
class DirectoryClient:
    def __init__(self, connection_string, container_name):
        self.container_name = container_name
        # service_client = BlobServiceClient.from_connection_string(connection_string)
        # self.client = service_client.get_container_client(container_name)
        self.client = BlockBlobService(account_name=account_name,
                                       account_key=account_key)

    # send big .zip file to blob just from filestream (no downloading)
    def create_blob_from_stream(self, blob_name, stream):
        print('in azure_blob from stream function')
        self.client.create_blob_from_stream(self.container_name, blob_name,
                                            stream)
        return

    def upload_file(self, source, dest):
        '''
    Upload a single file to a path inside the container
    '''
        # print(f'Uploading {source} to {dest}')
        # do i need this with open thing??? not too sure bc i never use the data var
        with open(source, 'rb') as data:
            self.client.create_blob_from_path(self.container_name, dest,
                                              source)
        return

    def download(self, source, dest):
        '''
    Download a file or directory to a path on the local filesystem
    '''
        if not dest:
            raise Exception('A destination must be provided')

        blobs = self.ls_files(source, recursive=True)
        print('blobs to download:', blobs)
        if blobs:
            # if source is a directory, dest must also be a directory
            if not source == '' and not source.endswith('/'):
                source += '/'
            if not dest.endswith('/'):
                dest += '/'
            # append the directory name from source to the destination
            dest += os.path.basename(os.path.normpath(source)) + '/'

            blobs = [source + blob for blob in blobs]
            for blob in blobs:
                blob_dest = dest + os.path.relpath(blob, source)
                # print("\nDownloading blob to " + blob_dest)
                self.client.get_blob_to_path(self.container_name, blob,
                                             blob_dest)
        else:
            self.client.get_blob_to_path(self.container_name, source, dest)

    def download_file(self, source, dest):
        '''
    # Download a single file to a path on the local filesystem
    '''
        # dest is a directory if ending with '/' or '.', otherwise it's a file
        if dest.endswith('.'):
            dest += '/'
        blob_dest = dest + os.path.basename(source) if dest.endswith(
            '/') else dest

        # print( f'Downloading {source} to {blob_dest}')
        os.makedirs(os.path.dirname(blob_dest), exist_ok=True)
        self.client.get_blob_to_path(self.container_name, source, blob_dest)
        '''data = bc.download_blob()
    file.write(data.readall())'''

    def ls_files(self, path, recursive=False):
        '''
    List files under a path, optionally recursively
    '''
        if not path == '' and not path.endswith('/'):
            path += '/'

        blob_iter = self.client.list_blobs(self.container_name, prefix=path)

        files = []
        for blob in blob_iter:
            relative_path = os.path.relpath(blob.name, path)
            if recursive or not '/' in relative_path:
                files.append(relative_path)
        return files

    def ls_dirs(self, path, recursive=False):
        '''
    List directories under a path, optionally recursively
    '''
        if not path == '' and not path.endswith('/'):
            path += '/'

        blob_iter = self.client.list_blobs(self.container_name, prefix=path)
        dirs = []
        for blob in blob_iter:
            relative_dir = os.path.dirname(os.path.relpath(blob.name, path))
            if relative_dir and (recursive or not '/'
                                 in relative_dir) and not relative_dir in dirs:
                dirs.append(relative_dir)

        return dirs

    def rm(self, path, recursive=False):
        '''
    Remove a single file, or remove a path recursively
    '''
        if recursive:
            self.rmdir(path)
        else:
            print(f'Deleting {path}')
            self.client.delete_blob(path)

    def rmdir(self, path):
        '''
    Remove a directory and its contents recursively
    '''
        blobs = self.ls_files(path, recursive=True)
        if blobs == []:
            return

        if not path == '' and not path.endswith('/'):
            path += '/'
        blobs = [path + blob for blob in blobs]
        print(f'Deleting blobs in ' + str(self.container_name))
        for blob in blobs:
            self.client.delete_blob(blob_name=blob,
                                    container_name=self.container_name)
Beispiel #25
0
ASGneg = pd.concat([df1, df2, df3, df4, df5, df10, df15, df20])

ASGINC = pd.concat([ASGpos, ASGneg])
ASGINC = ASGINC.rename(columns={'Unnamed: 0': 'Date'})

from io import StringIO, BytesIO
from azure.storage.blob import BlockBlobService

blobservice = BlockBlobService(
    account_name='flaskstorage',
    account_key=
    '4+JwE+i1NvLF/oJuqmEVb0nNEiX0+9Tnq8M6U28cA0hsjP4qlpAfaSORdOg0Kphw2CWf/Zp4uPZG+M/sfdZytQ=='
)
data = BytesIO()
ASGINC.to_excel(data, index=False)
data = bytes(data.getvalue())
data = BytesIO(data)
blobservice.create_blob_from_stream('mtscontainer', 'MTSINC.xlsx', data)
data.close()
"""
from pandas import ExcelWriter
if not os.path.exists('Generated-Excels'):
    os.makedirs('Generated-Excels')
writer = ExcelWriter('Generated-Excels/MTSINCR.xlsx')
ASGINC.to_excel(writer,'Sheet2')
writer.save()
print('done')

"""
def upload_res(path, local_path):
    BlockBlobService = connect_azure()
    Container_name = "open-pai"
    path = BlockBlobService.create_blob_from_stream(Container_name, path, local_path)
    print("Upload success!")
    return path
Beispiel #27
0
class AzureStorage(plugins.IStoragePlugin):

    def configure(self, config):
        self.storage = BlockBlobService(account_name=config['account_name'], account_key=config['account_key'])
        self.container = config['container']
        try:
            container = self.storage.get_container_properties(self.container)
            log.info("Configuring Azure blob storage %s/%s", self.storage.account_name, self.container)
        except AzureMissingResourceHttpError as e:
            log.warning("Container '%s' is missing in account '%s', trying to create new", self.container, self.storage.account_name)
            try:
                self.storage.create_container(self.container)
                self.storage.set_container_acl(self.container, public_access=PublicAccess.Container)
            except Exception as e:
                log.critical("Cannot create new container: %s", e)
                raise plugins.PluginInitException("Cannot create new container")
        except AzureHttpError as e:
            log.critical("Cannot access container '%s' in account '%s': %s", self.container, self.storage.account_name, e)
            raise plugins.PluginInitException("Cannot access container")
        except Exception as e:
            log.critical("Cannot access container '%s' in account '%s': %s", self.container, self.storage.account_name, e)
            raise plugins.PluginInitException("Cannot access container")

    def delete(self, key):
        log.info("Deleting file '%s' from %s/%s", key, self.storage.account_name, self.container)
        try:
            self.storage.delete_blob(self.container, key)
        except AzureMissingResourceHttpError:
            log.error("File '%s' was not found in %s/%s", key, self.storage.account_name, self.container)
            raise common.NotFound('File not found')
        except Exception as e:
            log.error("Cannot delete '%s' from %s/%s: %s", key, self.storage.account_name, self.container, e)
            raise common.FatalError(e)

    def put(self, key, filename=None, file=None):
        storage_key = key
        try:
            if filename:
                log.debug("Uploading %s to %s", filename, self.storage.make_blob_url(self.container, storage_key))
                self.storage.create_blob_from_path(self.container, storage_key, filename, content_settings=ContentSettings(content_type='application/octet-stream'))
            elif file:
                old_pos = file.tell()
                file.seek(0)
                log.debug("Uploading from stream to %s", self.storage.make_blob_url(self.container, storage_key))
                self.storage.create_blob_from_stream(self.container, storage_key, file, content_settings=ContentSettings(content_type='application/octet-stream'))
                file.seek(old_pos)
        except Exception as e:
            # TODO: more detailed error inspection
            log.critical("Error uploading to %s/%s: %s", self.storage.account_name, self.container, e)
            raise common.FatalError(e)
        return storage_key


    def get(self, key, stream):
        # current azure python sdk barely can work with non-seekable streams,
        # so we have to implement chunking by our own
        # TODO: proper ranging? RFC says server SHOULD return 406 once range is unsatisfiable, 
        # but Azure is OK with end pos > blob length unless blob is not empty
        chunk_size = 4*1024*1024
        chunk_start = 0
        chunk_end = chunk_size - 1
        while True:
            try:
                chunk = self.storage._get_blob(self.container, key, start_range=chunk_start, end_range=chunk_end)
                log.debug("Writing %s bytes from %s", len(chunk.content), chunk_start)
                stream.write(chunk.content)
            except IOError:
                # remote side closed connection
                return
            except AzureMissingResourceHttpError as e:
                raise common.NotFound(e)
            except (AzureHttpError, AzureException) as e:
                raise common.TemporaryError('Error while downloading {}: {}'.format(key, e))

            blob_length = int(chunk.properties.content_range.split('/')[1])
            chunk_start, chunk_end, blob_size = map(int, re.match(r'^bytes\s+(\d+)-(\d+)/(\d+)$', chunk.properties.content_range).groups())
            if chunk_end == blob_size - 1:
                # no more data to stream
                break
            else:
                chunk_start = chunk_end + 1
                chunk_end += chunk_size
        return 0
Beispiel #28
0
        print('It didnt work')

ASGneg = pd.concat([df1, df2, df3, df4, df5, df10, df15, df20])

ASGINC = pd.concat([ASGpos, ASGneg])
ASGINC = ASGINC.rename(columns={'Unnamed: 0': 'Date'})

from io import StringIO, BytesIO
from azure.storage.blob import BlockBlobService

blobservice = BlockBlobService(
    account_name='flaskstorage',
    account_key=
    '4+JwE+i1NvLF/oJuqmEVb0nNEiX0+9Tnq8M6U28cA0hsjP4qlpAfaSORdOg0Kphw2CWf/Zp4uPZG+M/sfdZytQ=='
)
data = BytesIO()
ASGINC.to_excel(data, index=False)
data = bytes(data.getvalue())
data = BytesIO(data)
blobservice.create_blob_from_stream('mobilecontainer', 'MOBILEINC.xlsx', data)
data.close()
"""
from pandas import ExcelWriter
if not os.path.exists('Generated-Excels'):
    os.makedirs('Generated-Excels')
writer = ExcelWriter('Generated-Excels/MOBILEINCR.xlsx')
ASGINC.to_excel(writer,'Sheet2')
writer.save()
print('done')
"""
Beispiel #29
0
from PIL import Image
import io
import requests
api_key="your_key"
from apiclient.discovery import build
import azure
from azure.storage.blob import BlockBlobService
from azure.storage.blob import PublicAccess
from azure.storage.blob import ContentSettings

block_blob_service =BlockBlobService(account_name="your_account_name",account_key="account_key")

resource=build("customsearch","v1",developerKey=api_key).cse()
images=[]


for i in range(1,50,10):
    result=resource.list(q="your_query",cx="cx_of_your_site",searchType="image",start=i).execute()
    images+=result["items"]

for item in images:
    response=requests.get(item["link"])  
    block_blob_service.create_blob_from_stream('account_name','external',io.BytesIO(response.content),content_settings=ContentSettings(content_type='image/Jpeg'))
    
    
Beispiel #30
0
ASGneg = pd.concat([df1, df2, df3, df4, df5, df10, df15, df20])

ASGINC = pd.concat([ASGpos, ASGneg])
ASGINC = ASGINC.rename(columns={'Unnamed: 0': 'Date'})

blobservice = BlockBlobService(
    account_name='flaskstorage',
    account_key=
    'M9Hax/c6wKCdVXIcmBafad35/ctWW2OQJQynRMrM29D+mfZXWW53MF0Sthsf0cmWN+/XukVg/aZQ/6XBAB4cgg=='
)
data = BytesIO()
ASGINC.to_excel(data, index=False)
data = bytes(data.getvalue())
data = BytesIO(data)
blobservice.create_blob_from_stream('totalcontainer', 'TOTALINC.xlsx', data)
data.close()
"""
from pandas import ExcelWriter
if not os.path.exists('Generated-Excels'):
    os.makedirs('Generated-Excels')
writer = ExcelWriter('Generated-Excels/TOTALINCR1.xlsx')
ASGINC.to_excel(writer,'Sheet2')
writer.save()
print('done')
""" ""
#*********************************Attrition for total*************************************************
revdf = ser
revdf['ds'] = revdf.index
revdf = revdf.rename(columns={"Attrition": 'y'})
my_model = Prophet(interval_width=0.95, changepoint_prior_scale=5)
from azure.storage.blob import BlockBlobService
from azure.storage.blob import PublicAccess
from azure.storage.blob import ContentSettings
import requests
import io

AzureStorageAccount = "istagingstorage"
AzureStorageAccessKey = "qkFU/ah2v4cHvQ7oAZASb2HRGFUkJhg2xs5KBYB+2fEnYmSp6hZH9U3vEO6TujzHHdBF3HWVgqalwcUuvIBMUQ=="
AzureStorageContainerName = "peter-container"

blobname="out2.jpg"
url = "https://www.gravatar.com/avatar/dd071a6a7c97ba637c558c5e71137c7b?s=32&d=identicon&r=PG"

block_blob_service = BlockBlobService(account_name=AzureStorageAccount, account_key=AzureStorageAccessKey)

r = requests.get(url, stream=True)
stream = io.BytesIO(r.content)

block_blob_service.create_blob_from_stream(AzureStorageContainerName,blobname,stream)


def _getDownloadlink():
				baseurl = "https://"+AzureStorageAccount+".blob.core.windows.net/"
				downloadlink = baseurl+AzureStorageContainerName+"/"+blobname
				return downloadlink


	print _getDownloadlink()
Beispiel #32
0
print('\n*** Pushing selected file to Azure ***\n')
print(' - path to input file: ' + input_filename)
print(' - output filename: ' + output_filename)

# Azure commands
from azure.storage.blob import BlockBlobService
from azure.storage.blob import PublicAccess
azu_dict = Lfun.csv_to_dict(Ldir['data'] + 'accounts/azure_pm_2015.05.25.csv')
account = azu_dict['account']
key = azu_dict['key']
blob_service = BlockBlobService(account_name=account, account_key=key)
blob_service.create_container
blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

# write it to Azure
try:
    bname = open(input_filename, 'rb')
    blob_service.create_blob_from_stream(container_name, output_filename, bname)
    bname.close()
    print('\n*** success ***')
    az_url = ('https://pm2.blob.core.windows.net/'
        + container_name + '/' + output_filename)
    print('\nUSE THIS URL TO ACCESS THE FILE\n')
    print(az_url)
except:
    # could be FileNotFoundError from open, or an Azure error
    print('*** FAILED TO WRITE ***')
print('\n' + 50*'*')

Beispiel #33
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    logging.info(f'Method: {req.method}')
    if req.method == "OPTIONS":
        return func.HttpResponse(status_code=204,
                                 headers={
                                     'Access-Control-Allow-Headers':
                                     'content-type',
                                     'Access-Control-Allow-Methods': 'POST',
                                     'Access-Control-Max-Age': '180',
                                     'Access-Control-Allow-Origin': '*'
                                 })

    body = req.get_json()

    blob_service = BlockBlobService(account_name=storageAccount,
                                    account_key=storageAccountKey)

    # prep trainer
    trainer = CustomVisionTrainingClient(trainingKey, apiEndpoint)
    #check tags
    check_tags(trainer)

    records = {'images': []}
    image_list = []

    try:
        for item in body['items']:
            # sign
            sign = item['type'].strip()

            # image bits
            img = base64.b64decode(item['image'].replace(
                'data:image/png;base64,', ''))
            stream = BytesIO(img)

            # storage path + save
            image_name = f'{str(uuid.uuid4())}.png'
            blob_name = f'{base_folder}/{sign}/{image_name}'
            sresponse = blob_service.create_blob_from_stream(
                storageContainer, blob_name, stream)

            logging.info(f'Storage Response: {sresponse}')

            # save to custom vision
            image_list.append(
                ImageFileCreateEntry(name=image_name,
                                     contents=img,
                                     tag_ids=[tags[sign].id]))

            # return image
            path = f'{blob_service.protocol}://{blob_service.primary_endpoint}/{storageContainer}/{blob_name}'
            records['images'].append({'sign': sign, 'path': path})

        # save list
        upload_result = trainer.create_images_from_files(projectId,
                                                         images=image_list)
        if not upload_result.is_batch_successful:
            records['error'] = {'type': 'CustomVision Error', 'items': []}
            for image in upload_result.images:
                records['error']['items'].append(
                    {image.source_url: image.status})
        else:
            records['error'] = {}
    except Exception as error:
        logging.exception('Python Error')
        records['error'] = {
            'code': '500',
            'message': f'{type(error).__name__}: {str(error)}',
            'type': 'Python Error'
        }

    return func.HttpResponse(body=json.dumps(records),
                             status_code=200,
                             headers={
                                 'Content-Type': 'application/json',
                                 'Access-Control-Allow-Origin': '*'
                             })