Exemplo n.º 1
0
def save_image(request, filefield):

    blob_client = BlockBlobService(
        account_name=appvar.config["BLOB_ACCT_NAME"],
        account_key=appvar.config["BLOB_KEY"])

    filestorage = request.files.get(filefield)

    if filestorage is None:
        raise FileNotFoundError("File storage")

    if filestorage.filename == "":
        raise FileNotFoundError("File blank")

    file_name, ext = os.path.splitext(filestorage.filename)
    file_name_rand = file_name + str(uuid.uuid4()) + ext

    file_name_secure = secure_filename(file_name_rand)

    content = filestorage.read()
    blob_client.create_blob_from_bytes(container_name="ocrimages",
                                       blob_name=file_name_secure,
                                       blob=content)

    return file_name_secure
Exemplo n.º 2
0
class BlobUploader(object):
    """
    A simple helper class for uploading image data to blob storage.
    """

    def __init__(self, container_name, connection_string):
        """
        Initialize a new BlobUploader.
        """
        self.container_name = container_name
        self.block_blob_service = BlockBlobService(connection_string=connection_string)

    def upload(self, camera_id, base_name, extension , data):
        """
        Upload data to blob storage.

        :param str camera_id:
            The ID of the camera that took the image.
        :param str base_name:
            The base filename of the file in storage, without extension.
            The camera module provides this, and it should be a string
            representation of the date and time at which the image was
            captured, such as 2019-06-15T13:45:30.459Z
        :param str extension:
            The file extension specifying the type of image, such as
            "jpg", "png", etc. The leading dot character should not
            be included.
        :param bytes data:
            The content of the blob to be uploaded.
        """
        blob_name = camera_id + '/' + base_name + '.' + extension
        self.block_blob_service.create_blob_from_bytes(self.container_name,
                                                       blob_name,
                                                       data)
Exemplo n.º 3
0
    def test_account_sas_with_question_mark_prefix(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        token = '?' + self.account.generate_shared_access_signature(
            Services.BLOB,
            ResourceTypes.OBJECT + ResourceTypes.CONTAINER,
            AccountPermissions.READ + AccountPermissions.WRITE +
            AccountPermissions.DELETE + AccountPermissions.CREATE,
            datetime.utcnow() + timedelta(hours=1),
        )

        service = BlockBlobService(self.account_name,
                                   sas_token=token,
                                   is_emulated=self.settings.IS_EMULATED)
        data = b'shared access signature with read/write permission on blob'
        container_name = self.get_resource_name("container")
        blob_name = 'blob1.txt'

        try:
            # Act
            service.create_container(container_name)
            service.create_blob_from_bytes(container_name, blob_name, data)
            blob = service.get_blob_to_bytes(container_name, blob_name)

            # Assert
            self.assertIsNotNone(blob)
            self.assertEqual(data, blob.content)
        finally:
            service.delete_container(container_name)
Exemplo n.º 4
0
class AzureStorage:
    def __init__(self, connectionString, container):
        self.BlobService = BlockBlobService(connection_string=connectionString)
        nameValue = UtilityHelper.connectStringToDictionary(connectionString)
        self.AccountName = nameValue['AccountName']
        self.container = container

    def getBaseURL(self):
        return 'https://' + self.AccountName + '.blob.core.windows.net/'

    def uploadByLocalFile(self, localFullFileName, remoteBlobName):
        self.BlobService.create_blob_from_path(self.container, remoteBlobName,
                                               localFullFileName)
        blobURL = 'https://' + self.AccountName + '.blob.core.windows.net/' + self.container + '/' + remoteBlobName
        return blobURL

    def uploadByStream(self, streamData, remoteBlobName):
        self.BlobService.create_blob_from_stream(self.container,
                                                 remoteBlobName, streamData)
        blobURL = 'https://' + self.AccountName + '.blob.core.windows.net/' + self.container + '/' + remoteBlobName
        return blobURL

    def uploadByBytes(self, bytesData, remoteBlobName):
        self.BlobService.create_blob_from_bytes(self.container, remoteBlobName,
                                                bytesData)
        blobURL = 'https://' + self.AccountName + '.blob.core.windows.net/' + self.container + '/' + remoteBlobName
        return blobURL

    def delete(self, blobName):
        self.BlobService.delete_blob(self.container, blobName)

    def copy(self, sourceBlobURL, targetBlobName):
        self.BlobService.copy_blob(self.container, targetBlobName,
                                   sourceBlobURL)
Exemplo n.º 5
0
        def update(self, area, selector, content_type, buffer):
            assert area is not None, 'area is none; should already be validated'

            area_config = config.load_area(area)
            
            storage_config = config.load_storage(area_config['storage'])
            
            area = area.lower()

            # httplib.HTTPConnection.debuglevel = 1
            # http.client.HTTPConnection.debuglevel = 1

            blob_service = BlockBlobService(account_name=storage_config['name'], account_key=storage_config['key1'])

            hash = base64.b64encode(hashlib.md5(buffer).digest())

            content_settings = ContentSettings(content_md5=hash)
            if content_type is not None and len(content_type) > 0:
                content_settings.content_type = content_type

            blob_service.create_blob_from_bytes(
                area_config['container'],
                selector,
                buffer,
                content_settings=content_settings,
                validate_content=False
            )

            return hash
def __submit_image_from_url(proof_type, url):
    """Uploads an image to an azure blob storage instance that is also
    accessible by the clerk's module. Seems like a strange architectural
    decision--the A2P API should accept the image data as part of
    the user's submission."""

    # TODO: Find a better way to get original filename from DA.
    filename = "ProofOf%s" % proof_type
    try:
        orig_filename = re.findall(r"filename%3D(.*?)(&|$)", url)[0][0]
        filename += "_%s" % orig_filename
    except Exception:
        pass

    blob_service = BlockBlobService(
        account_name=a2p_config()['blob_account_name'],
        account_key=a2p_config()['blob_account_key'])
    image_body = requests.get(url).content
    blob_service.create_blob_from_bytes('attachments', filename, image_body)

    return {
        "fileName": filename,
        "blobName": filename,
        "size": len(image_body)
    }
Exemplo n.º 7
0
def add_case():
    case_data = request.get_json()
    case_data['created_at'] = dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S")

    case = CaseSchema().load(case_data)
    case_id = str(db.cases.insert_one(case.data).inserted_id)

    block_blob_service = BlockBlobService(
        account_name='cryemlab',
        account_key=
        'yo97cwqrvQLDJuQcY9fwpJqRJAQ9wPl5moUGCGxCesdyyEaByLT6L+0lCYBMZ31AqbtIAekAI429+U6UzEC/Vg=='
    )

    container_name = 'cases'
    block_blob_service.create_container(container_name)

    block_blob_service.set_container_acl(container_name,
                                         public_access=PublicAccess.Container)

    image_data = re.sub('^data:image/.+;base64,', '', case_data['image'])

    byte_data = base64.b64decode(image_data)

    block_blob_service.create_blob_from_bytes(container_name, case_id + '.jpg',
                                              byte_data)

    return '', 204
Exemplo n.º 8
0
def prokka_request(request):
    form = ProkkaForm()
    if request.method == 'POST':
        form = ProkkaForm(request.POST, request.FILES)
        if form.is_valid():
            seqids, name, other_files = form.cleaned_data
            prokka_request_object = ProkkaRequest.objects.create(
                user=request.user, seqids=seqids, status='Processing')
            if name is None:
                prokka_request_object.name = prokka_request.pk
            else:
                prokka_request_object.name = name
            prokka_request_object.save()
            container_name = 'prokka-{}'.format(prokka_request_object.pk)
            blob_client = BlockBlobService(
                account_name=settings.AZURE_ACCOUNT_NAME,
                account_key=settings.AZURE_ACCOUNT_KEY)
            blob_client.create_container(container_name)
            file_names = list()
            for other_file in request.FILES.getlist('other_files'):
                file_name = os.path.join(container_name, other_file.name)
                file_names.append(file_name)
                blob_client.create_blob_from_bytes(
                    container_name=container_name,
                    blob_name=other_file.name,
                    blob=other_file.read())
            prokka_request_object.other_input_files = file_names
            prokka_request_object.save()
            run_prokka.apply_async(queue='cowbat',
                                   args=(prokka_request_object.pk, ),
                                   countdown=10)
            return redirect('geneseekr:prokka_result',
                            prokka_request_pk=prokka_request_object.pk)
    return render(request, 'geneseekr/prokka_request.html', {'form': form})
Exemplo n.º 9
0
def blob():
    account_name = config.STORAGE_ACCOUNT_NAME
    account_key = config.STORAGE_ACCOUNT_KEY
    block_blob_service = BlockBlobService(account_name=account_name,
                                          account_key=account_key)

    container_name = config.BLOB_CONTAINER_NAME

    block_blob_service.create_container(container_name)

    img = load_img("/share1/public/isic/images/2.NV/ISIC_0034320.jpg",
                   target_size=(224, 224))
    img = img_to_array(img)
    imgbytes = img.tobytes()

    block_blob_service.create_blob_from_bytes(container_name, "test1",
                                              imgbytes)

    ib = block_blob_service.get_blob_to_bytes(container_name, "test1").content

    print(imgbytes == ib)

    print("\nList blobs in the container")
    generator = block_blob_service.list_blobs(container_name)
    for blob in generator:
        print("\t Blob name: " + blob.name)
Exemplo n.º 10
0
 def _save(self, data: pd.DataFrame) -> None:
     blob_service = BlockBlobService(**self._credentials)
     blob_service.create_blob_from_bytes(
         container_name=self._container_name,
         blob_name=self._filepath,
         blob=data.to_json(**self._save_args).encode(self._encoding),
         **self._blob_from_bytes_args)
Exemplo n.º 11
0
def uploadToBlobStorage(dataToUpload, customer, dataName, storage_account_name,
                        storage_account_key):
    # Create a blob container for each customer
    blockblobService = BlockBlobService(account_name=storage_account_name,
                                        account_key=storage_account_key)
    blockblobService.create_container(customer)

    # Upload the data to the correct blob container, under the correct dataname
    blockblobService.create_blob_from_bytes(customer, dataName, dataToUpload)
    print('{} for {} has been uploaded'.format(dataName, customer))
Exemplo n.º 12
0
    def write_blob_from_bytes(self, sas_uri, blob_name, input_bytes):
        sas_service = BlockBlobService(
            account_name=self.get_account_from_uri(sas_uri),
            sas_token=self.get_sas_key_from_uri(sas_uri))

        container_name = self.get_container_from_uri(sas_uri)

        sas_service.create_blob_from_bytes(container_name, blob_name, input_bytes)

        return sas_service.make_blob_url(container_name, blob_name, sas_token=self.get_sas_key_from_uri(sas_uri))
Exemplo n.º 13
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')
    # checking for a POST request.
    if (req.method == "POST"):
        try:
            # check JSON body request that carries the encoded image base64 info
            req_body = req.get_json()
            logging.info(req_body)
            image_name = req_body['name']
            image = req_body['image'].replace(' ', '+')

            #decoding base64 image from json body
            decoded_image = base64.b64decode(image)
            logging.info(decoded_image)

            # Storage connection string
            connect_str = os.getenv('StorageConnStr')
            accuntName = os.getenv('StorageName')
            accuntKey = os.getenv('StorageConnectionKey')
            BlobPermissions(read=False,
                            add=False,
                            create=False,
                            write=False,
                            delete=False,
                            _str=None)
            logging.info("Successful connection to blob storage.")

            #upload to picture to blob storage
            block_blob_service = BlockBlobService(account_name=accuntName,
                                                  account_key=accuntKey)
            container_name = 'machineimages'
            blob_name = image_name + '.jpeg'
            # Creating the blob
            block_blob_service.create_blob_from_bytes(
                container_name,
                blob_name,
                decoded_image,
                content_settings=ContentSettings(content_type='image/png'))
            logging.info("Successfull blob creating ")

            # Returning a succesful post request
            return func.HttpResponse(f"successful request")
        except ValueError as e:
            logging.error("Invalid json format " + str(e))
            pass
        except Exception as err:
            logging.error("Something went wrong decoding json " + str(err))
            pass
        except AzureException as ae:
            logging.error("Something went wrong with azure connection " +
                          str(ae))
            pass
    return func.HttpResponse(
        "Please pass a name on the query string or in the request body",
        status_code=400)
Exemplo n.º 14
0
def guardarDarUrl(file, filemane):
    baseUrl = 'https://catalogo2018storage.blob.core.windows.net/pictures/'
    sas = '?sv=2017-07-29&ss=bf&srt=co&sp=rwdlac&se=2018-05-19T00:27:02Z&st=2018-04-01T16:27:02Z&spr=https,http&sig=iJy3%2BhD2JhuYvXTRfsXT2qTM2p08tfhNGAfb%2BG5YR6w%3D'
    # Create the BlockBlockService that is used to call the Blob service for the storage account
    block_blob_service = BlockBlobService(account_name=config('ACCOUNT_NAME',
                                                              default=''),
                                          account_key=config('ACCOUNT_KEY',
                                                             default=''))

    # Upload the created file, use local_file_name for the blob name
    block_blob_service.create_blob_from_bytes('pictures', filemane, file)

    return baseUrl + filemane + sas
def __submit_image_from_url(url):
    blob_service = BlockBlobService(
        account_name='a2pca',
        account_key=__get_a2p_config()['blob_account_key'])
    image_body = requests.get(url).content
    filename = 'a2p_daupload_' + hashlib.sha224(image_body).hexdigest()
    blob_service.create_blob_from_bytes('attachments', filename, image_body)

    return {
        "fileName": filename,
        "blobName": filename,
        "size": len(image_body)
    }
Exemplo n.º 16
0
def storage(host):
    """
    Create blob using azurite.
    """
    bbs = BlockBlobService(
        account_name=USERNAME,
        account_key=KEY,
        custom_domain=f"http://{host}/devstoreaccount1",
    )
    bbs.create_container("data", timeout=1)

    bbs.create_blob_from_bytes("data", "root/a/file.txt", data)
    bbs.create_blob_from_bytes("data", "root/b/file.txt", data)
    yield bbs
Exemplo n.º 17
0
def guardar_archivo(archivo, nombre):
    # PUNTO DE CONEXIÓN DE SERVICIO BLOB PRINCIPAL
    baseUrl = config('STORAGE_URL', default='')
    # Una firma de acceso compartido (SAS) es un identificador URI que concede derechos de acceso
    # limitados a recursos de Azure Storage
    sas = config('SAS', default='')
    # Create the BlockBlockService that is used to call the Blob service for the storage account
    block_blob_service = BlockBlobService(account_name=config('ACCOUNT_NAME', default=''),
                                          account_key=config('ACCOUNT_KEY', default=''))

    # Upload the created file, use local_file_name for the blob name
    block_blob_service.create_blob_from_bytes('pictures', nombre, archivo)

    return baseUrl + nombre + sas
def extractFrames(pathOut, filepath):

    # Path to video file
    cap = cv2.VideoCapture(filepath)
    #Reducing the frames per second of the video to 2
    cap.set(cv2.CAP_PROP_FPS, 2)
    # Used as counter variable
    x = 1
    frameRate = cap.get(5)  #frame rate
    numberOfPicturesPerSecond = 2
    blockBlobService = BlockBlobService(
        account_name='stworkersafety',
        account_key=
        '7OyzTj7Y83+0/+DiuS9IVDoZcKrQ0pSjE4F4q8L/ltT+Dv4TbBXTSDrOu928L60SCzo7mq+P3fEv3B4aOL6Flw=='
    )
    # start creating frames from video
    while (cap.isOpened()):
        frameId = cap.get(1)  #current frame number
        ret, frame = cap.read()
        if (ret != True):
            break

        # in case frame matches a multiple of the frame, create image
        if frameId % math.floor(frameRate / numberOfPicturesPerSecond) == 0:
            logging.info("create cap" + str(x))
            # convert frame to PIL image
            frame_conv = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
            pilImage = Image.fromarray(frame_conv)
            #Calculate size = Height/2 * Width/2
            size = (round(pilImage.size[0] / 2), round(pilImage.size[1] / 2))
            #Resize using CV2
            pilImage = pilImage.resize(size, Image.ANTIALIAS)
            imgByteArr = BytesIO()
            pilImage.save(imgByteArr, format='jpeg')
            #print(type(pilImage))
            imgByteArr = imgByteArr.getvalue()

            # write image to blob for logging
            now = datetime.strftime(datetime.now(), "%Y%m%dT%H%M%S%Z")
            imageFileName = 'epm_stage/image' + str(
                int(x)) + "_img_" + now + ".jpg"
            #imageFileName= 'folder' + "/log/image" +  str(int(x)) + "_img.png"
            blockBlobService.create_blob_from_bytes('videoblob', imageFileName,
                                                    imgByteArr)
            #Write to local directory
            pilImage.save(os.path.join(pathOut, "image{:d}.jpg".format(x)))
            #cv2.imwrite(os.path.join(pathOut , "image{:d}.jpeg".format(x)),frame)
            # increment image
            x += 1
Exemplo n.º 19
0
class AzureFs(object):
    def __init__(self, name):
        self.bbs = BlockBlobService(account_name=acname,
                                    account_key=key,
                                    endpoint_suffix='core.chinacloudapi.cn')
        self.cname = name

    def put(self, content):
        x = uuid.uuid1()
        nm = str(x) + '.ms'
        self.bbs.create_blob_from_bytes(self.cname, nm, content)
        return nm

    def read(self, fid):
        return self.bbs.get_blob_to_bytes(self.cname, fid).content
Exemplo n.º 20
0
class AzureStorage(BlobStorage):
    """Azure storage provider that utilizes the Azure blob storage.

    Args:
        connection_string: See http://azure.microsoft.com/en-us/documentation/articles/storage-configure-connection-string/ 
            for the connection string format.
        container_name: the name of the blob container in which all blobs
            are stored.
        
    """
    def __init__(self, connection_string, container_name):
        self._service = BlockBlobService(connection_string=connection_string)
        if not self._service.exists(container_name):
            raise ValueError("Container does not exist: " + container_name)
        self._container_name = container_name

    def get_object(self, blob_name):
        blob = self._service.get_blob_to_text(self._container_name, blob_name)
        return json.loads(blob.content)

    def put_object(self, obj, blob_name):
        data = json.dumps(obj).encode("utf-8")
        self._service.create_blob_from_bytes(self._container_name, blob_name,
                                             data)
        return Blob(blob_name, len(data))

    @contextlib.contextmanager
    def get_file(self, blob_name):
        try:
            stream = AzureBlobReader(self._service, self._container_name,
                                     blob_name)
            yield stream
        finally:
            stream.close()

    def put_file(self, fileobj, blob_name):
        self._service.create_blob_from_stream(self._container_name, blob_name,
                                              fileobj)
        size = fileobj.tell()
        return Blob(blob_name, size)

    def put_avro(self, schema, records, blob_name, codec='snappy'):
        writer = AzureBlobWriter(self._service, self._container_name,
                                 blob_name)
        fastavro.writer(writer, schema, records, codec)
        writer.close()
        size = writer.tell()
        return Blob(blob_name, size)
Exemplo n.º 21
0
def save_image(image, output_location, output_filename, container_name, format="png", bbs=None):
    """
    Given a PIL.Image (list of pixel values), save
    to requested filename - note that the file extension
    will determine the output file type, can be .png, .tif,
    probably others...
    """
    if not bbs:
        bbs = BlockBlobService(account_name=config["account_name"],
                               account_key=config["account_key"])
    output_path = os.path.join(output_location, output_filename)
    blob_name = remove_container_name_from_blob_path(output_path,
                                                     container_name)
    im_bytes = io.BytesIO()
    image.save(im_bytes, format=format)
    bbs.create_blob_from_bytes(container_name, blob_name, im_bytes.getvalue())
Exemplo n.º 22
0
def upload_metadata(request):
    form = RunNameForm()
    if request.method == 'POST':
        form = RunNameForm(request.POST)
        if form.is_valid():
            if not SequencingRun.objects.filter(
                    run_name=form.cleaned_data.get('run_name')).exists():
                sequencing_run, created = SequencingRun.objects.update_or_create(
                    run_name=form.cleaned_data.get('run_name'), seqids=list())
            else:
                sequencing_run = SequencingRun.objects.get(
                    run_name=form.cleaned_data.get('run_name'))
            files = [
                request.FILES.get('file[%d]' % i)
                for i in range(0, len(request.FILES))
            ]
            container_name = sequencing_run.run_name.lower().replace('_', '-')
            blob_client = BlockBlobService(
                account_name=settings.AZURE_ACCOUNT_NAME,
                account_key=settings.AZURE_ACCOUNT_KEY)
            blob_client.create_container(container_name)
            for item in files:
                blob_client.create_blob_from_bytes(
                    container_name=container_name,
                    blob_name=item.name,
                    blob=item.read())
                if item.name == 'SampleSheet.csv':
                    instance = DataFile(sequencing_run=sequencing_run,
                                        data_file=item)
                    instance.save()
                    with open(
                            'olc_webportalv2/media/{run_name}/SampleSheet.csv'.
                            format(run_name=str(sequencing_run))) as f:
                        lines = f.readlines()
                    seqid_start = False
                    seqid_list = list()
                    for i in range(len(lines)):
                        if seqid_start:
                            seqid = lines[i].split(',')[0]
                            seqid_list.append(seqid)
                        if 'Sample_ID' in lines[i]:
                            seqid_start = True
                    SequencingRun.objects.filter(pk=sequencing_run.pk).update(
                        seqids=seqid_list)
            return redirect('cowbat:upload_interop',
                            sequencing_run_pk=sequencing_run.pk)
    return render(request, 'cowbat/upload_metadata.html', {'form': form})
Exemplo n.º 23
0
 def azure_store_image_from_stream(self, img_name, file_stream):
     BlockBlobService = self.connect_azure()
     Container_name = "open-pai"
     path = BlockBlobService.create_blob_from_bytes(Container_name,
                                                    'test/img/' + img_name,
                                                    file_stream)
     print("Upload success!")
     return path
Exemplo n.º 24
0
def vir_typer_upload(request, vir_typer_pk):
    vir_typer_project = get_object_or_404(VirTyperProject, pk=vir_typer_pk)
    vir_typer_samples = list(
        VirTyperRequest.objects.filter(project_name__pk=vir_typer_pk))
    sample_names = list()
    for sample in vir_typer_samples:
        sample_names.append(str(sample.LSTS_ID))
    if request.method == 'POST':
        seq_files = [
            request.FILES.get('file[%d]' % i)
            for i in range(0, len(request.FILES))
        ]
        if seq_files:
            container_name = VirTyperProject.objects.get(
                pk=vir_typer_pk).container_namer()
            blob_client = BlockBlobService(
                account_name=settings.AZURE_ACCOUNT_NAME,
                account_key=settings.AZURE_ACCOUNT_KEY)
            blob_client.create_container(container_name)
            for item in seq_files:
                blob_client.create_blob_from_bytes(
                    container_name=container_name,
                    blob_name=item.name,
                    blob=item.read())
            for sample in vir_typer_samples:
                # sample_name = '{lsts}_{sn}'.format(lsts=str(sample.LSTS_ID),
                #                                    sn=str(sample.sample_name))
                for seq_file in seq_files:
                    if str(sample.LSTS_ID) in str(seq_file):

                        vir_files = VirTyperFiles(sample_name_id=sample.pk,
                                                  sequence_file=seq_file)
                        vir_files.save()
            vir_typer_project.status = 'Processing'
            vir_typer_project.save()
            run_vir_typer.apply_async(queue='cowbat',
                                      args=(vir_typer_pk, ),
                                      countdown=10)
        return redirect('vir_typer:vir_typer_home')
    return render(
        request, 'vir_typer/vir_typer_upload_sequences.html', {
            'vir_typer_project': vir_typer_project,
            'vir_typer_samples': vir_typer_samples,
            'vir_typer_sample_names': sample_names
        })
Exemplo n.º 25
0
def upldfile():
    if request.method == 'POST':
        file = request.files['file']
        if file and allowed_file(file.filename):
            filename = secure_filename(file.filename)
            app.logger.info('FileName: ' + filename)

            block_blob_service = BlockBlobService(
                account_name=app.config['AZURE_STORAGE_ACCOUNT'],
                account_key=app.config['AZURE_STORAGE_KEY'])
            block_blob_service.create_blob_from_bytes('doc', filename,
                                                      file.read())

            #             updir = os.path.join(basedir, 'upload/')
            #             file.save(os.path.join(updir, filename))
            #             file_size = os.path.getsize(os.path.join(updir, filename))
            return jsonify(name=filename, url='https://'+app.config['AZURE_STORAGE_ACCOUNT']+'.blob.core.windows.net/' \
                           +app.config['AZURE_STORAGE_CONTAINER']+'/'+filename)
class BlobHelper:
    def __init__(self, blob=None):
        account_name = os.environ["AzureStorageAccountName"]
        account_key = os.environ["AzureStorageAccountKey"]
        self.blob_service = BlockBlobService(
            account_name=account_name, account_key=account_key
        )
        self.blob = blob

    def create_output_blob(self, destination_container_name):
        source_url = os.environ["StorageUrl"] + self.blob.name
        destination_blob_name = self.get_destination_blob_name()

        self.blob_service.copy_blob(
            container_name=destination_container_name,
            blob_name=destination_blob_name,
            copy_source=source_url,
        )

    def get_destination_blob_name(self):
        blob_filename = self.blob.name.split("/")[1]
        datetime_str = datetime.today().strftime("%Y%m%d-%H%M%S")
        return f"{datetime_str}-{blob_filename}"

    def get_str_file(self, storage_container_name, storage_blob_name):
        compressed_file = io.BytesIO()

        self.blob_service.get_blob_to_stream(storage_container_name, storage_blob_name, compressed_file, max_connections=1)

        compressed_file.seek(0)

        compressed_gzip = gzip.GzipFile(fileobj=compressed_file)

        decompressed_file = compressed_gzip.read()

        compressed_file.close()
        compressed_gzip.close()

        file_string = decompressed_file.decode("utf-8-sig")

        return file_string

    def write_stream_file(self, storage_container_name, storage_blob_name, encoded_file):
        self.blob_service.create_blob_from_bytes(storage_container_name, storage_blob_name, encoded_file, max_connections=1)
Exemplo n.º 27
0
class AzureStorage(Storage):
    def __init__(self, option=None):
        self.block_blob_service = \
            BlockBlobService(account_name=settings.AZURE_STORAGE_ACCOUNT_NAME,
                             account_key=settings.AZURE_STORAGE_ACCOUNT_KEY)
        self.block_blob_service.create_container(
            settings.AZURE_STORAGE_DEFAULT_CONTAINER)

    def _save(self, name, content):
        content.open()
        content_stream = content.read()
        self.block_blob_service.create_blob_from_bytes(
            'media',
            name,
            content_stream,
            content_settings=(ContentSettings(
                content_type=content.file.content_type)))
        return name

    def _open(self, name, mode='rb'):
        extension_index = name.rfind('.')
        extension = ''
        if extension_index != -1:
            extension = name[extension_index:]
        tmp_file = tempfile.NamedTemporaryFile(suffix=extension, delete=False)
        self.block_blob_service.get_blob_to_stream(
            container_name=settings.AZURE_STORAGE_DEFAULT_CONTAINER,
            blob_name=name,
            stream=tmp_file,
            max_connections=2)
        tmp_file.seek(0)

        return File(tmp_file)

    def exists(self, name):
        generator = self.block_blob_service.list_blobs('media')
        for blob in generator:
            if name == blob.name:
                return True
        return False

    def url(self, name):
        return self.name
Exemplo n.º 28
0
class BlobStorage():
    def __init__(self, account_name: str, sas_token: str, container: str):

        self.block_blob_service = BlockBlobService(account_name=account_name,
                                                   sas_token=sas_token)
        self.container = container

    def _read_stream_from_blob(func):
        def wrapped_func(self, file_name, args, *kwargs):
            with BytesIO() as stream:
                self.block_blob_service.get_blob_to_stream(
                    self.container, file_name, stream)
                stream.seek(0)
                return func(self, stream, args, *kwargs)

        return wrapped_func

    def _write_stream_to_blob(func):
        def wrapped_func(self, file_name, args, *kwargs):
            with BytesIO() as stream:
                func(self, stream, args, *kwargs)
                self.block_blob_service.create_blob_from_bytes(
                    self.container, file_name, stream.getvalue())

        return wrapped_func

    def _write_string_to_blob(func):
        def wrapped_func(self, file_name, *args, **kwargs):
            with StringIO() as stream:
                func(self, stream, *args, **kwargs)
                self.block_blob_service.create_blob_from_text(
                    self.container, file_name, stream.getvalue())

        return wrapped_func

    @_write_stream_to_blob
    def write_df_to_parquet(self, file_name: str, df: pd.DataFrame):
        df.to_parquet(file_name, index=False, engine='pyarrow')

    @_write_string_to_blob
    def write_df_to_csv(self, file_name: str, df: pd.DataFrame, *args,
                        **kwargs):
        df.to_csv(file_name, index=False, *args, **kwargs)
Exemplo n.º 29
0
def upload_sequence_data(request, sequencing_run_pk):
    sequencing_run = get_object_or_404(SequencingRun, pk=sequencing_run_pk)
    check_uploaded_seqids(sequencing_run=sequencing_run)
    if request.method == 'POST':
        container_name = sequencing_run.run_name.lower().replace('_', '-')
        blob_client = BlockBlobService(
            account_name=settings.AZURE_ACCOUNT_NAME,
            account_key=settings.AZURE_ACCOUNT_KEY)
        blob_client.create_container(container_name)
        for i in range(0, len(request.FILES)):
            item = request.FILES.get('file[%d]' % i)
            blob_client.create_blob_from_bytes(container_name=container_name,
                                               blob_name=item.name,
                                               blob=item.read())

        # return redirect('cowbat:cowbat_processing', sequencing_run_pk=sequencing_run.pk)
    return render(request, 'cowbat/upload_sequence_data.html', {
        'sequencing_run': sequencing_run,
    })
Exemplo n.º 30
0
def update_rescued():
    data=request.data
    #PERSON_GROUP_ID="victims"
    d=db.ngo_data.find_one({'type':'safe'})
    #headers={"Content-Type":"application/octet-stream"}
    #headers["Ocp-Apim-Subscription-Key"]="501f22c3797048d2a73ae58a83ea9069"
    #BASE_URL="https://australiaeast.api.cognitive.microsoft.com/face/v1.0/"
    #cf.BaseUrl.set(BASE_URL)
    #cf.Key.set("501f22c3797048d2a73ae58a83ea9069")
    #binurl="https://australiaeast.api.cognitive.microsoft.com/face/v1.0/detect?returnFaceId=true&returnFaceLandmarks=false"
    #res=requests.post(url=binurl,headers=headers,data=data)
    #js=json.loads(res.text)
    #face_ids = [d['faceId'] for d in js]
    #identified_faces = cf.face.identify(face_ids, PERSON_GROUP_ID)
    #person_list=cf.person.lists(PERSON_GROUP_ID)
    uid=len(d['rescued_urls'])+1
    # cursor["facial"] = "https://rvsafeimages.blob.core.windows.net/imagescontainer/"+uid+'.'+format_
    # ref.update_one({"user_id":user_id,"Disasterid":str(cursor["Disasterid"])},{"$set":cursor},upsert=False)
    
    block_blob_service = BlockBlobService(account_name='rvsafeimages', account_key='391TMmlvDdRWu+AsNX+ZMl1i233YQfP5dxo/xhMrPm22KtwWwwMmM9vFAJpJHrGXyBrTW4OoAInjHnby9Couug==')
    container_name ='imagescontainer'
    block_blob_service.create_blob_from_bytes(container_name,'safevictims'+str(uid)+"."+'jpg',data)
    #save to blob
    urls="https://rvsafeimages.blob.core.windows.net/imagescontainer/safevictims"+str(uid)+'.'+'jpg'
    rescued=[]
    try:
        d['rescued_urls'].append(urls)
    except:
        d['rescued_urls']=[urls]
    # for k in identified_faces:
    #     i=k['candidates']
    #     if len(i)==0:
    #         continue
    #     for t in i:
    #         if 'personId' in t:
    #             for j in person_list:
    #                 if j['personId']==t['personId']:
    #                     rescued.append([j['name'],urls])
    # rescued=rescued+d['rescued']
    # d['rescued']=rescued
    db.ngo_data.update_one({"type":"safe","_id":ObjectId("5c3b5389d59b290b704c4012")},{"$set":d},upsert=False)
    return json.dumps({'status':200})
Exemplo n.º 31
0
def upload_images(userid, disasterid, format_):
    data = request.get_data()
    ref = db.Victim
    # import pdb; pdb.set_trace()
    cursor = ref.find_one({'user_id': userid})
    if len(list(cursor)) == 0:
        assert ("Upload Failed")
    if "num_files" not in cursor:
        cursor["num_files"] = 0
    nums = int(cursor["num_files"])  #nums has to be set to 0.
    nums += 1
    cursor["num_files"] = nums
    if "blobnames" not in cursor:
        files = []
    else:
        files = cursor["blobnames"]

    try:
        files = literal_eval(files)
    except:
        pass

    uid = userid + str(nums)
    files.append(uid + "." + format_)
    cursor["blobnames"] = files
    ref.update_one(
        {
            "_id": cursor["_id"],
            "user_id": userid,
            "Disasterid": disasterid
        }, {"$set": cursor},
        upsert=False)
    block_blob_service = BlockBlobService(
        account_name='rvsafeimages',
        account_key=
        '391TMmlvDdRWu+AsNX+ZMl1i233YQfP5dxo/xhMrPm22KtwWwwMmM9vFAJpJHrGXyBrTW4OoAInjHnby9Couug=='
    )
    container_name = 'imagescontainer'
    block_blob_service.create_blob_from_bytes(container_name,
                                              uid + "." + format_, data)
    #save to blob
    return json.dumps({"status": 200})
Exemplo n.º 32
0
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name, prefix=None):
        # NOTE: Azure wants all paths to start with a slash
        prefix = "/{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlockBlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")
        # XXX: AzureTransfer isn't actively tested and hasn't its error handling is probably lacking
        #self.log.warning("AzureTransfer is experimental and has not been thoroughly tested")

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key)
        return self._list_blobs(key)[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key):
        path = self.format_key_for_backend(key, trailing_slash=True)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        items = self.conn.list_blobs(self.container_name, prefix=path, delimiter="/", include="metadata")
        result = []
        for item in items:
            result.append({
                "last_modified": item.properties.last_modified.isoformat(),
                "metadata": item.metadata,
                "name": self.format_key_from_backend(item.name),
                "size": item.properties.content_length,
            })
        return result

    def delete_key(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Deleting key: %r", key)
        return self.conn.delete_blob(self.container_name, key)

    def get_contents_to_file(self, key, filepath_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to)
        meta = self.conn.get_blob_to_path(self.container_name, key, filepath_to_store_to)
        if progress_callback:
            progress_callback(1, 1)
        return meta

    def get_contents_to_fileobj(self, key, fileobj_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        meta = self.conn.get_blob_to_file(self.container_name, key, fileobj_to_store_to)
        if progress_callback:
            progress_callback(1, 1)
        return meta

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_bytes(self.container_name, key), self._metadata_for_key(key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key)
        self.conn.create_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=self.sanitize_metadata(metadata))

    def store_file_from_disk(self, key, filepath, metadata=None, multipart=None):
        key = self.format_key_for_backend(key)
        self.conn.create_blob_from_path(self.container_name, key, filepath,None)
                                        #    x_ms_meta_name_values=self.sanitize_metadata(metadata))

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
Exemplo n.º 33
0
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, bucket_name, prefix=None):
        prefix = "{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = bucket_name
        self.conn = BlockBlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized, %r", self.container_name)

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key, remove_slash_prefix=True, trailing_slash=False)
        results = self._list_blobs(key)
        if not results:
            raise FileNotFoundFromStorageError(key)
        return results[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key, trailing_slash=True):  # pylint: disable=arguments-differ
        # Trailing slash needed when listing directories, without when listing individual files
        path = self.format_key_for_backend(key, remove_slash_prefix=True, trailing_slash=trailing_slash)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        if path:
            items = self.conn.list_blobs(self.container_name, prefix=path, delimiter="/", include="metadata")
        else:  # If you give Azure an empty path, it gives you an authentication error
            items = self.conn.list_blobs(self.container_name, delimiter="/", include="metadata")
        results = []
        for item in items:
            if not isinstance(item, BlobPrefix):
                results.append({
                    "last_modified": item.properties.last_modified,
                    # Azure Storage cannot handle '-' so we turn them into underscores and back again
                    "metadata": dict((k.replace("_", "-"), v) for k, v in item.metadata.items()),
                    "name": self.format_key_from_backend(item.name),
                    "size": item.properties.content_length,
                })
        return results

    def delete_key(self, key):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.log.debug("Deleting key: %r", key)
        try:
            return self.conn.delete_blob(self.container_name, key)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

    def get_contents_to_file(self, key, filepath_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)

        self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to)
        try:
            self.conn.get_blob_to_path(self.container_name, key, filepath_to_store_to)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

        if progress_callback:
            progress_callback(1, 1)
        return self._metadata_for_key(key)

    def get_contents_to_fileobj(self, key, fileobj_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)

        self.log.debug("Starting to fetch the contents of: %r", key)
        try:
            self.conn.get_blob_to_stream(self.container_name, key, fileobj_to_store_to)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

        if progress_callback:
            progress_callback(1, 1)

        return self._metadata_for_key(key)

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.log.debug("Starting to fetch the contents of: %r", key)
        try:
            blob = self.conn.get_blob_to_bytes(self.container_name, key)
            return blob.content, self._metadata_for_key(key)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.conn.create_blob_from_bytes(self.container_name, key, memstring,
                                         metadata=self.sanitize_metadata(metadata, replace_hyphen_with="_"))

    def store_file_from_disk(self, key, filepath, metadata=None, multipart=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.conn.create_blob_from_path(self.container_name, key, filepath,
                                        metadata=self.sanitize_metadata(metadata, replace_hyphen_with="_"))

    def get_or_create_container(self, container_name):
        start_time = time.monotonic()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs",
                       container_name, time.monotonic() - start_time)
        return container_name