示例#1
0
    def _save(self, name, content):
        """
        Use the Azure Storage service to write ``content`` to a remote file
        (called ``name``).
        """

        content.open()

        content_type = None

        if hasattr(content.file, 'content_type'):
            content_type = content.file.content_type
        else:
            content_type = mimetypes.guess_type(name)[0]

        cache_control = self.get_cache_control(self.container, name,
                                               content_type)

        self._get_service().create_blob_from_stream(
            container_name=self.container,
            blob_name=name,
            stream=content,
            content_settings=ContentSettings(
                content_type=content_type,
                cache_control=cache_control,
            ),
        )

        content.close()

        return name
示例#2
0
 def _azure_3_save(self, name, content_type, content_data):
     content_type = ContentSettings(content_type=content_type)
     self.connection.create_blob_from_bytes(self.azure_container,
                                            name,
                                            content_data,
                                            content_settings=content_type,
                                            timeout=self.azure_timeout)
示例#3
0
    def pdf_upload(self, debug):
        '''
            @function: upload pdf file.
            @args:
                1. debug :  debug is "False"  the pdf file will be deleted.
                            debug is "True"  the pdf file will not be deleted.
        '''

        aes_pdfname = aes.encrypt_str(
            self.pdf_config_object.pdf_company) + "/" + aes.encrypt_str(
                self.pdf_config_object.pdf_property) + "/" + self.pdf_name
        self.pdf_url = self.pdf_config_object.pdf_url + aes_pdfname
        try:
            blob_service = BlockBlobService(
                self.pdf_config_object.account_name,
                self.pdf_config_object.account_key)
            blob_service.create_blob_from_path(
                self.pdf_config_object.pdf_upload_path_name,
                aes_pdfname,
                self.pdf_config_object.pdf_path + "/" + self.pdf_name,
                content_settings=ContentSettings(
                    content_type='application/pdf'))
            if debug == "False":
                self.pdf_clean()
            return self.pdf_url
        except Exception as e:
            raise e
            return None
示例#4
0
 def put_file_into_storage(self, dirName, fileName):
     """
     Ship the outdated log file to the specified blob container.
     """
     if not self.container_created:
         self.service.create_container(self.container)
         self.container_created = True
     fd, tmpfile_path = None, ''
     try:
         file_path = os.path.join(dirName, fileName)
         if self.zip_compression:
             suffix, content_type = '.zip', 'application/zip'
             fd, tmpfile_path = mkstemp(suffix=suffix)
             with os.fdopen(fd, 'wb') as f:
                 with ZipFile(f, 'w', ZIP_DEFLATED) as z:
                     z.write(file_path, arcname=fileName)
             file_path = tmpfile_path
         else:
             suffix, content_type = '', 'text/plain'
         self.service.create_blob_from_path(container_name=self.container,
                                            blob_name=fileName+suffix,
                                            file_path=fileName,
                                            content_settings=ContentSettings(content_type=content_type),
                                            max_connections=self.max_connections
                                            )  # max_retries and retry_wait no longer arguments in azure 0.33
     finally:
         if self.zip_compression and fd:
             os.remove(tmpfile_path)
def blob_service_create_blob_from_bytes(account_name, storage_key,
                                        container_name, blob_name, blob):
    content_settings = ContentSettings(content_type='image/jpeg')

    block_blob_service = get_block_blob_service(account_name, storage_key)
    block_blob_service.create_blob_from_bytes(
        container_name, blob_name, blob, content_settings=content_settings)
示例#6
0
    def upload_blob(self):
        content_settings = None
        if self.content_type or self.content_encoding or self.content_language or self.content_disposition or \
                self.cache_control or self.content_md5:
            content_settings = ContentSettings(
                content_type=self.content_type,
                content_encoding=self.content_encoding,
                content_language=self.content_language,
                content_disposition=self.content_disposition,
                cache_control=self.cache_control,
                content_md5=self.content_md5)
        if not self.check_mode:
            try:
                self.blob_client.create_blob_from_path(
                    self.container,
                    self.blob,
                    self.src,
                    metadata=self.tags,
                    content_settings=content_settings)
            except AzureHttpError as exc:
                self.fail("Error creating blob {0} - {1}".format(
                    self.blob, str(exc)))

        self.blob_obj = self.get_blob()
        self.results['changed'] = True
        self.results['actions'].append('created blob {0} from {1}'.format(
            self.blob, self.src))
        self.results['container'] = self.container_obj
        self.results['blob'] = self.blob_obj
示例#7
0
    def update_blob_content_settings(self):
        content_settings = ContentSettings(
            content_type=self.content_type,
            content_encoding=self.content_encoding,
            content_language=self.content_language,
            content_disposition=self.content_disposition,
            cache_control=self.cache_control,
            content_md5=self.content_md5)
        if not self.check_mode:
            try:
                self.blob_client.set_blob_properties(
                    self.container,
                    self.blob,
                    content_settings=content_settings)
            except AzureHttpError as exc:
                self.fail("Update blob content settings {0}:{1} - {2}".format(
                    self.container, self.blob, str(exc)))

        self.blob_obj = self.get_blob()
        self.results['changed'] = True
        self.results['actions'].append(
            "updated blob {0}:{1} content settings.".format(
                self.container, self.blob))
        self.results['container'] = self.container_obj
        self.results['blob'] = self.blob_obj
示例#8
0
    def upload(self, id, max_size=10):
        """
        Complete the file upload, or clear an existing upload.

        :param id: The resource_id.
        :param max_size: Ignored.
        """
        if self.filename:
            if self.can_use_advanced_azure:
                from azure.storage import blob as azure_blob
                from azure.storage.blob.models import ContentSettings

                blob_service = azure_blob.BlockBlobService(
                    self.driver_options['key'],
                    self.driver_options['secret']
                )
                content_settings = None
                if self.guess_mimetype:
                    content_type, _ = mimetypes.guess_type(self.filename)
                    if content_type:
                        content_settings = ContentSettings(
                            content_type=content_type
                        )

                return blob_service.create_blob_from_stream(
                    container_name=self.container_name,
                    blob_name=self.path_from_filename(
                        id,
                        self.filename
                    ),
                    stream=self.file_upload,
                    content_settings=content_settings
                )
            else:
                self.container.upload_object_via_stream(
                    self.file_upload,
                    object_name=self.path_from_filename(
                        id,
                        self.filename
                    )
                )

        elif self._clear and self.old_filename and not self.leave_files:
            # This is only set when a previously-uploaded file is replace
            # by a link. We want to delete the previously-uploaded file.
            try:
                self.container.delete_object(
                    self.container.get_object(
                        self.path_from_filename(
                            id,
                            self.old_filename
                        )
                    )
                )
            except ObjectDoesNotExistError:
                # It's possible for the object to have already been deleted, or
                # for it to not yet exist in a committed state due to an
                # outstanding lease.
                return
示例#9
0
    def _put_file(self, key, file):
        from azure.storage.blob.models import ContentSettings

        if self.checksum:
            content_settings = ContentSettings(content_md5=_file_md5(file))
        else:
            content_settings = ContentSettings()

        with map_azure_exceptions(key=key):
            self.block_blob_service.create_blob_from_stream(
                container_name=self.container,
                blob_name=key,
                stream=file,
                max_connections=self.max_connections,
                content_settings=content_settings,
            )
            return key
示例#10
0
    def _put(self, key, data):
        from azure.storage.blob.models import ContentSettings

        if self.checksum:
            content_settings = ContentSettings(content_md5=_byte_buffer_md5(data))
        else:
            content_settings = ContentSettings()

        with map_azure_exceptions(key=key):
            self.block_blob_service.create_blob_from_bytes(
                container_name=self.container,
                blob_name=key,
                blob=data,
                max_connections=self.max_connections,
                content_settings=content_settings,
            )
            return key
示例#11
0
    def upload_blob(
        self,
        container: Container,
        filename: FileLike,
        blob_name: str = None,
        acl: str = None,
        meta_data: MetaData = None,
        content_type: str = None,
        content_disposition: str = None,
        cache_control: str = None,
        chunk_size: int = 1024,
        extra: ExtraOptions = None,
    ) -> Blob:
        if acl:
            logger.info(messages.OPTION_NOT_SUPPORTED, "acl")

        meta_data = {} if meta_data is None else meta_data
        extra = extra if extra is not None else {}

        extra_args = self._normalize_parameters(extra, self._PUT_OBJECT_KEYS)
        extra_args.setdefault("content_type", content_type)
        extra_args.setdefault("content_disposition", content_disposition)
        extra_args.setdefault("cache_control", cache_control)

        azure_container = self._get_azure_container(container.name)
        blob_name = blob_name or validate_file_or_path(filename)

        # azure does not set content_md5 on backend
        file_hash = file_checksum(filename, hash_type=self.hash_type)
        file_digest = file_hash.digest()
        checksum = base64.b64encode(file_digest).decode("utf-8").strip()
        extra_args.setdefault("content_md5", checksum)

        content_settings = ContentSettings(**extra_args)

        if isinstance(filename, str):
            self.service.create_blob_from_path(
                container_name=azure_container.name,
                blob_name=blob_name,
                file_path=filename,
                content_settings=content_settings,
                metadata=meta_data,
                validate_content=True,
            )
        else:
            self.service.create_blob_from_stream(
                container_name=azure_container.name,
                blob_name=blob_name,
                stream=filename,
                content_settings=content_settings,
                metadata=meta_data,
                validate_content=True,
            )

        azure_blob = self._get_azure_blob(azure_container.name, blob_name)
        return self._convert_azure_blob(container, azure_blob)
示例#12
0
def uri_put_file(creds, uri, fp, content_type=None):
    assert fp.tell() == 0
    assert uri.startswith('wabs://')

    def log_upload_failures_on_error(exc_tup, exc_processor_cxt):
        def standard_detail_message(prefix=''):
            return (prefix + '  There have been {n} attempts to upload  '
                    'file {url} so far.'.format(n=exc_processor_cxt, url=uri))

        typ, value, tb = exc_tup
        del exc_tup

        # Screen for certain kinds of known-errors to retry from
        if issubclass(typ, socket.error):
            socketmsg = value[1] if isinstance(value, tuple) else value

            logger.info(
                msg='Retrying upload because of a socket error',
                detail=standard_detail_message(
                    "The socket error's message is '{0}'.".format(socketmsg)))
        else:
            # For all otherwise untreated exceptions, report them as a
            # warning and retry anyway -- all exceptions that can be
            # justified should be treated and have error messages
            # listed.
            logger.warning(
                msg='retrying file upload from unexpected exception',
                detail=standard_detail_message(
                    'The exception type is {etype} and its value is '
                    '{evalue} and its traceback is {etraceback}'.format(
                        etype=typ,
                        evalue=value,
                        etraceback=''.join(traceback.format_tb(tb)))))

        # Help Python GC by resolving possible cycles
        del tb

    url_tup = urlparse(uri)
    kwargs = dict(content_settings=ContentSettings(content_type),
                  validate_content=True)

    conn = BlockBlobService(creds.account_name,
                            creds.account_key,
                            sas_token=creds.access_token,
                            protocol='https')
    conn.create_blob_from_bytes(url_tup.netloc, url_tup.path.lstrip('/'),
                                fp.read(), **kwargs)

    # To maintain consistency with the S3 version of this function we must
    # return an object with a certain set of attributes.  Currently, that set
    # of attributes consists of only 'size'
    return _Key(size=fp.tell())
示例#13
0
 def __sendFrameForStorage(self, frame):
     try:
         blobname = time.strftime("%Y%m%d-%H%M%S.jpg")
         result = self.block_blob_service.create_blob_from_bytes(
             self.imageStorageEndpoint["containername"],
             blobname,
             frame,
             content_settings=ContentSettings('image/jpg'))
         if self.verbose:
             print("Stored blob name [" + blobname + "]")
     except Exception as e:
         print("Store blob failed with: " + str(e))
     return result
示例#14
0
        def _guess_content_type(file_path, original):
            if original.content_encoding or original.content_type:
                return original

            mimetypes.add_type('application/json', '.json')
            mimetypes.add_type('application/javascript', '.js')
            mimetypes.add_type('application/wasm', '.wasm')

            content_type, v = mimetypes.guess_type(file_path)
            return ContentSettings(
                content_type=content_type,
                content_disposition=original.content_disposition,
                content_language=original.content_language,
                content_md5=original.content_md5,
                cache_control=original.cache_control)
示例#15
0
def uploadImageToBlob(block_blob_service, container_name, local_file_name,
                      full_path_to_file, image_metadata):

    if block_blob_service.exists(container_name) == False:
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(
            container_name, public_access=PublicAccess.Container)

    block_blob_service.create_blob_from_path(
        container_name,
        local_file_name,
        full_path_to_file,
        content_settings=ContentSettings(content_type='application/png'),
        metadata=image_metadata)
示例#16
0
    def _save(self, name, content):
        if hasattr(content.file, 'content_type'):
            content_type = content.file.content_type
        else:
            content_type = mimetypes.guess_type(name)[0]

        if hasattr(content, 'chunks'):
            content_data = b''.join(chunk for chunk in content.chunks())
        else:
            content_data = content.read()

        self.block_blob_service.create_blob_from_bytes(
            self.azure_container,
            name,
            content_data,
            content_settings=ContentSettings(content_type=content_type))
        return name
示例#17
0
    def zzztest_storage_account_creation(self):  #!!! need to update
        from azure.common.credentials import UserPassCredentials
        from azure.mgmt.resource import ResourceManagementClient
        from azure.mgmt.storage import StorageManagementClient
        from azure.storage import CloudStorageAccount
        from azure.storage.blob.models import ContentSettings
        import getpass

        username, subscription_id = [
            s.strip()
            for s in open(os.path.expanduser("~") +
                          "/azurebatch/account.txt").xreadlines()
        ]
        print "Azure password"
        password = getpass.getpass()
        credentials = UserPassCredentials(username, password)
        resource_client = ResourceManagementClient(credentials,
                                                   subscription_ids)
        storage_client = StorageManagementClient(credentials, subscription_id)
        resource_client.resource_groups.create_or_update(
            'my_resource_group', {'location': 'westus'})
        async_create = storage_client.storage_accounts.create(
            'my_resource_group', 'my_storage_account', {
                'location': 'westus',
                'account_type': 'Standard_LRS'
            })

        async_create.wait()
        storage_keys = storage_client.storage_accounts.list_keys(
            'my_resource_group', 'my_storage_account')
        storage_keys = {v.key_name: v.value for v in storage_keys.keys}
        storage_client = CloudStorageAccount('my_storage_account',
                                             storage_keys['key1'])
        blob_service = storage_client.create_block_blob_service()
        blob_service.create_container('my_container_name')
        blob_service.create_blob_from_bytes(
            'my_container_name',
            'my_blob_name',
            b'<center><h1>Hello World!</h1></center>',
            content_settings=ContentSettings('text/html'))
        print(blob_service.make_blob_url('my_container_name', 'my_blob_name'))
print('Downloading cloud %s/%s to local %s' %
      (container_name, source_audio_name, source_audio_name))
blob_service.get_blob_to_path(container_name, source_audio_name,
                              source_audio_name)
print('Downloading cloud %s/%s to local %s' %
      (container_name, source_video_name, source_video_name))
blob_service.get_blob_to_path(container_name, source_video_name,
                              source_video_name)

print('smash away here - just copying video file for now')
copyfile(source_video_name, smashed_name)
blob_service.create_blob_from_path(
    container_name,
    smashed_name,
    smashed_name,
    content_settings=ContentSettings(content_type=smashed_content_type))
print('URL for public download is https://%s.blob.core.windows.net/%s/%s' %
      (storage_account_name, container_name, smashed_name))

print('Uploading local %s to cloud %s/%s' %
      (smashed_name, container_name, smashed_name))
smash_video_audio(source_video_name, source_audio_name, smashed_name)
blob_service.create_blob_from_path(
    container_name,
    smashed_name,
    smashed_name,
    content_settings=ContentSettings(content_type=smashed_content_type))
print('URL for public download is https://%s.blob.core.windows.net/%s/%s' %
      (storage_account_name, container_name, smashed_name))
示例#19
0
def saveToBlob(block_blob_service, local_path,local_file_name,container_name):
    # Create a file in Documents to test the upload and download.
    full_path_to_file =os.path.join(local_path, local_file_name)
    print("Temp file = " + full_path_to_file)
    print("\nUploading to Blob storage as blob: " + local_file_name)
    # Upload the created file, use local_file_name for the blob name
    block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file, content_settings=ContentSettings(content_type='image/png'))
示例#20
0
    def upload_file(data_file, meta, encryption_metadata, max_concurrency):
        logger = getLogger(__name__)
        try:
            azure_metadata = {
                u'sfcdigest': meta[SHA256_DIGEST],
            }
            if (encryption_metadata):
                azure_metadata.update({
                    u'encryptiondata':
                    json.dumps({
                        u'EncryptionMode': u'FullBlob',
                        u'WrappedContentKey': {
                            u'KeyId': u'symmKey1',
                            u'EncryptedKey': encryption_metadata.key,
                            u'Algorithm': u'AES_CBC_256'
                        },
                        u'EncryptionAgent': {
                            u'Protocol': '1.0',
                            u'EncryptionAlgorithm': u'AES_CBC_128',
                        },
                        u'ContentEncryptionIV': encryption_metadata.iv,
                        u'KeyWrappingMetadata': {
                            u'EncryptionLibrary': u'Java 5.3.0'
                        }
                    }),
                    u'matdesc':
                    encryption_metadata.matdesc
                })
            azure_location = SnowflakeAzureUtil.extract_container_name_and_path(
                meta[u'stage_info'][u'location'])
            path = azure_location.path + meta[u'dst_file_name'].lstrip('/')

            client = meta[u'client']
            callback = meta[u'put_callback'](
                    data_file,
                    os.path.getsize(data_file),
                    output_stream=meta[u'put_callback_output_stream']) if \
                    meta[u'put_callback'] else None

            def azure_callback(current, total):
                callback(current)

            client.create_blob_from_path(
                azure_location.container_name,
                path,
                data_file,
                progress_callback=azure_callback
                if meta[u'put_callback'] else None,
                metadata=azure_metadata,
                max_connections=max_concurrency,
                content_settings=ContentSettings(
                    content_type=u'application/octet-stream',
                    content_encoding=u'utf-8',
                ))

            logger.debug(u'DONE putting a file')
            meta[u'dst_file_size'] = meta[u'upload_size']
            meta[u'result_status'] = ResultStatus.UPLOADED
        except AzureHttpError as err:
            if (err.status_code == 403
                    and "Signature not valid in the specified time frame"
                    in err.message):
                logger.debug(u"AZURE Token expired. Renew and retry")
                meta[u'result_status'] = ResultStatus.RENEW_TOKEN
                return None
            else:
                meta[u'last_error'] = err
                meta[u'result_status'] = ResultStatus.NEED_RETRY
示例#21
0
def main(inputDict: dict) -> str:
    ## Arguments
    fileType = inputDict['fileType']
    fileURL = inputDict["fileURL"]
    container = inputDict["container"]
    selector = inputDict["selector"]
    logging.info(f"fileType: {fileType}")
    logging.info(f"fileURL: {fileURL}")
    logging.info(f"container: {container}")
    logging.info(f"selector: {selector}")
    ## `selector` takes the format "XofY" or "ALL"
    _all_ = selector == "ALL"
    X = None
    Y = None
    if not _all_:
        X = int(selector.split("of")[0])
        Y = int(selector.split("of")[1])
    logging.info("changes 12")
    ## Set file name to be used and get container it came from
    fileName = unquote(fileURL.split("/")[-1])  # (contains file extension)
    container = fileURL.split("/")[-2]
    ## Create bbs
    bbs = BlockBlobService(
        connection_string=os.getenv("fsevideosConnectionString"))
    logging.info("bbs created")
    ## Create SAS URL
    sasURL = get_SAS_URL(fileURL=fileURL,
                         block_blob_service=bbs,
                         container=container)
    logging.info(f"sasURL: {sasURL}")
    ## Download blob to temporary location
    tempClipFilePath = "/tmp/" + fileName
    bbs.get_blob_to_path(container_name=container,
                         blob_name=fileName,
                         file_path=tempClipFilePath)
    logging.info(f'file saved to "{tempClipFilePath}"')
    ## Set the size of the clips you want (in seconds)
    chunk_length_secs = 3600
    ## Create clip
    if fileType == "MP4":
        clip = VideoFileClip(tempClipFilePath)
    elif fileType == "MP3":
        clip = AudioFileClip(tempClipFilePath)
    else:
        raise ValueError("wrong file type")

    ## Get number of chunks (files to be created)
    logging.info(f"clip.duration: {clip.duration}")
    chunk_count = ceil(clip.duration / chunk_length_secs)
    ## If not _all_, make sure Y is the expected number
    if not _all_:
        if chunk_count != Y:
            vem = (f"Based on the selector passed ({selector}), "
                   f"there should be {Y} chunks, but instead there "
                   f"are {chunk_count} chunks.")
            raise ValueError(vem)
    ## Loop through the chunks
    for a in range(chunk_count):
        A = datetime.now()
        ## Get prefix to use
        subclipPrefix = f"{a+1}of{chunk_count}"
        subclipFileName = f"{subclipPrefix}_{fileName}"
        logging.info(f"clip: {subclipPrefix}")
        ## Check how many times this clip has been created already (max 3 atts allowed)
        visDF = get_VideoIndexerSplits_rows(subclipFileName=subclipFileName)
        ## If we're on the last subclip
        if a + 1 == chunk_count:
            subclipDurationSeconds = clip.duration - (a * chunk_length_secs)
        else:
            subclipDurationSeconds = chunk_length_secs

        startSeconds = chunk_length_secs * a
        startHMS = "{:0>8}".format(str(timedelta(seconds=startSeconds)))
        endHMS = "{:0>8}".format(
            str(timedelta(seconds=startSeconds + chunk_length_secs)))

        fileOutPath = "/tmp/" + subclipFileName

        # ff = ffmpy.FFmpeg(
        #     executable="./ffmpeg",
        #     inputs={
        #             tempClipFilePath : f"-ss {startSeconds}"
        #             },
        #     outputs={
        #             fileOutPath : f"-t {subclipDurationSeconds} -c copy"
        #             }
        # )
        # logging.info(f"ff.cmd: {ff.cmd}")
        # ff.run()
        if ((_all_) or (X == a + 1)) and (len(visDF) <= 2):
            logging.info(f'startSeconds: {startSeconds}')
            logging.info(f"tempClipFilePath: {tempClipFilePath}")
            logging.info(f"subclipDurationSeconds: {subclipDurationSeconds}")
            logging.info(f"fileOutPath: {fileOutPath}")
            logging.info(f"startHMS: {startHMS}")
            logging.info(f"endHMS: {endHMS}")

            # # ffmpegCommand = f'./ffmpeg -ss {startSeconds} -i "{tempClipFilePath}" -t {subclipDurationSeconds} -c copy -bsf:a aac_adtstoasc "{fileOutPath}"'
            # ffmpegCommand = f'./ffmpeg -ss {startSeconds} -i "{tempClipFilePath}" -t {subclipDurationSeconds} -bsf:a aac_adtstoasc -acodec copy -vcodec copy "{fileOutPath}"'
            ffmpegCommand = f'./ffmpeg -i "{tempClipFilePath}" -ss {startHMS} -to {endHMS} -c copy "{fileOutPath}"'
            logging.info(f"ffmpegCommand: {ffmpegCommand}")
            # p = subprocess.Popen(ffmpegCommand)
            # p.wait()
            result = os.popen(ffmpegCommand).read()
            logging.info("command run")
            logging.info(f"result: {result}")
            ## Add attempt to SQL
            add_VideoIndexerSplits_row(subclipFileName=subclipFileName)

            ## Create subclip using moviepy
            # t_start = startSeconds
            # t_end = startSeconds+subclipDurationSeconds
            # logging.info(f"t_start: {t_start}")
            # logging.info(f"t_end: {t_end}")
            # subclip = clip.subclip(
            #     t_start=t_start,
            #     t_end=t_end
            # )
            # logging.info("subclip created")
            # # temp_file_path = tempfile.gettempdir() + "/temp-audio.m4a"
            # temp_file_path = "/tmp/temp-audio.m4a"
            # ## Save to path
            # subclip.write_videofile(
            #     filename=fileOutPath,
            #     verbose=False,
            #     logger=None,
            #     temp_audiofile=temp_file_path,
            #     remove_temp=True,
            #     audio_codec="aac"
            # )
            # logging.info("subclip written to file")
            # subclip.close()
            # logging.info("subclip closed")
            contentType = "video/mp4" if fileType == "MP4" else "audio/mpeg3"
            bbs.create_blob_from_path(
                container_name=container,
                blob_name=subclipFileName,
                file_path=fileOutPath,
                content_settings=ContentSettings(content_type=contentType))
            ## Delete created file from temporary storage
            os.remove(fileOutPath)
            B = datetime.now()
            logging.info(f"{subclipPrefix} uploaded, time taken: {B-A}")

        else:
            logging.info("this ffmpeg run is not happening")

            if len(visDF) >= 4:
                logging.info("Reason: file has already been created 3 times")
            if not _all_:
                logging.info(f"Reason: re-do was requested for {X} not {a+1}")

    ## Delete original file from temporary storage
    os.remove(tempClipFilePath)

    return f"{chunk_count} files uploaded to the `{container}` container"
示例#22
0
    def batch_upload(self):
        def _glob_files_locally(folder_path):

            len_folder_path = len(folder_path) + 1

            for root, v, files in os.walk(folder_path):
                for f in files:
                    full_path = os.path.join(root, f)
                    yield full_path, full_path[len_folder_path:]

        def _normalize_blob_file_path(path, name):
            path_sep = '/'
            if path:
                name = path_sep.join((path, name))

            return path_sep.join(os.path.normpath(name).split(
                os.path.sep)).strip(path_sep)

        def _guess_content_type(file_path, original):
            if original.content_encoding or original.content_type:
                return original

            mimetypes.add_type('application/json', '.json')
            mimetypes.add_type('application/javascript', '.js')
            mimetypes.add_type('application/wasm', '.wasm')

            content_type, v = mimetypes.guess_type(file_path)
            return ContentSettings(
                content_type=content_type,
                content_disposition=original.content_disposition,
                content_language=original.content_language,
                content_md5=original.content_md5,
                cache_control=original.cache_control)

        if not os.path.exists(self.batch_upload_src):
            self.fail(
                "batch upload source source directory {0} does not exist".
                format(self.batch_upload_src))

        if not os.path.isdir(self.batch_upload_src):
            self.fail("incorrect usage: {0} is not a directory".format(
                self.batch_upload_src))

        source_dir = os.path.realpath(self.batch_upload_src)
        source_files = list(_glob_files_locally(source_dir))

        content_settings = ContentSettings(
            content_type=self.content_type,
            content_encoding=self.content_encoding,
            content_language=self.content_language,
            content_disposition=self.content_disposition,
            cache_control=self.cache_control,
            content_md5=None)

        for src, blob_path in source_files:
            if self.batch_upload_dst:
                blob_path = _normalize_blob_file_path(self.batch_upload_dst,
                                                      blob_path)
            if not self.check_mode:
                try:
                    self.blob_client.create_blob_from_path(
                        self.container,
                        blob_path,
                        src,
                        metadata=self.tags,
                        content_settings=_guess_content_type(
                            src, content_settings))
                except AzureHttpError as exc:
                    self.fail("Error creating blob {0} - {1}".format(
                        src, str(exc)))
            self.results['actions'].append('created blob from {0}'.format(src))

        self.results['changed'] = True
        self.results['container'] = self.container_obj
def StoreImage(image_id, image_bytes):
    blob_service.create_blob_from_bytes(
        IMAGE_STORAGE_CONTAINER_NAME,
        image_id,
        image_bytes,
        content_settings=ContentSettings('image/jpeg'))
示例#24
0
CONTAINER_NAME = 'ocean-container'

# log in
storage_client = get_client_from_cli_profile(StorageManagementClient)

# create a public storage container to hold the file
storage_keys = storage_client.storage_accounts.list_keys(RESOURCE_GROUP, STORAGE_ACCOUNT_NAME)
storage_keys = {v.key_name: v.value for v in storage_keys.keys}

storage_client = CloudStorageAccount(STORAGE_ACCOUNT_NAME, storage_keys['key1'])
blob_service = storage_client.create_block_blob_service()
blob_service.create_container(CONTAINER_NAME, public_access=PublicAccess.Container)

#%%
FILE_NAME = 'hello-ocean.html'
blob_service.create_blob_from_bytes(
    CONTAINER_NAME,
    FILE_NAME,
    b'<center><h1> Surf the Ocean again! </h1></center>',
    content_settings=ContentSettings('text/html')
)

print(blob_service.make_blob_url(CONTAINER_NAME, FILE_NAME))

#%%

for blob in blob_service.list_blobs(CONTAINER_NAME):
    print(blob.name)


    def upload_file(data_file, meta, encryption_metadata, max_concurrency):
        try:
            azure_metadata = {
                u'sfcdigest': meta[SHA256_DIGEST],
            }
            if (encryption_metadata):
                azure_metadata.update({
                    u'encryptiondata':
                    json.dumps({
                        u'EncryptionMode': u'FullBlob',
                        u'WrappedContentKey': {
                            u'KeyId': u'symmKey1',
                            u'EncryptedKey': encryption_metadata.key,
                            u'Algorithm': u'AES_CBC_256'
                        },
                        u'EncryptionAgent': {
                            u'Protocol': '1.0',
                            u'EncryptionAlgorithm': u'AES_CBC_128',
                        },
                        u'ContentEncryptionIV': encryption_metadata.iv,
                        u'KeyWrappingMetadata': {
                            u'EncryptionLibrary': u'Java 5.3.0'
                        }
                    }),
                    u'matdesc':
                    encryption_metadata.matdesc
                })
            azure_location = SnowflakeAzureUtil.extract_container_name_and_path(
                meta[u'stage_info'][u'location'])
            path = azure_location.path + meta[u'dst_file_name'].lstrip('/')

            client = meta[u'client']
            callback = None
            if meta[u'put_azure_callback']:
                callback = meta[u'put_azure_callback'](
                    data_file,
                    os.path.getsize(data_file),
                    output_stream=meta[u'put_callback_output_stream'],
                    show_progress_bar=meta[u'show_progress_bar'])

            def azure_callback(current, total):
                callback(current)
                logger.debug(
                    "data transfer progress from sdk callback. "
                    "current: %s, total: %s", current, total)

            client.create_blob_from_path(
                azure_location.container_name,
                path,
                data_file,
                progress_callback=azure_callback
                if meta[u'put_azure_callback'] else None,
                metadata=azure_metadata,
                max_connections=max_concurrency,
                content_settings=ContentSettings(
                    content_type=HTTP_HEADER_VALUE_OCTET_STREAM,
                    content_encoding=u'utf-8',
                ))

            logger.debug(u'DONE putting a file')
            meta[u'dst_file_size'] = meta[u'upload_size']
            meta[u'result_status'] = ResultStatus.UPLOADED
        except AzureHttpError as err:
            logger.debug(
                u"Caught exception's status code: {status_code} and message: {ex_representation}"
                .format(status_code=err.status_code,
                        ex_representation=str(err)))
            if err.status_code == 403 and SnowflakeAzureUtil._detect_azure_token_expire_error(
                    err):
                logger.debug(u"AZURE Token expired. Renew and retry")
                meta[u'result_status'] = ResultStatus.RENEW_TOKEN
                return None
            else:
                meta[u'last_error'] = err
                meta[u'result_status'] = ResultStatus.NEED_RETRY
    def properties_and_metadata_operations(self, account):
        file_blob_name = "HelloWorld.png"
        text_blob_name = "Text"

        # Create a Block Blob Service object
        blockblob_service = account.create_block_blob_service()

        container_name = 'blockblobbasicscontainer' + self.random_data.get_random_name(
            6)

        try:
            # Create a new container
            print('1. Create a container with name and custom metadata - ' +
                  container_name)
            blockblob_service.create_container(container_name,
                                               {'sample': 'azure-storage'})

            # Upload file as a block blob
            print(
                '2. Uploading BlockBlob from file with properties and custom metadata'
            )
            #Get full path on drive to file_to_upload by joining the fully qualified directory name and file name on the local drive
            full_path_to_file = os.path.join(os.path.dirname(__file__),
                                             file_blob_name)

            blockblob_service.create_blob_from_path(
                container_name,
                file_blob_name,
                full_path_to_file,
                content_settings=ContentSettings(
                    content_type='application/png'),
                metadata={'category': 'azure-samples'})

            blockblob_service.create_blob_from_text(
                container_name,
                text_blob_name,
                'Data',
                content_settings=ContentSettings(content_encoding='UTF-8',
                                                 content_language='en'),
                metadata={
                    'origin': 'usa',
                    'title': 'azure-samples'
                })

            # Get all the container properties
            print('3. Get Container metadata')

            container = blockblob_service.get_container_properties(
                container_name)

            print('    Metadata:')

            for key in container.metadata:
                print('        ' + key + ':' + container.metadata[key])

            # Get all the blob properties
            print('4. Get Blob properties')
            blob = blockblob_service.get_blob_properties(
                container_name, file_blob_name)

            print('    Metadata:')
            for key in blob.metadata:
                print('        ' + key + ':' + blob.metadata[key])

            print('    Properties:')
            print('        Content-Type:' +
                  blob.properties.content_settings.content_type)
        finally:
            # Delete the container
            print("5. Delete Container")
            if blockblob_service.exists(container_name):
                blockblob_service.delete_container(container_name)
示例#27
0
def main():
    logging.basicConfig(level=logging.DEBUG)
    with open(TASKDATA) as taskdata_file:
        taskdata = json.loads(taskdata_file.read())
    github = GithubAdapter(taskdata)
    gh_commit = github.get_commit()
    config = github.get_config()
    blob_service = AppendBlobService(
        account_name=taskdata["storage_account_name"],
        account_key=taskdata["storage_account_key"])
    queue_service = QueueService(
        connection_string=taskdata["queue_connection_string"])
    loop = asyncio.get_event_loop()
    ctx = Context(loop=loop,
                  config=config,
                  blob_service=blob_service,
                  queue_service=queue_service,
                  taskdata=taskdata)

    blob_service.create_container("logs",
                                  fail_on_exist=False,
                                  public_access=PublicAccess.Blob)
    blob_service.create_blob("logs",
                             ctx.pid,
                             content_settings=ContentSettings(
                                 content_type="text/plain; charset=utf-8"))
    gh_commit.create_status(
        "pending",
        target_url=blob_service.make_blob_url("logs", ctx.pid),
        description="Build started",
        context=config.get("configuration_name",
                           "configuration%s" % taskdata["config_num"]))
    os.makedirs(REPOSDIR, exist_ok=True)
    # Check if we're the only process who updates the git cache on SMB share.
    # Otherwise skip updating.
    if not os.path.exists(LOCKFILENAME):
        lock = open(LOCKFILENAME, "w")
        lock.close()
        update_git_cache(ctx)
        os.unlink(LOCKFILENAME)

    if os.path.exists(SRCDIR):
        shutil.rmtree(SRCDIR)
    os.makedirs(os.path.join(SRCDIR, "build/conf"))
    with open(os.path.join(SRCDIR, "build/conf/auto.conf"), "a") as localconf:
        localconf.write("\n%s\n" % config.get("localconf", ""))
        localconf.write(AUTOCONFIG)

    repos = get_repos(config)
    repos.append((repodirname(taskdata["gh"]["repository"]["clone_url"]),
                  taskdata["gh"]["repository"]["clone_url"], None, None))
    for reponame, repourl, reporef, _ in repos:
        refrepopath = os.path.join(REPOSDIR, reponame)
        run(ctx,
            ["git", "clone", "--reference", refrepopath, repourl, reponame],
            cwd=SRCDIR)
        if reporef:
            LOG.info("Checkout %s to %s" % (reponame, reporef))
            run(ctx, ["git", "checkout", "%s" % reporef],
                cwd=os.path.join(SRCDIR, reponame))

    # Do checkout
    if taskdata["gh"]["type"] == "pull_request":
        LOG.info("Add remote repo %s" % taskdata["gh"]["clone_url"])
        run(ctx, [
            "git", "remote", "add", "contributor", taskdata["gh"]["clone_url"]
        ],
            cwd=os.path.join(
                SRCDIR,
                repodirname(taskdata["gh"]["repository"]["clone_url"])))
        LOG.info("Fetch contributor's repo")
        run(ctx, ["git", "fetch", "contributor"],
            cwd=os.path.join(
                SRCDIR,
                repodirname(taskdata["gh"]["repository"]["clone_url"])))
    LOG.info("Checkout %s to %s" % (repodirname(
        taskdata["gh"]["repository"]["clone_url"]), taskdata["gh"]["sha"]))
    run(ctx, ["git", "checkout", taskdata["gh"]["sha"]],
        cwd=os.path.join(
            SRCDIR, repodirname(taskdata["gh"]["repository"]["clone_url"])))

    # Fetch sstate if any
    if os.path.exists(get_sstate_archive_path(ctx)):
        with tarfile.open(name=get_sstate_archive_path(ctx),
                          mode="r:gz") as sstate_tar:
            sstate_tar.extractall(path=SRCDIR)

    addlayers = []
    for dep in config["dependencies"]:
        repodir = repodirname(dep["url"])
        layers = dep.get("layers", None)
        if layers:
            addlayers.extend([
                "bitbake-layers add-layer ../%s/%s" % (repodir, layer)
                for layer in layers
            ])
        else:
            addlayers.append("bitbake-layers add-layer ../%s" % repodir)
    addlayers.append("bitbake-layers add-layer ../%s" %
                     repodirname(taskdata["gh"]["repository"]["clone_url"]))

    run_script(ctx,
               BUILDSCRIPT % ("\n".join(addlayers), config["bitbake_target"]),
               cwd=SRCDIR)
    save_sstate(ctx)

    # Github auth token has expired by now most probably => renew
    github = GithubAdapter(taskdata)
    gh_commit = github.get_commit()
    gh_commit.create_status(
        "success",
        target_url=blob_service.make_blob_url("logs", ctx.pid),
        description="Target has been built successfully",
        context=config.get("configuration_name",
                           "configuration%s" % taskdata["config_num"]))
    loop.close()
    # TODO: copy cloud-init log files to share
    taskdata["build_result"] = "success"
    queue_service.put_message(
        "buildresults",
        base64.b64encode(json.dumps(taskdata).encode("utf")).decode("utf"))