Example #1
0
def upload_df(transactions, pickle_name, base_dir, bucket_name=None):
    # TODO: Change this routine to work with NamedTemporaryfile class
    pickle_local_path = f'/tmp/{pickle_name}'
    pickle_remote_path = f'{base_dir}/{pickle_name}'
    transactions.to_pickle(pickle_local_path)
    if bucket_name:
        manager = S3Manager(bucket_name=bucket_name)
    else:
        manager = S3Manager()
    config = TransferConfig()
    transfer = S3Transfer(manager.s3_client, config)
    transfer.upload_file(pickle_local_path, manager.bucket_name, pickle_remote_path)
    os.remove(pickle_local_path)
Example #2
0
    def uploader(self):
        """Upload configuration.

        To support Google Cloud Storage, we disable multipart uploads. Every upload
        uses the calling thread and uploads in a single part.
        """
        if self._uploader is None:
            self._uploader = S3Transfer(
                self.client,
                TransferConfig(use_threads=False,
                               multipart_threshold=sys.maxsize),
            )
        return self._uploader
Example #3
0
def download_file(self,
                  Bucket,
                  Key,
                  Filename,
                  ExtraArgs=None,
                  Callback=None,
                  Config=None):
    transfer = S3Transfer(self, Config)
    return transfer.download_file(bucket=Bucket,
                                  key=Key,
                                  filename=Filename,
                                  extra_args=ExtraArgs,
                                  callback=Callback)
def upload(file_path, file_name):
    """Upload file located at <file_path> to s3.
    Uses <file_name> as the s3 key name (object identifier).
    """
    transfer = S3Transfer(boto3.client('s3'))
    transfer.upload_file(file_path,
                         BUCKET_NAME,
                         file_name,
                         extra_args={
                             'ACL': 'private',
                             'StorageClass': 'STANDARD',
                         },
                         callback=ProgressPercentage(file_path))
Example #5
0
def move_backups_to_s3():
    s3 = TatorS3().s3
    transfer = S3Transfer(s3)
    bucket_name = os.getenv('BUCKET_NAME')
    num_moved = 0
    for backup in os.listdir('/backup'):
        logger.info(f"Moving {backup} to S3...")
        key = f'backup/{backup}'
        path = os.path.join('/backup', backup)
        transfer.upload_file(path, bucket_name, key)
        os.remove(path)
        num_moved += 1
    logger.info(f"Finished moving {num_moved} files!")
Example #6
0
    def test_uses_multipart_upload_when_over_threshold(self):
        with mock.patch('boto3.s3.transfer.MultipartUploader') as uploader:
            fake_files = {
                'smallfile': b'foobar',
            }
            osutil = InMemoryOSLayer(fake_files)
            config = TransferConfig(multipart_threshold=2,
                                    multipart_chunksize=2)
            transfer = S3Transfer(self.client, osutil=osutil, config=config)
            transfer.upload_file('smallfile', 'bucket', 'key')

            uploader.return_value.upload_file.assert_called_with(
                'smallfile', 'bucket', 'key', None, {})
Example #7
0
def upload_file(self,
                Filename,
                Bucket,
                Key,
                ExtraArgs=None,
                Callback=None,
                Config=None):
    transfer = S3Transfer(self, Config)
    return transfer.upload_file(filename=Filename,
                                bucket=Bucket,
                                key=Key,
                                extra_args=ExtraArgs,
                                callback=Callback)
Example #8
0
 def __exit__(self, exc_type=None, exc_value=None, exc_traceback=None):
     from boto3.s3.transfer import S3Transfer, TransferConfig
     if exc_type is not None:
         log.exception("Error while executing policy")
     log.debug("Uploading policy logs")
     self.leave_log()
     self.compress()
     self.transfer = S3Transfer(
         self.ctx.session_factory(assume=False).client('s3'),
         config=TransferConfig(use_threads=False))
     self.upload()
     shutil.rmtree(self.root_dir)
     log.debug("Policy Logs uploaded")
Example #9
0
 def test_extra_args_on_uploaded_passed_to_api_call(self):
     extra_args = {'ACL': 'public-read'}
     fake_files = {
         'smallfile': b'hello world'
     }
     osutil = InMemoryOSLayer(fake_files)
     transfer = S3Transfer(self.client, osutil=osutil)
     transfer.upload_file('smallfile', 'bucket', 'key',
                          extra_args=extra_args)
     self.client.put_object.assert_called_with(
         Bucket='bucket', Key='key', Body=mock.ANY,
         ACL='public-read'
     )
Example #10
0
def convertor(videopath,outputpath,quality,hls_time,bucketpath):
     
     logging.info('Hls conversion started ')
     for file in os.listdir('./'+videopath):
         print(videopath)
         out1=outputpath+"/"+os.path.splitext(os.path.basename(file))[0] +"/"
         folder=os.makedirs(out1)
         str = "ffmpeg -i "+"\"./" +videopath+ "/"+os.path.basename(file)+"\""+" " + quality +" -s 1920*1080 -start_number 0 -hls_time "+hls_time+" -hls_list_size 0 -f hls ./"+"\""+"./"+outputpath+"/"+os.path.splitext(os.path.basename(file))[0]+"/" +"index.m3u8"+"\""
         os.system(str)
         logging.info(str)
         
         logging.info('converted')
         print(outputpath)
         print(os.path.basename(file))
         print(os.path.splitext(os.path.basename(file)))
         print(os.path.splitext(os.path.basename(file))[0])
        
     ACCESS_KEY_ID="KAIAJ6BZMFBLF7K2XIXA"
     ACCESS_SECRET_KEY="45rXXwkNl5Dwi6qmFDm3VXyuMC3hm1FNbqBlGAE3F"
     BUCKET_NAME='videos.kavi.in'
     
     client = boto3.client('s3', aws_access_key_id=ACCESS_KEY_ID,aws_secret_access_key=ACCESS_SECRET_KEY)
     transfer = S3Transfer(client)
     s3=boto3.client('s3')
     s3=boto3.resource('s3',
                    aws_access_key_id= ACCESS_KEY_ID,
                    aws_secret_access_key=ACCESS_SECRET_KEY,
                    config=Config(signature_version='s3v4'))
     logging.info('connected with s3 server')
     logging.info('bucket name in s3:'+bucketpath)
     for dir in os.listdir('./'+outputpath):
         
         if True:
             print(dir)
             print("welcome")
             for file in os.listdir('./'+outputpath+'/'+dir):
                 
                 print(file)
                 
                 full_path='./'+outputpath+'/'+dir+'/'+file
                 print(full_path)
                 
                 with open(full_path,'r+') as data:
                    
                     transfer.upload_file('./'+outputpath+'/'+dir+'/'+file,BUCKET_NAME, 'hls-test'+'/'+bucketpath+"/"+dir+"/"+file)
                
                     print("filedone")
                     logging.info(file)
                     logging.info('uploaded')
                  
                     print("Done")
Example #11
0
def updateProjectTotals(force=False):
    projects=Project.objects.all()
    for project in projects:
        temp_files = TemporaryFile.objects.filter(project=project)
        files = Media.objects.filter(project=project)
        if (files.count() + temp_files.count() != project.num_files) or force:
            project.num_files = files.count() + temp_files.count()
            duration_info = files.values('num_frames', 'fps')
            project.duration = sum([info['num_frames'] / info['fps'] for info in duration_info
                                    if info['num_frames'] and info['fps']])
            logger.info(f"Updating {project.name}: Num files = {project.num_files}, "
                        f"Duration = {project.duration}")
        if not project.thumb:
            media = Media.objects.filter(project=project, media_files__isnull=False).first()
            if not media:
                media = Media.objects.filter(project=project, thumbnail__isnull=False).first()
            if media:
                s3 = TatorS3().s3
                bucket_name = os.getenv('BUCKET_NAME')
                if media.thumbnail:
                    transfer = S3Transfer(s3)
                    fname = os.path.basename(media.thumbnail.url)
                    s3_key = f"{project.organization.pk}/{project.pk}/{fname}"
                    transfer.upload_file(media.thumbnail.url, bucket_name, s3_key)
                    project.thumb = s3_key
                elif media.media_files:
                    if 'thumbnail' in media.media_files:
                        if len(media.media_files['thumbnail']) > 0:
                            src_key = media.media_files['thumbnail'][0]['path']
                            fname = os.path.basename(src_key)
                            dest_key = f"{project.organization.pk}/{project.pk}/{fname}"
                            try:
                                # S3 requires source key to include bucket name.
                                s3.copy_object(Bucket=bucket_name, Key=dest_key,
                                               CopySource={'Bucket': bucket_name,
                                                           'Key': f"{bucket_name}/{src_key}"})
                            except:
                                # Minio requires source key to not include bucket name.
                                s3.copy_object(Bucket=bucket_name, Key=dest_key,
                                               CopySource={'Bucket': bucket_name,
                                                           'Key': src_key})
                            project.thumb = dest_key
        users = User.objects.filter(pk__in=Membership.objects.filter(project=project)\
                            .values_list('user')).order_by('last_name')
        usernames = [str(user) for user in users]
        creator = str(project.creator)
        if creator in usernames:
            usernames.remove(creator)
            usernames.insert(0, creator)
        project.usernames = usernames
        project.save()
Example #12
0
    def run(self):

        with self.output().open("w") as f:
            f.write(
                os.path.expanduser("~") +
                "\\Team6_ADS_Assignment1\\Luigi\\Logs\\Temp\\UploadInitial_Log.txt"
            )

        with self.input().open('r') as json_file:
            json_txt = json.load(json_file)
        AWS_ACCESS_KEY = json_txt["AWSAccess"]
        AWS_SECRET_KEY = json_txt["AWSSecret"]
        conn = boto3.client('s3',
                            aws_access_key_id=AWS_ACCESS_KEY,
                            aws_secret_access_key=AWS_SECRET_KEY)
        transfer = S3Transfer(conn)

        response = conn.list_buckets()
        existent = []
        for bucket in response["Buckets"]:
            existent.append(bucket['Name'])

        bucket_name = 'Team6ILAssignment01'
        target_dir = os.path.expanduser(
            "~") + "\\Team6_ADS_Assignment1\\Luigi\\Data\\DirtyData\\"
        filename = None
        file_list = os.listdir(target_dir)
        for file in file_list:
            if file.endswith('.csv'):
                filename = file

        if bucket_name in existent:
            filenames = []
            for key in conn.list_objects(Bucket=bucket_name)['Contents']:
                filenames.append(key['Key'])

            if filename not in filenames:
                print('File upload started to s3!!!!!', '\n')
                transfer.upload_file(os.path.join(target_dir, filename),
                                     bucket_name, filename)
                print('File uploaded to s3!!!!!', '\n')

            else:
                print('File already present on s3!!!!!', '\n')

        else:
            conn.create_bucket(Bucket=bucket_name)
            print('File upload started to s3!!!!!', '\n')
            transfer.upload_file(os.path.join(target_dir, filename),
                                 bucket_name, filename)
            print('File uploaded to s3!!!!!', '\n')
Example #13
0
def image_height_transform(file, content_type, content_id, height=200):
    image_id = now_ms()()
    filename_template = content_id + '.%s.%s.png'

    # original
    with Image(filename=file) as img:
        img.format = 'png'
        img.save(filename=os.path.join(UPLOAD_FOLDER, content_type,
                                       filename_template % (image_id, 'raw')))

    # resized
    img_width = None
    with Image(filename=file) as img:
        img.transform(resize='x' + str(height))
        img.format = 'png'
        img.save(filename=os.path.join(UPLOAD_FOLDER, content_type,
                                       filename_template % (image_id, 'xlg')))
        img_width = img.width

    if AWS_BUCKET:
        s3 = boto3.client('s3')
        transfer = S3Transfer(s3)
        transfer.upload_file(
            os.path.join(UPLOAD_FOLDER, content_type,
                         filename_template % (image_id, 'raw')),
            AWS_BUCKET,
            os.path.join(content_type, filename_template % (image_id, 'raw')),
            extra_args={
                'ACL': 'public-read',
                'ContentType': 'image/png'
            })
        os.remove(
            os.path.join(UPLOAD_FOLDER, content_type,
                         filename_template % (image_id, 'raw')))

        transfer.upload_file(
            os.path.join(UPLOAD_FOLDER, content_type,
                         filename_template % (image_id, 'xlg')),
            AWS_BUCKET,
            os.path.join(content_type, filename_template % (image_id, 'xlg')),
            extra_args={
                'ACL': 'public-read',
                'ContentType': 'image/png'
            })
        os.remove(
            os.path.join(UPLOAD_FOLDER, content_type,
                         filename_template % (image_id, 'xlg')))

    os.remove(file)

    return (image_id, img_width)
Example #14
0
def upload_json(obj, json_name, base_dir, bucket_name=None):
    # TODO: Change this routine to work with NamedTemporaryfile class
    json_local_path = f'/tmp/{json_name}'
    json_remote_path = f'{base_dir}/{json_name}'
    with open(json_local_path, 'w') as f:
        json.dump(obj, f)
        if bucket_name:
            manager = S3Manager(bucket_name=bucket_name)
        else:
            manager = S3Manager()
    config = TransferConfig()
    transfer = S3Transfer(manager.s3_client, config)
    transfer.upload_file(json_local_path, manager.bucket_name, json_remote_path)
    os.remove(json_local_path)
Example #15
0
def download_object(request, object_name, download_dir):
    bucket_name = request.user.tenant_id
    download_path = "%s/%s" % (download_dir, object_name)
    LOG.debug('Start Download S3 Object : %s/%s"' % (bucket_name, object_name))
    file_size = s3_client(request).head_object(
        Bucket=bucket_name, Key=object_name).get("ContentLength")
    transfer = S3Transfer(s3_client(request), s3_config)
    transfer.download_file(bucket_name,
                           object_name,
                           download_path,
                           callback=DownloadProgress(download_path, file_size))
    LOG.debug('Download S3 Object Complete : %s (%d MB)' %
              (download_path, round(file_size / 1024 / 1024)))
    return download_path
Example #16
0
def downloadFilefromS3(bucketName, bucketKey, downloadFilePath):
    role = getTemporarySecurityCredentials()
    # get temporary credentials from role
    access_key = getAccessKey(role)
    secret_key = getSecretKey(role)
    security_token = getSecurityToken(role)
    s3Client = initiateS3Client(access_key, secret_key, security_token, region)
    try:
        transfer = S3Transfer(s3Client)
        # Download s3://bucket/key to /tmp/myfile
        transfer.download_file(bucketName, bucketKey, downloadFilePath)
        print "[Done]: File successfully downloaded to" + downloadFilePath + "."
    except boto3.exceptions.S3UploadFailedError as e:
        print "[Error] Error occurred while trying to download file from S3." + e
Example #17
0
def upload_model_to_s3():
    bucket_name = "hannibal-vector"

    print_line_for_user("- Starting uploading model and index files")

    index_file_name = get_compressed_files_order_by_last_modified("Index")[0]
    model_file_name = get_compressed_files_order_by_last_modified("Model")[0]

    logging.info("Uploading to Bucket name: {}".format(bucket_name))
    client = boto3.client("s3")
    transfer = S3Transfer(client)
    transfer.upload_file(index_file_name, bucket_name, index_file_name)
    transfer.upload_file(model_file_name, bucket_name, model_file_name)
    print_line_for_user("Upload completed!")
Example #18
0
def upload_inputs(local_files_dir,
                  user_id,
                  s3_bucket,
                  region_name,
                  aws_access_key_id=None,
                  aws_secret_access_key=None):
    """
    Testing function to upload list of files from your local folder to S3 bucket
    Usage: tools.upload_payloads('pdfs/', 2017, 'testbank-nc', 'ap-southeast-1')
           tools.upload_payloads('pdfs/', 100,'AKxxxxxxxxxxxxxxxxxxTQ','Xxxxxxxxxxysnkfgvjkbkdjfbkxxxxx', 'ncfinbank', 'ap-south-1')
    :param local_files_dir: directory containing files
    :param user_id: numeric value i.e the folder path for users_{}
    :param aws_access_key_id: aws access key
    :param aws_secret_access_key: aws secret key
    :param s3_bucket: s3 bucket name
    :param region_name: aws region
    :return:
    """
    if aws_access_key_id and aws_secret_access_key:
        s3 = get_service("s3",
                         service_type='client',
                         service_region_name=region_name,
                         aws_access_key_id=aws_access_key_id,
                         aws_secret_access_key=aws_secret_access_key)
    else:
        s3 = get_service("s3",
                         service_type='client',
                         service_region_name=region_name)

    pdfs_list = glob.glob(local_files_dir + '/*')
    if not pdfs_list:
        print("No files to upload")
        return

    for pdf in pdfs_list:
        try:
            if user_id:
                s3_file_path = "users_{}/{}".format(user_id,
                                                    os.path.basename(pdf))

                transfer = S3Transfer(s3)
                logger.info(
                    "Uploading file {} to path {} in bucket {} of region {}".
                    format(os.path.basename(pdf), s3_file_path, s3_bucket,
                           region_name))
                transfer.upload_file(pdf, s3_bucket, s3_file_path)
                logger.info("Uploaded successfully ")

        except Exception as error:
            logger.error(error)
Example #19
0
    def download_path(self, path):
        filename = path.split('/')
        self.filename = filename[3:]
        key = '/'.join(self.filename)
        bucket = filename[2]
        self.newdir = filename[3:-1]
        self.newdir = '/'.join(self.newdir)
        self.newdir = os.path.join(self.directory, self.newdir)
        local_filename = os.path.join(self.directory, key)

        downloaded = False

        # check previous downloads
        if args.resume:
            prev_directory = args.resume[0]
            prev_local_filename = os.path.join(prev_directory, key)
            if os.path.isfile(prev_local_filename):
                # print(prev_local_filename, 'is already downloaded.')
                downloaded = True

        if not downloaded:
            try:
                os.makedirs(self.newdir)
            except OSError as e:
                pass

            # check tokens
            self.check_time()

            session = boto3.session.Session(self.access_key, self.secret_key,
                                            self.session)
            s3client = session.client('s3')
            s3transfer = S3Transfer(s3client)

            try:
                s3transfer.download_file(bucket, key, local_filename)
                print('downloaded: ', path)
            except botocore.exceptions.ClientError as e:
                # If a client error is thrown, then check that it was a 404 error.
                # If it was a 404 error, then the bucket does not exist.
                error_code = int(e.response['Error']['Code'])
                if error_code == 404:
                    print('This path is incorrect:', path,
                          'Please try again.\n')
                    pass
                if error_code == 403:
                    print(
                        'This is a private bucket. Please contact NDAR for help:',
                        path, '\n')
                    pass
Example #20
0
 def transferFile(self):
     """
     This method is safest because it handles large and small files, 
     resuming uploads, etc
     """
     client = boto.client('s3', 'us-west-2')
     transfer = S3Transfer(client)
     result = transfer.upload_file(self.localFileName,
                                   self.bucketName,
                                   self.keyName,
                                   extra_args={'ACL': 'public-read'},
                                   callback=ProgressPercentage(
                                       self.localFileName))
     print(result)
Example #21
0
 def perform_download_to_path(self, remote_path: str, local_dest: str):
     """
     Upload a file from a given url (should be implemented)
     :param remote_path: the remote path of the file that should be downloaded
     :param local_dest: the destination file path
     :return:
     """
     s3 = self.get_s3_resource()
     transfer = S3Transfer(client=s3.meta.client, config=AmazonS3TransferConfig())
     transfer.download_file(
         bucket=self.storage.credentials['AWS_S3_BUCKET'],
         key=remote_path,
         filename=local_dest,
     )
Example #22
0
 def _upload_func(self, s3_uri, func, archive):
     _, bucket, key_prefix = parse_s3(s3_uri)
     key = "%s/%s" % (key_prefix, func.name)
     transfer = S3Transfer(
         self.session_factory().client('s3'),
         config=TransferConfig(
             multipart_threshold=1024*1024*4))
     transfer.upload_file(
         archive.path,
         bucket=bucket,
         key=key,
         extra_args={
             'ServerSideEncryption': 'AES256'})
     return bucket, key
Example #23
0
def upload(value, storage):
    access_key_id, secret_key, bucket_name, encryption_enabled = parse_bucket_url(
        storage)
    s3 = get_resource(access_key_id, secret_key)
    ### S3Transfer allows multi-part, call backs etc
    # http://boto3.readthedocs.io/en/latest/_modules/boto3/s3/transfer.html
    transfer = S3Transfer(s3.meta.client)
    if encryption_enabled:
        transfer.upload_file(value,
                             bucket_name,
                             value,
                             extra_args={'ServerSideEncryption': 'AES256'})
    else:
        transfer.upload_file(value, bucket_name, value)
Example #24
0
def upload(BUCKET_NAME, FILENAME, FILEPATH, KEYPATH):
    s3 = boto3.resource('s3')

    try:
        transfer = S3Transfer(boto3.client('s3', 'us-east-1'))
        progress = ProgressPercentage_u(FILEPATH + FILENAME)
        transfer.upload_file(FILEPATH + FILENAME,
                             BUCKET_NAME,
                             KEYPATH,
                             callback=progress)

    except OSError as e:
        print("The file does not exist.")
        print(e)
Example #25
0
def upload_s3_file(file_path,
                   bucket,
                   file_id,
                   extra_args={'ContentType': "html/text"}):
    client = boto3.client('s3',
                          aws_access_key_id=AWS_ACCESS_KEY,
                          aws_secret_access_key=AWS_SECRET_KEY)

    transfer = S3Transfer(client)

    return transfer.upload_file(file_path,
                                bucket,
                                file_id,
                                extra_args=extra_args)
Example #26
0
def upload_file(self,
                Filename,
                Bucket,
                Key,
                ExtraArgs=None,
                Callback=None,
                Config=None):
    """Upload a file to an S3 object.

    Usage::

        import boto3
        s3 = boto3.resource('s3')
        s3.meta.client.upload_file('/tmp/hello.txt', 'mybucket', 'hello.txt')

    Similar behavior as S3Transfer's upload_file() method,
    except that parameters are capitalized. Detailed examples can be found at
    :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.

    :type Filename: str
    :param Filename: The path to the file to upload.

    :type Bucket: str
    :param Bucket: The name of the bucket to upload to.

    :type Key: str
    :param Key: The name of the key to upload to.

    :type ExtraArgs: dict
    :param ExtraArgs: Extra arguments that may be passed to the
        client operation. For allowed upload arguments see
        boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS.

    :type Callback: function
    :param Callback: A method which takes a number of bytes transferred to
        be periodically called during the upload.

    :type Config: boto3.s3.transfer.TransferConfig
    :param Config: The transfer configuration to be used when performing the
        transfer.
    """
    with S3Transfer(self, Config) as transfer:
        return transfer.upload_file(
            filename=Filename,
            bucket=Bucket,
            key=Key,
            extra_args=ExtraArgs,
            callback=Callback,
        )
Example #27
0
 def __init__(self, api_key):
     self.api_key = api_key
     self.max_clouds_initial = 0.25
     self.max_clouds = 0.01
     self.max_shadows = 0.01
     self.max_bad_pixels = 0.25
     self.max_nodata = 0.25
     self.maximgs = 1
     self.catalog_path = "catalog/"
     self.s3_catalog_bucket = "azavea-africa-test"
     self.s3_catalog_prefix = "planet/images"
     self.products = {
         'analytic_sr': {
             'item_type': 'PSScene4Band',
             'asset_type': 'analytic_sr',
             'ext': 'tif'
         },
         'analytic': {
             'item_type': 'PSScene4Band',
             'asset_type': 'analytic',
             'ext': 'tif'
         },
         'analytic_xml': {
             'item_type': 'PSScene4Band',
             'asset_type': 'analytic_xml',
             'ext': 'xml'
         },
         'visual': {
             'item_type': 'PSScene3Band',
             'asset_type': 'visual',
             'ext': 'tif'
         }
     }
     self.client = api.ClientV1(api_key=api_key)
     self.output_filename = "output.csv"
     self.output_encoding = "utf-8"
     self.s3client = boto3.client('s3')
     self.with_analytic = False
     self.with_analytic_xml = False
     self.with_visual = False
     self.local_mode = False
     self.s3_only = False
     self.transfer = S3Transfer(self.s3client,
                                TransferConfig(use_threads=False))
     self.transfer_config = TransferConfig(use_threads=False)
     self.logger = logging.getLogger(__name__)
     self.logger.setLevel(logging.INFO)
     self.secondary_uploads_executor = FixedThreadPoolExecutor(size=5)
     self.with_immediate_cleanup = False
Example #28
0
def upload_to_S3(*files):
    client = boto3.client('s3', settings.BUCKET_REGION,
                          aws_access_key_id=settings.AWS_SID,
                          aws_secret_access_key=settings.AWS_SECRET)

    transfer = S3Transfer(client)
    for file in files:
        extra_args = {'ACL': 'public-read'}
        content_type = file.get('content_type', None)

        if content_type:
            extra_args.update({'ContentType': content_type})

        transfer.upload_file(file['file'], settings.BUCKET, file['bucket_path'], extra_args=extra_args)
        os.remove(file['file'])
Example #29
0
 def __init__(
     self,
     directory,
     access_key=None,
     secret_key=None,
     data_type="marketdata",
     **kwargs
 ):
     self.s3 = boto3.client(
         "s3", aws_access_key_id=access_key, aws_secret_access_key=secret_key
     )
     transfer_config = TransferConfig(use_threads=False)
     self.transfer = S3Transfer(self.s3, config=transfer_config)
     self.data_type = data_type
     super(S3, self).__init__(directory, **kwargs)
Example #30
0
 def test_callback_handlers_register_on_put_item(self):
     osutil = InMemoryOSLayer({'smallfile': b'foobar'})
     transfer = S3Transfer(self.client, osutil=osutil)
     transfer.upload_file('smallfile', 'bucket', 'key')
     events = self.client.meta.events
     events.register_first.assert_called_with(
         'request-created.s3',
         disable_upload_callbacks,
         unique_id='s3upload-callback-disable',
     )
     events.register_last.assert_called_with(
         'request-created.s3',
         enable_upload_callbacks,
         unique_id='s3upload-callback-enable',
     )