def uploadToS3():

    key = input("Enter the AWSKey:")
    secretkey = input("Enter the Secret Key:")
    awsKey = key  #
    awsSecret = secretkey  #

    bucname = input("Enter the bucket name:")

    conn = S3Connection(awsKey, awsSecret)
    #print(conn)
    print("inside Upload function..")
    #Connecting to a bucket
    bucket_name = bucname  #"luigibuckets"
    bucket = conn.get_bucket(bucket_name)
    print(bucket)
    #Setting the keys
    k = Key(bucket)
    print(k)
    k.key = "XGB_File_400trees_TrainDatafinal.csv"
    k.set_contents_from_filename("XGB_File_400trees_TrainDatafinal.csv")

    k2 = Key(bucket)
    k2.key = "documents_meta.csv"
    k2.set_contents_from_filename("documents_meta.csv")

    k3 = Key(bucket)
    k3.key = "xgb_results.csv"
    k3.set_contents_from_filename("xgb_results.csv")

    k4 = Key(bucket)
    k4.key = "xgboost.pkl"
    k4.set_contents_from_filename("xgboost.pkl")

    print('Upload Completed..')
Exemple #2
0
def delete_img_aws(instance, **kwargs):
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    b = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
    img_k = Key(b)
    img_thumb_k = Key(b)
    img_k.key = instance.image.name
    img_thumb_k.key = instance.image_thumb.name
    b.delete_key(img_k)
    b.delete_key(img_thumb_k)
Exemple #3
0
    def test_s3(self):
        title = "Dino Test"
        link = "http://example.com"
        description = "My dino test"
        base_url = "http://example.com/items/"
        aws_key = os.environ['MYDINOSAUR_AWS_ACCESS_KEY']
        aws_secret = os.environ['MYDINOSAUR_AWS_SECRET_KEY']
        s3_bucket = os.environ['MYDINOSAUR_S3_BUCKET']
        dino = mydinosaur.MyS3Dinosaur(':memory:',
                                       title=title,
                                       link=link,
                                       description=description,
                                       base_url=base_url,
                                       aws_access_key=aws_key,
                                       aws_secret_key=aws_secret,
                                       s3_bucket=s3_bucket)
        dino.update("hello there")

        # make sure that stuff got uploaded
        conn = S3Connection(aws_key, aws_secret)
        bucket = conn.get_bucket(s3_bucket)

        k = Key(bucket)
        k.key = 'rss.xml'
        feed = feedparser.parse(k.get_contents_as_string())
        self.assertEqual(len(feed.entries), 1)

        k = Key(bucket)
        k.key = '1.html'
        html = k.get_contents_as_string()
        self.assertIn('<!-- MyDinosaur default html template -->', html)

        # now, update with media!
        filehandle = StringIO("Hello there.\n")
        dino.update_with_media('this is a test',
                               filehandle,
                               media_type="text/plain",
                               ext="txt")

        k = Key(bucket)
        k.key = 'rss.xml'
        feed = feedparser.parse(k.get_contents_as_string())
        self.assertEqual(len(feed.entries), 2)

        # ensure that media was uploaded
        media_url = feed.entries[0].enclosures[0].url
        key_name = urlparse(media_url).path.split('/')[-1]
        k = Key(bucket)
        k.key = key_name
        contents = k.get_contents_as_string()
        self.assertEqual(contents, "Hello there.\n")
Exemple #4
0
def test_upload_and_download_with_encryption(tmpdir):
    from toil_lib.urls import s3am_upload
    from toil_lib.urls import download_url
    from boto.s3.connection import S3Connection, Bucket, Key
    work_dir = str(tmpdir)
    # Create temporary encryption key
    key_path = os.path.join(work_dir, 'foo.key')
    subprocess.check_call([
        'dd', 'if=/dev/urandom', 'bs=1', 'count=32', 'of={}'.format(key_path)
    ])
    # Create test file
    upload_fpath = os.path.join(work_dir, 'upload_file')
    with open(upload_fpath, 'wb') as fout:
        fout.write(os.urandom(1024))
    # Upload file
    random_key = os.path.join('test/', str(uuid4()), 'upload_file')
    s3_url = os.path.join('s3://cgl-driver-projects/', random_key)
    try:
        s3_dir = os.path.split(s3_url)[0]
        s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
        # Download the file
        download_url(url=s3_url,
                     name='download_file',
                     work_dir=work_dir,
                     s3_key_path=key_path)
        download_fpath = os.path.join(work_dir, 'download_file')
        assert os.path.exists(download_fpath)
        assert filecmp.cmp(upload_fpath, download_fpath)
    finally:
        # Delete the Key. Key deletion never fails so we don't need to catch any exceptions
        with closing(S3Connection()) as conn:
            b = Bucket(conn, 'cgl-driver-projects')
            k = Key(b)
            k.key = random_key
            k.delete()
Exemple #5
0
def upload_to_s3(fp, name):
    conn = _get_s3_connection()
    bucket = conn.create_bucket('muxlist')
    k = Key(bucket)
    k.key = name
    k.set_contents_from_file(fp)
    return 'http://muxlist.s3.amazonaws.com/%s' % name
Exemple #6
0
def download_file_s3(aws_path, aws_config, local_folder=DATA_PATH):
    """ Download a file from an S3 bucket and save it in the local folder. """
    # remove the prefix and extract the S3 bucket, folder, and file name
    m = re.match(S3_PREFIX, aws_path)
    split = aws_path[len(m.group()):].split('/')
    s3_bucket = split.pop(0)
    s3_folder = '/'.join(split[:-1])
    keyname = split[-1]

    # create the local folder if necessary
    if local_folder is not None:
        ensure_directory(local_folder)
        path = os.path.join(local_folder, keyname)
    else:
        path = keyname

    if os.path.isfile(path):
        print 'file %s already exists!' % path
        return path

    conn = S3Connection(aws_config.access_key, aws_config.secret_key)
    bucket = conn.get_bucket(s3_bucket)

    if s3_folder:
        aws_keyname = os.path.join(s3_folder, keyname)
    else:
        aws_keyname = keyname

    print 'downloading data from S3...'
    s3key = Key(bucket)
    s3key.key = aws_keyname
    s3key.get_contents_to_filename(path)

    return path
def delete_product_image(product):
    S3_BUCKET = settings.S3_BUCKET
    AWS_ACCESS_KEY_ID = settings.AWS_ACCESS_KEY_ID
    AWS_SECRET_ACCESS_KEY = settings.AWS_SECRET_ACCESS_KEY
    conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    url_list1 = product.image_url.split("/")
    filename1 = url_list1[len(url_list1) - 1]
    url_list2 = product.thumbnail.url.split("/")
    filename2 = url_list2[len(url_list2) - 1]
    url_list3 = product.watermark.url.split("/")
    filename3 = url_list3[len(url_list3) - 1]

    b = Bucket(conn, S3_BUCKET)

    k = Key(b)

    k.key = 'products/' + filename1

    b.delete_key(k)

    k.key = 'products/thumbnails/' + filename2

    b.delete_key(k)

    k.key = 'products/watermarked/' + filename3

    b.delete_key(k)
def delete_from_s3(image_name):
    """Delete image from S3 bucket"""
    conn = S3Connection(aws_access_key_id, aws_secret_access_key)
    bucket = Bucket(conn, "shopifyimagerepository")
    k = Key(bucket)
    k.key = image_name
    bucket.delete_key(k)
def remove_profile_image(user):
    S3_BUCKET = settings.S3_BUCKET
    AWS_ACCESS_KEY_ID = settings.AWS_ACCESS_KEY_ID
    AWS_SECRET_ACCESS_KEY = settings.AWS_SECRET_ACCESS_KEY
    conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    url_list1 = user.profile_image_url.split("/")
    filename1 = url_list1[len(url_list1) - 1]
    url_list2 = user.profile_image_crop.url.split("/")
    filename2 = url_list2[len(url_list2) - 1]

    b = Bucket(conn, S3_BUCKET)

    k = Key(b)

    k.key = 'profile_images/' + filename1

    b.delete_key(k)

    k.key = 'profile_images/crop/' + filename2

    b.delete_key(k)

    user.profile_image_url = None
    user.profile_image_crop = None
    user.save()
Exemple #10
0
def delete_from_S3(filename):
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    bucket = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
    k = Key(bucket)
    k.key = settings.MEDIA_URL + filename
    bucket.delete_key(k)
    def delete_file(self, data, suffix=''):
        """ Handle file deletion requests. For this, we use the Amazon Python SDK,
        boto.
        """
        from .models import FileAndUrl
        boto.set_stream_logger('boto')
        S3 = S3Connection(settings.AWS_ACCESS_KEY_ID,
                          settings.AWS_SECRET_ACCESS_KEY)
        if boto:
            file_id = data.get('file_id', None)
            bucket_name = self.s3_bucket
            aws_bucket = S3.get_bucket(bucket_name, validate=False)

            fileuploader = FileAndUrl()
            log.info(u"fileuploader.get_file_path(file_id)%s",
                     fileuploader.get_file_path(file_id))
            #Delete for S3
            file_key = Key(aws_bucket, fileuploader.get_file_path(file_id))
            file_key.delete()
            #Delete from db
            fileuploader.delete_record(file_id)

            return
        else:
            return
Exemple #12
0
    def add_bucket(self, bucket_name, access, zonename, create_date):
        try:
            bucket_count = len(self.conn.get_all_buckets())
            # print 'bucket_count is %s ' %bucket_count
            if bucket_count < self.bucket_limit:
                self.conn.create_bucket(bucket_name)
                b = self.conn.get_bucket(bucket_name)
                try:
                    # k=b.new_key('create_info')
                    # k.set_contents_from_string("{'bucket_name':'%s','zonename':'%s','access':'%s','create_date':'%s'}" %(bucket_name,zonename,access,create_date))
                    k1 = Key(b)
                    k1.key = 'create_info'

                    #k1.set_metadata('Bucket_Name',bucket_name),注意,在设置元数据的时候key名不能带有下划线,该示例在创建的时候会报错403
                    k1.set_metadata('BucketName', bucket_name)
                    k1.set_metadata('ZoneName', zonename)
                    k1.set_metadata('Access', access)
                    k1.set_metadata('CreateDate', create_date)

                    k1.set_contents_from_string('')
                except Exception as e:
                    print r'\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\', e
                return True
            else:
                return False
        except Exception as e:
            return False
Exemple #13
0
def download_file_s3(keyname,
                     aws_key,
                     aws_secret,
                     s3_bucket,
                     s3_folder=None,
                     local_folder=None):
    """ Download a file from an S3 bucket and save it at keyname.  """
    if local_folder is not None:
        ensure_directory(local_folder)
        path = os.path.join(local_folder, keyname)
    else:
        path = keyname

    if os.path.isfile(path):
        print 'file %s already exists!' % path
        return path

    conn = S3Connection(aws_key, aws_secret)
    bucket = conn.get_bucket(s3_bucket)

    if s3_folder:
        aws_keyname = os.path.join(s3_folder, keyname)
    else:
        aws_keyname = keyname

    print 'downloading data from S3...'
    s3key = Key(bucket)
    s3key.key = aws_keyname
    s3key.get_contents_to_filename(path)

    return path
def handle_DELETE(request):
    """ Handle file deletion requests. For this, we use the Amazon Python SDK,
    boto.
    """
    try:

        boto.set_stream_logger('boto')
        S3 = S3Connection(aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
                          aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
    except ImportError:
        print("Could not import boto, the Amazon SDK for Python.")
        print("Deleting files will not work.")
        print("Install boto with")
        print("$ pip install boto")
        return make_response(500)

    bucket_name = request.POST.get('bucket')
    key_name = request.POST.get('key')
    try:
        aws_bucket = S3.get_bucket(bucket_name, validate=False)
        aws_key = Key(aws_bucket, key_name)
        aws_key.delete()
        return make_response(200)
    except Exception as err:
        print(err)
        return make_response(500)
Exemple #15
0
 def clean_s3(self):
     if self.file_to_clean is not None:
         from boto.s3.connection import S3Connection, Bucket, Key
         conn = S3Connection(cconfig.S3_ACCESS_KEY, cconfig.S3_SECRET_KEY)
         bucket = conn.get_bucket(cconfig.S3_BUCKET)
         k = Key(bucket)
         k.key = self.file_to_clean[1:]
         bucket.delete_key(k)
Exemple #16
0
 def getGroupsTrackerKey(self):
     groups_tracker_key_name = self.getGroupsTrackerKeyName()
     bucket = getHDISBucket()
     groups_tracker_key = bucket.get_key(groups_tracker_key_name)
     if not groups_tracker_key:
         groups_tracker_key = Key(bucket)
         groups_tracker_key.key = groups_tracker_key_name
     return groups_tracker_key
Exemple #17
0
    def uploadStrToS3(self, destDir, filename, contents):
        '''Uploads a string to an S3 file.'''
        print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)

        k2 = Key(self.bucket)
        k2.key = os.path.join(destDir, filename)
        k2.set_contents_from_string(contents, reduced_redundancy=True)
        print  # This newline is needed to get the path of the compiled binary printed on a newline.
Exemple #18
0
    def save_product_image_to_s3(self):
        if any([key == self.slug for key in BUCKET_LIST]):
            return

        k = Key(bucket)
        k.key = self.slug
        file_object = urllib2.urlopen(self.img)
        fp = StringIO.StringIO(file_object.read())
        k.set_contents_from_file(fp)
Exemple #19
0
    def uploadFileToS3(self, filename):
        '''Uploads file to S3.'''
        destDir = ''  # Root folder of the S3 bucket
        destpath = os.path.join(destDir, os.path.basename(filename))
        print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
Exemple #20
0
    def uploadStrToS3(self, destDir, filename, contents):  # pylint: disable=invalid-name,missing-param-doc
        # pylint: disable=missing-type-doc
        """Upload a string to an S3 file."""
        print(f"Uploading {filename} to Amazon S3 bucket {self.bucket_name}")

        k2 = Key(self.bucket)  # pylint: disable=invalid-name
        k2.key = os.path.join(destDir, filename)
        k2.set_contents_from_string(contents, reduced_redundancy=True)
        print()  # This newline is needed to get the path of the compiled binary printed on a newline.
Exemple #21
0
 def transfer_files(self):
     from boto.s3.connection import S3Connection
     from boto.s3.connection import Key
     conn = S3Connection(self.extra_args['aws_access_key'],
                         self.extra_args['aws_secret_key'])
     bucket = conn.get_bucket(self.extra_args['s3_bucket'])
     for fname in self.files:
         key = Key(bucket)
         key.key = os.path.basename(fname)
         key.set_contents_from_filename(fname)
Exemple #22
0
    def uploadFileToS3(self, filename):  # pylint: disable=invalid-name,missing-param-doc,missing-type-doc
        """Upload file to S3."""
        # Root folder of the S3 bucket
        destDir = ""  # pylint: disable=invalid-name
        destpath = os.path.join(destDir, os.path.basename(filename))
        print(f"Uploading {filename} to Amazon S3 bucket {self.bucket_name}")

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
Exemple #23
0
def delete_image_from_s3(file_name):
    try:
        conn = S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY)
        logging.info("success s3 connection")
        bucket = Bucket(conn, BUCKET)
        k = Key(bucket=bucket, name=file_name)
        k.delete()
        logging.info("success delete image from s3")
    except Exception as e:
        logging.debug(e)
Exemple #24
0
    def uploadFileToS3(self, filename):
        '''Uploads file to S3.'''
        destDir = ''  # Root folder of the S3 bucket
        destpath = os.path.join(destDir, os.path.basename(filename))
        print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
        print  # This newline is needed to get the path of the compiled binary printed on a newline.
Exemple #25
0
	def add_latest(self, archive_path):
		print("  Adding backup from", archive_path)
		
		print("    * Uploading to", self.latestPath)
		pbar = S3Progress(archive_path)
		Key(self.bucket, self.latestPath).set_contents_from_filename(archive_path, cb=pbar.cb_function, num_cb=pbar.num_cb)
		pbar.final()

		#self.bucket.get_key(self.latestPath).set_contents_from_filename(archive_path)
		print("    * Copying to", self.currentPath)
		self.bucket.copy_key(self.currentPath, self.bucket.name, self.latestPath)
Exemple #26
0
    def post(self, request):
        key = Key(self._bucket)

        id = request.POST['id']
        attachment = AIAttachment.objects.get(id=id)

        key.key = attachment.attachment
        self._bucket.delete_key(key)

        attachment.delete()
        return JsonResponse({'ok': True})
Exemple #27
0
 def store_image(self,
                 callback,
                 image_id,
                 request,
                 body=None,
                 filename=None,
                 **kwargs):
     bucket = self._get_bucket()
     image = Key(bucket, image_id)
     image.set_contents_from_file(body)
     callback(image.generate_url(HOUR))
Exemple #28
0
    def upload(self, local_filepath, aws_filepath):
        """
        Uploads `local_filepath` to `aws_filepath`.

        Returns the published URL for the file.
        """
        logging.info('Publishing %s to %s' % (local_filepath, aws_filepath))

        key = Key(bucket=self.bucket, name=aws_filepath)
        key.key = aws_filepath
        key.set_contents_from_filename(local_filepath)
        key.set_acl('public-read')
Exemple #29
0
def s3_delete_image(data):

    try:
        from boto.s3.connection import S3Connection, Bucket, Key
        conn = S3Connection(data['S3_KEY'], data['S3_SECRET'])
        b = Bucket(conn, data['S3_BUCKET'])
        k = Key(b)
        k.key = data['S3_UPLOAD_DIRECTORY'] + '/' + data['destinationFileName']
        b.delete_key(k)

    except Exception as e:
        return e
Exemple #30
0
def delete_file_from_s3(filename):
    conn = S3Connection(
        settings.AWS_ACCESS_KEY_ID,
        settings.AWS_SECRET_ACCESS_KEY,
    )
    b = Bucket(
        conn,
        settings.AWS_STORAGE_BUCKET_NAME,
    )
    k = Key(b)
    k.key = filename
    b.delete_key(k)