def delete_product_image(product):
    S3_BUCKET = settings.S3_BUCKET
    AWS_ACCESS_KEY_ID = settings.AWS_ACCESS_KEY_ID
    AWS_SECRET_ACCESS_KEY = settings.AWS_SECRET_ACCESS_KEY
    conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    url_list1 = product.image_url.split("/")
    filename1 = url_list1[len(url_list1) - 1]
    url_list2 = product.thumbnail.url.split("/")
    filename2 = url_list2[len(url_list2) - 1]
    url_list3 = product.watermark.url.split("/")
    filename3 = url_list3[len(url_list3) - 1]

    b = Bucket(conn, S3_BUCKET)

    k = Key(b)

    k.key = 'products/' + filename1

    b.delete_key(k)

    k.key = 'products/thumbnails/' + filename2

    b.delete_key(k)

    k.key = 'products/watermarked/' + filename3

    b.delete_key(k)
def uploadToS3():

    key = input("Enter the AWSKey:")
    secretkey = input("Enter the Secret Key:")
    awsKey = key  #
    awsSecret = secretkey  #

    bucname = input("Enter the bucket name:")

    conn = S3Connection(awsKey, awsSecret)
    #print(conn)
    print("inside Upload function..")
    #Connecting to a bucket
    bucket_name = bucname  #"luigibuckets"
    bucket = conn.get_bucket(bucket_name)
    print(bucket)
    #Setting the keys
    k = Key(bucket)
    print(k)
    k.key = "XGB_File_400trees_TrainDatafinal.csv"
    k.set_contents_from_filename("XGB_File_400trees_TrainDatafinal.csv")

    k2 = Key(bucket)
    k2.key = "documents_meta.csv"
    k2.set_contents_from_filename("documents_meta.csv")

    k3 = Key(bucket)
    k3.key = "xgb_results.csv"
    k3.set_contents_from_filename("xgb_results.csv")

    k4 = Key(bucket)
    k4.key = "xgboost.pkl"
    k4.set_contents_from_filename("xgboost.pkl")

    print('Upload Completed..')
def remove_profile_image(user):
    S3_BUCKET = settings.S3_BUCKET
    AWS_ACCESS_KEY_ID = settings.AWS_ACCESS_KEY_ID
    AWS_SECRET_ACCESS_KEY = settings.AWS_SECRET_ACCESS_KEY
    conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    url_list1 = user.profile_image_url.split("/")
    filename1 = url_list1[len(url_list1) - 1]
    url_list2 = user.profile_image_crop.url.split("/")
    filename2 = url_list2[len(url_list2) - 1]

    b = Bucket(conn, S3_BUCKET)

    k = Key(b)

    k.key = 'profile_images/' + filename1

    b.delete_key(k)

    k.key = 'profile_images/crop/' + filename2

    b.delete_key(k)

    user.profile_image_url = None
    user.profile_image_crop = None
    user.save()
Exemple #4
0
def delete_img_aws(instance, **kwargs):
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
    b = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
    img_k = Key(b)
    img_thumb_k = Key(b)
    img_k.key = instance.image.name
    img_thumb_k.key = instance.image_thumb.name
    b.delete_key(img_k)
    b.delete_key(img_thumb_k)
Exemple #5
0
def delete_img_aws(instance, **kwargs):
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    b = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
    img_k = Key(b)
    img_thumb_k = Key(b)
    img_k.key = instance.image.name
    img_thumb_k.key = instance.image_thumb.name
    b.delete_key(img_k)
    b.delete_key(img_thumb_k)
Exemple #6
0
    def perform_destroy(self, instance):
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, host='s3.ap-northeast-2.amazonaws.com')
        b = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
        k = Key(b)

        k.key = str(instance.left_footscan_stl)
        b.delete_key(k)

        k.key = str(instance.right_footscan_stl)
        b.delete_key(k)

        instance.delete()
Exemple #7
0
    def test_s3(self):
        title = "Dino Test"
        link = "http://example.com"
        description = "My dino test"
        base_url = "http://example.com/items/"
        aws_key = os.environ['MYDINOSAUR_AWS_ACCESS_KEY']
        aws_secret = os.environ['MYDINOSAUR_AWS_SECRET_KEY']
        s3_bucket = os.environ['MYDINOSAUR_S3_BUCKET']
        dino = mydinosaur.MyS3Dinosaur(':memory:',
                                       title=title,
                                       link=link,
                                       description=description,
                                       base_url=base_url,
                                       aws_access_key=aws_key,
                                       aws_secret_key=aws_secret,
                                       s3_bucket=s3_bucket)
        dino.update("hello there")

        # make sure that stuff got uploaded
        conn = S3Connection(aws_key, aws_secret)
        bucket = conn.get_bucket(s3_bucket)

        k = Key(bucket)
        k.key = 'rss.xml'
        feed = feedparser.parse(k.get_contents_as_string())
        self.assertEqual(len(feed.entries), 1)

        k = Key(bucket)
        k.key = '1.html'
        html = k.get_contents_as_string()
        self.assertIn('<!-- MyDinosaur default html template -->', html)

        # now, update with media!
        filehandle = StringIO("Hello there.\n")
        dino.update_with_media('this is a test',
                               filehandle,
                               media_type="text/plain",
                               ext="txt")

        k = Key(bucket)
        k.key = 'rss.xml'
        feed = feedparser.parse(k.get_contents_as_string())
        self.assertEqual(len(feed.entries), 2)

        # ensure that media was uploaded
        media_url = feed.entries[0].enclosures[0].url
        key_name = urlparse(media_url).path.split('/')[-1]
        k = Key(bucket)
        k.key = key_name
        contents = k.get_contents_as_string()
        self.assertEqual(contents, "Hello there.\n")
Exemple #8
0
 def delete(self):
     conn = S3Connection(settings.AWS_ACCESS_KEY, settings.AWS_SECRET_KEY)
     bucket = conn.get_bucket(
         settings.AWS_IMAGE_BUCKET,
         validate=False
     )
     key = Key(bucket)
     key.key = self.key
     optimized_key = Key(bucket)
     optimized_key.key = self.optimized_key
     thumbnail_key = Key(bucket)
     thumbnail_key.key = self.thumbnail_key
     super(Photo, self).delete()
     bucket.delete_keys([key, optimized_key, thumbnail_key])
Exemple #9
0
	def test_s3(self):
		title = "Dino Test"
		link = "http://example.com"
		description = "My dino test"
		base_url = "http://example.com/items/"
		aws_key = os.environ['MYDINOSAUR_AWS_ACCESS_KEY']
		aws_secret = os.environ['MYDINOSAUR_AWS_SECRET_KEY']
		s3_bucket = os.environ['MYDINOSAUR_S3_BUCKET']
		dino = mydinosaur.MyS3Dinosaur(':memory:',
				title=title,
				link=link,
				description=description,
				base_url=base_url,
				aws_access_key=aws_key,
				aws_secret_key=aws_secret,
				s3_bucket=s3_bucket)
		dino.update("hello there")

		# make sure that stuff got uploaded
		conn = S3Connection(aws_key, aws_secret)
		bucket = conn.get_bucket(s3_bucket) 

		k = Key(bucket)
		k.key = 'rss.xml'
		feed = feedparser.parse(k.get_contents_as_string())
		self.assertEqual(len(feed.entries), 1)

		k = Key(bucket)
		k.key = '1.html'
		html = k.get_contents_as_string()
		self.assertIn('<!-- MyDinosaur default html template -->', html)

		# now, update with media!
		filehandle = StringIO("Hello there.\n")
		dino.update_with_media('this is a test', filehandle,
				media_type="text/plain", ext="txt")

		k = Key(bucket)
		k.key = 'rss.xml'
		feed = feedparser.parse(k.get_contents_as_string())
		self.assertEqual(len(feed.entries), 2)

		# ensure that media was uploaded
		media_url = feed.entries[0].enclosures[0].url
		key_name = urlparse(media_url).path.split('/')[-1]
		k = Key(bucket)
		k.key = key_name
		contents = k.get_contents_as_string()
		self.assertEqual(contents, "Hello there.\n")
Exemple #10
0
def screenshot(request,val):
	if val == 1 :
		conn = S3Connection('##', '##')
		bucket = conn.get_bucket('lheston-bucket')
		k = Key(bucket)
		k.key = '//lab3' + request + '_toS3.png'
		driver = webdriver.PhantomJS() # or add to your PATH                                                                
		driver.set_window_size(1024, 768) # optional                                                                        
		driver.get(request)
		driver.save_screenshot('tempfile.png')
		driver.quit
		file1 = open('tempfile.png', 'rb')
		os.remove('tempfile.png')
		k.set_contents_from_file(file1)
		driver.quit
		return str(request + '_toS3.png')
	elif val == 2:
		text = '/lab3' + request
		conn = S3Connection('##', '##')
		S3_BUCKET_NAME = 'lheston-bucket'
		bucket = Bucket(conn, S3_BUCKET_NAME)
		bucket = bucket.delete_key(text)
		#bucket.delete_key('/lab3/' + request.split(':')[1])
		#k = Key(b)
		#k.name = k.get_key(text)
		#b.delete_key(k)
		#k.name = k.get_key(text)
		#b.delete_key(k)
		#b.delete_key('//lab3' + request.split(':')[1] + '_toS3.png')
	else:
		return str('incorrect input')
def test_bwa():
    work_dir = tempfile.mkdtemp()
    create_config(work_dir)
    create_manifest(work_dir)
    # Call Pipeline
    try:
        subprocess.check_call(
            [
                "toil-bwa",
                "run",
                os.path.join(work_dir, "jstore"),
                "--manifest",
                os.path.join(work_dir, "manifest.txt"),
                "--config",
                os.path.join(work_dir, "config.txt"),
                "--retryCount",
                "1",
            ]
        )
    finally:
        shutil.rmtree(work_dir)
        conn = S3Connection()
        b = Bucket(conn, "cgl-driver-projects")
        k = Key(b)
        k.key = "test/ci/ci_test.bam"
        k.delete()
Exemple #12
0
def upload_to_s3(fp, name):
    conn = _get_s3_connection()
    bucket = conn.create_bucket('muxlist')
    k = Key(bucket)
    k.key = name
    k.set_contents_from_file(fp)
    return 'http://muxlist.s3.amazonaws.com/%s' % name
Exemple #13
0
def test_upload_and_download_with_encryption(tmpdir):
    from toil_lib.urls import s3am_upload
    from toil_lib.urls import download_url
    from boto.s3.connection import S3Connection, Bucket, Key
    work_dir = str(tmpdir)
    # Create temporary encryption key
    key_path = os.path.join(work_dir, 'foo.key')
    subprocess.check_call([
        'dd', 'if=/dev/urandom', 'bs=1', 'count=32', 'of={}'.format(key_path)
    ])
    # Create test file
    upload_fpath = os.path.join(work_dir, 'upload_file')
    with open(upload_fpath, 'wb') as fout:
        fout.write(os.urandom(1024))
    # Upload file
    random_key = os.path.join('test/', str(uuid4()), 'upload_file')
    s3_url = os.path.join('s3://cgl-driver-projects/', random_key)
    try:
        s3_dir = os.path.split(s3_url)[0]
        s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
        # Download the file
        download_url(url=s3_url,
                     name='download_file',
                     work_dir=work_dir,
                     s3_key_path=key_path)
        download_fpath = os.path.join(work_dir, 'download_file')
        assert os.path.exists(download_fpath)
        assert filecmp.cmp(upload_fpath, download_fpath)
    finally:
        # Delete the Key. Key deletion never fails so we don't need to catch any exceptions
        with closing(S3Connection()) as conn:
            b = Bucket(conn, 'cgl-driver-projects')
            k = Key(b)
            k.key = random_key
            k.delete()
Exemple #14
0
def test_upload_and_download_with_encryption(tmpdir):
    from toil_scripts.lib.urls import s3am_upload
    from toil_scripts.lib.urls import download_url
    from boto.s3.connection import S3Connection, Bucket, Key
    work_dir = str(tmpdir)
    # Create temporary encryption key
    key_path = os.path.join(work_dir, 'foo.key')
    subprocess.check_call(['dd', 'if=/dev/urandom', 'bs=1', 'count=32',
                           'of={}'.format(key_path)])
    # Create test file
    upload_fpath = os.path.join(work_dir, 'upload_file')
    with open(upload_fpath, 'wb') as fout:
        fout.write(os.urandom(1024))
    # Upload file
    random_key = os.path.join('test/', str(uuid4()), 'upload_file')
    s3_url = os.path.join('s3://cgl-driver-projects/', random_key)
    try:
        s3_dir = os.path.split(s3_url)[0]
        s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
        # Download the file
        download_url(url=s3_url, name='download_file', work_dir=work_dir, s3_key_path=key_path)
        download_fpath = os.path.join(work_dir, 'download_file')
        assert os.path.exists(download_fpath)
        assert filecmp.cmp(upload_fpath, download_fpath)
    finally:
        # Delete the Key. Key deletion never fails so we don't need to catch any exceptions
        with closing(S3Connection()) as conn:
            b = Bucket(conn, 'cgl-driver-projects')
            k = Key(b)
            k.key = random_key
            k.delete()
Exemple #15
0
def download_file_s3(aws_path, aws_config, local_folder=DATA_PATH):
    """ Download a file from an S3 bucket and save it in the local folder. """
    # remove the prefix and extract the S3 bucket, folder, and file name
    m = re.match(S3_PREFIX, aws_path)
    split = aws_path[len(m.group()):].split('/')
    s3_bucket = split.pop(0)
    s3_folder = '/'.join(split[:-1])
    keyname = split[-1]

    # create the local folder if necessary
    if local_folder is not None:
        ensure_directory(local_folder)
        path = os.path.join(local_folder, keyname)
    else:
        path = keyname

    if os.path.isfile(path):
        print 'file %s already exists!' % path
        return path

    conn = S3Connection(aws_config.access_key, aws_config.secret_key)
    bucket = conn.get_bucket(s3_bucket)

    if s3_folder:
        aws_keyname = os.path.join(s3_folder, keyname)
    else:
        aws_keyname = keyname

    print 'downloading data from S3...'
    s3key = Key(bucket)
    s3key.key = aws_keyname
    s3key.get_contents_to_filename(path)

    return path
Exemple #16
0
def delete():
	#try:
	songid = int(request.args.get('songid'))
	
	song = Song.query.filter_by(id=songid).first()
	votes = Vote.query.filter_by(songdata=song.songdata).all()
	for x in votes:
		db.session.delete(x)
	db.session.commit()
	db.session.delete(song)
	db.session.commit()
	
	try:
		conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)

		b = Bucket(conn, S3_BUCKET_NAME)

		k = Key(b)

		k.key = songdata.lower() + '.mp3'
	

		b.delete_key(k)
	except:
		pass
	return render_template('notice.html', message="Delete successful.", redirect="/")
def delete_from_s3(image_name):
    """Delete image from S3 bucket"""
    conn = S3Connection(aws_access_key_id, aws_secret_access_key)
    bucket = Bucket(conn, "shopifyimagerepository")
    k = Key(bucket)
    k.key = image_name
    bucket.delete_key(k)
Exemple #18
0
def delete_from_S3(filename):
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    bucket = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
    k = Key(bucket)
    k.key = settings.MEDIA_URL + filename
    bucket.delete_key(k)
Exemple #19
0
    def add_bucket(self,bucket_name,access,zonename,create_date):
        try:
            bucket_count=len(self.conn.get_all_buckets())
            # print 'bucket_count is %s ' %bucket_count
            if bucket_count < self.bucket_limit:
                self.conn.create_bucket(bucket_name)
                b=self.conn.get_bucket(bucket_name)
                try:
                    # k=b.new_key('create_info')
                    # k.set_contents_from_string("{'bucket_name':'%s','zonename':'%s','access':'%s','create_date':'%s'}" %(bucket_name,zonename,access,create_date))
                    k1=Key(b)
                    k1.key='create_info'

                    #k1.set_metadata('Bucket_Name',bucket_name),注意,在设置元数据的时候key名不能带有下划线,该示例在创建的时候会报错403
                    k1.set_metadata('BucketName',bucket_name)
                    k1.set_metadata('ZoneName',zonename)
                    k1.set_metadata('Access',access)
                    k1.set_metadata('CreateDate',create_date)

                    k1.set_contents_from_string('')
                except Exception as e:
                    print r'\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\',e
                return True
            else:
                return False
        except Exception as e:
            return False
Exemple #20
0
    def add_bucket(self, bucket_name, access, zonename, create_date):
        try:
            bucket_count = len(self.conn.get_all_buckets())
            # print 'bucket_count is %s ' %bucket_count
            if bucket_count < self.bucket_limit:
                self.conn.create_bucket(bucket_name)
                b = self.conn.get_bucket(bucket_name)
                try:
                    # k=b.new_key('create_info')
                    # k.set_contents_from_string("{'bucket_name':'%s','zonename':'%s','access':'%s','create_date':'%s'}" %(bucket_name,zonename,access,create_date))
                    k1 = Key(b)
                    k1.key = 'create_info'

                    #k1.set_metadata('Bucket_Name',bucket_name),注意,在设置元数据的时候key名不能带有下划线,该示例在创建的时候会报错403
                    k1.set_metadata('BucketName', bucket_name)
                    k1.set_metadata('ZoneName', zonename)
                    k1.set_metadata('Access', access)
                    k1.set_metadata('CreateDate', create_date)

                    k1.set_contents_from_string('')
                except Exception as e:
                    print r'\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\', e
                return True
            else:
                return False
        except Exception as e:
            return False
def test_upload_and_download_with_encryption(tmpdir):
    from toil_scripts.lib.urls import s3am_upload
    from toil_scripts.lib.urls import download_url
    from boto.s3.connection import S3Connection, Bucket, Key
    work_dir = str(tmpdir)
    # Create temporary encryption key
    key_path = os.path.join(work_dir, 'foo.key')
    subprocess.check_call(['dd', 'if=/dev/urandom', 'bs=1', 'count=32',
                           'of={}'.format(key_path)])
    # Create test file
    upload_fpath = os.path.join(work_dir, 'upload_file')
    with open(upload_fpath, 'wb') as fout:
        fout.write(os.urandom(1024))
    # Upload file
    s3_dir = 's3://cgl-driver-projects/test'
    s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
    # Download the file
    url = 'https://s3-us-west-2.amazonaws.com/cgl-driver-projects/test/upload_file'
    download_url(url=url, name='download_file', work_dir=work_dir, s3_key_path=key_path)
    download_fpath = os.path.join(work_dir, 'download_file')
    assert os.path.exists(download_fpath)
    assert filecmp.cmp(upload_fpath, download_fpath)
    # Delete the Key
    conn = S3Connection()
    b = Bucket(conn, 'cgl-driver-projects')
    k = Key(b)
    k.key = 'test/upload_file'
    k.delete()
Exemple #22
0
def download_file_s3(keyname,
                     aws_key,
                     aws_secret,
                     s3_bucket,
                     s3_folder=None,
                     local_folder=None):
    """ Download a file from an S3 bucket and save it at keyname.  """
    if local_folder is not None:
        ensure_directory(local_folder)
        path = os.path.join(local_folder, keyname)
    else:
        path = keyname

    if os.path.isfile(path):
        print 'file %s already exists!' % path
        return path

    conn = S3Connection(aws_key, aws_secret)
    bucket = conn.get_bucket(s3_bucket)

    if s3_folder:
        aws_keyname = os.path.join(s3_folder, keyname)
    else:
        aws_keyname = keyname

    print 'downloading data from S3...'
    s3key = Key(bucket)
    s3key.key = aws_keyname
    s3key.get_contents_to_filename(path)

    return path
Exemple #23
0
def download_file_s3(aws_path, aws_config, local_folder=DATA_DL_PATH):
    """ Download a file from an S3 bucket and save it in the local folder. """
    # remove the prefix and extract the S3 bucket, folder, and file name
    m = re.match(S3_PREFIX, aws_path)
    split = aws_path[len(m.group()):].split('/')
    s3_bucket = split.pop(0)
    s3_folder = '/'.join(split[:-1])
    keyname = split[-1]

    # create the local folder if necessary
    if local_folder is not None:
        ensure_directory(local_folder)
        path = os.path.join(local_folder, keyname)
    else:
        path = keyname

    if os.path.isfile(path):
        logger.warning('file %s already exists!' % path)
        return path

    conn = S3Connection(aws_config.access_key, aws_config.secret_key)
    bucket = conn.get_bucket(s3_bucket)

    if s3_folder:
        aws_keyname = os.path.join(s3_folder, keyname)
    else:
        aws_keyname = keyname

    logger.debug('downloading data from S3...')
    s3key = Key(bucket)
    s3key.key = aws_keyname
    s3key.get_contents_to_filename(path)
    logger.info('file saved at %s' % path)

    return path
Exemple #24
0
 def clean_s3(self):
     if self.file_to_clean is not None:
         from boto.s3.connection import S3Connection, Bucket, Key
         conn = S3Connection(cconfig.S3_ACCESS_KEY, cconfig.S3_SECRET_KEY)
         bucket = conn.get_bucket(cconfig.S3_BUCKET)
         k = Key(bucket)
         k.key = self.file_to_clean[1:]
         bucket.delete_key(k)
Exemple #25
0
    def uploadStrToS3(self, destDir, filename, contents):
        '''Uploads a string to an S3 file.'''
        print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)

        k2 = Key(self.bucket)
        k2.key = os.path.join(destDir, filename)
        k2.set_contents_from_string(contents, reduced_redundancy=True)
        print  # This newline is needed to get the path of the compiled binary printed on a newline.
def WriteDataStringtoS3(string,game,msg_type, S3_bucket): 
  
  today_YYYMMDD, today_hhmmss = datetime.now().strftime('%Y%m%d') , datetime.now().strftime('%H-%M-%S')    
  S3_path =  '/data/' + game + '/' + msg_type + '/' +  today_YYYMMDD +  '/' +  today_hhmmss + '-logs.txt' 
  
  k=Key(S3_bucket)
  k.key = S3_path  
  k.set_contents_from_string(string,reduced_redundancy=True)  
Exemple #27
0
 def getGroupsTrackerKey(self):
     groups_tracker_key_name = self.getGroupsTrackerKeyName()
     bucket = getHDISBucket()
     groups_tracker_key = bucket.get_key(groups_tracker_key_name)
     if not groups_tracker_key:
         groups_tracker_key = Key(bucket)
         groups_tracker_key.key = groups_tracker_key_name
     return groups_tracker_key
Exemple #28
0
 def getGroupsTrackerKey(self):
     groups_tracker_key_name = self.getGroupsTrackerKeyName()
     bucket = getHDISBucket()
     groups_tracker_key = bucket.get_key(groups_tracker_key_name)
     if not groups_tracker_key:
         groups_tracker_key = Key(bucket)
         groups_tracker_key.key = groups_tracker_key_name
     return groups_tracker_key
Exemple #29
0
    def uploadFileToS3(self, filename):
        '''Uploads file to S3.'''
        destDir = ''  # Root folder of the S3 bucket
        destpath = os.path.join(destDir, os.path.basename(filename))
        print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
Exemple #30
0
def add(bkt, key, img, form = 'JPEG'):
  bucket = conn.get_bucket(bkt)
  newKeyObj = Key(bucket)
  newKeyObj.key = key
  newKeyObj.set_metadata('Content-Type', 'image/jpg')
  buf = s.StringIO()
  img.save(buf, form)
  newKeyObj.set_contents_from_string(buf.getvalue())
  newKeyObj.set_acl('public-read')
Exemple #31
0
    def uploadStrToS3(self, destDir, filename, contents):  # pylint: disable=invalid-name,missing-param-doc
        # pylint: disable=missing-type-doc
        """Upload a string to an S3 file."""
        print(f"Uploading {filename} to Amazon S3 bucket {self.bucket_name}")

        k2 = Key(self.bucket)  # pylint: disable=invalid-name
        k2.key = os.path.join(destDir, filename)
        k2.set_contents_from_string(contents, reduced_redundancy=True)
        print()  # This newline is needed to get the path of the compiled binary printed on a newline.
	def pull_s3_file(self, bucket, key, dst):
		"""
		Get a file from an S3 bucket
		"""
		conn = boto.connect_s3(self.aws_id, self.aws_key)
		b = conn.create_bucket(bucket)
		k = Key(b)
		k.key = key
		k.get_contents_to_filename(dst)
Exemple #33
0
    def save_product_image_to_s3(self):
        if any([key == self.slug for key in BUCKET_LIST]):
            return

        k = Key(bucket)
        k.key = self.slug
        file_object = urllib2.urlopen(self.img)
        fp = StringIO.StringIO(file_object.read())
        k.set_contents_from_file(fp)
Exemple #34
0
    def uploadFileToS3(self, filename):  # pylint: disable=invalid-name,missing-param-doc,missing-type-doc
        """Upload file to S3."""
        # Root folder of the S3 bucket
        destDir = ""  # pylint: disable=invalid-name
        destpath = os.path.join(destDir, os.path.basename(filename))
        print(f"Uploading {filename} to Amazon S3 bucket {self.bucket_name}")

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
Exemple #35
0
	def transfer_files(self):
		from boto.s3.connection import S3Connection
		from boto.s3.connection import Key
		conn = S3Connection(self.extra_args['aws_access_key'],
				self.extra_args['aws_secret_key'])
		bucket = conn.get_bucket(self.extra_args['s3_bucket'])
		for fname in self.files:
			key = Key(bucket)
			key.key = os.path.basename(fname)
			key.set_contents_from_filename(fname)
Exemple #36
0
    def uploadFileToS3(self, filename):
        '''Uploads file to S3.'''
        destDir = ''  # Root folder of the S3 bucket
        destpath = os.path.join(destDir, os.path.basename(filename))
        print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
        print  # This newline is needed to get the path of the compiled binary printed on a newline.
Exemple #37
0
 def transfer_files(self):
     from boto.s3.connection import S3Connection
     from boto.s3.connection import Key
     conn = S3Connection(self.extra_args['aws_access_key'],
                         self.extra_args['aws_secret_key'])
     bucket = conn.get_bucket(self.extra_args['s3_bucket'])
     for fname in self.files:
         key = Key(bucket)
         key.key = os.path.basename(fname)
         key.set_contents_from_filename(fname)
Exemple #38
0
def user_thumbnail_delete(sender, instance, **kwargs):
    logging.debug('Firing pre-delete signal...')
    gif = get_object_or_404(Gif, pk=instance.id)
    f = str(gif.thumbnail)
    filename = f[f.rfind('/') + 1:]
    s3conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
    bucket = Bucket(s3conn, settings.AWS_STORAGE_BUCKET_NAME)
    key_obj = Key(bucket)
    key_obj.key = 'thumbs/' + filename
    bucket.delete_key(key_obj.key)
Exemple #39
0
    def uploadFileToS3(self, filename):
        """Uploads file to S3."""
        destDir = ""  # Root folder of the S3 bucket
        destpath = os.path.join(destDir, os.path.basename(filename))
        print "Uploading %s to Amazon S3 bucket %s" % (filename, self.bucket_name)

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
        print  # This newline is needed to get the path of the compiled binary printed on a newline.
Exemple #40
0
    def upload(self):
        for destination, data, content_type, compressed in self.get_files():
            key = Key(self.bucket)
            key.content_type = content_type
            if compressed:
                key.set_metadata('content-encoding', 'gzip')

            for header, value in self.headers:
                key.set_metadata(header, value)
            key.key = destination
            key.set_contents_from_string(data)
Exemple #41
0
    def post(self, request):
        key = Key(self._bucket)

        id = request.POST['id']
        attachment = AIAttachment.objects.get(id=id)

        key.key = attachment.attachment
        self._bucket.delete_key(key)

        attachment.delete()
        return JsonResponse({'ok': True})
def WriteStringtoS3(string,game,msg_type): 
  
  env,game,msgtype =g_env, game, msg_type
  today_YYYMMDD, today_hhmmss = datetime.now().strftime('%Y%m%d') , datetime.now().strftime('%H-%M-%S')    
  S3_path =  env + '/data/' + game + '/' + msgtype + '/' +  today_YYYMMDD +  '/' +  today_hhmmss + '-logs.txt'
  S3_bucket = 'dailydosegames-gamedata-' + g_AWSAccessKeyId.lower()
  
  conn = S3Connection(g_AWSAccessKeyId, g_AWSSecretKey)
  bucket = conn.get_bucket(S3_bucket) 
  k=Key(bucket)
  k.key = S3_path  
  k.set_contents_from_string(string,reduced_redundancy=True)  
Exemple #43
0
def s3_delete_image(data):

    try:
        from boto.s3.connection import S3Connection, Bucket, Key
        conn = S3Connection(data['S3_KEY'], data['S3_SECRET'])
        b = Bucket(conn, data['S3_BUCKET'])
        k = Key(b)
        k.key = data['S3_UPLOAD_DIRECTORY'] + '/' + data['destinationFileName']
        b.delete_key(k)

    except Exception as e:
        return e
Exemple #44
0
def delete_file_from_s3(filename):
    conn = S3Connection(
        settings.AWS_ACCESS_KEY_ID,
        settings.AWS_SECRET_ACCESS_KEY,
    )
    b = Bucket(
        conn,
        settings.AWS_STORAGE_BUCKET_NAME,
    )
    k = Key(b)
    k.key = filename
    b.delete_key(k)
Exemple #45
0
    def upload(self, local_filepath, aws_filepath):
        """
        Uploads `local_filepath` to `aws_filepath`.

        Returns the published URL for the file.
        """
        logging.info('Publishing %s to %s' % (local_filepath, aws_filepath))

        key = Key(bucket=self.bucket, name=aws_filepath)
        key.key = aws_filepath
        key.set_contents_from_filename(local_filepath)
        key.set_acl('public-read')
Exemple #46
0
def upload_file(conn, full_path):
    b = Bucket(conn, BUCKET)
    k = Key(b)
    k.key = full_path
    expires = datetime.utcnow() + timedelta(days=(25 * 365))
    expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
    k.set_metadata("Content-Type", mimetypes.guess_type(full_path)[0])
    k.set_metadata("Expires", expires)
    k.set_metadata("Cache-Control", "max-age={0}, public".format(25 * 365 * 36400))
    k.set_contents_from_filename(full_path)
    k.set_acl('public-read')
    print "{} -> http://s3.amazonaws.com/yaluandmike/{}".format(full_path, full_path)
	def push_s3_file(self, bucket, src=None, key=None):
		"""
		Upload a file to an S3 bucket
		"""
		if not src:	src = self.working_file
		if not key: key = os.path.basename(src)
		conn = boto.connect_s3(self.aws_id, self.aws_key)
		b = conn.create_bucket(bucket)
		k = Key(b)
		k.key = key
		k.set_contents_from_filename(src)
		self.working_file = '%s/%s' % (bucket, key)
		return self.working_file
Exemple #48
0
def WriteStringtoS3(string, game, msg_type):

    env, game, msgtype = g_env, game, msg_type
    today_YYYMMDD, today_hhmmss = datetime.now().strftime(
        '%Y%m%d'), datetime.now().strftime('%H-%M-%S')
    S3_path = env + '/data/' + game + '/' + msgtype + '/' + today_YYYMMDD + '/' + today_hhmmss + '-logs.txt'
    S3_bucket = 'dailydosegames-gamedata-' + g_AWSAccessKeyId.lower()

    conn = S3Connection(g_AWSAccessKeyId, g_AWSSecretKey)
    bucket = conn.get_bucket(S3_bucket)
    k = Key(bucket)
    k.key = S3_path
    k.set_contents_from_string(string, reduced_redundancy=True)
Exemple #49
0
def move_file(from_filename, to_filename, bucket):
    """ Move file between 'folders' in a bucket """
    conn = boto.connect_s3()

    processing_bucket = conn.get_bucket(bucket)

    processing_bucket.copy_key(to_filename, processing_bucket.name,
                               from_filename)

    k = Key(processing_bucket)

    k.key = from_filename
    processing_bucket.delete_key(k)
Exemple #50
0
def delete_s3_pic(user, image):

	conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)

	b = Bucket(conn, AWS_STORAGE_BUCKET_NAME)

	k = Key(b)


	k.key = 'media/%s' % (image.image)

	b.delete_key(k)
	return 
Exemple #51
0
    def post(self, request):
        key = Key(self._bucket)

        id = request.POST['id']

        attachments = AIAttachment.objects.filter(ai_pics_id=id)
        for attachment in attachments:
            key.key = attachment.attachment
            self._bucket.delete_key(key)

        aipic = AIPics.objects.get(id=id)
        aipic.delete()
        return JsonResponse({'ok': True})
    def upload(self, local_filepath, aws_filepath):
        """
        Uploads `local_filepath` to `aws_filepath`.

        Returns the published URL for the file.
        """
        logging.info('Publishing %s to %s' % (
                local_filepath, aws_filepath))

        key = Key(bucket=self.bucket, name=aws_filepath)
        key.key = aws_filepath
        key.set_contents_from_filename(local_filepath)
        key.set_acl('public-read')
Exemple #53
0
def prop(request, prop_id):
    if request.method == "GET":
        domain = request.get_host()
        # GET - READ
        try:
            prop = Prop.objects.get(id=prop_id)
            response_data = {
                "success": True,
                "prop": {
                    "id": prop.id,
                    "name": prop.name,
                    "description": prop.description,
                    "url": prop.image.url
                }
            }
            return HttpResponse(json.dumps(response_data),
                                content_type="application/json")
        except ObjectDoesNotExist:
            return HttpResponse(status=404)
    elif request.method == "PUT":
        # PUT - UPDATE - later
        pass
    elif request.method == "DELETE":
        prop = Prop.objects.get(id=prop_id)
        # Unset From all scenes and delete scene_prop
        scene_props = SceneProp.objects.filter(prop_file=prop)
        for scene_prop in scene_props:
            scene = scene_prop.scene
            scene_prop.delete()
            scene.save()
        # Delete File
        if prop.image:
            if not settings.USE_AWS and prop.image.path:
                # Delete from MEDIA_ROOT
                os.remove(prop.image.path)
            elif settings.USE_AWS and prop.image.name:
                # Delete from AWS S3
                connection = S3Connection(settings.AWS_ACCESS_KEY_ID,
                                          settings.AWS_SECRET_ACCESS_KEY)
                bucket = Bucket(connection, settings.AWS_STORAGE_BUCKET_NAME)
                fileKey = Key(bucket)
                fileKey.key = prop.image.name
                bucket.delete_key(fileKey)
        # Delete From Database
        prop.delete()
        response_data = {"success": True}
        return HttpResponse(json.dumps(response_data),
                            content_type="application/json")
    else:
        return HttpResponseNotAllowed(['GET', 'PUT', 'DELETE'])
    return HttpResponse("API call for prop #" + prop_id)
Exemple #54
0
def getOrCreateS3Key(key_name):
    bucket = getHDISBucket()
    try:
        key = bucket.get_key(key_name)
        key_json = key.get_contents_as_string()
    except:
        key = Key(bucket)
        key.key = key_name
        key_json = json.dumps({})
    try:
        key_dict = json.loads(key_json)
    except:
        key_dict = {}
    return key, key_dict
def upload():
    s3_conn = s3()
    
#     bucket = s3_conn.create_bucket('distributed-web-crawler')
    bucket = Bucket(s3_conn, 'distributed-web-crawler')
    
    k = Key(bucket)
    
    k.key = 'list_links_a.txt'
    k.set_contents_from_filename('input_links_a.txt')
    
    os.remove('input_links_a.txt')
    
    s3_conn.close()
def prop(request, prop_id):
	if request.method == "GET":
		domain = request.get_host()
		# GET - READ
		try:
			prop = Prop.objects.get(id=prop_id)
			response_data = {
				"success" : True,
				"prop" : {
					"id" : prop.id,
					"name" : prop.name,
					"description" : prop.description,
					"url" : prop.image.url
				}
			}
			return HttpResponse(json.dumps(response_data), content_type="application/json")
		except ObjectDoesNotExist:
			return HttpResponse(status=404)
	elif request.method == "PUT":
		# PUT - UPDATE - later
		pass
	elif request.method == "DELETE":
		prop = Prop.objects.get(id=prop_id)
		# Unset From all scenes and delete scene_prop
		scene_props = SceneProp.objects.filter(prop_file=prop)
		for scene_prop in scene_props:
			scene = scene_prop.scene
			scene_prop.delete()
			scene.save()
		# Delete File
		if prop.image:
			if not settings.USE_AWS and prop.image.path:
				# Delete from MEDIA_ROOT
				os.remove(prop.image.path)
			elif settings.USE_AWS and prop.image.name:
				# Delete from AWS S3
				connection = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
				bucket = Bucket(connection, settings.AWS_STORAGE_BUCKET_NAME)
				fileKey = Key(bucket)
				fileKey.key = prop.image.name
				bucket.delete_key(fileKey)
		# Delete From Database
		prop.delete()
		response_data = { "success" : True }
		return HttpResponse(json.dumps(response_data), content_type="application/json")
	else:
		return HttpResponseNotAllowed(['GET', 'PUT', 'DELETE'])
	return HttpResponse("API call for prop #" + prop_id)