コード例 #1
0
def remove_profile_image(user):
    S3_BUCKET = settings.S3_BUCKET
    AWS_ACCESS_KEY_ID = settings.AWS_ACCESS_KEY_ID
    AWS_SECRET_ACCESS_KEY = settings.AWS_SECRET_ACCESS_KEY
    conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    url_list1 = user.profile_image_url.split("/")
    filename1 = url_list1[len(url_list1) - 1]
    url_list2 = user.profile_image_crop.url.split("/")
    filename2 = url_list2[len(url_list2) - 1]

    b = Bucket(conn, S3_BUCKET)

    k = Key(b)

    k.key = 'profile_images/' + filename1

    b.delete_key(k)

    k.key = 'profile_images/crop/' + filename2

    b.delete_key(k)

    user.profile_image_url = None
    user.profile_image_crop = None
    user.save()
コード例 #2
0
def download_file_s3(aws_path, aws_config, local_folder=DATA_PATH):
    """ Download a file from an S3 bucket and save it in the local folder. """
    # remove the prefix and extract the S3 bucket, folder, and file name
    m = re.match(S3_PREFIX, aws_path)
    split = aws_path[len(m.group()):].split('/')
    s3_bucket = split.pop(0)
    s3_folder = '/'.join(split[:-1])
    keyname = split[-1]

    # create the local folder if necessary
    if local_folder is not None:
        ensure_directory(local_folder)
        path = os.path.join(local_folder, keyname)
    else:
        path = keyname

    if os.path.isfile(path):
        print 'file %s already exists!' % path
        return path

    conn = S3Connection(aws_config.access_key, aws_config.secret_key)
    bucket = conn.get_bucket(s3_bucket)

    if s3_folder:
        aws_keyname = os.path.join(s3_folder, keyname)
    else:
        aws_keyname = keyname

    print 'downloading data from S3...'
    s3key = Key(bucket)
    s3key.key = aws_keyname
    s3key.get_contents_to_filename(path)

    return path
コード例 #3
0
def delete_from_S3(filename):
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                        settings.AWS_SECRET_ACCESS_KEY)
    bucket = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
    k = Key(bucket)
    k.key = settings.MEDIA_URL + filename
    bucket.delete_key(k)
コード例 #4
0
ファイル: test_urls.py プロジェクト: fnothaft/toil-lib
def test_upload_and_download_with_encryption(tmpdir):
    from toil_lib.urls import s3am_upload
    from toil_lib.urls import download_url
    from boto.s3.connection import S3Connection, Bucket, Key
    work_dir = str(tmpdir)
    # Create temporary encryption key
    key_path = os.path.join(work_dir, 'foo.key')
    subprocess.check_call([
        'dd', 'if=/dev/urandom', 'bs=1', 'count=32', 'of={}'.format(key_path)
    ])
    # Create test file
    upload_fpath = os.path.join(work_dir, 'upload_file')
    with open(upload_fpath, 'wb') as fout:
        fout.write(os.urandom(1024))
    # Upload file
    random_key = os.path.join('test/', str(uuid4()), 'upload_file')
    s3_url = os.path.join('s3://cgl-driver-projects/', random_key)
    try:
        s3_dir = os.path.split(s3_url)[0]
        s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
        # Download the file
        download_url(url=s3_url,
                     name='download_file',
                     work_dir=work_dir,
                     s3_key_path=key_path)
        download_fpath = os.path.join(work_dir, 'download_file')
        assert os.path.exists(download_fpath)
        assert filecmp.cmp(upload_fpath, download_fpath)
    finally:
        # Delete the Key. Key deletion never fails so we don't need to catch any exceptions
        with closing(S3Connection()) as conn:
            b = Bucket(conn, 'cgl-driver-projects')
            k = Key(b)
            k.key = random_key
            k.delete()
コード例 #5
0
def test_bwa():
    work_dir = tempfile.mkdtemp()
    create_config(work_dir)
    create_manifest(work_dir)
    # Call Pipeline
    try:
        subprocess.check_call(
            [
                "toil-bwa",
                "run",
                os.path.join(work_dir, "jstore"),
                "--manifest",
                os.path.join(work_dir, "manifest.txt"),
                "--config",
                os.path.join(work_dir, "config.txt"),
                "--retryCount",
                "1",
            ]
        )
    finally:
        shutil.rmtree(work_dir)
        conn = S3Connection()
        b = Bucket(conn, "cgl-driver-projects")
        k = Key(b)
        k.key = "test/ci/ci_test.bam"
        k.delete()
コード例 #6
0
ファイル: utilities.py プロジェクト: shaneopatrick/ATM
def download_file_s3(keyname,
                     aws_key,
                     aws_secret,
                     s3_bucket,
                     s3_folder=None,
                     local_folder=None):
    """ Download a file from an S3 bucket and save it at keyname.  """
    if local_folder is not None:
        ensure_directory(local_folder)
        path = os.path.join(local_folder, keyname)
    else:
        path = keyname

    if os.path.isfile(path):
        print 'file %s already exists!' % path
        return path

    conn = S3Connection(aws_key, aws_secret)
    bucket = conn.get_bucket(s3_bucket)

    if s3_folder:
        aws_keyname = os.path.join(s3_folder, keyname)
    else:
        aws_keyname = keyname

    print 'downloading data from S3...'
    s3key = Key(bucket)
    s3key.key = aws_keyname
    s3key.get_contents_to_filename(path)

    return path
コード例 #7
0
ファイル: test_urls.py プロジェクト: cmarkello/toil-scripts
def test_upload_and_download_with_encryption(tmpdir):
    from toil_scripts.lib.urls import s3am_upload
    from toil_scripts.lib.urls import download_url
    from boto.s3.connection import S3Connection, Bucket, Key
    work_dir = str(tmpdir)
    # Create temporary encryption key
    key_path = os.path.join(work_dir, 'foo.key')
    subprocess.check_call(['dd', 'if=/dev/urandom', 'bs=1', 'count=32',
                           'of={}'.format(key_path)])
    # Create test file
    upload_fpath = os.path.join(work_dir, 'upload_file')
    with open(upload_fpath, 'wb') as fout:
        fout.write(os.urandom(1024))
    # Upload file
    random_key = os.path.join('test/', str(uuid4()), 'upload_file')
    s3_url = os.path.join('s3://cgl-driver-projects/', random_key)
    try:
        s3_dir = os.path.split(s3_url)[0]
        s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
        # Download the file
        download_url(url=s3_url, name='download_file', work_dir=work_dir, s3_key_path=key_path)
        download_fpath = os.path.join(work_dir, 'download_file')
        assert os.path.exists(download_fpath)
        assert filecmp.cmp(upload_fpath, download_fpath)
    finally:
        # Delete the Key. Key deletion never fails so we don't need to catch any exceptions
        with closing(S3Connection()) as conn:
            b = Bucket(conn, 'cgl-driver-projects')
            k = Key(b)
            k.key = random_key
            k.delete()
コード例 #8
0
ファイル: utilities.py プロジェクト: wuqixiaobai/ATM
def download_file_s3(aws_path, aws_config, local_folder=DATA_DL_PATH):
    """ Download a file from an S3 bucket and save it in the local folder. """
    # remove the prefix and extract the S3 bucket, folder, and file name
    m = re.match(S3_PREFIX, aws_path)
    split = aws_path[len(m.group()):].split('/')
    s3_bucket = split.pop(0)
    s3_folder = '/'.join(split[:-1])
    keyname = split[-1]

    # create the local folder if necessary
    if local_folder is not None:
        ensure_directory(local_folder)
        path = os.path.join(local_folder, keyname)
    else:
        path = keyname

    if os.path.isfile(path):
        logger.warning('file %s already exists!' % path)
        return path

    conn = S3Connection(aws_config.access_key, aws_config.secret_key)
    bucket = conn.get_bucket(s3_bucket)

    if s3_folder:
        aws_keyname = os.path.join(s3_folder, keyname)
    else:
        aws_keyname = keyname

    logger.debug('downloading data from S3...')
    s3key = Key(bucket)
    s3key.key = aws_keyname
    s3key.get_contents_to_filename(path)
    logger.info('file saved at %s' % path)

    return path
コード例 #9
0
def delete_product_image(product):
    S3_BUCKET = settings.S3_BUCKET
    AWS_ACCESS_KEY_ID = settings.AWS_ACCESS_KEY_ID
    AWS_SECRET_ACCESS_KEY = settings.AWS_SECRET_ACCESS_KEY
    conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
    url_list1 = product.image_url.split("/")
    filename1 = url_list1[len(url_list1) - 1]
    url_list2 = product.thumbnail.url.split("/")
    filename2 = url_list2[len(url_list2) - 1]
    url_list3 = product.watermark.url.split("/")
    filename3 = url_list3[len(url_list3) - 1]

    b = Bucket(conn, S3_BUCKET)

    k = Key(b)

    k.key = 'products/' + filename1

    b.delete_key(k)

    k.key = 'products/thumbnails/' + filename2

    b.delete_key(k)

    k.key = 'products/watermarked/' + filename3

    b.delete_key(k)
コード例 #10
0
def delete_from_s3(image_name):
    """Delete image from S3 bucket"""
    conn = S3Connection(aws_access_key_id, aws_secret_access_key)
    bucket = Bucket(conn, "shopifyimagerepository")
    k = Key(bucket)
    k.key = image_name
    bucket.delete_key(k)
コード例 #11
0
ファイル: base.py プロジェクト: openvenues/common_crawl
    def mapper(self, _, line):
        line = line.rstrip().split('\t', 1)[-1]
        
        filename = line.rsplit('/', 1)[-1]
        first_rec = None
        f = open(filename, 'w')
        for i in xrange(10):
            try:
                conn = boto.connect_s3(anon=True)
                bucket = conn.get_bucket('aws-publicdatasets')
                key = Key(bucket, line)
                key.get_contents_to_file(f)
                f.close()
                records = warc.WARCFile(fileobj=gzip.open(filename, 'rb'))
                break
            except Exception as e:
                continue
        else:
            logger.error('10 attempts to get file {} failed, skipping...'.format(filename))
            return
 
        try:
            for i, record in enumerate(records):
                if record.type != 'response':
                    _ = record.payload.read()
                    continue
                for key, value in self.process_record(record):
                    yield key, value
                self.increment_counter('commoncrawl', 'processed_records', 1)
        except Exception:
            logger.error(traceback.format_exc())
            self.increment_counter('errors', 'general', 1)
        finally:
            f.close()
            os.unlink(filename)
コード例 #12
0
ファイル: views.py プロジェクト: lheston/lab1
def screenshot(request,val):
	if val == 1 :
		conn = S3Connection('##', '##')
		bucket = conn.get_bucket('lheston-bucket')
		k = Key(bucket)
		k.key = '//lab3' + request + '_toS3.png'
		driver = webdriver.PhantomJS() # or add to your PATH                                                                
		driver.set_window_size(1024, 768) # optional                                                                        
		driver.get(request)
		driver.save_screenshot('tempfile.png')
		driver.quit
		file1 = open('tempfile.png', 'rb')
		os.remove('tempfile.png')
		k.set_contents_from_file(file1)
		driver.quit
		return str(request + '_toS3.png')
	elif val == 2:
		text = '/lab3' + request
		conn = S3Connection('##', '##')
		S3_BUCKET_NAME = 'lheston-bucket'
		bucket = Bucket(conn, S3_BUCKET_NAME)
		bucket = bucket.delete_key(text)
		#bucket.delete_key('/lab3/' + request.split(':')[1])
		#k = Key(b)
		#k.name = k.get_key(text)
		#b.delete_key(k)
		#k.name = k.get_key(text)
		#b.delete_key(k)
		#b.delete_key('//lab3' + request.split(':')[1] + '_toS3.png')
	else:
		return str('incorrect input')
コード例 #13
0
    def delete_file(self, data, suffix=''):
        """ Handle file deletion requests. For this, we use the Amazon Python SDK,
        boto.
        """
        from .models import FileAndUrl
        boto.set_stream_logger('boto')
        S3 = S3Connection(settings.AWS_ACCESS_KEY_ID,
                          settings.AWS_SECRET_ACCESS_KEY)
        if boto:
            file_id = data.get('file_id', None)
            bucket_name = self.s3_bucket
            aws_bucket = S3.get_bucket(bucket_name, validate=False)

            fileuploader = FileAndUrl()
            log.info(u"fileuploader.get_file_path(file_id)%s",
                     fileuploader.get_file_path(file_id))
            #Delete for S3
            file_key = Key(aws_bucket, fileuploader.get_file_path(file_id))
            file_key.delete()
            #Delete from db
            fileuploader.delete_record(file_id)

            return
        else:
            return
コード例 #14
0
def test_upload_and_download_with_encryption(tmpdir):
    from toil_scripts.lib.urls import s3am_upload
    from toil_scripts.lib.urls import download_url
    from boto.s3.connection import S3Connection, Bucket, Key
    work_dir = str(tmpdir)
    # Create temporary encryption key
    key_path = os.path.join(work_dir, 'foo.key')
    subprocess.check_call(['dd', 'if=/dev/urandom', 'bs=1', 'count=32',
                           'of={}'.format(key_path)])
    # Create test file
    upload_fpath = os.path.join(work_dir, 'upload_file')
    with open(upload_fpath, 'wb') as fout:
        fout.write(os.urandom(1024))
    # Upload file
    s3_dir = 's3://cgl-driver-projects/test'
    s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
    # Download the file
    url = 'https://s3-us-west-2.amazonaws.com/cgl-driver-projects/test/upload_file'
    download_url(url=url, name='download_file', work_dir=work_dir, s3_key_path=key_path)
    download_fpath = os.path.join(work_dir, 'download_file')
    assert os.path.exists(download_fpath)
    assert filecmp.cmp(upload_fpath, download_fpath)
    # Delete the Key
    conn = S3Connection()
    b = Bucket(conn, 'cgl-driver-projects')
    k = Key(b)
    k.key = 'test/upload_file'
    k.delete()
コード例 #15
0
def upload_to_s3(fp, name):
    conn = _get_s3_connection()
    bucket = conn.create_bucket('muxlist')
    k = Key(bucket)
    k.key = name
    k.set_contents_from_file(fp)
    return 'http://muxlist.s3.amazonaws.com/%s' % name
コード例 #16
0
def handle_DELETE(request):
    """ Handle file deletion requests. For this, we use the Amazon Python SDK,
    boto.
    """
    try:

        boto.set_stream_logger('boto')
        S3 = S3Connection(aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
                          aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
    except ImportError:
        print("Could not import boto, the Amazon SDK for Python.")
        print("Deleting files will not work.")
        print("Install boto with")
        print("$ pip install boto")
        return make_response(500)

    bucket_name = request.POST.get('bucket')
    key_name = request.POST.get('key')
    try:
        aws_bucket = S3.get_bucket(bucket_name, validate=False)
        aws_key = Key(aws_bucket, key_name)
        aws_key.delete()
        return make_response(200)
    except Exception as err:
        print(err)
        return make_response(500)
コード例 #17
0
ファイル: routes.py プロジェクト: RishavT/cube26musicapp
def delete():
	#try:
	songid = int(request.args.get('songid'))
	
	song = Song.query.filter_by(id=songid).first()
	votes = Vote.query.filter_by(songdata=song.songdata).all()
	for x in votes:
		db.session.delete(x)
	db.session.commit()
	db.session.delete(song)
	db.session.commit()
	
	try:
		conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)

		b = Bucket(conn, S3_BUCKET_NAME)

		k = Key(b)

		k.key = songdata.lower() + '.mp3'
	

		b.delete_key(k)
	except:
		pass
	return render_template('notice.html', message="Delete successful.", redirect="/")
コード例 #18
0
ファイル: models.py プロジェクト: mhfowler/howdoispeak_django
 def getGroupsTrackerKey(self):
     groups_tracker_key_name = self.getGroupsTrackerKeyName()
     bucket = getHDISBucket()
     groups_tracker_key = bucket.get_key(groups_tracker_key_name)
     if not groups_tracker_key:
         groups_tracker_key = Key(bucket)
         groups_tracker_key.key = groups_tracker_key_name
     return groups_tracker_key
コード例 #19
0
    def uploadStrToS3(self, destDir, filename, contents):
        '''Uploads a string to an S3 file.'''
        print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)

        k2 = Key(self.bucket)
        k2.key = os.path.join(destDir, filename)
        k2.set_contents_from_string(contents, reduced_redundancy=True)
        print  # This newline is needed to get the path of the compiled binary printed on a newline.
コード例 #20
0
ファイル: models.py プロジェクト: mhfowler/howdoispeak_django
 def getGroupsTrackerKey(self):
     groups_tracker_key_name = self.getGroupsTrackerKeyName()
     bucket = getHDISBucket()
     groups_tracker_key = bucket.get_key(groups_tracker_key_name)
     if not groups_tracker_key:
         groups_tracker_key = Key(bucket)
         groups_tracker_key.key = groups_tracker_key_name
     return groups_tracker_key
コード例 #21
0
 def clean_s3(self):
     if self.file_to_clean is not None:
         from boto.s3.connection import S3Connection, Bucket, Key
         conn = S3Connection(cconfig.S3_ACCESS_KEY, cconfig.S3_SECRET_KEY)
         bucket = conn.get_bucket(cconfig.S3_BUCKET)
         k = Key(bucket)
         k.key = self.file_to_clean[1:]
         bucket.delete_key(k)
コード例 #22
0
def WriteDataStringtoS3(string,game,msg_type, S3_bucket): 
  
  today_YYYMMDD, today_hhmmss = datetime.now().strftime('%Y%m%d') , datetime.now().strftime('%H-%M-%S')    
  S3_path =  '/data/' + game + '/' + msg_type + '/' +  today_YYYMMDD +  '/' +  today_hhmmss + '-logs.txt' 
  
  k=Key(S3_bucket)
  k.key = S3_path  
  k.set_contents_from_string(string,reduced_redundancy=True)  
コード例 #23
0
ファイル: models.py プロジェクト: Gorillaz322/RozetkaParser
    def save_product_image_to_s3(self):
        if any([key == self.slug for key in BUCKET_LIST]):
            return

        k = Key(bucket)
        k.key = self.slug
        file_object = urllib2.urlopen(self.img)
        fp = StringIO.StringIO(file_object.read())
        k.set_contents_from_file(fp)
コード例 #24
0
ファイル: models.py プロジェクト: dsimandl/teamsurmandl
def delete_img_aws(instance, **kwargs):
    conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
    b = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
    img_k = Key(b)
    img_thumb_k = Key(b)
    img_k.key = instance.image.name
    img_thumb_k.key = instance.image_thumb.name
    b.delete_key(img_k)
    b.delete_key(img_thumb_k)
コード例 #25
0
    def uploadFileToS3(self, filename):
        '''Uploads file to S3.'''
        destDir = ''  # Root folder of the S3 bucket
        destpath = os.path.join(destDir, os.path.basename(filename))
        print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
コード例 #26
0
ファイル: s3cache.py プロジェクト: pwnfuzzsec/funfuzz
    def uploadStrToS3(self, destDir, filename, contents):  # pylint: disable=invalid-name,missing-param-doc
        # pylint: disable=missing-type-doc
        """Upload a string to an S3 file."""
        print(f"Uploading {filename} to Amazon S3 bucket {self.bucket_name}")

        k2 = Key(self.bucket)  # pylint: disable=invalid-name
        k2.key = os.path.join(destDir, filename)
        k2.set_contents_from_string(contents, reduced_redundancy=True)
        print()  # This newline is needed to get the path of the compiled binary printed on a newline.
コード例 #27
0
	def pull_s3_file(self, bucket, key, dst):
		"""
		Get a file from an S3 bucket
		"""
		conn = boto.connect_s3(self.aws_id, self.aws_key)
		b = conn.create_bucket(bucket)
		k = Key(b)
		k.key = key
		k.get_contents_to_filename(dst)
コード例 #28
0
ファイル: models.py プロジェクト: kershner/gifcache
def user_thumbnail_delete(sender, instance, **kwargs):
    logging.debug('Firing pre-delete signal...')
    gif = get_object_or_404(Gif, pk=instance.id)
    f = str(gif.thumbnail)
    filename = f[f.rfind('/') + 1:]
    s3conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
    bucket = Bucket(s3conn, settings.AWS_STORAGE_BUCKET_NAME)
    key_obj = Key(bucket)
    key_obj.key = 'thumbs/' + filename
    bucket.delete_key(key_obj.key)
コード例 #29
0
def delete_image_from_s3(file_name):
    try:
        conn = S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY)
        logging.info("success s3 connection")
        bucket = Bucket(conn, BUCKET)
        k = Key(bucket=bucket, name=file_name)
        k.delete()
        logging.info("success delete image from s3")
    except Exception as e:
        logging.debug(e)
コード例 #30
0
ファイル: s3cache.py プロジェクト: pwnfuzzsec/funfuzz
    def uploadFileToS3(self, filename):  # pylint: disable=invalid-name,missing-param-doc,missing-type-doc
        """Upload file to S3."""
        # Root folder of the S3 bucket
        destDir = ""  # pylint: disable=invalid-name
        destpath = os.path.join(destDir, os.path.basename(filename))
        print(f"Uploading {filename} to Amazon S3 bucket {self.bucket_name}")

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
コード例 #31
0
ファイル: __init__.py プロジェクト: aparrish/mydinosaur
	def transfer_files(self):
		from boto.s3.connection import S3Connection
		from boto.s3.connection import Key
		conn = S3Connection(self.extra_args['aws_access_key'],
				self.extra_args['aws_secret_key'])
		bucket = conn.get_bucket(self.extra_args['s3_bucket'])
		for fname in self.files:
			key = Key(bucket)
			key.key = os.path.basename(fname)
			key.set_contents_from_filename(fname)
コード例 #32
0
 def transfer_files(self):
     from boto.s3.connection import S3Connection
     from boto.s3.connection import Key
     conn = S3Connection(self.extra_args['aws_access_key'],
                         self.extra_args['aws_secret_key'])
     bucket = conn.get_bucket(self.extra_args['s3_bucket'])
     for fname in self.files:
         key = Key(bucket)
         key.key = os.path.basename(fname)
         key.set_contents_from_filename(fname)
コード例 #33
0
ファイル: s3cache.py プロジェクト: jruderman/funfuzz
    def uploadFileToS3(self, filename):
        """Uploads file to S3."""
        destDir = ""  # Root folder of the S3 bucket
        destpath = os.path.join(destDir, os.path.basename(filename))
        print "Uploading %s to Amazon S3 bucket %s" % (filename, self.bucket_name)

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
        print  # This newline is needed to get the path of the compiled binary printed on a newline.
コード例 #34
0
ファイル: s3cache.py プロジェクト: lovesuae/funfuzz
    def uploadFileToS3(self, filename):
        '''Uploads file to S3.'''
        destDir = ''  # Root folder of the S3 bucket
        destpath = os.path.join(destDir, os.path.basename(filename))
        print 'Uploading %s to Amazon S3 bucket %s' % (filename, self.bucket_name)

        k = Key(self.bucket)
        k.key = destpath
        k.set_contents_from_filename(filename, reduced_redundancy=True)
        print  # This newline is needed to get the path of the compiled binary printed on a newline.
コード例 #35
0
 def store_image(self,
                 callback,
                 image_id,
                 request,
                 body=None,
                 filename=None,
                 **kwargs):
     bucket = self._get_bucket()
     image = Key(bucket, image_id)
     image.set_contents_from_file(body)
     callback(image.generate_url(HOUR))
コード例 #36
0
    def post(self, request):
        key = Key(self._bucket)

        id = request.POST['id']
        attachment = AIAttachment.objects.get(id=id)

        key.key = attachment.attachment
        self._bucket.delete_key(key)

        attachment.delete()
        return JsonResponse({'ok': True})
コード例 #37
0
    def perform_destroy(self, instance):
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, host='s3.ap-northeast-2.amazonaws.com')
        b = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
        k = Key(b)

        k.key = str(instance.left_footscan_stl)
        b.delete_key(k)

        k.key = str(instance.right_footscan_stl)
        b.delete_key(k)

        instance.delete()
コード例 #38
0
def WriteStringtoS3(string,game,msg_type): 
  
  env,game,msgtype =g_env, game, msg_type
  today_YYYMMDD, today_hhmmss = datetime.now().strftime('%Y%m%d') , datetime.now().strftime('%H-%M-%S')    
  S3_path =  env + '/data/' + game + '/' + msgtype + '/' +  today_YYYMMDD +  '/' +  today_hhmmss + '-logs.txt'
  S3_bucket = 'dailydosegames-gamedata-' + g_AWSAccessKeyId.lower()
  
  conn = S3Connection(g_AWSAccessKeyId, g_AWSSecretKey)
  bucket = conn.get_bucket(S3_bucket) 
  k=Key(bucket)
  k.key = S3_path  
  k.set_contents_from_string(string,reduced_redundancy=True)  
コード例 #39
0
ファイル: s3.py プロジェクト: jupiny/EnglishDiary
def delete_file_from_s3(filename):
    conn = S3Connection(
        settings.AWS_ACCESS_KEY_ID,
        settings.AWS_SECRET_ACCESS_KEY,
    )
    b = Bucket(
        conn,
        settings.AWS_STORAGE_BUCKET_NAME,
    )
    k = Key(b)
    k.key = filename
    b.delete_key(k)
コード例 #40
0
ファイル: tasks.py プロジェクト: Prasadraj6897/04itpacs
def s3_delete_image(data):

    try:
        from boto.s3.connection import S3Connection, Bucket, Key
        conn = S3Connection(data['S3_KEY'], data['S3_SECRET'])
        b = Bucket(conn, data['S3_BUCKET'])
        k = Key(b)
        k.key = data['S3_UPLOAD_DIRECTORY'] + '/' + data['destinationFileName']
        b.delete_key(k)

    except Exception as e:
        return e
コード例 #41
0
ファイル: views.py プロジェクト: ciaron/djangofine
def handle_DELETE(request):
    """ Handle file deletion requests. For this, we use the Amazon Python SDK,
    boto.
    """
    if boto:
        bucket_name = request.REQUEST.get('bucket')
        key_name = request.REQUEST.get('key')
        aws_bucket = S3.get_bucket(bucket_name, validate=False)
        aws_key = Key(aws_bucket, key_name)
        aws_key.delete()
        return make_response(200)
    else:
        return make_response(500)
コード例 #42
0
ファイル: s3utils.py プロジェクト: dgl1230/forfriends
def delete_s3_pic(user, image):

	conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)

	b = Bucket(conn, AWS_STORAGE_BUCKET_NAME)

	k = Key(b)


	k.key = 'media/%s' % (image.image)

	b.delete_key(k)
	return 
コード例 #43
0
def WriteStringtoS3(string, game, msg_type):

    env, game, msgtype = g_env, game, msg_type
    today_YYYMMDD, today_hhmmss = datetime.now().strftime(
        '%Y%m%d'), datetime.now().strftime('%H-%M-%S')
    S3_path = env + '/data/' + game + '/' + msgtype + '/' + today_YYYMMDD + '/' + today_hhmmss + '-logs.txt'
    S3_bucket = 'dailydosegames-gamedata-' + g_AWSAccessKeyId.lower()

    conn = S3Connection(g_AWSAccessKeyId, g_AWSSecretKey)
    bucket = conn.get_bucket(S3_bucket)
    k = Key(bucket)
    k.key = S3_path
    k.set_contents_from_string(string, reduced_redundancy=True)
コード例 #44
0
    def post(self, request):
        key = Key(self._bucket)

        id = request.POST['id']

        attachments = AIAttachment.objects.filter(ai_pics_id=id)
        for attachment in attachments:
            key.key = attachment.attachment
            self._bucket.delete_key(key)

        aipic = AIPics.objects.get(id=id)
        aipic.delete()
        return JsonResponse({'ok': True})
コード例 #45
0
def handle_DELETE(request):
    """ Handle file deletion requests. For this, we use the Amazon Python SDK,
    boto.
    """
    if boto:
        bucket_name = request.REQUEST.get('bucket')
        key_name = request.REQUEST.get('key')
        aws_bucket = S3.get_bucket(bucket_name, validate=False)
        aws_key = Key(aws_bucket, key_name)
        aws_key.delete()
        return make_response(200)
    else:
        return make_response(500)
コード例 #46
0
ファイル: UpdateDB.py プロジェクト: mdjukic/repos
def move_file(from_filename, to_filename, bucket):
    """ Move file between 'folders' in a bucket """
    conn = boto.connect_s3()

    processing_bucket = conn.get_bucket(bucket)

    processing_bucket.copy_key(to_filename, processing_bucket.name,
                               from_filename)

    k = Key(processing_bucket)

    k.key = from_filename
    processing_bucket.delete_key(k)
コード例 #47
0
	def push_s3_file(self, bucket, src=None, key=None):
		"""
		Upload a file to an S3 bucket
		"""
		if not src:	src = self.working_file
		if not key: key = os.path.basename(src)
		conn = boto.connect_s3(self.aws_id, self.aws_key)
		b = conn.create_bucket(bucket)
		k = Key(b)
		k.key = key
		k.set_contents_from_filename(src)
		self.working_file = '%s/%s' % (bucket, key)
		return self.working_file
コード例 #48
0
def prop(request, prop_id):
    if request.method == "GET":
        domain = request.get_host()
        # GET - READ
        try:
            prop = Prop.objects.get(id=prop_id)
            response_data = {
                "success": True,
                "prop": {
                    "id": prop.id,
                    "name": prop.name,
                    "description": prop.description,
                    "url": prop.image.url
                }
            }
            return HttpResponse(json.dumps(response_data),
                                content_type="application/json")
        except ObjectDoesNotExist:
            return HttpResponse(status=404)
    elif request.method == "PUT":
        # PUT - UPDATE - later
        pass
    elif request.method == "DELETE":
        prop = Prop.objects.get(id=prop_id)
        # Unset From all scenes and delete scene_prop
        scene_props = SceneProp.objects.filter(prop_file=prop)
        for scene_prop in scene_props:
            scene = scene_prop.scene
            scene_prop.delete()
            scene.save()
        # Delete File
        if prop.image:
            if not settings.USE_AWS and prop.image.path:
                # Delete from MEDIA_ROOT
                os.remove(prop.image.path)
            elif settings.USE_AWS and prop.image.name:
                # Delete from AWS S3
                connection = S3Connection(settings.AWS_ACCESS_KEY_ID,
                                          settings.AWS_SECRET_ACCESS_KEY)
                bucket = Bucket(connection, settings.AWS_STORAGE_BUCKET_NAME)
                fileKey = Key(bucket)
                fileKey.key = prop.image.name
                bucket.delete_key(fileKey)
        # Delete From Database
        prop.delete()
        response_data = {"success": True}
        return HttpResponse(json.dumps(response_data),
                            content_type="application/json")
    else:
        return HttpResponseNotAllowed(['GET', 'PUT', 'DELETE'])
    return HttpResponse("API call for prop #" + prop_id)
コード例 #49
0
def upload():
    s3_conn = s3()
    
#     bucket = s3_conn.create_bucket('distributed-web-crawler')
    bucket = Bucket(s3_conn, 'distributed-web-crawler')
    
    k = Key(bucket)
    
    k.key = 'list_links_a.txt'
    k.set_contents_from_filename('input_links_a.txt')
    
    os.remove('input_links_a.txt')
    
    s3_conn.close()
コード例 #50
0
ファイル: common.py プロジェクト: mhfowler/howdoispeak_django
def getOrCreateS3Key(key_name):
    bucket = getHDISBucket()
    try:
        key = bucket.get_key(key_name)
        key_json = key.get_contents_as_string()
    except:
        key = Key(bucket)
        key.key = key_name
        key_json = json.dumps({})
    try:
        key_dict = json.loads(key_json)
    except:
        key_dict = {}
    return key, key_dict
コード例 #51
0
ファイル: __init__.py プロジェクト: cknv/beetle-s3uploader
    def upload(self):
        for destination, data, content_type, compressed in self.get_files():
            key = Key(self.bucket)
            key.content_type = content_type
            if compressed:
                key.set_metadata('content-encoding', 'gzip')

            for header, value in self.headers:
                key.set_metadata(header, value)
            key.key = destination
            key.set_contents_from_string(data)
コード例 #52
0
ファイル: app.py プロジェクト: 9px/server-examples
def s3_delete(key=None):
    """ Route for deleting files off S3. Uses the SDK. """
    try:
        from boto.s3.connection import Key, S3Connection
        S3 = S3Connection(app.config.get("AWS_SERVER_PUBLIC_KEY"), 
            app.config.get("AWS_SERVER_SECRET_KEY"))
        request_payload = request.values
        bucket_name = request_payload.get('bucket')
        key_name = request_payload.get('key')
        aws_bucket = S3.get_bucket(bucket_name, validate=False)
        aws_key = Key(aws_bucket, key_name)
        aws_key.delete()
        return make_response('', 200)
    except ImportError:
        abort(500)
コード例 #53
0
ファイル: RMS_S3.py プロジェクト: GaryPate/Luigi-Plotting
    def run(self):
        s3 = boto3Conn()
        s3content = s3.list_objects_v2(Bucket='rmsapi')['Contents']
        s3files = [s['Key'] for s in s3content]
        s3 = s3open()
        s3read = s3.Bucket('rmsapi')

        if len(s3files) > 59:
            # Creates data frames and assigns columns
            E_cols = ['id', 'etype', 'diff', 'lon', 'lat', 'bear', 'road', 'dat']
            I_cols = ['id', 'lon', 'lat', 'itype', 'street', 'dat_str', 'dat_end', 'ended']
            df_outE = pd.DataFrame(columns=E_cols)
            df_outI = pd.DataFrame(columns=I_cols)
            timelist = []

            # Iterates over files and stores the timestamp of the file
            for file in s3read.objects.all():
                key = file.key
                body = file.get()['Body'].read()
                timestamp = re.findall('[^_]+', key)
                timestamp = timestamp[1].replace('+', ':')
                timelist.append(timestamp)

                # Different methods for event and incident files
                if key.startswith('event'):
                    df_outE = parseEvent(body, timestamp, df_outE)
                if key.startswith('inciden'):
                    df_outI = parseIncident(body, timestamp, df_outI)

            # Output to csv file
            with self.output()[0].open('w') as csvfile:
                df_outE['diff'] = df_outE['diff'].astype(int)
                df_outE['id'] = pd.to_numeric(df_outE['id'])
                df_outE.to_csv(csvfile, columns=E_cols, index=False)

            with self.output()[1].open('w') as csvfile:
                # Create an end time stamp for data that has not actually ended yet for Gantt file
                df_outI = endIncidents(timelist, df_outI)
                df_outI['id'] = df_outI['id'].astype(int)
                df_outI.to_csv(csvfile, columns=I_cols, index=False)

            s3del = Bucket(s3delete(),'rmsapi')
            k = Key(s3del)

            # Delete all files in S3 folder
            for file in s3files:
                k.key = file
                s3del.delete_key(k)
コード例 #54
0
def prop(request, prop_id):
	if request.method == "GET":
		domain = request.get_host()
		# GET - READ
		try:
			prop = Prop.objects.get(id=prop_id)
			response_data = {
				"success" : True,
				"prop" : {
					"id" : prop.id,
					"name" : prop.name,
					"description" : prop.description,
					"url" : prop.image.url
				}
			}
			return HttpResponse(json.dumps(response_data), content_type="application/json")
		except ObjectDoesNotExist:
			return HttpResponse(status=404)
	elif request.method == "PUT":
		# PUT - UPDATE - later
		pass
	elif request.method == "DELETE":
		prop = Prop.objects.get(id=prop_id)
		# Unset From all scenes and delete scene_prop
		scene_props = SceneProp.objects.filter(prop_file=prop)
		for scene_prop in scene_props:
			scene = scene_prop.scene
			scene_prop.delete()
			scene.save()
		# Delete File
		if prop.image:
			if not settings.USE_AWS and prop.image.path:
				# Delete from MEDIA_ROOT
				os.remove(prop.image.path)
			elif settings.USE_AWS and prop.image.name:
				# Delete from AWS S3
				connection = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
				bucket = Bucket(connection, settings.AWS_STORAGE_BUCKET_NAME)
				fileKey = Key(bucket)
				fileKey.key = prop.image.name
				bucket.delete_key(fileKey)
		# Delete From Database
		prop.delete()
		response_data = { "success" : True }
		return HttpResponse(json.dumps(response_data), content_type="application/json")
	else:
		return HttpResponseNotAllowed(['GET', 'PUT', 'DELETE'])
	return HttpResponse("API call for prop #" + prop_id)
コード例 #55
0
def add(bkt, key, img, form = 'JPEG'):
  bucket = conn.get_bucket(bkt)
  newKeyObj = Key(bucket)
  newKeyObj.key = key
  newKeyObj.set_metadata('Content-Type', 'image/jpg')
  buf = s.StringIO()
  img.save(buf, form)
  newKeyObj.set_contents_from_string(buf.getvalue())
  newKeyObj.set_acl('public-read')
コード例 #56
0
ファイル: storage.py プロジェクト: lunayo/Brazaar
def upload_content(bucket=None, key_name=None, 
                    data_type=kUploadContentType.String, data=None) :
    bucket = get_bucket(bucket)
    bucketKey = Key(bucket)
    bucketKey.key = key_name
    try :
        if data_type == kUploadContentType.String :
            bucketKey.set_contents_from_string(data)
        elif data_type == kUploadContentType.File :
            bucketKey.set_contents_from_file(data)
        elif data_type == kUploadContentType.FileName(data) :
            bucketKey.set_contents_from_filename(data)
        elif data_type == kUploadContentType.Stream :
            bucketKey.set_contents_from_stream(data)
        return True
    except Exception, e :
        return False