def screenshot(request,val): if val == 1 : conn = S3Connection('##', '##') bucket = conn.get_bucket('lheston-bucket') k = Key(bucket) k.key = '//lab3' + request + '_toS3.png' driver = webdriver.PhantomJS() # or add to your PATH driver.set_window_size(1024, 768) # optional driver.get(request) driver.save_screenshot('tempfile.png') driver.quit file1 = open('tempfile.png', 'rb') os.remove('tempfile.png') k.set_contents_from_file(file1) driver.quit return str(request + '_toS3.png') elif val == 2: text = '/lab3' + request conn = S3Connection('##', '##') S3_BUCKET_NAME = 'lheston-bucket' bucket = Bucket(conn, S3_BUCKET_NAME) bucket = bucket.delete_key(text) #bucket.delete_key('/lab3/' + request.split(':')[1]) #k = Key(b) #k.name = k.get_key(text) #b.delete_key(k) #k.name = k.get_key(text) #b.delete_key(k) #b.delete_key('//lab3' + request.split(':')[1] + '_toS3.png') else: return str('incorrect input')
def delete_from_S3(filename): conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME) k = Key(bucket) k.key = settings.MEDIA_URL + filename bucket.delete_key(k)
def delete(): #try: songid = int(request.args.get('songid')) song = Song.query.filter_by(id=songid).first() votes = Vote.query.filter_by(songdata=song.songdata).all() for x in votes: db.session.delete(x) db.session.commit() db.session.delete(song) db.session.commit() try: conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) b = Bucket(conn, S3_BUCKET_NAME) k = Key(b) k.key = songdata.lower() + '.mp3' b.delete_key(k) except: pass return render_template('notice.html', message="Delete successful.", redirect="/")
def deletePreviousS3Files(bucketName, data_path): print("Deleting S3 Stvpt Contacts Files") conn = S3Connection() b = Bucket(conn, bucketName) for x in b.list(prefix = data_path + 'contacts/stvpt/'): x.delete()
def handle(self, *args, **options): print('Loading list of S3 files') conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME) s3_files = set() for key in bucket.list(): s3_files.add(key.name) print('Loaded {} S3 files'.format(len(s3_files))) startdate = timezone.now() - timedelta( days=int(options["no_of_days_back"])) attachments = Attachment.objects.select_related('report')\ .filter(report__created_at__gte=startdate) for attachment in attachments: if attachment.attachment not in s3_files: attachment.delete() sys.stdout.write('-') else: sys.stdout.write('+') sys.stdout.flush() print('Deleting empty reports') with connection.cursor() as cursor: cursor.execute( "delete from report_report WHERE " "(description is NULL or description = '') AND " "(select count(*) from report_attachment where report_id=report_report.id) =0" )
def deletePreviousS3Files(bucketName, data_path): print("Deleting S3 otodompl Deals Files") conn = S3Connection() b = Bucket(conn, bucketName) for x in b.list(prefix=data_path + 'deals/otodompl/'): x.delete()
def deletePreviousS3Files(conf_file, keyId, sKeyId): print("Deleting S3 Stvpt Contacts Files") conf = json.load(open(conf_file)) conn = S3Connection(keyId, sKeyId) b = Bucket(conn, 'pyrates-eu-data-ocean') for x in b.list(prefix='crm-automations/contacts/stvpt/'): x.delete()
def tearDown(self): shutil.rmtree(self.workdir) with closing(S3Connection()) as s3: bucket = Bucket(s3, self.output_dir.netloc) prefix = self.output_dir.path[1:] for key in bucket.list(prefix=prefix): assert key.name.startswith(prefix) key.delete()
def deletePreviousS3Files(conf_file, keyId, sKeyId): print("Deleting S3 Atvro Deals Files") conf = json.load(open(conf_file)) conn = S3Connection(keyId, sKeyId) b = Bucket(conn, 'pyrates-data-ocean') for x in b.list(prefix = 'renato-teixeira/deals/atvro/'): x.delete()
def deletePreviousS3Files(conf_file): conf = json.load(open(conf_file)) bucket_name = conf['bucket_name'] conn = S3Connection() b = Bucket(conn, bucket_name) for x in b.list(prefix = 'Aux/'): x.delete()
def _assertOutput(self, num_samples=None): with closing(S3Connection()) as s3: bucket = Bucket(s3, self.output_dir.netloc) prefix = self.output_dir.path[1:] for i in range(1 if num_samples is None else num_samples): output_file = self._sample_name(None if num_samples is None else i) + '.tar.gz' key = bucket.get_key(posixpath.join(prefix, output_file), validate=True) # FIXME: We may want to validate the output a bit more self.assertTrue(key.size > 0)
def delete_img_aws(instance, **kwargs): conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) b = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME) img_k = Key(b) img_thumb_k = Key(b) img_k.key = instance.image.name img_thumb_k.key = instance.image_thumb.name b.delete_key(img_k) b.delete_key(img_thumb_k)
def deletePreviousS3Files(conf_file): conf = json.load(open(conf_file)) key = conf['s3_key'] skey = conf['s3_skey'] conn = S3Connection(key, skey) b = Bucket(conn, 'verticals-raw-data') for x in b.list(prefix='vas/silver'): x.delete()
def user_thumbnail_delete(sender, instance, **kwargs): logging.debug('Firing pre-delete signal...') gif = get_object_or_404(Gif, pk=instance.id) f = str(gif.thumbnail) filename = f[f.rfind('/') + 1:] s3conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = Bucket(s3conn, settings.AWS_STORAGE_BUCKET_NAME) key_obj = Key(bucket) key_obj.key = 'thumbs/' + filename bucket.delete_key(key_obj.key)
def _assertOutput(self, num_samples=None, bam=False): with closing(S3Connection()) as s3: bucket = Bucket(s3, self.output_dir.netloc) prefix = self.output_dir.path[1:] for i in range(1 if num_samples is None else num_samples): value = None if num_samples is None else i output_file = self._sample_name(value, bam=bam) + '.tar.gz' key = bucket.get_key(posixpath.join(prefix, output_file), validate=True) # FIXME: We may want to validate the output a bit more self.assertTrue(key.size > 0)
def _assertOutput(self, num_samples=None): with closing(S3Connection()) as s3: bucket = Bucket(s3, self.output_dir.netloc) prefix = self.output_dir.path[1:] for i in range(1 if num_samples is None else num_samples): output_file = self._sample_name( None if num_samples is None else i) + '.tar.gz' output_file = 'FAIL.' + output_file # This flag is added by bamQC key = bucket.get_key(posixpath.join(prefix, output_file), validate=True) # FIXME: We may want to validate the output a bit more self.assertTrue(key.size > 0)
def deletePreviousS3Files(conf_file, bucket_name, s3_path_prefix, scai_last_execution_status=1): if (scai_last_execution_status!=3): conf = json.load(open(conf_file)) key = conf['s3_key'] skey = conf['s3_skey'] conn = S3Connection(key, skey) b = Bucket(conn, bucket_name) for x in b.list(prefix = s3_path_prefix): x.delete()
def checkS3FileExists(bucket, path): conn = S3Connection() b = Bucket(conn, bucket) found_file = 'false' for x in b.list(prefix=path[1:]): if (len(str(x)) > 0): print(path) found_file = 'true' break return found_file
def s3_delete_image(data): try: from boto.s3.connection import S3Connection, Bucket, Key conn = S3Connection(data['S3_KEY'], data['S3_SECRET']) b = Bucket(conn, data['S3_BUCKET']) k = Key(b) k.key = data['S3_UPLOAD_DIRECTORY'] + '/' + data['destinationFileName'] b.delete_key(k) except Exception as e: return e
def delete_file_from_s3(filename): conn = S3Connection( settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, ) b = Bucket( conn, settings.AWS_STORAGE_BUCKET_NAME, ) k = Key(b) k.key = filename b.delete_key(k)
def delete_s3_pic(user, image): conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) b = Bucket(conn, AWS_STORAGE_BUCKET_NAME) k = Key(b) k.key = 'media/%s' % (image.image) b.delete_key(k) return
def prop(request, prop_id): if request.method == "GET": domain = request.get_host() # GET - READ try: prop = Prop.objects.get(id=prop_id) response_data = { "success": True, "prop": { "id": prop.id, "name": prop.name, "description": prop.description, "url": prop.image.url } } return HttpResponse(json.dumps(response_data), content_type="application/json") except ObjectDoesNotExist: return HttpResponse(status=404) elif request.method == "PUT": # PUT - UPDATE - later pass elif request.method == "DELETE": prop = Prop.objects.get(id=prop_id) # Unset From all scenes and delete scene_prop scene_props = SceneProp.objects.filter(prop_file=prop) for scene_prop in scene_props: scene = scene_prop.scene scene_prop.delete() scene.save() # Delete File if prop.image: if not settings.USE_AWS and prop.image.path: # Delete from MEDIA_ROOT os.remove(prop.image.path) elif settings.USE_AWS and prop.image.name: # Delete from AWS S3 connection = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = Bucket(connection, settings.AWS_STORAGE_BUCKET_NAME) fileKey = Key(bucket) fileKey.key = prop.image.name bucket.delete_key(fileKey) # Delete From Database prop.delete() response_data = {"success": True} return HttpResponse(json.dumps(response_data), content_type="application/json") else: return HttpResponseNotAllowed(['GET', 'PUT', 'DELETE']) return HttpResponse("API call for prop #" + prop_id)
def remove_file_from_bucket(self, kw, bucket_name=None): if not bucket_name: bucket_name = self.bucket_name bucket = Bucket(self.conn, self.bucket_name) keyword = Key(bucket) keyword.key = kw try: bucket.delete_key(keyword) except Exception as error: current_app.logger.error(error) return False return True
def delete_product_image(product): S3_BUCKET = settings.S3_BUCKET AWS_ACCESS_KEY_ID = settings.AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY = settings.AWS_SECRET_ACCESS_KEY conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) url_list1 = product.image_url.split("/") filename1 = url_list1[len(url_list1) - 1] url_list2 = product.thumbnail.url.split("/") filename2 = url_list2[len(url_list2) - 1] url_list3 = product.watermark.url.split("/") filename3 = url_list3[len(url_list3) - 1] b = Bucket(conn, S3_BUCKET) k = Key(b) k.key = 'products/' + filename1 b.delete_key(k) k.key = 'products/thumbnails/' + filename2 b.delete_key(k) k.key = 'products/watermarked/' + filename3 b.delete_key(k)
def checkS3FileExists(conf_file, bucket, path): conf = json.load(open(conf_file)) key = conf['s3_key'] skey = conf['s3_skey'] conn = S3Connection(key, skey) b = Bucket(conn, bucket) found_file = 'false' for x in b.list(prefix=path[1:]): if (len(str(x)) > 0): print(path) found_file = 'true' break return found_file
def cleanup_and_validate(): valid_output = False expected_output = posixpath.basename(sample.path) with closing(S3Connection()) as s3: b = Bucket(s3, output_dir.netloc) path = output_dir.path[1:] for k in b.list(prefix=path): assert k.name.startswith(path) if k.name[len(path):] == '/' + expected_output: # FIXME: We may want to validate the output a bit more valid_output = True else: log.warn('Unexpected output file %s/%s', output_dir.geturl(), k.name) k.delete() assert valid_output, 'Did not find expected output file'
def prop(request, prop_id): if request.method == "GET": domain = request.get_host() # GET - READ try: prop = Prop.objects.get(id=prop_id) response_data = { "success" : True, "prop" : { "id" : prop.id, "name" : prop.name, "description" : prop.description, "url" : prop.image.url } } return HttpResponse(json.dumps(response_data), content_type="application/json") except ObjectDoesNotExist: return HttpResponse(status=404) elif request.method == "PUT": # PUT - UPDATE - later pass elif request.method == "DELETE": prop = Prop.objects.get(id=prop_id) # Unset From all scenes and delete scene_prop scene_props = SceneProp.objects.filter(prop_file=prop) for scene_prop in scene_props: scene = scene_prop.scene scene_prop.delete() scene.save() # Delete File if prop.image: if not settings.USE_AWS and prop.image.path: # Delete from MEDIA_ROOT os.remove(prop.image.path) elif settings.USE_AWS and prop.image.name: # Delete from AWS S3 connection = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = Bucket(connection, settings.AWS_STORAGE_BUCKET_NAME) fileKey = Key(bucket) fileKey.key = prop.image.name bucket.delete_key(fileKey) # Delete From Database prop.delete() response_data = { "success" : True } return HttpResponse(json.dumps(response_data), content_type="application/json") else: return HttpResponseNotAllowed(['GET', 'PUT', 'DELETE']) return HttpResponse("API call for prop #" + prop_id)
def run(self): s3 = boto3Conn() s3content = s3.list_objects_v2(Bucket='rmsapi')['Contents'] s3files = [s['Key'] for s in s3content] s3 = s3open() s3read = s3.Bucket('rmsapi') if len(s3files) > 59: # Creates data frames and assigns columns E_cols = ['id', 'etype', 'diff', 'lon', 'lat', 'bear', 'road', 'dat'] I_cols = ['id', 'lon', 'lat', 'itype', 'street', 'dat_str', 'dat_end', 'ended'] df_outE = pd.DataFrame(columns=E_cols) df_outI = pd.DataFrame(columns=I_cols) timelist = [] # Iterates over files and stores the timestamp of the file for file in s3read.objects.all(): key = file.key body = file.get()['Body'].read() timestamp = re.findall('[^_]+', key) timestamp = timestamp[1].replace('+', ':') timelist.append(timestamp) # Different methods for event and incident files if key.startswith('event'): df_outE = parseEvent(body, timestamp, df_outE) if key.startswith('inciden'): df_outI = parseIncident(body, timestamp, df_outI) # Output to csv file with self.output()[0].open('w') as csvfile: df_outE['diff'] = df_outE['diff'].astype(int) df_outE['id'] = pd.to_numeric(df_outE['id']) df_outE.to_csv(csvfile, columns=E_cols, index=False) with self.output()[1].open('w') as csvfile: # Create an end time stamp for data that has not actually ended yet for Gantt file df_outI = endIncidents(timelist, df_outI) df_outI['id'] = df_outI['id'].astype(int) df_outI.to_csv(csvfile, columns=I_cols, index=False) s3del = Bucket(s3delete(),'rmsapi') k = Key(s3del) # Delete all files in S3 folder for file in s3files: k.key = file s3del.delete_key(k)
def test_upload_and_download_with_encryption(tmpdir): from toil_lib.urls import s3am_upload from toil_lib.urls import download_url from boto.s3.connection import S3Connection, Bucket, Key work_dir = str(tmpdir) # Create temporary encryption key key_path = os.path.join(work_dir, 'foo.key') subprocess.check_call([ 'dd', 'if=/dev/urandom', 'bs=1', 'count=32', 'of={}'.format(key_path) ]) # Create test file upload_fpath = os.path.join(work_dir, 'upload_file') with open(upload_fpath, 'wb') as fout: fout.write(os.urandom(1024)) # Upload file random_key = os.path.join('test/', str(uuid4()), 'upload_file') s3_url = os.path.join('s3://cgl-driver-projects/', random_key) try: s3_dir = os.path.split(s3_url)[0] s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path) # Download the file download_url(url=s3_url, name='download_file', work_dir=work_dir, s3_key_path=key_path) download_fpath = os.path.join(work_dir, 'download_file') assert os.path.exists(download_fpath) assert filecmp.cmp(upload_fpath, download_fpath) finally: # Delete the Key. Key deletion never fails so we don't need to catch any exceptions with closing(S3Connection()) as conn: b = Bucket(conn, 'cgl-driver-projects') k = Key(b) k.key = random_key k.delete()
def delete_from_s3(image_name): """Delete image from S3 bucket""" conn = S3Connection(aws_access_key_id, aws_secret_access_key) bucket = Bucket(conn, "shopifyimagerepository") k = Key(bucket) k.key = image_name bucket.delete_key(k)
def background(request, background_id): domain = request.get_host() if request.method == "GET": # GET - READ try: background = Background.objects.get(id=background_id) response_data = { "success": True, "background": { "id": background.id, "name": background.name, "description": background.description, "url": background.image.url } } return HttpResponse(json.dumps(response_data), content_type="application/json") except ObjectDoesNotExist: return HttpResponse(status=404) elif request.method == "PUT": # PUT - UPDATE - later pass elif request.method == "DELETE": background = Background.objects.get(id=background_id) # Unset From all scenes background.scenes.clear() # Delete File if background.image: if not settings.USE_AWS and background.image.path: # Delete from MEDIA_ROOT os.remove(background.image.path) elif settings.USE_AWS and background.image.name: # Delete from AWS S3 connection = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = Bucket(connection, settings.AWS_STORAGE_BUCKET_NAME) fileKey = Key(bucket) fileKey.key = background.image.name bucket.delete_key(fileKey) # Delete from database background.delete() response_data = {"success": True} return HttpResponse(json.dumps(response_data), content_type="application/json") else: return HttpResponseNotAllowed(['GET', 'PUT', 'DELETE']) return HttpResponse("API call for background #" + background_id)
def delete_url(request): if request.is_ajax(): req = eval(request.body) if request.method == 'POST': user = UserProfile.objects.get(user__username=request.user) conn = S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY) b = Bucket(conn, AWS_BUCKET_NAME) k = Key(b) for x in req['url']: x = str(x) k.key = x[-8:] b.delete_key(k) url_list = Image.objects.get(author=user,url=x,album_name=req['album']) album = Album.objects.get(author=user,name=req['album']) album.images.remove(url_list) album.save() url_list.delete() return HttpResponse( request )
def background(request, background_id): domain = request.get_host() if request.method == "GET": # GET - READ try: background = Background.objects.get(id=background_id) response_data = { "success" : True, "background" : { "id" : background.id, "name" : background.name, "description" : background.description, "url" : background.image.url } } return HttpResponse(json.dumps(response_data), content_type="application/json") except ObjectDoesNotExist: return HttpResponse(status=404) elif request.method == "PUT": # PUT - UPDATE - later pass elif request.method == "DELETE": background = Background.objects.get(id=background_id) # Unset From all scenes background.scenes.clear() # Delete File if background.image: if not settings.USE_AWS and background.image.path: # Delete from MEDIA_ROOT os.remove(background.image.path) elif settings.USE_AWS and background.image.name: # Delete from AWS S3 connection = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = Bucket(connection, settings.AWS_STORAGE_BUCKET_NAME) fileKey = Key(bucket) fileKey.key = background.image.name bucket.delete_key(fileKey) # Delete from database background.delete() response_data = { "success" : True } return HttpResponse(json.dumps(response_data), content_type="application/json") else: return HttpResponseNotAllowed(['GET', 'PUT', 'DELETE']) return HttpResponse("API call for background #" + background_id)
def delete_image_from_s3(file_name): try: conn = S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY) logging.info("success s3 connection") bucket = Bucket(conn, BUCKET) k = Key(bucket=bucket, name=file_name) k.delete() logging.info("success delete image from s3") except Exception as e: logging.debug(e)
def handle(self, *args, **options): conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = Bucket(conn, settings.AWS_STORAGE_BUCKET_AI_NAME) s3_files = set() for key in bucket.list(): s3_files.add(key.name) startdate = timezone.now() - timedelta(days=int(options["no_of_days_back"])) attachments = AIAttachment.objects.select_related('ai_pics').filter(ai_pics__created_at__gte=startdate) for attachment in attachments: if attachment.attachment not in s3_files: print(attachment.attachment) attachment.delete() with connection.cursor() as cursor: cursor.execute( 'delete from ai_pics_aipics WHERE ' '(select count(*) from ai_pics_aiattachment where ai_pics_id=ai_pics_aipics.id) =0' )
def __init__(self, aws_access_key_id, aws_secret_access_key, bucket_name): #conn = boto.connect_s3(aws_access_key_id, aws_secret_access_key) conn = S3Connection(aws_access_key_id = my_aws_access_key_id, aws_secret_access_key = my_aws_secret_access_key, port=my_s3_port, calling_format = calling_format, host=my_s3_host, path = "/", is_secure=False) self._bucket = Bucket(conn, bucket_name)
def download(): s3_conn = s3() # bucket = s3_conn.create_bucket('distributed-web-crawler') bucket = Bucket(s3_conn, 'distributed-web-crawler') while True: try: k = Key(bucket) k.key = 'list_links_a.txt' k.get_contents_to_filename('input_links_a.txt') bucket.delete_key(k) break except S3ResponseError: pass s3_conn.close()
def download(): s3_conn = s3() # bucket = s3_conn.create_bucket('distributed-web-crawler') bucket = Bucket(s3_conn, 'distributed-web-crawler') while True: try: k = Key(bucket) k.key = 'list_links_b.txt' k.get_contents_to_filename('input_links_b.txt') bucket.delete_key(k) break except S3ResponseError: pass s3_conn.close()
def delete_album(request): if request.is_ajax(): req = eval(request.body) if request.method == 'POST': user = UserProfile.objects.get(user__username=request.user) album_to_delete = Album.objects.get(author=user,name=req['album']) imgs = list(album_to_delete.images.all().order_by('-created_date')) conn = S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY) b = Bucket(conn, AWS_BUCKET_NAME) k = Key(b) for x in imgs: k.key = x.url[-8:] b.delete_key(k) x.delete() album_to_delete.delete() user = str(request.user) album_url_list = fill_albums(user,"a-z","") return HttpResponse( json.dumps(album_url_list) )
def delete_scene(scene): # Delete all scene props in the scene scene_props = SceneProp.objects.filter(scene=scene) for scene_prop in scene_props: scene = scene_prop.scene scene_prop.delete() scene.save() # Delete the scene thumbnail if not settings.USE_AWS and hasattr(scene.thumbnail, "path"): # Delete from MEDIA_ROOT os.remove(scene.thumbnail.path) elif settings.USE_AWS and hasattr(scene.thumbnail, "name"): # Delete from AWS S3 connection = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = Bucket(connection, settings.AWS_STORAGE_BUCKET_NAME) fileKey = Key(bucket) fileKey.key = scene.thumbnail.name bucket.delete_key(fileKey) # Delete the scene itself scene.delete()
def prodcut_images_delete(request, product_id, image_id): product = get_object_or_404(Product, pk=product_id) if product.user != request.user: redirect('honme') image = get_object_or_404(ProductImages, pk=image_id) s3conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = Bucket(s3conn, settings.AWS_STORAGE_BUCKET_NAME) name_image = image.images.split('/') try: bucket.delete_key('upload/images/' + name_image[-1]) image.delete() except Exception as e: print(e) return redirect('product_images', product_id)
def delete_scene(scene): # Delete all scene props in the scene scene_props = SceneProp.objects.filter(scene=scene) for scene_prop in scene_props: scene = scene_prop.scene scene_prop.delete() scene.save() # Delete the scene thumbnail if not settings.USE_AWS and hasattr(scene.thumbnail, 'path'): # Delete from MEDIA_ROOT os.remove(scene.thumbnail.path) elif settings.USE_AWS and hasattr(scene.thumbnail, 'name'): # Delete from AWS S3 connection = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) bucket = Bucket(connection, settings.AWS_STORAGE_BUCKET_NAME) fileKey = Key(bucket) fileKey.key = scene.thumbnail.name bucket.delete_key(fileKey) # Delete the scene itself scene.delete()
def cleanupBucket(self, bucket_name): try: from boto.s3.connection import S3Connection, Bucket, Key conn = S3Connection() b = Bucket(conn, bucket_name) for x in b.list(): b.delete_key(x.key) b.delete() except: pass # bucket must already be removed
def remove_profile_image(user): S3_BUCKET = settings.S3_BUCKET AWS_ACCESS_KEY_ID = settings.AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY = settings.AWS_SECRET_ACCESS_KEY conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) url_list1 = user.profile_image_url.split("/") filename1 = url_list1[len(url_list1) - 1] url_list2 = user.profile_image_crop.url.split("/") filename2 = url_list2[len(url_list2) - 1] b = Bucket(conn, S3_BUCKET) k = Key(b) k.key = 'profile_images/' + filename1 b.delete_key(k) k.key = 'profile_images/crop/' + filename2 b.delete_key(k) user.profile_image_url = None user.profile_image_crop = None user.save()
def delete(): file_name1=request.form.get("del_filename") print "Filename is " print file_name1 conn = S3Connection(cfg.AWS_APP_ID, cfg.AWS_APP_SECRET) bucket = Bucket(conn, cfg.AWS_BUCKET) key = 'uploads/' + secure_filename(file_name1) k = Key(bucket=bucket, name=key) k.delete() flash("File Delete successfully") return render_template('index.html') return file_name1
class S3Aspect: '''A storage for Amazon S3 web service ''' def __init__(self, aws_access_key_id, aws_secret_access_key, bucket_name): #conn = boto.connect_s3(aws_access_key_id, aws_secret_access_key) conn = S3Connection(aws_access_key_id = my_aws_access_key_id, aws_secret_access_key = my_aws_secret_access_key, port=my_s3_port, calling_format = calling_format, host=my_s3_host, path = "/", is_secure=False) self._bucket = Bucket(conn, bucket_name) def lastCommit(self): ''' Get the last committed tid and serial according to S3. This may not be current ''' prefix = 'type:index_serial,' rs = self._bucket.get_all_keys(prefix=prefix, maxkeys=1) if not rs: # umm not created yet return None, None else: info = dict_from_key(rs[0].key) tid = tid_unrepr(info['tid']) serial = serial_unrepr(info['serial']) return tid, serial def highestOid(self): ''' Get the highest oid value ''' prefix = 'type:record,' rs = self._bucket.get_all_keys(prefix=prefix, maxkeys=1) if not rs: # umm not created yet return None else: info = dict_from_key(rs[0].key) oid = oid_unrepr(info['oid']) return oid def getSerialForTid(self, tid): ''' get the serial for a tid ''' prefix = 'type:index_tid,tid:%s,' % tid_repr(tid) rs = self._bucket.get_all_keys(prefix=prefix, maxkeys=1) if not rs: #either it hasn't propogated yet or has been packed away return None info = dict_from_key(rs[0].key) serial = serial_unrepr(info['serial']) return serial def loadPickle(self, oid, tid): """Actually get the data from s3""" k = Key(self._bucket) k.key = 'type:record,oid:%s,tid:%s' % (oid_repr(oid), tid_repr(tid)) if DEBUG: print 'LOAD %s' % k.key for n in xrange(RETRIES): try: return k.get_contents_as_string() except S3ResponseError, s3e: print " = = = = = = = S3ResponseError: = = = = = = = =" print s3e if DEBUG: print 'RETRY %s' % n time.sleep(SLEEPTIME) return k.get_contents_as_string()
def process_file(aws_conn, filepath): mtime = get_mtime(filepath) name_200 = add_size_name(filepath, '200') name_800 = add_size_name(filepath, '800') mtime_200 = get_mtime(name_200) mtime_800 = get_mtime(name_800) im = None if mtime_200 is None or mtime_200 < mtime: try: im = Image.open(filepath) except: return None generate_200(im, name_200) if mtime_800 is None or mtime_800 < mtime: if im is None: try: im = Image.open(filepath) except: return None generate_800(im, name_800) names = { 'original': filepath, 'thumbnail': name_200, 'display': name_800, } b = Bucket(aws_conn, BUCKET) image_result = {} for image_type, name in names.items(): aws_tag_path = add_size_name(name, 's3t') + '.meta' aws_key_path = name[len(GALLERY_DIR):].strip('/') image_result[image_type] = { 'url': 'http://s3.amazonaws.com/{}/{}'.format( BUCKET, aws_key_path) } if not is_newer(name, aws_tag_path): try: resolution = load_data(aws_tag_path) resolution['width'] except: resolution = get_resolution(name) save_data(aws_tag_path, resolution) image_result[image_type].update(resolution) continue resolution = get_resolution(name) image_result.update(resolution) save_data(aws_tag_path, resolution) s3key = b.get_key(aws_key_path) mtime = get_mtime(name) if s3key and s3key.last_modified: print datetime.datetime(*parsedate(s3key.last_modified)[:6]) print mtime if datetime.datetime(*parsedate(s3key.last_modified)[:6]) > mtime: with open(aws_tag_path, 'a'): os.utime(aws_tag_path, None) continue print 'Sending {} to S3'.format(name) k = Key(b) k.key = aws_key_path expires = datetime.datetime.utcnow() + datetime.timedelta(days=25 * 365) expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") k.set_metadata("Content-Type", mimetypes.guess_type(name)[0]) k.set_metadata("Expires", expires) k.set_metadata("Cache-Control", "max-age={0}, public".format(86400 * 365 * 25)) k.set_contents_from_filename(name) k.set_acl('public-read') with open(aws_tag_path, 'a'): os.utime(aws_tag_path, None) photo_age = get_photo_age(filepath) image_result['caption'] = get_caption(filepath) return photo_age, image_result
def process(access_key=None, secret_key=None, bucket_name=None, video_path=None, video=None): for_deleting = False if len(sys.argv) > 1: a = sys.argv[1] ACCESS_KEY = sys.argv[2] SECRET_KEY = sys.argv[3] BUCKET_NAME = sys.argv[4] database.createDatabase(a) else: for_deleting = True ACCESS_KEY = access_key SECRET_KEY = secret_key BUCKET_NAME = bucket_name database.createDatabase() aws_s3_connection = S3Connection(ACCESS_KEY, SECRET_KEY) aws_s3_bucket = Bucket(aws_s3_connection, BUCKET_NAME) session = database.DBSession() object_keys = [] # This is if the CRON job is running and is removing flagged videos if for_deleting is False: flagged_content = session.query(FlaggedVideoModel).all() if len(flagged_content) > 0: time_stamp_now = calendar.timegm(datetime.utcnow().timetuple()) for content in flagged_content: if content.timeStamp <= time_stamp_now: video = content.video favourites_of_video = session.query(VideoFavourite).filter(VideoFavourite.video_id == video.video_id).all() for key in aws_s3_bucket.list(prefix=content.video_path): object_keys.append(key) if len(favourites_of_video) > 0: for fv in favourites_of_video: session.delete(fv) session.delete(content) session.delete(video) # This is for when you are deleting a video from the timeline elif for_deleting is True and video is not None and video_path is not '' and video_path is not None: favourites_of_video = session.query(VideoFavourite).filter(VideoFavourite.video_id == video.video_id).all() flags_for_video = session.query(FlaggedVideoModel).filter(FlaggedVideoModel.video_id == video.video_id).all() # Collect the AWS S3 objects to delete for key in aws_s3_bucket.list(prefix=video_path): object_keys.append(key) # Collect the Video Favourites if len(favourites_of_video) > 0: for fv in favourites_of_video: session.delete(fv) # Collect the Video Flags if len(flags_for_video) > 0: for fv in flags_for_video: session.delete(fv) try: if len(object_keys) > 0: aws_s3_bucket.delete_keys(object_keys) session.commit() session.close() return True except exc.SQLAlchemyError: session.close() return False