def tearDown(self):
     shutil.rmtree(self.workdir)
     with closing(S3Connection()) as s3:
         bucket = Bucket(s3, self.output_dir.netloc)
         prefix = self.output_dir.path[1:]
         for key in bucket.list(prefix=prefix):
             assert key.name.startswith(prefix)
             key.delete()
def cleanup_and_validate():
    valid_output = False
    expected_output = posixpath.basename(sample.path)
    with closing(S3Connection()) as s3:
        b = Bucket(s3, output_dir.netloc)
        path = output_dir.path[1:]
        for k in b.list(prefix=path):
            assert k.name.startswith(path)
            if k.name[len(path):] == '/' + expected_output:
                # FIXME: We may want to validate the output a bit more
                valid_output = True
            else:
                log.warn('Unexpected output file %s/%s', output_dir.geturl(), k.name)
            k.delete()
    assert valid_output, 'Did not find expected output file'
def checkS3FileExists(conf_file,bucket,path):
	conf = json.load(open(conf_file))
	key = conf['s3_key']
	skey = conf['s3_skey']
	conn = S3Connection(key, skey)
	b = Bucket(conn, bucket)
	found_file = 'false'

	for x in b.list(prefix = path[1:]):
		if(len(str(x)) > 0):
			print(path)
			found_file = 'true'
			break

	return found_file
    def handle(self, *args, **options):
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        bucket = Bucket(conn, settings.AWS_STORAGE_BUCKET_AI_NAME)

        s3_files = set()
        for key in bucket.list():
            s3_files.add(key.name)

        startdate = timezone.now() - timedelta(days=int(options["no_of_days_back"]))
        attachments = AIAttachment.objects.select_related('ai_pics').filter(ai_pics__created_at__gte=startdate)
        for attachment in attachments:
            if attachment.attachment not in s3_files:
                print(attachment.attachment)
                attachment.delete()

        with connection.cursor() as cursor:
            cursor.execute(
                'delete from ai_pics_aipics WHERE '
                '(select count(*) from ai_pics_aiattachment where ai_pics_id=ai_pics_aipics.id) =0'
            )
Exemple #5
0
def remove_s3_file(filePath):
    if settings.ENVIRONMENT_PRODUCTION:
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        bucket = Bucket(conn,settings.AWS_STORAGE_BUCKET_NAME)
        for key in bucket.list(prefix=filePath):
            key.delete()
def process(access_key=None, secret_key=None, bucket_name=None, video_path=None, video=None):

    for_deleting = False

    if len(sys.argv) > 1:
        a = sys.argv[1]
        ACCESS_KEY = sys.argv[2]
        SECRET_KEY = sys.argv[3]
        BUCKET_NAME = sys.argv[4]
        database.createDatabase(a)
    else:
        for_deleting = True
        ACCESS_KEY = access_key
        SECRET_KEY = secret_key
        BUCKET_NAME = bucket_name
        database.createDatabase()

    aws_s3_connection = S3Connection(ACCESS_KEY, SECRET_KEY)
    aws_s3_bucket = Bucket(aws_s3_connection, BUCKET_NAME)

    session = database.DBSession()

    object_keys = []

    # This is if the CRON job is running and is removing flagged videos
    if for_deleting is False:
        flagged_content = session.query(FlaggedVideoModel).all()
        if len(flagged_content) > 0:
            time_stamp_now = calendar.timegm(datetime.utcnow().timetuple())
            for content in flagged_content:
                if content.timeStamp <= time_stamp_now:
                    video = content.video
                    favourites_of_video = session.query(VideoFavourite).filter(VideoFavourite.video_id == video.video_id).all()
                    for key in aws_s3_bucket.list(prefix=content.video_path):
                        object_keys.append(key)

                    if len(favourites_of_video) > 0:
                        for fv in favourites_of_video:
                            session.delete(fv)
                    session.delete(content)
                    session.delete(video)
    # This is for when you are deleting a video from the timeline
    elif for_deleting is True and video is not None and video_path is not '' and video_path is not None:
        favourites_of_video = session.query(VideoFavourite).filter(VideoFavourite.video_id == video.video_id).all()
        flags_for_video = session.query(FlaggedVideoModel).filter(FlaggedVideoModel.video_id == video.video_id).all()

        # Collect the AWS S3 objects to delete
        for key in aws_s3_bucket.list(prefix=video_path):
            object_keys.append(key)
        # Collect the Video Favourites
        if len(favourites_of_video) > 0:
            for fv in favourites_of_video:
                session.delete(fv)
        # Collect the Video Flags
        if len(flags_for_video) > 0:
            for fv in flags_for_video:
                session.delete(fv)

    try:
        if len(object_keys) > 0:
            aws_s3_bucket.delete_keys(object_keys)
        session.commit()
        session.close()
        return True
    except exc.SQLAlchemyError:
        session.close()
        return False