def delete_file(self, data, suffix=''):
        """ Handle file deletion requests. For this, we use the Amazon Python SDK,
        boto.
        """
        from .models import FileAndUrl
        boto.set_stream_logger('boto')
        S3 = S3Connection(settings.AWS_ACCESS_KEY_ID,
                          settings.AWS_SECRET_ACCESS_KEY)
        if boto:
            file_id = data.get('file_id', None)
            bucket_name = self.s3_bucket
            aws_bucket = S3.get_bucket(bucket_name, validate=False)

            fileuploader = FileAndUrl()
            log.info(u"fileuploader.get_file_path(file_id)%s",
                     fileuploader.get_file_path(file_id))
            #Delete for S3
            file_key = Key(aws_bucket, fileuploader.get_file_path(file_id))
            file_key.delete()
            #Delete from db
            fileuploader.delete_record(file_id)

            return
        else:
            return
def test_bwa():
    work_dir = tempfile.mkdtemp()
    create_config(work_dir)
    create_manifest(work_dir)
    # Call Pipeline
    try:
        subprocess.check_call(
            [
                "toil-bwa",
                "run",
                os.path.join(work_dir, "jstore"),
                "--manifest",
                os.path.join(work_dir, "manifest.txt"),
                "--config",
                os.path.join(work_dir, "config.txt"),
                "--retryCount",
                "1",
            ]
        )
    finally:
        shutil.rmtree(work_dir)
        conn = S3Connection()
        b = Bucket(conn, "cgl-driver-projects")
        k = Key(b)
        k.key = "test/ci/ci_test.bam"
        k.delete()
def handle_DELETE(request):
    """ Handle file deletion requests. For this, we use the Amazon Python SDK,
    boto.
    """
    try:

        boto.set_stream_logger('boto')
        S3 = S3Connection(aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
                          aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
    except ImportError:
        print("Could not import boto, the Amazon SDK for Python.")
        print("Deleting files will not work.")
        print("Install boto with")
        print("$ pip install boto")
        return make_response(500)

    bucket_name = request.POST.get('bucket')
    key_name = request.POST.get('key')
    try:
        aws_bucket = S3.get_bucket(bucket_name, validate=False)
        aws_key = Key(aws_bucket, key_name)
        aws_key.delete()
        return make_response(200)
    except Exception as err:
        print(err)
        return make_response(500)
def test_upload_and_download_with_encryption(tmpdir):
    from toil_scripts.lib.urls import s3am_upload
    from toil_scripts.lib.urls import download_url
    from boto.s3.connection import S3Connection, Bucket, Key
    work_dir = str(tmpdir)
    # Create temporary encryption key
    key_path = os.path.join(work_dir, 'foo.key')
    subprocess.check_call(['dd', 'if=/dev/urandom', 'bs=1', 'count=32',
                           'of={}'.format(key_path)])
    # Create test file
    upload_fpath = os.path.join(work_dir, 'upload_file')
    with open(upload_fpath, 'wb') as fout:
        fout.write(os.urandom(1024))
    # Upload file
    s3_dir = 's3://cgl-driver-projects/test'
    s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
    # Download the file
    url = 'https://s3-us-west-2.amazonaws.com/cgl-driver-projects/test/upload_file'
    download_url(url=url, name='download_file', work_dir=work_dir, s3_key_path=key_path)
    download_fpath = os.path.join(work_dir, 'download_file')
    assert os.path.exists(download_fpath)
    assert filecmp.cmp(upload_fpath, download_fpath)
    # Delete the Key
    conn = S3Connection()
    b = Bucket(conn, 'cgl-driver-projects')
    k = Key(b)
    k.key = 'test/upload_file'
    k.delete()
Exemple #5
0
def test_upload_and_download_with_encryption(tmpdir):
    from toil_lib.urls import s3am_upload
    from toil_lib.urls import download_url
    from boto.s3.connection import S3Connection, Bucket, Key
    work_dir = str(tmpdir)
    # Create temporary encryption key
    key_path = os.path.join(work_dir, 'foo.key')
    subprocess.check_call([
        'dd', 'if=/dev/urandom', 'bs=1', 'count=32', 'of={}'.format(key_path)
    ])
    # Create test file
    upload_fpath = os.path.join(work_dir, 'upload_file')
    with open(upload_fpath, 'wb') as fout:
        fout.write(os.urandom(1024))
    # Upload file
    random_key = os.path.join('test/', str(uuid4()), 'upload_file')
    s3_url = os.path.join('s3://cgl-driver-projects/', random_key)
    try:
        s3_dir = os.path.split(s3_url)[0]
        s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
        # Download the file
        download_url(url=s3_url,
                     name='download_file',
                     work_dir=work_dir,
                     s3_key_path=key_path)
        download_fpath = os.path.join(work_dir, 'download_file')
        assert os.path.exists(download_fpath)
        assert filecmp.cmp(upload_fpath, download_fpath)
    finally:
        # Delete the Key. Key deletion never fails so we don't need to catch any exceptions
        with closing(S3Connection()) as conn:
            b = Bucket(conn, 'cgl-driver-projects')
            k = Key(b)
            k.key = random_key
            k.delete()
Exemple #6
0
def test_upload_and_download_with_encryption(tmpdir):
    from toil_scripts.lib.urls import s3am_upload
    from toil_scripts.lib.urls import download_url
    from boto.s3.connection import S3Connection, Bucket, Key
    work_dir = str(tmpdir)
    # Create temporary encryption key
    key_path = os.path.join(work_dir, 'foo.key')
    subprocess.check_call(['dd', 'if=/dev/urandom', 'bs=1', 'count=32',
                           'of={}'.format(key_path)])
    # Create test file
    upload_fpath = os.path.join(work_dir, 'upload_file')
    with open(upload_fpath, 'wb') as fout:
        fout.write(os.urandom(1024))
    # Upload file
    random_key = os.path.join('test/', str(uuid4()), 'upload_file')
    s3_url = os.path.join('s3://cgl-driver-projects/', random_key)
    try:
        s3_dir = os.path.split(s3_url)[0]
        s3am_upload(fpath=upload_fpath, s3_dir=s3_dir, s3_key_path=key_path)
        # Download the file
        download_url(url=s3_url, name='download_file', work_dir=work_dir, s3_key_path=key_path)
        download_fpath = os.path.join(work_dir, 'download_file')
        assert os.path.exists(download_fpath)
        assert filecmp.cmp(upload_fpath, download_fpath)
    finally:
        # Delete the Key. Key deletion never fails so we don't need to catch any exceptions
        with closing(S3Connection()) as conn:
            b = Bucket(conn, 'cgl-driver-projects')
            k = Key(b)
            k.key = random_key
            k.delete()
Exemple #7
0
def delete_image_from_s3(file_name):
    try:
        conn = S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY)
        logging.info("success s3 connection")
        bucket = Bucket(conn, BUCKET)
        k = Key(bucket=bucket, name=file_name)
        k.delete()
        logging.info("success delete image from s3")
    except Exception as e:
        logging.debug(e)
Exemple #8
0
def handle_DELETE(request):
    """ Handle file deletion requests. For this, we use the Amazon Python SDK,
    boto.
    """
    if boto:
        bucket_name = request.REQUEST.get('bucket')
        key_name = request.REQUEST.get('key')
        aws_bucket = S3.get_bucket(bucket_name, validate=False)
        aws_key = Key(aws_bucket, key_name)
        aws_key.delete()
        return make_response(200)
    else:
        return make_response(500)
def handle_DELETE(request):
    """ Handle file deletion requests. For this, we use the Amazon Python SDK,
    boto.
    """
    if boto:
        bucket_name = request.REQUEST.get('bucket')
        key_name = request.REQUEST.get('key')
        aws_bucket = S3.get_bucket(bucket_name, validate=False)
        aws_key = Key(aws_bucket, key_name)
        aws_key.delete()
        return make_response(200)
    else:
        return make_response(500)
def delete():
    file_name1=request.form.get("del_filename")
    print "Filename is "
    print file_name1

    conn = S3Connection(cfg.AWS_APP_ID, cfg.AWS_APP_SECRET)
    bucket = Bucket(conn, cfg.AWS_BUCKET)
    key = 'uploads/' + secure_filename(file_name1)
    k = Key(bucket=bucket, name=key)
    k.delete()
    flash("File Delete successfully")
    return render_template('index.html')

    return file_name1
Exemple #11
0
def s3_delete(key=None):
    """ Route for deleting files off S3. Uses the SDK. """
    try:
        from boto.s3.connection import Key, S3Connection
        S3 = S3Connection(app.config.get("AWS_SERVER_PUBLIC_KEY"), 
            app.config.get("AWS_SERVER_SECRET_KEY"))
        request_payload = request.values
        bucket_name = request_payload.get('bucket')
        key_name = request_payload.get('key')
        aws_bucket = S3.get_bucket(bucket_name, validate=False)
        aws_key = Key(aws_bucket, key_name)
        aws_key.delete()
        return make_response('', 200)
    except ImportError:
        abort(500)
Exemple #12
0
def s3_delete(key=None):
    """ Route for deleting files off S3. Uses the SDK. """
    try:
        from boto.s3.connection import Key, S3Connection
        S3 = S3Connection(app.config.get("AWS_SERVER_PUBLIC_KEY"), 
            app.config.get("AWS_SERVER_SECRET_KEY"))
        request_payload = request.values
        bucket_name = request_payload.get('bucket')
        key_name = request_payload.get('key')
        aws_bucket = S3.get_bucket(bucket_name, validate=False)
        aws_key = Key(aws_bucket, key_name)
        aws_key.delete()
        return make_response('', 200)
    except ImportError:
        abort(500)
Exemple #13
0
    def post(self, request, business_id):
        business = get_object_or_404(Business, pk=business_id)

        if business.image and business.image.url:
            conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
            bucket = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
            k = Key(bucket=bucket, name=business.image.url.split(bucket.name)[1])
            k.delete()

        file = request.FILES.get('file')
        business.image = file
        business.save()

        request.session['business_image'] = business.image.url if business.image and business.image.url else None

        return Response({}, status=status.HTTP_200_OK)
Exemple #14
0
def test_exome():
    workdir = tempfile.mkdtemp()
    create_config_and_manifest(workdir)
    # Call Pipeline
    try:
        base_command = ['toil-exome', 'run',
                        '--config', os.path.join(workdir, 'config-toil-exome.yaml'),
                        os.path.join(workdir, 'jstore')]
        # Run with manifest
        subprocess.check_call(base_command + ['--manifest', os.path.join(workdir, 'manifest-toil-exome.tsv')])
    finally:
        shutil.rmtree(workdir)
        conn = S3Connection()
        b = Bucket(conn, 'cgl-driver-projects')
        k = Key(b)
        k.key = 'test/ci/exome-ci-test.tar.gz'
        k.delete()
Exemple #15
0
    def post(self, request, business_id, business_location_id):
        location = get_object_or_404(BusinessLocation, pk=business_location_id)

        if location.image and location.image.url:
            conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
            bucket = Bucket(conn, settings.AWS_STORAGE_BUCKET_NAME)
            k = Key(bucket=bucket, name=location.image.url.split(bucket.name)[1])
            k.delete()

        file = request.FILES.get('file')
        location.image = file
        location.save()

        if location.primary:
            request.session['business_image'] = location.image.url

        return Response({}, status=status.HTTP_200_OK)
def test_exome():
    workdir = tempfile.mkdtemp()
    create_config_and_manifest(workdir)
    # Call Pipeline
    try:
        base_command = ['toil-exome', 'run',
                        '--config', os.path.join(workdir, 'config-toil-exome.yaml'),
                        os.path.join(workdir, 'jstore')]
        # Run with manifest
        subprocess.check_call(base_command + ['--manifest', os.path.join(workdir, 'manifest-toil-exome.tsv')])
    finally:
        shutil.rmtree(workdir)
        conn = S3Connection()
        b = Bucket(conn, 'cgl-driver-projects')
        k = Key(b)
        k.key = 'test/ci/exome-ci-test.tar.gz'
        k.delete()
Exemple #17
0
def handle_delete(request):
    """ Handle file deletion requests. For this, we use the Amazon Python SDK,
    boto.
    """
    key_name = request.GET['key']
    response = None

    # TODO: I see an opportunity to dance to funky beats here ... functools!!!!! @sylvain
    qs_other_data = OtherData.objects.filter(
        user=request.user, other_data_file=key_name).order_by('-pk')
    qs_digital_work = DigitalWork.objects.filter(
        user=request.user, digital_work_file=key_name).order_by('-pk')
    qs_thumbnail = Thumbnail.objects.filter(
        user=request.user, thumbnail_file=key_name).order_by('-pk')

    if qs_other_data:
        view = OtherDataEndpoint.as_view({'delete': 'destroy'})
        response = view(request, pk=qs_other_data[0].pk)
    elif qs_digital_work:
        view = DigitalWorkEndpoint.as_view({'delete': 'destroy'})
        response = view(request, pk=qs_digital_work[0].pk)
    elif qs_thumbnail:
        view = ThumbnailEndpoint.as_view({'delete': 'destroy'})
        response = view(request, pk=qs_thumbnail[0].pk)

    if response is None:
        return make_response(400, json.dumps({'success': False}))

    if response.status_code == status.HTTP_200_OK:
        aws_bucket = get_bucket()
        aws_key = Key(aws_bucket, key_name)
        aws_key.delete()
        # TODO replace with S3HttpRequest model serializer
        http_request = save_request(
            request.user, {
                'request': request,
                'payload': [{
                    'key': key_name
                }, {
                    'bucket': aws_bucket.name
                }]
            },
            multi_parts=1)
        # TODO log it
        print http_request
    return response
def test_bwa():
    work_dir = tempfile.mkdtemp()
    create_config(work_dir)
    create_manifest(work_dir)
    # Call Pipeline
    try:
        subprocess.check_call([
            'toil-bwa', 'run',
            os.path.join(work_dir, 'jstore'), '--manifest',
            os.path.join(work_dir, 'manifest.txt'), '--config',
            os.path.join(work_dir, 'config.txt'), '--retryCount', '1'
        ])
    finally:
        shutil.rmtree(work_dir)
        conn = S3Connection()
        b = Bucket(conn, 'cgl-driver-projects')
        k = Key(b)
        k.key = 'test/ci/ci_test.bam'
        k.delete()
def test_bwa(tmpdir):
    work_dir = str(tmpdir)
    create_config(work_dir)
    create_manifest(work_dir)
    subdir = '/mnt/ephemeral/toil-scripts/bwa'
    os.makedirs(os.path.join(subdir, 'workDir'))
    # Call Pipeline
    try:
        subprocess.check_call(['toil-bwa', 'run',
                               os.path.join(subdir, 'jstore'),
                               '--manifest', os.path.join(work_dir, 'manifest.txt'),
                               '--config', os.path.join(work_dir, 'config.txt'),
                               '--retryCount', '1',
                               '--workDir', os.path.join(subdir, 'workDir')])
    finally:
        shutil.rmtree(subdir)
        conn = S3Connection()
        b = Bucket(conn, 'cgl-driver-projects')
        k = Key(b)
        k.key = 'test/ci/ci_test.bam'
        k.delete()
Exemple #20
0
def checkS3(db, api):
    # Get list for DELETE
    dellist = db.dellist.find()
    dellist = [x for x in dellist]
    if len(dellist) > 0:
        # Connect to S3
        S3_ACCESS_KEY = api['S3_ACCESS_KEY']
        S3_SECRET_KEY = api['S3_SECRET_KEY']
        S3_BUCKET = api['S3_BUCKET']
        try:
            conn = boto.connect_s3(S3_ACCESS_KEY,
                                   S3_SECRET_KEY,
                                   is_secure=False,
                                   calling_format=OrdinaryCallingFormat())
            bucket = Bucket(conn, S3_BUCKET)
            info('\nConnected to S3')
        except Exception as e:
            info(e)
            info('\nFail to connect to S3')
            return
        for rmlist in dellist:
            if rmlist['list']:
                info('Removing files from: ' + str(rmlist['ts']))
                try:
                    alldeleted = True
                    for key in rmlist['list']:
                        info(str(key))
                        info(str(S3_BUCKET))
                        k = Key(bucket=bucket, name=key)
                        if k.exists():
                            k.delete()
                        if k.exists():
                            alldeleted = False
                    if alldeleted:
                        res = db.dellist.remove({'_id': rmlist['_id']})
                        info(res)
                except Exception as e:
                    info(e)
                    info('Error in the deletion of images')
Exemple #21
0
    def handle(self, *args, **options):
        all_models = apps.get_models()
        physical_files = set()
        db_files = set()

        # Get all files from the database
        for model in all_models:
            file_fields = []
            filters = Q()
            for f_ in model._meta.fields:
                if isinstance(f_, FileField):
                    file_fields.append(f_.name)
                    is_null = {'{}__isnull'.format(f_.name): True}
                    is_empty = {'{}__exact'.format(f_.name): ''}
                    filters = Q(**is_null) | Q(**is_empty)
            # only retrieve the models which have non-empty, non-null file fields
            if file_fields:
                files = model.objects.exclude(filters).values_list(*file_fields).distinct()
                for file in files:
                    db_files.update(file)

        # Get all files from the MEDIA_ROOT, recursively
        media_root = getattr(settings, 'MEDIA_URL', None)
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, host=settings.AWS_S3_HOST)
        bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
        if media_root is not None:
            for key in bucket.list("media"):
                physical_files.add(key.name[6:])

        # Compute the difference and delete those files
        deletables = physical_files - db_files
        deletables = list(filter(None, deletables))

        if options['info']:
            for i in deletables:
                print(i, end="\n")

        if len(deletables):
            print("These files are not associated with any object. Are you sure "
                  "you want to delete these files?\n"
                  "If you're unsure answer 'no'.", end="\n")
            res = input(bcolors.WARNING + bcolors.BOLD + "Type 'yes' to continue, or 'no' to cancel: " + bcolors.ENDC)
            if res == 'yes':
                for file_ in deletables:
                    k = Key(bucket, "media/")
                    k.key = "media/" + file_
                    print(k.delete())
            else:
                print(bcolors.FAIL + bcolors.BOLD + "The last operation was cancelled" + bcolors.ENDC)
        else:
            print(bcolors.OKGREEN + bcolors.BOLD + "Nothing to delete" + bcolors.ENDC)
def test_bwa(tmpdir):
    work_dir = str(tmpdir)
    create_config(work_dir)
    subdir = '/mnt/ephemeral/toil-scripts/bwa'
    os.makedirs(os.path.join(subdir, 'workDir'))
    # URLs for
    ref = 'https://s3-us-west-2.amazonaws.com/cgl-pipeline-inputs/alignment/ci/hg38_chr6.fa'
    amb = 'https://s3-us-west-2.amazonaws.com/cgl-pipeline-inputs/alignment/ci/hg38_chr6.fa.amb'
    ann = 'https://s3-us-west-2.amazonaws.com/cgl-pipeline-inputs/alignment/ci/hg38_chr6.fa.ann'
    bwt = 'https://s3-us-west-2.amazonaws.com/cgl-pipeline-inputs/alignment/ci/hg38_chr6.fa.bwt'
    fai = 'https://s3-us-west-2.amazonaws.com/cgl-pipeline-inputs/alignment/ci/hg38_chr6.fa.fai'
    pac = 'https://s3-us-west-2.amazonaws.com/cgl-pipeline-inputs/alignment/ci/hg38_chr6.fa.pac'
    sa = 'https://s3-us-west-2.amazonaws.com/cgl-pipeline-inputs/alignment/ci/hg38_chr6.fa.sa'
    # Call Pipeline
    try:
        subprocess.check_call(['python', '-m', 'toil_scripts.batch_alignment.bwa_alignment',
                               os.path.join(subdir, 'jstore'),
                               '--config', os.path.join(work_dir, 'config.txt'),
                               '--retryCount', '1',
                               '--s3-dir', 's3://cgl-driver-projects/test/ci',
                               '--workDir', os.path.join(subdir, 'workDir'),
                               '--ref', ref,
                               '--amb', amb,
                               '--ann', ann,
                               '--bwt', bwt,
                               '--fai', fai,
                               '--pac', pac,
                               '--sa', sa,
                               '--library', 'test',
                               '--file-size', '1G'])
    finally:
        shutil.rmtree(subdir)
        conn = S3Connection()
        b = Bucket(conn, 'cgl-driver-projects')
        k = Key(b)
        k.key = 'test/ci/ci_test.bam'
        k.delete()
def test_rnaseq_cgl(tmpdir):
    work_dir = str(tmpdir)
    create_config_and_manifest(work_dir)
    subdir = '/mnt/ephemeral/toil-scripts/rnaseq/'
    os.makedirs(os.path.join(subdir, 'workDir'))
    sample = 's3://cgl-pipeline-inputs/rnaseq_cgl/ci/chr6_sample.tar.gz'
    # Call Pipeline
    try:
        base_command = ['toil-rnaseq', 'run',
                        '--config', os.path.join(work_dir, 'toil-rnaseq.config'),
                        os.path.join(subdir, 'jstore'),
                        '--retryCount', '1',
                        '--workDir', os.path.join(subdir, 'workDir')]
        # Run with --samples
        subprocess.check_call(base_command + ['--samples', sample])
        # Run with manifest
        subprocess.check_call(base_command + ['--manifest', os.path.join(work_dir, 'toil-rnaseq-manifest.tsv')])
    finally:
        shutil.rmtree(subdir)
        conn = S3Connection()
        b = Bucket(conn, 'cgl-driver-projects')
        k = Key(b)
        k.key = 'test/ci/chr6_sample.tar.gz'
        k.delete()
def test_exome(tmpdir):
    work_dir = str(tmpdir)
    create_config_and_manifest(work_dir)
    subdir = '/mnt/ephemeral/toil-scripts/exome/'
    os.makedirs(os.path.join(subdir, 'workDir'))
    # normal_bam = 's3://cgl-pipeline-inputs/exome/ci/chr6.normal.bam'
    # tumor_bam = 's3://cgl-pipeline-inputs/exome/ci/chr6.tumor.bam'
    # Call Pipeline
    try:
        base_command = ['toil-exome', 'run',
                        '--config', os.path.join(work_dir, 'toil-exome.config'),
                        os.path.join(subdir, 'jstore'),
                        '--workDir', os.path.join(subdir, 'workDir')]
        # Run with --samples
        # subprocess.check_call(base_command + ['--samples', sample])
        # Run with manifest
        subprocess.check_call(base_command + ['--manifest', os.path.join(work_dir, 'toil-exome-manifest.tsv')])
    finally:
        shutil.rmtree(subdir)
        conn = S3Connection()
        b = Bucket(conn, 'cgl-driver-projects')
        k = Key(b)
        k.key = 'test/ci/exome-ci-test.tar.gz'
        k.delete()
        boto.set_stream_logger('boto')
        S3 = S3Connection(aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
                          aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY)
    except ImportError, e:
        print("Could not import boto, the Amazon SDK for Python.")
        print("Deleting files will not work.")
        print("Install boto with")
        print("$ pip install boto")

    if boto:
        bucket_name = request.POST.get('bucket')
        key_name = request.POST.get('key')
        try:
            aws_bucket = S3.get_bucket(bucket_name, validate=False)
            aws_key = Key(aws_bucket, key_name)
            aws_key.delete()
            return make_response(200)
        except Exception as err:
            print err
            return make_response(500)
    else:
        return make_response(500)


def make_response(status=200, content=None):
    """ Construct an HTTP response. Fine Uploader expects 'application/json'.
    """
    response = HttpResponse()
    response.status_code = status
    response['Content-Type'] = "application/json"
    response.content = content