def create_annotation_job_request(): # Parse redirect URL query parameters for S3 object info # ref: http://flask.pocoo.org/docs/1.0/reqcontext/ bucket_name = request.args.get('bucket') key_name = request.args.get('key') results_bucket_name = app.config['AWS_S3_RESULTS_BUCKET'] sns_job_request_topic = app.config['AWS_SNS_JOB_REQUEST_TOPIC'] # Extract the job ID from the S3 key job_id = key_name.split('/')[2].split('~')[0] file_name = key_name.split('/')[2].split('~')[1] user_id = key_name.split('/')[1] email = get_profile(identity_id=user_id).email # Persist job to database try: # ref: http://boto3.readthedocs.io/en/latest/guide/dynamodb.html dynamodb = boto3.resource('dynamodb', region_name=app.config['AWS_REGION_NAME']) ann_table = dynamodb.Table('hongleizhou_annotations') except ClientError as e: app.logger.info(e) print(e) return forbidden(e) data = { 'job_id': job_id, 'user_id': user_id, 'email': email, 'submit_time': int(time.time()), 'input_file_name': file_name, 's3_inputs_bucket': bucket_name, 's3_key_input_file': key_name, 's3_results_bucket': results_bucket_name, 'job_status': 'PENDING' } ann_table.put_item(Item=data) app.logger.info('Update Dynamodb: {}'.format(data)) # Send message to request queue try: # ref: http://boto3.readthedocs.io/en/latest/reference/services/sns.html#SNS.Topic.publish data['url'] = request.url data['role'] = get_profile(identity_id=user_id).role sns = boto3.resource('sns', region_name=app.config['AWS_REGION_NAME']) topic = sns.Topic(sns_job_request_topic) topic.publish(Message=json.dumps(data)) app.logger.info('Publish message to {}: {}'.format(topic, data)) return render_template('annotate_confirm.html', job_id=job_id) except ClientError as e: app.logger.info(e) print(e) return forbidden(e)
def create_annotation_job_request(): # Get bucket name, key, and job ID from the S3 redirect URL bucket_name = request.args.get('bucket') key = request.args.get('key') # Extract the job ID from the S3 key whole_file_name = key.split('/')[2] job_id = whole_file_name.split("~")[0] input_file_name = whole_file_name.split("~")[1] submit_time = int(time.time()) user_id = session['primary_identity'] recipients = get_profile(identity_id=user_id).email user_role = get_profile(identity_id=user_id).role # Persist job to database # Move your code here... db_data = { "job_id": job_id, "user_id": user_id, "input_file_name": input_file_name, "s3_inputs_bucket": bucket_name, "s3_key_input_file": key, "submit_time": submit_time, "job_status": "PENDING" } dynamodb = boto3.resource('dynamodb', region_name=app.config['AWS_REGION_NAME']) ann_table = dynamodb.Table(app.config['AWS_DYNAMODB_ANNOTATIONS_TABLE']) try: ann_table.put_item(Item=db_data) except ClientError as e: app.logger.error(f"Failed to put item into database: {e}") # Send message to request queue # Move your code here... client = boto3.client('sns', region_name=app.config['AWS_REGION_NAME']) topic_arn = app.config['AWS_SNS_JOB_REQUEST_TOPIC'] queue_data = { "job_id": job_id, "user_id": user_id, "input_file_name": input_file_name, "s3_inputs_bucket": bucket_name, "s3_key_input_file": key, "submit_time": submit_time, "job_status": "PENDING", "recipients": recipients, "user_role": user_role } try: response = client.publish(TopicArn=topic_arn, Message=json.dumps(queue_data)) except ClientError as e: print("publish to sns failed") return render_template('annotate_confirm.html', job_id=job_id)
def delete(self, id): from auth import get_profile atts = get_profile() if atts['superuser']: self.bundle = self.model.find_one({'_id': str(id)}) result = self.delete_obj() if not result: return result else: for location in self.bundle['@graph']['ma:locator']: basename = location['@id'] duplicates = self.model.find_one({ "@graph.ma:locator": { "$elemMatch": { "@id": basename } } }) if duplicates is not None: return result extension = location['ma:hasFormat'].split('/')[-1] filename = "{0}.{1}".format(basename, extension) try: remove(config.MEDIA_DIRECTORY + filename) except (IOError, OSError): pass return result else: return action_401()
def post(self, id=None): # Verify that a user is logged in. from auth import get_profile userid = get_profile()['userid'] if not userid: return action_401() data = self.request.get_json() # Require clips to have a start time. # End time is optional. if 'start' not in data: return bundle_400('clips must have a start time!') # Validate media id. if 'mediaid' in data: query = {'_id': data['mediaid']} if not assets.find_one(query): return bundle_400('invalid media id!') else: return bundle_400('clips must have a valid media id!') # Populate the new object with all our data. self.bundle = self.model() self.set_attrs() self.bundle['userid'] = userid self.bundle['_id'] = str(ObjectId()) return self.save_bundle()
def auth_filter(self,bundle=None): from auth import get_profile atts=get_profile() if not atts['superuser']: bundle=self.acl_filter(atts['username'],bundle) self.bundle=bundle return self.bundle
def preprocess_bundle(self): self.bundle["@graph"]["dc:identifier"] = "%s/%s" % (self.namespace,str(self.bundle["_id"])) self.bundle["@graph"]["pid"] = str(self.bundle["_id"]) from auth import get_profile atts=get_profile() if not atts['superuser']: self.bundle["@graph"]["dc:creator"]=atts['username']
def set_disallowed_atts(self): self.disallowed_atts = [ "dc:identifier", "pid", "dc:type", "url", "ma:duration" ] from auth import get_profile atts = get_profile() if not atts['superuser']: self.disallowed_atts.append("dc:creator")
def acl_write_check(self, bundle=None): if super(Annotation, self).acl_write_check(bundle=self.bundle): return True from auth import get_profile atts = get_profile() collection = request.args.get('collection') method = request.method name = atts['username'] def can_write_to_vid(vid, name): # check to see if has write access to the collection for col in vid['@graph']['ma:isMemberOf']: query = { "@graph.pid": col['@id'], "$or": [{ "@graph.dc:rights.write": { "$in": [name] } }, { "@graph.dc:creator": name }] } if ags.find_one(query) is not None: return True return False if method == 'POST': if collection is not None: # does the user have write access to this? query = { "@graph.pid": collection, "$or": [{ "@graph.dc:rights.write": { "$in": [name] } }, { "@graph.dc:creator": name }] } return ags.find_one(query) is not None else: vid_id = json.loads(request.data)['media'][0]['id'] return can_write_to_vid(assets.find_one(vid_id), name) pid = bundle['@graph']['pid'] vid = assets.find_one(bundle["@graph"]["dc:relation"]) required = True if pid in vid["@graph"].get("ma:hasPolicy") else False if required and atts['role'] == 'faculty': return True return can_write_to_vid(vid, name)
def acl_write_check(self,bundle=None): from auth import get_profile atts=get_profile() if atts['superuser'] or (atts['role']=='faculty' and not bundle): return True if bundle: if bundle["@graph"].get("dc:creator")==atts['username'] or atts['username'] in bundle['@graph']["dc:rights"]["write"]: return True return False
def auth_filter(self,bundle=None): from auth import get_profile atts=get_profile() if not atts['username']: filtered_bundle=self.acl_filter(bundle=bundle) elif not atts['superuser']: filtered_bundle=self.acl_filter(["public","BYU"],atts['username'],atts['role'],bundle) else: filtered_bundle=bundle if bundle else self.bundle return filtered_bundle
def acl_write_check(self, bundle=None): from auth import get_profile atts = get_profile() if atts['superuser'] or (atts['role'] == 'faculty' and not bundle): return True if bundle: if bundle["@graph"].get("dc:creator") == atts['username'] or atts[ 'username'] in bundle['@graph']["dc:rights"]["write"]: return True return False
def annotate(): # Open a connection to the S3 service s3 = boto3.client('s3', region_name=app.config['AWS_REGION_NAME'], config=Config(signature_version='s3v4')) bucket_name = app.config['AWS_S3_INPUTS_BUCKET'] user_id = session['primary_identity'] # Generate unique ID to be used as S3 key (name) key_name = app.config['AWS_S3_KEY_PREFIX'] + user_id + '/' + str( uuid.uuid4()) + '~${filename}' # Redirect to a route that will call the annotator redirect_url = str(request.url) + "/job" # Get user profile profile = get_profile(identity_id=user_id) # Get limit size limit = -1 if profile.role == 'free_user': limit = app.config['FREE_USER_FILE_LIMIT'] session.update({'limit': limit}) # Define policy conditions # NOTE: We also must inlcude "x-amz-security-token" since we're # using temporary credentials via instance roles encryption = app.config['AWS_S3_ENCRYPTION'] acl = app.config['AWS_S3_ACL'] expires_in = app.config['AWS_SIGNED_REQUEST_EXPIRATION'] fields = { "success_action_redirect": redirect_url, "x-amz-server-side-encryption": encryption, "acl": acl } conditions = [["starts-with", "$success_action_redirect", redirect_url], { "x-amz-server-side-encryption": encryption }, { "acl": acl }] # Generate the presigned POST call presigned_post = s3.generate_presigned_post(Bucket=bucket_name, Key=key_name, Fields=fields, Conditions=conditions, ExpiresIn=expires_in) # How to redirect to sign up for premium if file size exceeds limit? app.logger.info(presigned_post) # Render the upload form which will parse/submit the presigned POST return render_template('annotate.html', s3_post=presigned_post)
def delete(self, id): # Verify that a user is logged in. from auth import get_profile userid = get_profile()['userid'] if not userid: return action_401() query = {'_id': id, 'userid': userid} result = self.collection.delete_one(query) return jsonify({'success': result.deleted_count == 1})
def get_list(self): # Verify that a user is logged in. from auth import get_profile userid = get_profile()['userid'] if not userid: return action_401() query = {'userid': userid} results = self.collection.find(query) return mongo_jsonify(list(results))
def auth_filter(self, bundle=None): from auth import get_profile atts = get_profile() if not atts['username']: filtered_bundle = self.acl_filter(bundle=bundle) elif not atts['superuser']: filtered_bundle = self.acl_filter(["public", "BYU"], atts['username'], atts['role'], bundle) else: filtered_bundle = bundle if bundle else self.bundle return filtered_bundle
def subscribe(): if request.method == 'GET': # show subscription form return render_template('subscribe.html') # https://stripe.com/docs/checkout/flask if request.method == 'POST': # set stripe api key stripe.api_key = app.config['STRIPE_SECRET_KEY'] # get stripe token stripe_token = request.form['stripe_token'] # get user email user_id = session['primary_identity'] user_profile = get_profile(identity_id=user_id) user_email = user_profile.email # create new customer in Stripe customer = stripe.Customer.create(email=user_email, source=stripe_token) # subscribe the customer subscription = stripe.Subscription.create( customer=customer.id, items=[{ 'plan': 'premium_plan' }], ) # update user role in Globus update_profile(identity_id=session['primary_identity'], role='premium_user') # connect to SNS try: sns = boto3.client('sns', region_name=app.config['AWS_REGION_NAME']) except Exception as e: print("There was error connecting to SNS: \n" + str(e)) return abort(500) # publish user id to SNS to request thaw for any archived files try: topic_arn = app.config['AWS_SNS_THAW_REQUESTS_TOPIC'] message = json.dumps(user_id) sns_response = sns.publish(TopicArn=topic_arn, Message=message) except Exception as e: print("There was error publishing a message to SNS: \n" + str(e)) return abort(500) # show subcription confirmation return render_template('subscribe_confirm.html', stripe_id=customer.id)
def annotate(): # Create a session client to the S3 service s3 = boto3.client('s3', region_name=app.config['AWS_REGION_NAME'], config=Config(signature_version='s3v4')) bucket_name = app.config['AWS_S3_INPUTS_BUCKET'] user_id = session['primary_identity'] profile = get_profile(identity_id=user_id) if profile.role == 'free_user': free_user = 1 else: free_user = 0 # Generate unique ID to be used as S3 key (name) key_name = app.config['AWS_S3_KEY_PREFIX'] + user_id + '/' + \ str(uuid.uuid4()) + '~${filename}' # Create the redirect URL redirect_url = str(request.url) + '/job' # Define policy fields/conditions #https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTConstructPolicy.html encryption = app.config['AWS_S3_ENCRYPTION'] acl = app.config['AWS_S3_ACL'] fields = { "success_action_redirect": redirect_url, "x-amz-server-side-encryption": encryption, "acl": acl } conditions = [["starts-with", "$success_action_redirect", redirect_url], { "x-amz-server-side-encryption": encryption }, { "acl": acl }] # Generate the presigned POST call # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.generate_presigned_post try: presigned_post = s3.generate_presigned_post( Bucket=bucket_name, Key=key_name, Fields=fields, Conditions=conditions, ExpiresIn=app.config['AWS_SIGNED_REQUEST_EXPIRATION']) except ClientError as e: app.logger.error(f"Unable to generate presigned URL for upload: {e}") return abort(500) # Render the upload form which will parse/submit the presigned POST return render_template('annotate.html', s3_post=presigned_post, free_user=free_user)
def acl_write_check(self, bundle=None): from auth import get_profile atts = get_profile() if atts["superuser"] or (atts["role"] == "faculty" and not bundle): return True if bundle: if ( bundle["@graph"].get("dc:creator") == atts["username"] or atts["username"] in bundle["@graph"]["dc:rights"]["write"] ): return True return False
def acl_write_check(self, bundle=None): if super(Annotation, self).acl_write_check(bundle=self.bundle): return True from auth import get_profile atts=get_profile() collection = request.args.get('collection') method = request.method name = atts['username'] def can_write_to_vid(vid, name): # check to see if has write access to the collection for col in vid['@graph']['ma:isMemberOf']: query = { "@graph.pid": col['@id'], "$or": [ {"@graph.dc:rights.write": {"$in": [name] }}, {"@graph.dc:creator": name} ] } if ags.find_one(query) is not None: return True return False if method == 'POST': if collection is not None: # does the user have write access to this? query = { "@graph.pid": collection, "$or": [ {"@graph.dc:rights.write": {"$in": [name] }}, {"@graph.dc:creator": name} ] } return ags.find_one(query) is not None else: vid_id = json.loads(request.data)['media'][0]['id'] return can_write_to_vid(assets.find_one(vid_id), name) pid = bundle['@graph']['pid'] vid=assets.find_one(bundle["@graph"]["dc:relation"]) required = True if pid in vid["@graph"].get("ma:hasPolicy") else False if required and atts['role'] == 'faculty': return True return can_write_to_vid(vid, name)
def annotate(): # Source: https://stackoverflow.com/questions/34348639/amazon-aws-s3-browser-based-upload-using-post # Create a session client to the S3 service s3 = boto3.client('s3', region_name=app.config['AWS_REGION_NAME'], config=Config(signature_version='s3v4')) bucket_name = app.config['AWS_S3_INPUTS_BUCKET'] user_id = session['primary_identity'] profile = get_profile(identity_id=user_id) role = profile.role # Generate unique ID to be used as S3 key (name) key_name = app.config['AWS_S3_KEY_PREFIX'] + user_id + '/' + str( uuid.uuid4()) + '/${filename}' # Create the redirect URL redirect_url = str(request.url) + '/job' # Define policy fields/conditions encryption = app.config['AWS_S3_ENCRYPTION'] acl = app.config['AWS_S3_ACL'] fields = { "success_action_redirect": redirect_url, "x-amz-server-side-encryption": encryption, "acl": acl } conditions = [["starts-with", "$success_action_redirect", redirect_url], { "x-amz-server-side-encryption": encryption }, { "acl": acl }] # Generate the presigned POST call try: presigned_post = s3.generate_presigned_post( Bucket=bucket_name, Key=key_name, Fields=fields, Conditions=conditions, ExpiresIn=app.config['AWS_SIGNED_REQUEST_EXPIRATION']) except ClientError as e: return jsonify({ 'code': 500, 'status': 'error', 'message': f'Failed to generate presigned post: {e}' }) # Render the upload form which will parse/submit the presigned POST return render_template('annotate.html', s3_post=presigned_post, role=role)
def annotation_details(id): dynamodb = boto3.resource('dynamodb', region_name=AWS_REGION) ann_table = dynamodb.Table(USER_TABLE) user_name = session.get('primary_identity') selected_job = ann_table.query(KeyConditionExpression=Key('job_id').eq(id)) ''' TESTING: To see if the selected job has archive id in dynamodb user_email = selected_job['Items'][0]['user_email'] if 'results_file_archive_id' in selected_job['Items'][0]: print("YES :::: results_file_archive_id") else: print("NO :::: results_file_archive_id") ''' try: # Get s3 results .annot.vcf file directory key_check = selected_job['Items'][0]['s3_key_result_file'] # Get url with s3 key result file (.annot.vcf file) s3_url = boto3.client('s3', config=Config(signature_version='s3v4')) url = s3_url.generate_presigned_url(ClientMethod='get_object', Params={ 'Bucket': RESULT_BUCKET, 'Key': DIRECTORY + key_check }) profile = get_profile(identity_id=user_name) completion_time = int(selected_job['Items'][0]['complete_time']) current_time = int(time.time()) # interval = app.config['FREE_USER_DATA_RETENTION'] # NOW just for tesing :: 30 secdons, but later the interval should be 1800 sec (30 min) if (current_time - completion_time) <= 30: premium = True else: if selected_job['Items'][0]['user_role'] == 'free_user': premium = None else: premium = True completion_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(completion_time)) except: premium = url = None completion_time = None return render_template('annotation_details.html', job=selected_job['Items'][0], complete=completion_time, url=url, premium=premium)
def subscribe(): user_id = session['primary_identity'] profile = get_profile(identity_id=user_id) if request.method == 'GET': if profile.role == 'premium_user': return render_template('subscribe_confirm.html') else: return render_template('subscribe.html') elif request.method == 'POST': # subscribe # https://pippinsplugins.com/stripe-integration-part-7-creating-and-storing-customers/ stripe_token = request.form.get('stripe_token') try: stripe.api_key = app.config['STRIPE_SECRET_KEY'] customer = stripe.Customer.create(email=profile.email, card=stripe_token, plan='premium_plan', description=user_id) # log the customer app.logger.info(customer) if __add_customer__(user_id, customer) is False: return page_not_found('error') # update database update_profile(identity_id=user_id, role='premium_user') data = { 'type': 'user', 'id': user_id, 'action': 'restore', 'url': request.url, 'email': profile.email } # publish subscription message __publish_archive_status__(data) return render_template('subscribe_confirm.html') except ClientError as e: return page_not_found(e) else: return page_not_found('error')
def read_override(self,obj,username,role): from auth import get_profile atts=get_profile() if atts['superuser']: return True for c in obj["@graph"]["ma:isMemberOf"]: coll=ags.find_one({"_id":c["@id"]}) try: if coll["@graph"].get("dc:creator")==atts['username'] or atts['username'] in coll['@graph']["dc:rights"]["read"] or coll['@graph']['dc:coverage'] == 'public': return True except TypeError: pass if is_enrolled(coll): return True return False
def create_annotation_job_request(): # Get bucket name, key, and job ID from the S3 redirect URL bucket_name = str(request.args.get('bucket')) s3_key = str(request.args.get('key')) # Extract the job ID from the S3 key job_id = s3_key.split('/')[2] input_file = s3_key.split('/')[3] user_id = session['primary_identity'] profile = get_profile(identity_id=session.get('primary_identity')) user_email = profile.email # Persist job to database # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.put_item dynamodb = boto3.resource('dynamodb', region_name=app.config['AWS_REGION_NAME']) ann_table = dynamodb.Table(app.config['AWS_DYNAMODB_ANNOTATIONS_TABLE']) data = { "job_id": job_id, "user_id": user_id, "user_email": user_email, "input_file_name": input_file, "s3_inputs_bucket": bucket_name, "s3_key_input_file": s3_key, "submit_time": int(time.time()), "user_role": profile.role, "job_status": "PENDING" } try: ann_table.put_item(Item=data) except ClientError as e: if e.response['Error']['Code'] == 'ValidationException': abort(500) abort(500) # Send message to request queue # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sns.html#topic sns = boto3.resource('sns', region_name=app.config['AWS_REGION_NAME']) topic = sns.Topic(app.config['AWS_SNS_JOB_REQUEST_TOPIC']) try: topic.publish(TopicArn=app.config['AWS_SNS_JOB_REQUEST_TOPIC'], Message=json.dumps(data)) except ClientError as e: abort(500) return render_template('annotate_confirm.html', job_id=job_id)
def annotation_details(id): # connect to the dynamoDB try: dynamodb = boto3.resource('dynamodb', region_name=app.config['AWS_REGION_NAME']) table_name = app.config['AWS_DYNAMODB_ANNOTATIONS_TABLE'] ann_table = dynamodb.Table(table_name) except boto3.exceptions.ResourceNotExistsError as e: return abort(500) except botocore.exceptions.ClientError as e: return abort(500) # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/dynamodb.html#querying-and-scanning response = ann_table.query(KeyConditionExpression=Key('job_id').eq(id)) job = response['Items'][0] # check the job belongs to the authorized user user_id = session['primary_identity'] profile = get_profile(identity_id=user_id) if job['user_id'] != user_id: return render_template( 'error.html', title='Not authorized', alert_level='danger', message="You are not authorized to view this job. \ If you think you deserve to be granted access, please contact the \ supreme leader of the mutating genome revolutionary party."), 403 job['submit_time'] = datetime.fromtimestamp(job['submit_time']) free_access_expired = False if job['job_status'] == 'COMPLETED': if profile.role == 'free_user': # if the user is a free_user, check if free access has passed cur_time = int(time.time()) # if passed, set the free_access_expired as true if cur_time - job['complete_time'] > 300: free_access_expired = 1 else: # if a premium user, check if the result file has been successfully restored if "existed" in job and job['existed'] == 'False': job['restore_message'] = "Result File being Restoring, Please wait" job['complete_time'] = datetime.fromtimestamp(job['complete_time']) job['result_file_url'] = create_presigned_download_url( job['s3_key_result_file']) return render_template('annotation_details.html', annotation=job, free_access_expired=free_access_expired)
def get(self, id): if not id: return self.get_list() else: from auth import get_profile userid = get_profile()['userid'] if not userid: return action_401() query = {'_id': str(id), 'userid': userid} bundle = self.get_bundle(query) if bundle: return self.serialize_bundle(bundle) else: return bundle_404()
def acl_write_check(self,bundle=None): from auth import get_profile atts=get_profile() if atts['superuser'] or (atts['role']=='faculty' and not bundle): return True if bundle: try: if bundle["@graph"].get("dc:creator")==atts['username'] or atts['username'] in bundle['@graph']["dc:rights"]["write"]: return True except TypeError: pass for coll in bundle["@graph"]["ma:isMemberOf"]: coll=ags.find_one({"_id":coll["@id"]}) if coll["@graph"].get("dc:creator")==atts['username'] or atts['username'] in coll['@graph']["dc:rights"]["write"]: return True return False
def create_annotation_job_request(): # Parse redirect URL query parameters for S3 object info bucket_name = request.args.get('bucket') key_name = request.args.get('key') jobid_and_file = key_name.split('/')[-1] user_id = key_name.split('/')[1] job_id = jobid_and_file.split('~')[0] file_name = jobid_and_file.split('~')[-1] # Persist job to database submit_time = int(time.time()) # Create a job item and persist it to the annotations database data = { "job_id": job_id, "user_id": user_id, "input_file_name": file_name, "s3_inputs_bucket": bucket_name, "s3_key_input_file": key_name, "submit_time": int(submit_time), "job_status": "PENDING" } dynamodb = boto3.resource('dynamodb', region_name=app.config['AWS_REGION_NAME']) ann_table = dynamodb.Table(app.config['AWS_DYNAMODB_ANNOTATIONS_TABLE']) ann_table.put_item(Item=data) # Send message to request queue sns = boto3.client('sns', region_name=app.config['AWS_REGION_NAME']) response = sns.publish(TopicArn=app.config['AWS_SNS_JOB_REQUEST_TOPIC'], Message=json.dumps(data)) #Glacier if get_profile(identity_id=user_id).role == 'free_user': data = { "job_id": job_id, "user_id": user_id, "input_file_name": file_name, "s3_inputs_bucket": bucket_name, "s3_key_input_file": key_name } sns_glacier = sns.publish( TopicArn=app.config['AWS_SNS_JOB_GLACIER_TOPIC'], Message=json.dumps(data)) print(f'Glacier works:{sns_glacier}') return render_template('annotate_confirm.html', job_id=job_id)
def annotate(): # Open a connection to the S3 service s3 = boto3.client('s3', region_name=app.config['AWS_REGION_NAME'], config=Config(signature_version='s3v4')) #set database and table resource bucket_name = app.config['AWS_S3_INPUTS_BUCKET'] #get user profile information user_id = session.get('primary_identity') profile = get_profile(identity_id=user_id) # Generate unique ID to be used as S3 key (name) key_name = app.config['AWS_S3_KEY_PREFIX'] + user_id + '/' + str( uuid.uuid4()) + '~${filename}' # Redirect to a route that will call the annotator redirect_url = str(request.url) + "/job" # Define policy conditions # NOTE: We also must inlcude "x-amz-security-token" since we're # using temporary credentials via instance roles encryption = app.config['AWS_S3_ENCRYPTION'] acl = app.config['AWS_S3_ACL'] expires_in = app.config['AWS_SIGNED_REQUEST_EXPIRATION'] fields = { "success_action_redirect": redirect_url, "x-amz-server-side-encryption": encryption, "acl": acl } conditions = [["starts-with", "$success_action_redirect", redirect_url], { "x-amz-server-side-encryption": encryption }, { "acl": acl }] # Generate the presigned POST call # source: http://boto3.readthedocs.io/en/latest/reference/services/s3.html#S3.Client.generate_presigned_post presigned_post = s3.generate_presigned_post(Bucket=bucket_name, Key=key_name, Fields=fields, Conditions=conditions, ExpiresIn=expires_in) # Render the upload form which will parse/submit the presigned POST return render_template('annotate.html', s3_post=presigned_post)
def get_expired(self, limit=0): '''Find any expired media and show why they are considered expired.''' # Only superusers should be able to review this report. from auth import get_profile atts = get_profile() if not atts['superuser']: return action_401() current_year = datetime.utcnow().year stale_date = datetime.utcnow() - timedelta(days=730) stale_str = datetime.strftime(stale_date, '%Y-%m-%d') not_hlr_query = {'$or': [{'@graph.dc:hlr': {'$exists': False}}, {'@graph.dc:hlr': False}]} exp_date_query = {'$and': [not_hlr_query, {'@graph.dc:expirationdate': {'$lt': current_year}}]} stale_query = {'$and': [not_hlr_query, {'@graph.dc:lastviewed': {'$lt': stale_str}}]} never_viewed_query = {'$and': [not_hlr_query, {'@graph.dc.lastviewed': {'$exists': False}}]} exp_date = [v for v in self.model.find(exp_date_query).limit(limit)] stale = [v for v in self.model.find(stale_query).limit(limit)] never_viewed = [v for v in self.model.find(never_viewed_query).limit(limit)] title = 'Expired Media' body = '<h1>%s Media</h1>' % title if len(exp_date) > 0: exp_date_fields = ['ma:title', 'dc:creator', 'dc:expirationdate', 'ma:isMemberOf'] exp_date_headings = ['Title', 'Owner', 'Expiration Date', 'Collections'] exp_date_table = build_html_table(exp_date, exp_date_fields, exp_date_headings) body += '<h2>%s</h2>%s' % ('Past Expiration Date', exp_date_table) if len(stale) > 0: stale_fields = ['ma:title', 'dc:creator', 'dc:date', 'dc:lastviewed', 'ma:isMemberOf'] stale_headings = ['Title', 'Owner', 'Upload Date', 'Last Viewed', 'Collections'] stale_table = build_html_table(stale, stale_fields, stale_headings) body += '<h2>%s</h2>%s' % ('Not Viewed Recently', stale_table) if len(never_viewed) > 0: never_viewed_fields = ['ma:title', 'dc:creator', 'dc:date', 'ma:isMemberOf'] never_viewed_headings = ['Title', 'Owner', 'Upload Date', 'Collections'] never_viewed_table = build_html_table(never_viewed, never_viewed_fields, never_viewed_headings) body += '<h2>%s</h2>%s' % ('Never Viewed', never_viewed_table) html = '<html><head><title>%s</title></head><body>%s</body></html>' % (title, body) return Response(html, status=200, mimetype="text/html")
def subscribe_post(): user_id = session.get('primary_identity') profile = get_profile(identity_id=user_id) if profile.role == 'premium_user': abort(403) stripe.api_key = app.config['STRIPE_SECRET_KEY'] token = request.form['stripe_token'] customer = None # https://stripe.com/docs/api/customers/create?lang=python try: customer = stripe.Customer.create( source=token, # obtained with Stripe.js name=profile.name, email=profile.email) except Exception as e: abort(500) # https://stripe.com/docs/api/subscriptions/create try: subscription = stripe.Subscription.create(customer=customer['id'], items=[ { "plan": "premium_plan", }, ]) except Exception as e: abort(500) update_profile(identity_id=session['primary_identity'], role="premium_user") # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sns.html#topic sns = boto3.resource('sns', region_name=app.config['AWS_REGION_NAME']) topic = sns.Topic(app.config['AWS_SNS_RESULTS_RESTORE_TOPIC']) data = {'user_id': user_id} try: topic.publish(TopicArn=app.config['AWS_SNS_RESULTS_RESTORE_TOPIC'], Message=json.dumps(data)) except ClientError as e: logging.error(e) abort(500) return render_template('subscribe_confirm.html', stripe_id=customer['id'])
def patch(self, id): from auth import get_profile atts = get_profile() if self.request.json is None or 'replacement_file' not in self.request.json: return super(MediaAsset, self).patch(id) if not atts['superuser']: return action_401() found = assets.find_one({'_id': id}) if not found: return bundle_404() file_id = None to_delete = [] # find existing filenames for f in found['@graph']['ma:locator']: if file_id is not None and f['@id'] != file_id: raise Exception( "Cannot replace file; multiple files with different IDs") file_id = f['@id'] extension = f['ma:hasFormat'].split('/')[-1] fpath = config.MEDIA_DIRECTORY + f['@id'] + '.' + extension to_delete.append(fpath) from os import remove for f in to_delete: try: remove(f) except OSError: pass assets.update({'_id': id}, {'$set': {'@graph.ma:locator': []}}) result = self.set_new_file(id, file_id, self.request.json['replacement_file']) if not result[0]: return bundle_400(result[1]) return self.serialize_bundle(assets.find_one({'_id': id}))
def patch(self, id): from auth import get_profile atts=get_profile() if self.request.json is None or 'replacement_file' not in self.request.json: return super(MediaAsset, self).patch(id) if not atts['superuser']: return action_401() found = assets.find_one({'_id': id}) if not found: return bundle_404() file_id = None to_delete = [] # find existing filenames for f in found['@graph']['ma:locator']: if file_id is not None and f['@id'] != file_id: raise Exception("Cannot replace file; multiple files with different IDs") file_id = f['@id'] extension = f['ma:hasFormat'].split('/')[-1] fpath = config.MEDIA_DIRECTORY + f['@id'] + '.' + extension to_delete.append(fpath) from os import remove for f in to_delete: try: remove(f) except OSError: pass assets.update({'_id': id}, {'$set': {'@graph.ma:locator': []}}) result = self.set_new_file(id, file_id, self.request.json['replacement_file']) if not result[0]: return bundle_400(result[1]) return self.serialize_bundle(assets.find_one({'_id': id}))
def delete(self,id): from auth import get_profile atts=get_profile() if atts['superuser']: self.bundle=self.model.find_one({'_id': str(id)}) result = self.delete_obj() if not result: return result else: for location in self.bundle['@graph']['ma:locator']: basename = location['@id'] duplicates = self.model.find_one({"@graph.ma:locator": {"$elemMatch": {"@id": basename}}}) if duplicates is not None: return result extension = location['ma:hasFormat'].split('/')[-1] filename = "{0}.{1}".format(basename, extension) try: remove(config.MEDIA_DIRECTORY + filename) except IOError: pass return result else: return action_401()
def cancel_subscription(): user_id = session['primary_identity'] profile = get_profile(identity_id=user_id) try: if __delete_customer__(user_id) is False: return page_not_found('error') data = { 'type': 'user', 'id': user_id, 'action': 'archive', 'url': request.url, 'email': profile.email } # publish cancel subscription message, archive user's file __publish_archive_status__(data) update_profile(identity_id=user_id, role='free_user') return render_template('home.html') except ClientError as e: print(e) return page_not_found(e)
def set_disallowed_atts(self): self.disallowed_atts=["dc:identifier","pid","dc:type"] from auth import get_profile atts=get_profile() if not atts['superuser']: self.disallowed_atts.append("dc:creator")
def acl_write_check(self,bundle=None): from auth import get_profile atts=get_profile() return atts['superuser']
def set_disallowed_atts(self): from auth import get_profile atts=get_profile() if not atts['superuser']: self.disallowed_atts=['role','superuser']
def set_disallowed_atts(self): from auth import get_profile atts=get_profile() if not atts['superuser']: self.disallowed_atts.append("dc:creator")
def subscribe(): # get stripe token # https://stripe.com/docs/api if request.get_data() is None: return error(404, "parameters not found!!!") info=request.get_data() info=info.decode('utf-8') stripe_token=info.split('=')[1] # create new customer try: stripe.api_key = app.config['STRIPE_SECRET_KEY'] response=stripe.Customer.create( description=key_pre, source=stripe_token # obtained with Stripe.js ) except: return error(500, 'fail to upgrade...') # update user role update_profile(identity_id=session['primary_identity'], role='premium_user') try: dynamoDB=boto3.resource('dynamodb', region_name=region) except ClientError as e: return error(500, "fail to connect to dynamoDB: "+str(e)) try: ann_table=dynamoDB.Table(app.config['AWS_DYNAMODB_ANNOTATIONS_TABLE']) except ClientError as e: return error(404, "Table not found: "+str(e)) try: response_from_table=ann_table.query(IndexName='user_id_index', KeyConditionExpression=Key('user_id').eq(session['primary_identity'])) except ClientError as e: error(500, str(e)) for item in response_from_table['Items']: try: ann_table.update_item( Key={'job_id': item['job_id']}, UpdateExpression="SET user_role = :ud", ExpressionAttributeValues={':ud' : 'premium_user'}, ConditionExpression=Key('user_role').eq('free_user') ) except ClientError as e: return error(500,"fail to update job status in the target table: "+str(e)) user_email=get_profile(identity_id=session['primary_identity']).email # send job_restore_request try: client=boto3.client('sns', region_name=region) except ClientError as e: error(500, str(e)) user_info={'user_id': session['primary_identity'], 'user_email': user_email} try: client.publish(Message=json.dumps({'default':json.dumps(user_info)}), MessageStructure='json',TopicArn=app.config['AWS_SNS_JOB_RESTORE_TOPIC']) except ClientError as e: error(500, str(e)) return render_template('subscribe_confirm.html', stripe_id=response['id'])
def annotation_details(id): try: dynamoDB=boto3.resource('dynamodb', region_name=region) except ClientError as e: error(500, str(e)) try: ann_table=dynamoDB.Table(table_name) except ClientError as e: error(500, str(e)) try: response=ann_table.query(KeyConditionExpression=Key('job_id').eq(id)) except ClientError as e: error(500, str(e)) info=response['Items'][0] user_role=get_profile(identity_id=session['primary_identity']).role # check if the current user owns the job if info['user_id']!=session['primary_identity']: return error(403, "Access denied") # check if the user is authorized to download download_indicator=1 restoring_indicator=0 if int(time.time())-info['submit_time']>app.config['FREE_USER_DATA_RETENTION'] and user_role=='free_user': download_indicator=0 if ('results_file_archive_id' in info) and user_role == 'premium_user': restoring_indicator=1 # transform time format info['submit_time']=time.strftime("%Y-%m-%d %H:%M",time.localtime(info['submit_time'])) # if job has't complete yet if info['job_status']!='COMPLETED': return render_template('annotation.html', information=info) # if job is completed info['complete_time']=time.strftime("%Y-%m-%d %H:%M",time.localtime(info['complete_time'])) result_file=info['s3_key_result_file'] #generate download url if download is allowed download_url="" if download_indicator==1: s3_client = boto3.client('s3', region_name=region) # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.generate_presigned_url try: download_url = s3_client.generate_presigned_url('get_object',Params={'Bucket': result_bucket,'Key': result_file},ExpiresIn=app.config['AWS_SIGNED_REQUEST_EXPIRATION']) except ClientError as e: logging.error(e) #update information info['s3_key_result_file']=info['input_file_name'].split('.')[0]+'.annot.vcf' info['s3_key_log_file']=info['input_file_name']+'.count.log' info.update({'download_url': download_url}) info.update({'upgrade_url':app.config['AWS_URL_PREFIX']+'subscribe'}) info.update({'download': download_indicator}) info.update({'restoring': restoring_indicator}) return render_template('annotation.html', information=info)
def create_annotation_job_request(): # Get bucket name, key, and job ID from the S3 redirect URL keys = str(request.args.get('key')) if keys is None: return error(404, "parameters not found") # Extract the job_id and fname from the S3 key jID=keys.split('/')[2].split("~") job_id=jID[0] fname=jID[1] user_id=session['primary_identity'] request_topic=app.config['AWS_SNS_JOB_REQUEST_TOPIC'] # if not .vcf file cType=fname.split(".") cType=cType[1] if cType=='vcf': cType="text/x-vcard" else: cType="invalid/not vcf" return render_template('annotate_confirm.html', job_id=None) # Persist job to database myData={'job_id':job_id,'user_id': user_id, 'input_file_name': fname, 's3_inputs_bucket': input_bucket, 's3_key_input_file': keys,'submit_time':int(time.time()), 'job_status':'PENDING', 'user_role': get_profile(identity_id=session['primary_identity']).role} # check the length of the job try: s3_resource=boto3.resource('s3', region_name=app.config['AWS_REGION_NAME']) except ClientError as e: error(500, str(e)) try: obj_check = s3_resource.Object(input_bucket, keys) except ClientError as e: error(500, str(e)) # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Object.content_length length_check=obj_check.content_length if length_check>app.config['AWS_JOB_LIMIT'] and get_profile(identity_id=session['primary_identity']).role=='free_user': return render_template('please_upgrade.html') # connect to database and retrieve data try: dynamoDB=boto3.resource('dynamodb', region_name=region) except ClientError as e: error(500, str(e)) try: ann_table=dynamoDB.Table(table_name) except ClientError as e: error(500, str(e)) try: myItems=ann_table.query(IndexName='user_id_index',KeyConditionExpression=Key('user_id').eq(user_id)) except ClientError as e: error(500, str(e)) myItems=myItems['Items'] # update dynamoDB if myItems==None: try: ann_table.put_item(Item=myData) except ClientError as e: error(500, str(e)) else: myItem=0 for i in myItems: if len(i)>0: if i['job_id']==job_id: myItem=i # if the job id is unique if myItem==0: try: ann_table.put_item(Item=myData) except botocore.exceptions.ClientError as e: return error(500,"fail to put items in the target table: "+str(e)) # already a job with the same id in the database else: return error(500,"job id already exist") # Send message to request queue try: client=boto3.client('sns', region_name=region) except botocore.exceptions.ClientError as e: return error(500, "fail to connect to boto3 server: "+str(e)) # update myData, add some inofmation of users user_profile=get_profile(identity_id=user_id) myData.update({'user_name': str(user_profile.name)}) myData.update({'user_email': str(user_profile.email)}) myData.update({'user_role': str(user_profile.role)}) #publish message try: client.publish(Message=json.dumps({'default':json.dumps(myData)}), MessageStructure='json',TopicArn=request_topic) except botocore.exceptions.ClientError as e: return error(500, "fail to publish message: "+str(e)) return render_template('annotate_confirm.html', job_id=job_id)