def update(event, context): logger.debug('event: {}'.format(event)) try: asset_id = event['path']['asset_id'] asset = AssetModel.get(hash_key=asset_id) asset.mark_uploaded() except AssertionError as e: return { 'statusCode': httplib.PRECONDITION_FAILED, 'body': { 'error_message': 'ASSET {} state incorrect: {}'.format(asset_id, e) } } except DoesNotExist: return { 'statusCode': httplib.NOT_FOUND, 'body': { 'error_message': 'ASSET {} not found'.format(asset_id) } } return { "statusCode": httplib.ACCEPTED, "body": { 'status': asset.state } }
def mark_received(self): """ Mark asset as having been received via the s3 objectCreated:Put event """ self.state = State.RECEIVED.name logger.debug('mark asset received: {}'.format(self.asset_id)) self.save()
def mark_deleted(self): """ Mark asset as deleted (soft delete) """ self.state = State.DELETED.name logger.debug('mark asset deleted: {}'.format(self.asset_id)) self.save()
def update(event, context): logger.debug('event: {}'.format(event)) try: asset_id = event['path']['asset_id'] asset = AssetModel.get(hash_key=asset_id) asset.mark_uploaded() except AssertionError as e: return { 'statusCode': httplib.PRECONDITION_FAILED, 'body': { 'error_message': 'ASSET {} state incorrect: {}'.format(asset_id, e) } } except DoesNotExist: return { 'statusCode': httplib.NOT_FOUND, 'body': { 'error_message': 'ASSET {} not found'.format(asset_id) } } return {"statusCode": httplib.ACCEPTED, "body": {'status': asset.state}}
def handler(event, context): logger.debug("Event: %s", json.dumps(event)) error = validate_environment() if error: logger.error(error) else: try: for record in event['Records']: bucket = record['s3']['bucket']['name'] key = unquote_plus(record['s3']['object']['key']) logger.debug(f'bucket key: {key}') customer_ref_key = key.split('/')[0] jsd_attachment_id = key.split('/')[1] logger.debug(f'CustomerRefNo: {customer_ref_key}') tmpkey = key.replace( f'{customer_ref_key}/{jsd_attachment_id}/', '') logger.debug(f'filename: {tmpkey}') download_path = '/tmp/{}'.format(tmpkey) s3_client.download_file(bucket, key, download_path) try: upload_file_to_snow(download_path, customer_ref_key, tmpkey) except Exception as ex: logger.error(f'Failed: {str(ex)}') finally: logger.debug(f'Deleting s3 object: {key}') s3_client.delete_object(Bucket=bucket, Key=key) except Exception as exc: logger.error(f'Failed: {str(exc)}')
def event(event, context): """ Triggered by s3 events, object create and remove """ # Sample event: # # _event = {'Records': [{'eventVersion': '2.0', 'eventSource': 'aws:s3', 'awsRegion': 'us-east-1', # 'eventTime': '2017-11-25T23:57:38.988Z', 'eventName': 'ObjectCreated:Put', # 'userIdentity': {'principalId': 'AWS:AROAJWJG5IVL3URF4WKKK:su-xx-test-create'}, # 'requestParameters': {'sourceIPAddress': '75.82.111.45'}, # 'responseElements': {'x-amz-request-id': '9E39B8F9A3D22C83', # 'x-amz-id-2': 'GiWcmOHnxnxOJa64k5rkgTsiiwo+JOR3p2DvuQ6txQXl9jC0jNhO+gbDwwP/3WKAl4oPbVZsTE4='}, # 's3': {'s3SchemaVersion': '1.0', 'configurationId': 'dad7b639-0cd8-4e47-a2ae-91cc5bf866c8', # 'bucket': {'name': 'su-xx', 'ownerIdentity': {'principalId': 'AEZOG5WRKFUM2'}, # 'arn': 'arn:aws:s3:::su-xx'}, # 'object': {'key': 'test/bbc498ea-d23b-11e7-af42-2a31486da301', 'size': 11060, # 'eTag': 'd50cb2e8d7ad6768d46b3d47ba9b241e', # 'sequencer': '005A1A0372C5A1D292'}}}]} logger.debug('event: {}'.format(event)) event_name = event['Records'][0]['eventName'] key = event['Records'][0]['s3']['object']['key'] asset_id = key.replace('{}/'.format(os.environ['S3_KEY_BASE']), '') try: if 'ObjectCreated:Put' == event_name: try: asset = AssetModel.get(hash_key=asset_id) asset.mark_received() except UpdateError: return { 'statusCode': httplib.BAD_REQUEST, 'body': { 'error_message': 'Unable to update ASSET'} } elif 'ObjectRemoved:Delete' == event_name: try: asset = AssetModel.get(hash_key=asset_id) asset.delete() except DeleteError: return { 'statusCode': httplib.BAD_REQUEST, 'body': { 'error_message': 'Unable to delete ASSET {}'.format(asset) } } except DoesNotExist: return { 'statusCode': httplib.NOT_FOUND, 'body': { 'error_message': 'ASSET {} not found'.format(asset_id) } } return {'statusCode': httplib.ACCEPTED}
def event(event, context): """ Triggered by s3 events, object create and remove """ # Sample event: # # _event = {'Records': [{'eventVersion': '2.0', 'eventSource': 'aws:s3', 'awsRegion': 'us-east-1', # 'eventTime': '2017-11-25T23:57:38.988Z', 'eventName': 'ObjectCreated:Put', # 'userIdentity': {'principalId': 'AWS:AROAJWJG5IVL3URF4WKKK:su-xx-test-create'}, # 'requestParameters': {'sourceIPAddress': '75.82.111.45'}, # 'responseElements': {'x-amz-request-id': '9E39B8F9A3D22C83', # 'x-amz-id-2': 'GiWcmOHnxnxOJa64k5rkgTsiiwo+JOR3p2DvuQ6txQXl9jC0jNhO+gbDwwP/3WKAl4oPbVZsTE4='}, # 's3': {'s3SchemaVersion': '1.0', 'configurationId': 'dad7b639-0cd8-4e47-a2ae-91cc5bf866c8', # 'bucket': {'name': 'su-xx', 'ownerIdentity': {'principalId': 'AEZOG5WRKFUM2'}, # 'arn': 'arn:aws:s3:::su-xx'}, # 'object': {'key': 'test/bbc498ea-d23b-11e7-af42-2a31486da301', 'size': 11060, # 'eTag': 'd50cb2e8d7ad6768d46b3d47ba9b241e', # 'sequencer': '005A1A0372C5A1D292'}}}]} logger.debug('event: {}'.format(event)) event_name = event['Records'][0]['eventName'] key = event['Records'][0]['s3']['object']['key'] asset_id = key.replace('{}/'.format(os.environ['S3_KEY_BASE']), '') loadModel(asset_id)
def upload_file_to_snow(download_path, cutomer_ref, file_name): logger.debug('Fetch snow api token') api_token = get_api_token() logger.debug(f'API TOKEN: {api_token}') headers = { "X-IBM-Client-ID": SNOW_X_IBM_CLIENT_ID, "Authorization": f'Bearer {api_token}' } logger.debug(f'Starting uploading to snow') with open(download_path, 'rb') as out: encoded_string = base64.b64encode(out.read()) payload = json.dumps({ "callingSystem": "FINEOS-SERVICE-DESK", "attachments": [{ "attachment": encoded_string.decode('utf-8'), "contentType": "", "fileName": file_name }] }) logger.debug(payload) res = requests.request(method='PUT', url=f'{SNOW_ATTACHMENT_ENDPOINT}/{cutomer_ref}', headers=headers, data=payload, timeout=25) logger.debug(f'Upload to SNOW: {res.text}')
def generate_presigned_url(issue_key, file_name, ttl): error = None status = 200 resp = { "ok": not error, } try: upload_url = s3.generate_presigned_post( Bucket=S3_BUCKET, Key=f'{issue_key}/{file_name}', ExpiresIn=int(ttl) ) logger.debug('S3 presigned upload URL: {}'.format(upload_url)) resp["upload_url"] = upload_url resp["issue_key"] = issue_key except Exception as e: logger.error('Could not generate S3 presigned upload URL failed: {}'.format(str(e))) error = str(e) status = 500 if error: resp["error"] = error logger.error(error) return status, resp
def update_params(self, params): """ update model params """ self.params = params logger.debug('model params updated: {}'.format(self.asset_id)) self.save()
def save(self, conditional_operator=None, **expected_values): try: self.updatedAt = datetime.now().astimezone() logger.debug('saving: {}'.format(self)) super(AssetModel, self).save() except Exception as e: logger.error('save {} failed: {}'.format(self.asset_id, e), exc_info=True) raise e
def asset_list(event, context): logger.debug('event: {}, context: {}'.format(event, context)) results = AssetModel.scan() return { 'statusCode': httplib.OK, 'body': { 'items': [dict(result) for result in results] } }
def mark_uploaded(self): """ Mark asset as having been uploaded via a PUT to the asset's REST path """ uploaded_states = [State.RECEIVED.name, State.UPLOADED.name] if self.state not in uploaded_states: raise AssertionError('State: \"{}\" must be one of {}'.format(self.state, uploaded_states)) self.state = State.UPLOADED.name logger.debug('mark asset uploaded: {}'.format(self.asset_id)) self.save()
def mark_uploaded(self): """ Mark asset as having been uploaded via a PUT to the asset's REST path """ uploaded_states = [State.RECEIVED.name, State.UPLOADED.name] if self.state not in uploaded_states: raise AssertionError('State: \"{}\" must be one of {}'.format( self.state, uploaded_states)) self.state = State.UPLOADED.name logger.debug('mark asset uploaded: {}'.format(self.asset_id)) self.save()
def validate_environment(): error = '' global JIRA_SEVER, JIRA_USER, JIRA_API_KEY err, JIRA_SEVER = get_ssm_value(key=Parameters.JIRA_HOST.value) error += err err, JIRA_USER = get_ssm_value(key=Parameters.JIRA_USER_ID.value) error += err err, JIRA_API_KEY = get_ssm_value(key=Parameters.JIRA_APP_PASSWORD.value) error += err logger.debug(f'JIRA_SEVER: {JIRA_SEVER} {JIRA_USER} {JIRA_API_KEY}') return error
def get_upload_url(self, ttl=60): """ :param ttl: url duration in seconds :return: a temporary presigned PUT url """ s3 = boto3.client('s3') put_url = s3.generate_presigned_url('put_object', Params={ 'Bucket': BUCKET, 'Key': self.get_key() }, ExpiresIn=ttl, HttpMethod='PUT') logger.debug('upload URL: {}'.format(put_url)) return put_url
def respond(event: dict, context) -> dict: """ An OCSP GET request contains the DER-in-base64 encoded OCSP request in the HTTP request URL. An OCSP POST request contains the DER encoded OCSP request in the HTTP request body. """ logger.debug("APIGW event: %s", event) http_method = event.get('httpMethod') if not http_method: with open('config/sample-ocsp-request.txt', 'r') as f: der = base64.b64decode(f.read().strip()) elif http_method == 'GET': request_param = event['pathParameters'].get('request_b64') request_data = unquote(request_param) der = base64.b64decode(request_data) elif event.get('isBase64Encoded'): request_data = event['body'] der = base64.b64decode(request_data) elif isinstance(event['body'], str): logger.warn("Http Request Body seems invalid: %s", type(event['body'])) logger.warn(str(event['body'])) der = event['body'].encode() else: logger.warn("Http Request Body seems invalid: %s", type(event['body'])) logger.warn(str(event['body'])) der = bytes(event['body']) ocsp_request, error = parse_ocsp_request(der) if error: ocsp_response = error else: ocsp_response = responders.build_ocsp_response(ocsp_request) if ocsp_response['response_status'].native == 'successful': ocsp_response_data = ocsp_response.response_data for single_response in ocsp_response_data['responses']: logger.info("Certificate Revocation Status: %s", single_response['cert_status'].name) logger.info("OCSP Next Update: %s", single_response['next_update'].native) response = http_ocsp_response(ocsp_response, max_age=43200) logger.debug("APIGW Response: %s", response) return response
def get_upload_url(self, ttl=60): """ :param ttl: url duration in seconds :return: a temporary presigned PUT url """ s3 = boto3.client('s3') put_url = s3.generate_presigned_url( 'put_object', Params={ 'Bucket': BUCKET, 'Key': self.get_key() }, ExpiresIn=ttl, HttpMethod='PUT' ) logger.debug('upload URL: {}'.format(put_url)) return put_url
def handler(event, context): """ Generate S3 presigned url for uploading issue_key: issue id or key on JSD and must be on query string file_name: file name will put on S3 bucket and must be on query string """ logger.debug("Event: %s", json.dumps(event)) logger.info("HTTP request received, validating...") status, resp, queries = validate_event(event) if resp["ok"]: status, resp, s3_presigned_url_ttl = validate_environment() if resp["ok"]: status, resp = generate_presigned_url(queries['issue_key'], queries['file_name'], s3_presigned_url_ttl) return { "statusCode": status, "body": json.dumps(resp) }
def event(event, context): """ Triggered by s3 events, object create and remove """ logger.debug('event: {}'.format(event)) event_name = event['Records'][0]['eventName'] key = event['Records'][0]['s3']['object']['key'] asset_id = key.replace('{}/'.format(os.environ['S3_KEY_BASE']), '') try: if 'ObjectCreated:Put' == event_name: try: asset = AssetModel.get(hash_key=asset_id) asset.mark_received() except UpdateError: return { 'statusCode': httplib.BAD_REQUEST, 'body': { 'error_message': 'Unable to update ASSET'} } elif 'ObjectRemoved:Delete' == event_name: try: asset = AssetModel.get(hash_key=asset_id) asset.delete() except DeleteError: return { 'statusCode': httplib.BAD_REQUEST, 'body': { 'error_message': 'Unable to delete ASSET {}'.format(asset) } } except DoesNotExist: return { 'statusCode': httplib.NOT_FOUND, 'body': { 'error_message': 'ASSET {} not found'.format(asset_id) } } return {'statusCode': httplib.ACCEPTED}
def handler(event, context): """ Upload attachments from JSD to S3 """ logger.debug("Event: %s", json.dumps(event)) logger.info("HTTP request received, validating...") status, resp, body = validate_event(event) if resp["ok"]: status, resp = validate_environment() if resp["ok"]: status, resp, issue_key, customer_ref_no = validate_body(body) if resp["ok"]: if 'commentId' in body: status, resp = download_comment_attachments_and_upload_to_s3( issue_key, body['body'], customer_ref_no) else: resp = upload_to_s3(body, customer_ref_no) return {"statusCode": status, "body": json.dumps(resp)}
def get_download_url(self, ttl=60): """ :param ttl: url duration in seconds :return: a temporary presigned download url """ s3 = boto3.client('s3') if self.state != State.UPLOADED.name: raise AssertionError( 'Asset {} is marked as {}, must be marked {} to retrieve.'. format(self.asset_id, self.state, State.UPLOADED.name)) get_url = s3.generate_presigned_url('get_object', Params={ 'Bucket': BUCKET, 'Key': self.get_key(), }, ExpiresIn=ttl, HttpMethod='GET') logger.debug('download URL: {}'.format(get_url)) return get_url
def create(event, context): """ No body needed here as POST is a request for a pre-signed upload URL. Create an entry for it in dynamo and return upload URL """ logger.debug('event: {}'.format(event)) asset = AssetModel() asset.asset_id = uuid.uuid1().__str__() asset.save() upload_url = asset.get_upload_url( ) # No timeout specified here, use member param default return { "statusCode": httplib.CREATED, "body": { 'upload_url': upload_url, 'id': asset.asset_id } }
def download_file_and_upload_to_s3(file_name, attachment_id, customer_ref_no): msg = None logger.debug('Uploading attachment to s3 {}'.format(attachment_id)) try: url = f'{JIRA_SEVER}/secure/attachment/{attachment_id}/{file_name}' download_path = f'/tmp/{attachment_id}/{file_name}' os.makedirs(os.path.dirname(download_path), exist_ok=True) with open(download_path, 'wb') as out: response = requests.get(url, auth=(JIRA_USER, JIRA_API_KEY), stream=True) out.write(response.content) with open(download_path, 'rb') as data: s3_client.upload_fileobj( data, S3_JSD_BUCKET, f'{customer_ref_no}/{attachment_id}/{file_name}') msg = f'Uploaded {file_name}' except Exception as ex: msg = f'Failed to upload {file_name}: {str(ex)}' logger.debug(msg) return msg
def download_comment_attachments_and_upload_to_s3(issue_key, body, customer_ref_no): error = None logger.debug('Handling attachments in comment...') headers = {"Accept": "application/json"} resp = { "ok": not error, } try: auth = HTTPBasicAuth(JIRA_USER, JIRA_API_KEY) response = requests.request( method='GET', url=f'{JIRA_SEVER}/rest/api/3/issue/{issue_key}?fields=attachment', headers=headers, auth=auth) data = json.loads(response.text) attachments = [ item for item in data['fields']['attachment'] if item['filename'] in body ] msgs = [] if not attachments: msgs.append('No attachemnt matched') else: for attachment in attachments: msg = download_file_and_upload_to_s3(attachment['filename'], attachment['id'], customer_ref_no) msgs.append(msg) resp["info"] = msgs except Exception as ex: error = f"An error occurred: {str(ex)}" if error: resp["error"] = error logger.error(error) return 400 if error else 200, resp
def getModelInfo(asset_id): logger.debug("getting params") url = "http://ec2-35-153-231-242.compute-1.amazonaws.com:8088/?input=" + asset_id logger.debug(url) response = requests.get(url) logger.debug(response) return response
def validate_event(event): error = None status = 200 queries = event.get("queryStringParameters", {}) logger.debug("Query Parameters: %s", json.dumps(queries)) httpMethod = event.get("httpMethod") if queries is None or 'issue_key' not in queries or 'file_name' not in queries: status = 400 error = "`issue_key` and `file_name` must be defined in querystring" elif httpMethod != "GET": status = 405 error = "Method not allowed: {}".format(httpMethod) resp = { "ok": not error, } if error: resp["error"] = error logger.error(error) return status, resp, queries
def get_download_url(self, ttl=60): """ :param ttl: url duration in seconds :return: a temporary presigned download url """ s3 = boto3.client('s3') if self.state != State.UPLOADED.name: raise AssertionError( 'Asset {} is marked as {}, must be marked {} to retrieve.'.format( self.asset_id, self.state, State.UPLOADED.name ) ) get_url = s3.generate_presigned_url( 'get_object', Params={ 'Bucket': BUCKET, 'Key': self.get_key(), }, ExpiresIn=ttl, HttpMethod='GET' ) logger.debug('download URL: {}'.format(get_url)) return get_url
def delete(event, context): logger.debug('event: {}'.format(event)) try: asset_id = event['path']['asset_id'] asset = AssetModel.get(hash_key=asset_id) except DoesNotExist: return { 'statusCode': httplib.NOT_FOUND, 'body': { 'error_message': 'ASSET {} not found'.format(asset_id) } } try: asset.mark_deleted() except DeleteError: return { 'statusCode': httplib.BAD_REQUEST, 'body': { 'error_message': 'Unable to delete ASSET {}'.format(asset) } } return {'statusCode': httplib.NO_CONTENT}
def get(event, context): """ Get a presigned download URL for asset <asset-id> """ logger.debug('event: {}'.format(event)) try: ttl = os.environ['URL_DEFAULT_TTL'] try: ttl = int(event['query']['timeout']) except KeyError or ValueError: pass asset_id = event['path']['asset_id'] asset = AssetModel.get(hash_key=asset_id) download_url = asset.get_download_url(ttl) except DoesNotExist: return { 'statusCode': httplib.NOT_FOUND, 'body': { 'error_message': 'ASSET {} not found'.format(asset_id) } } except AssertionError as e: return { 'statusCode': httplib.FORBIDDEN, 'body': { 'error_message': 'Unable to download: {}'.format(e) } } return { "statusCode": httplib.ACCEPTED, "body": { 'download_url': download_url } }
def handler(event, context): logger.debug("Event: %s", json.dumps(event)) error = validate_environment() if error: logger.error(error) else: logger.debug(f'JIRA_SEVER: {JIRA_SEVER} {JIRA_USER} {JIRA_API_KEY}') for record in event['Records']: bucket = record['s3']['bucket']['name'] key = unquote_plus(record['s3']['object']['key']) logger.debug('bucket key: {}'.format(key)) issue_key = key.split('/')[0] logger.debug('issue_key: {}'.format(issue_key)) tmpkey = key.replace(f'{issue_key}/', '') download_path = '/tmp/{}'.format(tmpkey) s3_client.download_file(bucket, key, download_path) try: cr = jsd.get_request(issue_key) # Upload file as temporary attachment temp_attachment_id = jsd.attach_temporary_file( cr['serviceDeskId'], download_path) logger.debug( 'Temporary Attachment Id: {}'.format(temp_attachment_id)) # Set attachment as public for customer response = jsd.add_attachment(issue_key, temp_attachment_id, public=True, comment=None) logger.debug( 'Set attachment to be public for customer: {}'.format( response)) except Exception as ex: logger.error('Upload attachments to Jira failed: {}'.format( str(ex))) finally: s3_client.delete_object(Bucket=bucket, Key=key)
def get(event, context): """ Get a presigned download URL for asset <asset-id> """ # Sample events using different lambda integrations: # # _lambda_event = { # 'body': {}, 'method': 'GET', 'principalId': '', 'stage': 'dev', 'cognitoPoolClaims': {'sub': ''}, # 'headers': {'Accept': '*/*', 'CloudFront-Forwarded-Proto': 'https', 'CloudFront-Is-Desktop-Viewer': 'true', # 'CloudFront-Is-Mobile-Viewer': 'false', 'CloudFront-Is-SmartTV-Viewer': 'false', # 'CloudFront-Is-Tablet-Viewer': 'false', 'CloudFront-Viewer-Country': 'US', # 'Host': 'c1xblyjsid.execute-api.us-east-1.amazonaws.com', 'User-Agent': 'curl/7.56.1', # 'Via': '1.1 57933097ddb189ecc8b3745fb94cfa94.cloudfront.net (CloudFront)', # 'X-Amz-Cf-Id': 'W95mJn3pc3G8T85Abt2Dj_wLPE_Ar_q0k56uF5yreiaNOMn6P2Nltw==', # 'X-Amzn-Trace-Id': 'Root=1-5a1b453d-1e857d3548e38a1c2827969e', # 'X-Forwarded-For': '75.82.111.45, 216.137.44.17', 'X-Forwarded-Port': '443', # 'X-Forwarded-Proto': 'https'}, 'query': {}, # 'path': {'asset_id': '0e4e06c6-d2fc-11e7-86c6-6672893a702e'}, # 'identity': {'cognitoIdentityPoolId': '', 'accountId': '', 'cognitoIdentityId': '', 'caller': '', # 'apiKey': '', 'sourceIp': '75.82.111.45', 'accessKey': '', 'cognitoAuthenticationType': '', # 'cognitoAuthenticationProvider': '', 'userArn': '', 'userAgent': 'curl/7.56.1', 'user': ''}, # 'stageVariables': {}} # # _lambda_event_with_timeout = { # 'body': {}, 'method': 'GET', 'principalId': '', 'stage': 'dev', # 'cognitoPoolClaims': {'sub': ''}, # 'headers': {'Accept': '*/*', 'CloudFront-Forwarded-Proto': 'https', # 'CloudFront-Is-Desktop-Viewer': 'true', # 'CloudFront-Is-Mobile-Viewer': 'false', # 'CloudFront-Is-SmartTV-Viewer': 'false', # 'CloudFront-Is-Tablet-Viewer': 'false', 'CloudFront-Viewer-Country': 'US', # 'Host': 'c1xblyjsid.execute-api.us-east-1.amazonaws.com', # 'User-Agent': 'curl/7.56.1', # 'Via': '1.1 7acf1813f9ec06038d676de15fcfc28f.cloudfront.net (CloudFront)', # 'X-Amz-Cf-Id': 'RBFBVYMys7aDqQ8u2Ktqvd-ZNwy-Kg7LPZ9LBTe-42nnx1wh0b5bGg==', # 'X-Amzn-Trace-Id': 'Root=1-5a1b4655-785e402d33e13e9d533281ef', # 'X-Forwarded-For': '75.82.111.45, 216.137.44.103', # 'X-Forwarded-Port': '443', 'X-Forwarded-Proto': 'https'}, # 'query': {'timeout': '1000000'}, # 'path': {'asset_id': '0e4e06c6-d2fc-11e7-86c6-6672893a702e'}, # 'identity': {'cognitoIdentityPoolId': '', 'accountId': '', 'cognitoIdentityId': '', # 'caller': '', 'apiKey': '', 'sourceIp': '75.82.111.45', 'accessKey': '', # 'cognitoAuthenticationType': '', 'cognitoAuthenticationProvider': '', # 'userArn': '', 'userAgent': 'curl/7.56.1', 'user': ''}, # 'stageVariables': {}} logger.debug('event: {}'.format(event)) try: ttl = os.environ['URL_DEFAULT_TTL'] try: ttl = int(event['query']['timeout']) except KeyError or ValueError: pass asset_id = event['path']['asset_id'] asset = AssetModel.get(hash_key=asset_id) download_url = asset.get_download_url(ttl) except DoesNotExist: return { 'statusCode': httplib.NOT_FOUND, 'body': { 'error_message': 'ASSET {} not found'.format(asset_id) } } except AssertionError as e: return { 'statusCode': httplib.FORBIDDEN, 'body': { 'error_message': 'Unable to download: {}'.format(e) } } return { "statusCode": httplib.ACCEPTED, "body": { 'download_url': download_url } }
def create(event, context): """ No body needed here as POST is a request for a pre-signed upload URL. Create an entry for it in dynamo and return upload URL """ # Sample events using different lambda integrations: # # _lambda_proxy_event = {'resource': '/asset', 'path': '/asset', 'httpMethod': 'POST', # 'headers': {'Accept': '*/*', 'CloudFront-Forwarded-Proto': 'https', # 'CloudFront-Is-Desktop-Viewer': 'true', 'CloudFront-Is-Mobile-Viewer': 'false', # 'CloudFront-Is-SmartTV-Viewer': 'false', 'CloudFront-Is-Tablet-Viewer': 'false', # 'CloudFront-Viewer-Country': 'US', # 'Host': 'c1xblyjsid.execute-api.us-east-1.amazonaws.com', # 'User-Agent': 'curl/7.56.1', # 'Via': '1.1 5c75b37c7e0aa5868b6499a5c4448d1f.cloudfront.net (CloudFront)', # 'X-Amz-Cf-Id': 'XG5WkkaGYGdbA9KAm7Hsbl5t7D7KmALE4Q2LdOwbXYoCFJZxyyiARw==', # 'X-Amzn-Trace-Id': 'Root=1-5a1b28a6-2b6e5ef6657e0f5f2d671017', # 'X-Forwarded-For': '75.82.111.45, 216.137.44.44', 'X-Forwarded-Port': '443', # 'X-Forwarded-Proto': 'https'}, 'queryStringParameters': None, # 'pathParameters': None, 'stageVariables': None, # 'requestContext': {'requestTime': '26/Nov/2017:20:48:38 +0000', 'path': '/dev/asset', # 'accountId': '818300131735', 'protocol': 'HTTP/1.1', # 'resourceId': 'wpjmgf', 'stage': 'dev', 'requestTimeEpoch': 1511729318077, # 'requestId': '2d827060-d2eb-11e7-96f5-9b58ecc94e3f', # 'identity': {'cognitoIdentityPoolId': None, 'accountId': None, # 'cognitoIdentityId': None, 'caller': None, 'apiKey': '', # 'sourceIp': '75.82.111.45', 'accessKey': None, # 'cognitoAuthenticationType': None, # 'cognitoAuthenticationProvider': None, 'userArn': None, # 'userAgent': 'curl/7.56.1', 'user': None}, # 'resourcePath': '/asset', 'httpMethod': 'POST', 'apiId': 'c1xblyjsid'}, # 'body': None, 'isBase64Encoded': False} # # _lambda_event = {'body': {}, 'method': 'POST', 'principalId': '', 'stage': 'dev', 'cognitoPoolClaims': {'sub': ''}, # 'headers': {'Accept': '*/*', 'CloudFront-Forwarded-Proto': 'https', # 'CloudFront-Is-Desktop-Viewer': 'true', 'CloudFront-Is-Mobile-Viewer': 'false', # 'CloudFront-Is-SmartTV-Viewer': 'false', 'CloudFront-Is-Tablet-Viewer': 'false', # 'CloudFront-Viewer-Country': 'US', # 'Host': 'c1xblyjsid.execute-api.us-east-1.amazonaws.com', 'User-Agent': 'curl/7.56.1', # 'Via': '1.1 022c901b294fedd7074704d46fce9819.cloudfront.net (CloudFront)', # 'X-Amz-Cf-Id': 'BifKUMLw8qO30TNbJ4QObNGq6WVxiL9nTv9eMbRtAIqqHIqQDkZEVw==', # 'X-Amzn-Trace-Id': 'Root=1-5a1b387c-47ab478111bbb2eb6bd6530c', # 'X-Forwarded-For': '75.82.111.45, 216.137.44.14', 'X-Forwarded-Port': '443', # 'X-Forwarded-Proto': 'https'}, 'query': {}, 'path': {}, # 'identity': {'cognitoIdentityPoolId': '', 'accountId': '', 'cognitoIdentityId': '', 'caller': '', # 'apiKey': '', 'sourceIp': '75.82.111.45', 'accessKey': '', # 'cognitoAuthenticationType': '', 'cognitoAuthenticationProvider': '', 'userArn': '', # 'userAgent': 'curl/7.56.1', 'user': ''}, 'stageVariables': {}} logger.debug('event: {}'.format(event)) asset = AssetModel() asset.asset_id = uuid.uuid1().__str__() asset.save() upload_url = asset.get_upload_url() # No timeout specified here, use member param default return { "statusCode": httplib.CREATED, "body": { 'upload_url': upload_url, 'id': asset.asset_id } }
def create(event, context): """ No body needed here as POST is a request for a pre-signed upload URL. Create an entry for it in dynamo and return upload URL """ # Sample events using different lambda integrations: # # _lambda_proxy_event = {'resource': '/asset', 'path': '/asset', 'httpMethod': 'POST', # 'headers': {'Accept': '*/*', 'CloudFront-Forwarded-Proto': 'https', # 'CloudFront-Is-Desktop-Viewer': 'true', 'CloudFront-Is-Mobile-Viewer': 'false', # 'CloudFront-Is-SmartTV-Viewer': 'false', 'CloudFront-Is-Tablet-Viewer': 'false', # 'CloudFront-Viewer-Country': 'US', # 'Host': 'c1xblyjsid.execute-api.us-east-1.amazonaws.com', # 'User-Agent': 'curl/7.56.1', # 'Via': '1.1 5c75b37c7e0aa5868b6499a5c4448d1f.cloudfront.net (CloudFront)', # 'X-Amz-Cf-Id': 'XG5WkkaGYGdbA9KAm7Hsbl5t7D7KmALE4Q2LdOwbXYoCFJZxyyiARw==', # 'X-Amzn-Trace-Id': 'Root=1-5a1b28a6-2b6e5ef6657e0f5f2d671017', # 'X-Forwarded-For': '75.82.111.45, 216.137.44.44', 'X-Forwarded-Port': '443', # 'X-Forwarded-Proto': 'https'}, 'queryStringParameters': None, # 'pathParameters': None, 'stageVariables': None, # 'requestContext': {'requestTime': '26/Nov/2017:20:48:38 +0000', 'path': '/dev/asset', # 'accountId': '818300131735', 'protocol': 'HTTP/1.1', # 'resourceId': 'wpjmgf', 'stage': 'dev', 'requestTimeEpoch': 1511729318077, # 'requestId': '2d827060-d2eb-11e7-96f5-9b58ecc94e3f', # 'identity': {'cognitoIdentityPoolId': None, 'accountId': None, # 'cognitoIdentityId': None, 'caller': None, 'apiKey': '', # 'sourceIp': '75.82.111.45', 'accessKey': None, # 'cognitoAuthenticationType': None, # 'cognitoAuthenticationProvider': None, 'userArn': None, # 'userAgent': 'curl/7.56.1', 'user': None}, # 'resourcePath': '/asset', 'httpMethod': 'POST', 'apiId': 'c1xblyjsid'}, # 'body': None, 'isBase64Encoded': False} # # _lambda_event = {'body': {}, 'method': 'POST', 'principalId': '', 'stage': 'dev', 'cognitoPoolClaims': {'sub': ''}, # 'headers': {'Accept': '*/*', 'CloudFront-Forwarded-Proto': 'https', # 'CloudFront-Is-Desktop-Viewer': 'true', 'CloudFront-Is-Mobile-Viewer': 'false', # 'CloudFront-Is-SmartTV-Viewer': 'false', 'CloudFront-Is-Tablet-Viewer': 'false', # 'CloudFront-Viewer-Country': 'US', # 'Host': 'c1xblyjsid.execute-api.us-east-1.amazonaws.com', 'User-Agent': 'curl/7.56.1', # 'Via': '1.1 022c901b294fedd7074704d46fce9819.cloudfront.net (CloudFront)', # 'X-Amz-Cf-Id': 'BifKUMLw8qO30TNbJ4QObNGq6WVxiL9nTv9eMbRtAIqqHIqQDkZEVw==', # 'X-Amzn-Trace-Id': 'Root=1-5a1b387c-47ab478111bbb2eb6bd6530c', # 'X-Forwarded-For': '75.82.111.45, 216.137.44.14', 'X-Forwarded-Port': '443', # 'X-Forwarded-Proto': 'https'}, 'query': {}, 'path': {}, # 'identity': {'cognitoIdentityPoolId': '', 'accountId': '', 'cognitoIdentityId': '', 'caller': '', # 'apiKey': '', 'sourceIp': '75.82.111.45', 'accessKey': '', # 'cognitoAuthenticationType': '', 'cognitoAuthenticationProvider': '', 'userArn': '', # 'userAgent': 'curl/7.56.1', 'user': ''}, 'stageVariables': {}} logger.debug('event: {}'.format(event)) asset = AssetModel() asset.asset_id = uuid.uuid1().__str__() asset.save() upload_url = asset.get_upload_url( ) # No timeout specified here, use member param default return { "statusCode": httplib.CREATED, "body": { 'upload_url': upload_url, 'id': asset.asset_id } }