def query_account(request): logger = utils.CloudLogger().log_request(request) try: # 1) Validate the token token = utils.extract_token(request.headers, None) if token is None: return ({ 'error': 'Bad Request', 'message': 'Token must be provided in header or body' }, 400) token_info = utils.get_token_info(token) if 'error' in token_info: return ({ 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401) if not utils.validate_token(token_info): return ({ 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403) # 2) Check service account default_session = utils.generate_default_session( scopes=['https://www.googleapis.com/auth/cloud-platform']) account_email = utils.ld_acct_in_project(token_info['email']) response = utils.query_service_account(default_session, account_email) if response.status_code >= 400: return ({ 'error': 'Unable to query service account', 'message': response.text }, 400) if response.json()['email'] != account_email: return ({ 'error': 'Service account email did not match expected value', 'message': response.json()['email'] + ' != ' + account_email }, 400) return account_email, 200 except: logger.log_exception() return ({ 'error': 'Unknown Error', 'message': traceback.format_exc() }, 500)
def register(request): logger = utils.CloudLogger().log_request(request) try: data = request.get_json() # 1) Validate the token if not isinstance(data, dict): return ({ 'error': "Bad Request", 'message': ("No data was provided" if data is None else "Expected JSON dictionary in request body") }, 400) token = utils.extract_token(request.headers, data) if token is None: return ({ 'error': 'Bad Request', 'message': 'Token must be provided in header or body' }, 400) token_info = utils.get_token_info(token) if 'error' in token_info: return ({ 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401) if not utils.validate_token(token_info): return ({ 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403) # 2) Validate user's permission for the bucket if 'bucket' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameter "bucket"' }, 400) session = utils.generate_user_session(token) fc_auth = utils.generate_user_session( request.headers['X-Fc-Auth'] ) if 'X-Fc-Auth' in request.headers else None fc_auth_error = ( 'Authorized' if fc_auth is not None or 'cloud-platform' in token_info['scope'] else 'Not Authorized. Repeat request with the "X-Fc-Auth" header containing application-default credentials' ) if fc_auth is None: logger.log("Missing backup Firecloud authentication", token_info=token_info, authorized=True if 'cloud-platform' in token_info['scope'] else False, severity='DEBUG') read, write = utils.validate_permissions(session, data['bucket']) if read is None: # Error, write will contain a message return ({ 'error': 'Cannot Validate Bucket Permissions', 'message': write }, 400) if not (read and write): # User doesn't have full permissions to the bucket return ({ 'error': 'Not Authorized', 'message': 'User lacks read/write permissions to the requested bucket' }, 403) # 2.b) Verify that the bucket belongs to this project if 'namespace' not in data or 'workspace' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameters "namespace" and "workspace"' }, 400) core_session = utils.generate_core_session() result, message = utils.authenticate_bucket( data['bucket'], data['namespace'], data['workspace'], fc_auth if fc_auth is not None else session, core_session) if not result: return ({ 'error': 'Cannot Validate Bucket Signature', 'message': message, 'FC-Auth': fc_auth_error }, 400) # 3) Issue worker account default_session = utils.generate_default_session( scopes=['https://www.googleapis.com/auth/cloud-platform']) account_email = utils.ld_acct_in_project(token_info['email']) response = utils.query_service_account(default_session, account_email) if response.status_code == 404: account_name = account_email.split('@')[0] logger.log('Issuing new pet service account', user=token_info['email'], service_account=account_email, severity='DEBUG') response = default_session.post( 'https://iam.googleapis.com/v1/projects/{project}/serviceAccounts' .format(project=os.environ.get('GCP_PROJECT')), headers={'Content-Type': 'application/json'}, json={ 'accountId': account_name, 'serviceAccount': { 'displayName': token_info['email'] } }) if response.status_code >= 400: return ({ 'error': 'Unable to issue service account', 'message': response.text }, 400) elif response.status_code >= 400: return ({ 'error': 'Unable to query service account', 'message': response.text }, 400) if response.json()['email'] != account_email: return ({ 'error': 'Service account email did not match expected value', 'message': response.json()['email'] + ' != ' + account_email }, 400) # 4) Update worker bindings logger.log("Updating service account bindings", account=account_email, bindings={ os.environ.get("FUNCTION_IDENTITY"): 'roles/iam.serviceAccountUser', account_email: 'roles/iam.serviceAccountUser' }) response = default_session.post( 'https://iam.googleapis.com/v1/projects/{project}/serviceAccounts/{account}:setIamPolicy' .format(project=os.environ.get('GCP_PROJECT'), account=account_email), headers={'Content-Type': 'application/json'}, json={ "policy": { "bindings": [{ "role": "roles/iam.serviceAccountUser", "members": [ # Allows the gcloud functions account to set this pet account on comwell servers "serviceAccount:{email}".format( email=os.environ.get("FUNCTION_IDENTITY")), # Allows the service account to set itself as the compute account on cromwell workers "serviceAccount:{email}".format(email=account_email ) ] }] }, "updateMask": "bindings" }) if response.status_code != 200: return ({ 'error': 'Unable to update service account bindings', 'message': '(%d) : %s' % (response.status_code, response.text) }) # 5) Update project bindings logger.log("Updating project-wide iam roles", bindings={ account_email: 'Pet_account', token_info['email']: 'Lapdog_user' }, severity="INFO") status, response = utils.update_iam_policy( default_session, { 'serviceAccount:' + account_email: 'Pet_account', 'user:'******'email']: 'Lapdog_user' }) if not status: return ({ 'error': 'Unable to update project IAM policy', 'message': '(%d) : %s' % (response.status_code, response.text) }, 400) # 6) Generate Key logger.log('Issuing new service account key', service_account=account_email) response = default_session.post( 'https://iam.googleapis.com/v1/projects/{project}/serviceAccounts/{email}/keys' .format(project=os.environ.get('GCP_PROJECT'), email=quote(account_email))) if response.status_code >= 400: return ({ 'error': 'Unable to issue service account key', 'message': response.text }, 400) # 7) Register with Firecloud time.sleep( 10 ) # New service account keys take a few seconds before they're usable # Register the user's new pet service account w/ Firecloud # We authenticate as the service account by using the newly generated key pet_session = AuthorizedSession( google.oauth2.service_account.Credentials. from_service_account_info( json.loads( base64.b64decode( response.json() ['privateKeyData']).decode())).with_scopes([ 'https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email' ])) while True: try: response = pet_session.post( "https://api.firecloud.org/register/profile", headers={ 'User-Agent': 'FISS/0.16.9', 'Content-Type': 'application/json' }, json={ "firstName": "Service", "lastName": "Account", "title": "None", "contactEmail": token_info['email'], "institute": "None", "institutionalProgram": "None", "programLocationCity": "None", "programLocationState": "None", "programLocationCountry": "None", "pi": "None", "nonProfitStatus": "false" }, timeout=10) break except google.auth.exceptions.RefreshError: logger.log_exception("Service account key not ready") time.sleep(10) # need more time for key to propagate if response.status_code != 200: return ({ 'error': "Unable to register account with firecloud", 'message': response.text }, 400) # 8) Check ProxyGroup # Either add the new service account to the user's [email protected] group # Or create the group, if it doesn't exist fc_session = fc_auth if fc_auth is not None else session response = fc_session.get('https://api.firecloud.org/api/groups', headers={'User-Agent': 'FISS/0.16.9'}, timeout=5) if response.status_code != 200: return ({ 'error': "Unable to enumerate user's groups", 'message': response.text, 'FC-Auth': fc_auth_error }, 400) target_group = utils.proxy_group_for_user(token_info['email']) for group in response.json(): if group['groupName'] == target_group: # 9) Register Account in Group response = fc_session.put( 'https://api.firecloud.org/api/groups/{group}/member/{email}' .format(group=target_group, email=quote(account_email)), timeout=5) if response.status_code != 204: return ({ 'error': 'Unable to add pet account to proxy group', 'message': "Please manually add {email} to {group}".format( group=target_group, email=quote(account_email)), 'FC-Auth': fc_auth_error }, 400) else: return (account_email, 200) # 8.b) Create Group response = session.post( 'https://api.firecloud.org/api/groups/{group}'.format( group=target_group), timeout=5) if response.status_code >= 400: return ({ 'error': 'Unable to create Firecloud proxy group', 'message': response.text }, 400) # 9) Register Account in Group response = fc_session.put( 'https://api.firecloud.org/api/groups/{group}/member/{email}'. format(group=target_group, email=quote(account_email)), timeout=5) if response.status_code != 204: return ({ 'error': 'Unable to add pet account to proxy group', 'message': "Please manually add {email} to {group}".format( group=target_group + '@firecloud.org', email=quote(account_email)), 'FC-Auth': fc_auth_error }, 400) else: return (account_email, 200) except requests.ReadTimeout: logger.log_exception('Firecloud timeout') return ({ 'error': 'timeout to firecloud', 'message': 'Took longer than 5 seconds for Firecloud to respond. Please try again later' }, 400) except: logger.log_exception() return ({ 'error': 'Unknown Error', 'message': traceback.format_exc() }, 500)
def abort_submission(request): logger = utils.CloudLogger().log_request(request) try: data = request.get_json() # 1) Validate the token if not isinstance(data, dict): return ({ 'error': "Bad Request", 'message': ("No data was provided" if data is None else "Expected JSON dictionary in request body") }, 400) token = utils.extract_token(request.headers, data) if token is None: return ({ 'error': 'Bad Request', 'message': 'Token must be provided in header or body' }, 400) token_info = utils.get_token_info(token) if 'error' in token_info: return ({ 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401) if not utils.validate_token(token_info): return ({ 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403) # 2) Validate user's permission for the bucket if 'bucket' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameter "bucket"' }, 400) session = utils.generate_user_session(token) read, write = utils.validate_permissions(session, data['bucket']) if read is None: # Error, write will contain a message return ({ 'error': 'Cannot Validate Bucket Permissions', 'message': write }, 400) if not (read and write): # User doesn't have full permissions to the bucket return ({ 'error': 'Not Authorized', 'message': 'User lacks read/write permissions to the requested bucket' }, 401) # 3) Check that submission.json exists, and is less than 1 Gib if 'submission_id' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameter "submission_id"' }, 400) submission = utils.fetch_submission_blob(session, data['bucket'], data['submission_id']) result, message = utils.validate_submission_file(submission) if not result: return ({'error': 'Bad Submission', 'message': message}, 400) # 4) Download submission and parse operation try: submission = json.loads(submission.download_as_string().decode()) except: return ({ 'error': 'Invalid Submission', 'message': 'Submission was not valid JSON' }, 400) if 'operation' not in submission: return ({ 'error': 'Invalid Submission', 'message': 'Submission contained no operation metadata' }, 400) signature_blob = utils.getblob( 'gs://{bucket}/lapdog-executions/{submission_id}/signature'.format( bucket=data['bucket'], submission_id=data['submission_id']), credentials=session.credentials) if not signature_blob.exists(): return ({ 'error': 'No Signature', 'message': 'The submission signature could not be found. Refusing to abort job' }, 403) if not utils.verify_signature( signature_blob, (data['submission_id'] + submission['operation']).encode()): return ({ 'error': 'Invalid Signature', 'message': 'Could not validate submission signature. Refusing to abort job' }, 403) core_session = utils.generate_core_session() # 5) Generate abort key logger.log("Generating new signature", data=data['submission_id']) utils.sign_object( data['submission_id'].encode(), utils.getblob( 'gs://{bucket}/lapdog-executions/{submission_id}/abort-key'. format(bucket=data['bucket'], submission_id=data['submission_id']), credentials=session.credentials), core_session.credentials) if 'hard' in data and data['hard']: # 6) Abort operation logger.log("Hard-aborting submission", submission_id=data['submission_id'], operation_id=submission['operation'], severity='NOTICE') response = core_session.post( "https://genomics.googleapis.com/v2alpha1/{operation}:cancel". format(operation=quote( submission['operation']) # Do not quote slashes here )) return response.text, response.status_code return ({ 'status': 'Aborting', 'message': 'A soft-abort request has been sent.' ' If the submission does not abort soon, abort it with hard=True to force-kill the cromwell server' }, 200) except: logger.log_exception() return ({ 'error': 'Unknown Error', 'message': traceback.format_exc() }, 500)
def insert_resolution(request): """ This function is unique. It is not deployed into each project. It is deployed once into my personal project which serves as a centralized database. """ logger = utils.CloudLogger().log_request(request) try: data = request.get_json() if not isinstance(data, dict): return ( { 'error': "Bad Request", 'message': ("No data was provided" if data is None else "Expected JSON dictionary in request body") }, 400 ) token = utils.extract_token(request.headers, data) token_info = utils.get_token_info(token) if 'error' in token_info: return ( { 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401 ) if not utils.validate_token(token_info): return ( { 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403 ) if 'namespace' not in data: return ( { 'error': 'Missing Parameters', 'message': "Missing required parameter \"namespace\"" }, 400 ) user_session = utils.generate_user_session(token) while True: response = user_session.get( 'https://api.firecloud.org/api/profile/billing' ) if response.status_code == 200: break print(response.status_code, response.text, file=sys.stderr) if response.status_code == 404: return ( { 'error': "User not found", 'message': "You are not registered yet with firecloud" }, 404 ) time.sleep(5) projects = {proj['projectName']:proj for proj in response.json()} if data['namespace'] not in projects: return ( { 'error': "Bad Namespace", 'message': 'The provided namespace "%s" could not be found' % data['namespace'] }, 400 ) if projects[data['namespace']]['role'] not in {'Owner', 'Admin', 'Administrator'}: return ( { 'error': "Insufficient Permissions", 'message': "The user lacks Owner/Admin privilages on the provided namespace" }, 401 ) if 'project' not in data: return ( { 'error': "Missing parameters", 'message': "Missing required parameter \"project\"" }, 400 ) response = user_session.post( 'https://cloudresourcemanager.googleapis.com/v1/projects/{project}:getIamPolicy'.format( project=data['project'] ) ) if response.status_code == 403: return ( { 'error': 'Unauthorized', 'message': "User lacks permissions on the provided project" }, 401 ) if response.status_code != 200: return ( { 'error': 'Unexpected response from Googla API', 'message': '(%d) : %s' % (response.status_code, response.text) }, 400 ) for policy in response.json()['bindings']: if policy['role'] == 'roles/owner': if ('user:'******'email']) in policy['members']: blob = utils.getblob( 'gs://lapdog-resolutions/%s' % sha512(data['namespace'].encode()).hexdigest(), credentials=utils.generate_default_session().credentials ) if blob.exists(): return ( { 'error': "Already Exists", 'message': "A resolution for this namespace is already in place" }, 409 ) logger.log( "Adding new resolution", namespace=data['namespace'], project_id=data['project'], admin=token_info['email'], severity='NOTICE' ) blob.upload_from_string( data['project'].encode() ) return ( 'gs://lapdog-resolutions/%s' % sha512(data['namespace'].encode()).hexdigest(), 200 ) return ( { 'error': "Unauthorized", 'message': "User lacks ownership of the provided project" }, 400 ) except: logger.log_exception() return ( { 'error': "Unknown Error", 'message': traceback.format_exc() }, 500 )
def create_submission(request): logger = utils.CloudLogger().log_request(request) try: data = request.get_json() # 1) Validate the token if not isinstance(data, dict): return ({ 'error': "Bad Request", 'message': ("No data was provided" if data is None else "Expected JSON dictionary in request body") }, 400) token = utils.extract_token(request.headers, data) if token is None: return ({ 'error': 'Bad Request', 'message': 'Token must be provided in header or body' }, 400) token_info = utils.get_token_info(token) if 'error' in token_info: return ({ 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401) if not utils.validate_token(token_info): return ({ 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403) # 1.b) Verify the user has a pet account response = utils.query_service_account( utils.generate_default_session( scopes=['https://www.googleapis.com/auth/cloud-platform']), utils.ld_acct_in_project(token_info['email'])) if response.status_code != 200: return ({ 'error': 'User has not registered with this Lapdog Engine', 'message': response.text }, 401) # 2) Validate user's permission for the bucket if 'bucket' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameter "bucket"' }, 400) session = utils.generate_user_session(token) fc_auth = utils.generate_user_session( request.headers['X-Fc-Auth'] ) if 'X-Fc-Auth' in request.headers else None if fc_auth is None: logger.log("Missing backup Firecloud authentication", token_info=token_info, authorized=True if 'cloud-platform' in token_info['scope'] else False, severity='DEBUG') read, write = utils.validate_permissions(session, data['bucket']) if read is None: # Error, write will contain a message return ({ 'error': 'Cannot Validate Bucket Permissions', 'message': write }, 400) if not (read and write): # User doesn't have full permissions to the bucket return ({ 'error': 'Not Authorized', 'message': 'User lacks read/write permissions to the requested bucket' }, 403) # 2.b) Verify that the bucket belongs to this project if 'namespace' not in data or 'workspace' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameters "namespace" and "workspace"' }, 400) core_session = utils.generate_core_session() result, message = utils.authenticate_bucket( data['bucket'], data['namespace'], data['workspace'], fc_auth if fc_auth is not None else session, core_session) if not result: return ({ 'error': 'Cannot Validate Bucket Signature', 'message': message, 'FC-Auth': ('Authorized' if fc_auth is not None or 'cloud-platform' in token_info['scope'] else 'Not Authorized. Repeat request with the "X-Fc-Auth" header containing application-default credentials' ) }, 400) # 3) Check that submission.json exists, and is less than 1 Gib if 'submission_id' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameter "submission_id"' }, 400) submission = utils.fetch_submission_blob(session, data['bucket'], data['submission_id']) result, message = utils.validate_submission_file(submission) if not result: return ({'error': 'Bad Submission', 'message': message}, 400) # 4) Submit pipelines request region = 'us-central1' if 'compute_region' in data: allowed_regions = utils.enabled_regions() if data['compute_region'] in allowed_regions: region = data['compute_region'] else: return ({ 'error': "Invalid Region", 'message': "Region not allowed. Enabled regions: " + repr(allowed_regions) }, 400) if 'memory' in data and data['memory'] > 3072: mtype = 'custom-%d-%d' % ( math.ceil(data['memory'] / 13312) * 2, # Cheapest core:memory ratio data['memory']) else: mtype = 'n1-standard-1' pipeline = { 'pipeline': { 'actions': [{ 'imageUri': 'gcr.io/broad-cga-aarong-gtex/wdl_runner:' + __CROMWELL_TAG__, 'commands': ['/wdl_runner/wdl_runner.sh'], 'environment': { 'SIGNATURE_ENDPOINT': 'https://{region}-{project}.cloudfunctions.net/signature-{version}' .format(region=os.environ.get('FUNCTION_REGION'), project=os.environ.get("GCP_PROJECT"), version=__API_VERSION__['signature']), 'LAPDOG_PROJECT': os.environ.get('GCP_PROJECT'), 'WDL': "gs://{bucket}/lapdog-executions/{submission_id}/method.wdl" .format(bucket=data['bucket'], submission_id=data['submission_id']), 'WORKFLOW_INPUTS': "gs://{bucket}/lapdog-executions/{submission_id}/config.tsv" .format(bucket=data['bucket'], submission_id=data['submission_id']), 'WORKFLOW_OPTIONS': json.dumps(data['options']) if 'options' in data else '{}', 'LAPDOG_SUBMISSION_ID': data['submission_id'], 'WORKSPACE': "gs://{bucket}/lapdog-executions/{submission_id}/workspace/" .format(bucket=data['bucket'], submission_id=data['submission_id']), 'OUTPUTS': "gs://{bucket}/lapdog-executions/{submission_id}/results" .format(bucket=data['bucket'], submission_id=data['submission_id']), 'SUBMISSION_DATA_PATH': "gs://{bucket}/lapdog-executions/{submission_id}/submission.json" .format(bucket=data['bucket'], submission_id=data['submission_id']), 'LAPDOG_LOG_PATH': "gs://{bucket}/lapdog-executions/{submission_id}/logs". format(bucket=data['bucket'], submission_id=data['submission_id']), 'PRIVATE_ACCESS': 'true' if ('no_ip' in data and data['no_ip']) else 'false', 'SUBMISSION_ZONES': " ".join('{}-{}'.format(region, zone) for zone in GCP_ZONES[region]), 'DUMP_PATH': (("gs://{bucket}/lapdog-call-cache.sql".format( bucket=data['bucket'])) if 'callcache' in data and data['callcache'] else "") } }], 'resources': { 'regions': [region], 'virtualMachine': { 'machineType': mtype, 'preemptible': False, 'labels': { 'lapdog-execution-role': 'cromwell', 'lapdog-submission-id': data['submission_id'] }, 'serviceAccount': { 'email': utils.ld_acct_in_project(token_info['email']), 'scopes': [ "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/devstorage.read_write", "https://www.googleapis.com/auth/genomics" ] }, 'bootDiskSizeGb': 20 + (max(0, data['cache_size'] - 10) if 'cache_size' in data else 0), 'network': { 'network': 'default', 'usePrivateAddress': ('no_ip' in data and data['no_ip']) } } }, } } papi_url = 'https://lifesciences.googleapis.com/v2beta/projects/{}/locations/{}/pipelines:run'.format( os.environ.get('GCP_PROJECT'), region) logger.log("Launching LifeSciences v2Beta pipeline", pipeline=pipeline['pipeline'], url=papi_url, severity='NOTICE') response = utils.generate_default_session([ "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/genomics" ]).post(papi_url, headers={'Content-Type': 'application/json'}, json=pipeline) try: if response.status_code == 200: operation = response.json()['name'] # 5) Sign the operation logger.log("Generating new signature", data=(data['submission_id'] + operation)) utils.sign_object( (data['submission_id'] + operation).encode(), utils.getblob( 'gs://{bucket}/lapdog-executions/{submission_id}/signature' .format(bucket=data['bucket'], submission_id=data['submission_id']), credentials=session.credentials), core_session.credentials) return operation, 200 except: logger.log_exception('PAPIv2 request failed') return ({ 'error': 'Unable to start submission', 'message': traceback.format_exc() }, 500) return ({ 'error': 'Unable to start submission', 'message': 'Google rejected the pipeline request (%d) : %s' % (response.status_code, response.text) }, 400) except: logger.log_exception() return ({ 'error': 'Unknown Error', 'message': traceback.format_exc() }, 500)
def quotas(request): logger = utils.CloudLogger().log_request(request) try: # 1) Validate the token token = utils.extract_token(request.headers, None) if token is None: return ({ 'error': 'Bad Request', 'message': 'Token must be provided in header or body' }, 400) token_info = utils.get_token_info(token) if 'error' in token_info: return ({ 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401) if not utils.validate_token(token_info): return ({ 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403) # 2) Check service account default_session = utils.generate_default_session( scopes=['https://www.googleapis.com/auth/cloud-platform']) account_email = utils.ld_acct_in_project(token_info['email']) response = utils.query_service_account(default_session, account_email) if response.status_code >= 400: return ({ 'error': 'Unable to query service account', 'message': response.text }, 400) if response.json()['email'] != account_email: return ({ 'error': 'Service account email did not match expected value', 'message': response.json()['email'] + ' != ' + account_email }, 400) # 3) Query quota usage project_usage = default_session.get( 'https://www.googleapis.com/compute/v1/projects/{project}'.format( project=os.environ.get('GCP_PROJECT'))) if project_usage.status_code != 200: return ({ 'error': 'Invalid response from Google', 'message': '(%d) : %s' % (project_usage.status_code, project_usage.text) }, 400) quotas = [{ **quota, **{ 'percent': ('%0.2f%%' % (100 * quota['usage'] / quota['limit'])) if quota['limit'] > 0 else '0.00%' } } for quota in project_usage.json()['quotas']] for region_name in utils.enabled_regions(): region_usage = default_session.get( 'https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}' .format(project=os.environ.get('GCP_PROJECT'), region=region_name)) if region_usage.status_code != 200: return ({ 'error': 'Invalid response from Google', 'message': '(%d) : %s' % (region_usage.status_code, region_usage.text) }, 400) quotas += [{ **quota, **{ 'percent': ('%0.2f%%' % (100 * quota['usage'] / quota['limit'])) if quota['limit'] > 0 else '0.00%', 'metric': region_name + '.' + quota['metric'] } } for quota in region_usage.json()['quotas']] return ({ 'raw': quotas, 'alerts': [ quota for quota in quotas if quota['limit'] > 0 and quota['usage'] / quota['limit'] >= 0.5 ] }, 200) except: logger.log_exception() return ({ 'error': 'Unknown Error', 'message': traceback.format_exc() }, 500)
def oauth(request): """ This function is unique. It is not deployed into each project. It is deployed once into my personal project which serves as a centralized database. This endpoint handles incoming OAuth """ logger = utils.CloudLogger().log_request(request) try: data = request.get_json() if not isinstance(data, dict): return ({ 'error': "Bad Request", 'message': ("No data was provided" if data is None else "Expected JSON dictionary in request body") }, 400) if 'grant_type' not in data: return ({ 'error': 'Missing Parameters', 'message': "Missing required 'grant_type' parameter" }, 400) if 'client_id' not in data: return ({ 'error': 'Missing Parameters', 'message': "Missing required 'client_id' parameter" }, 400) if data['client_id'] != utils.OAUTH_CLIENT_ID: return ({ 'error': 'Bad Client', 'message': 'The provided client ID did not match the server\'s client ID' }, 409) if data['grant_type'] == 'authorization_code': if 'code' not in data: return ({ 'error': 'Missing Parameters', 'message': "Missing required 'code' parameter (required by grant_type = authorization_code)" }, 400) if 'redirect_uri' not in data: return ({ 'error': 'Missing Parameters', 'message': "Missing required 'redirect_uri' parameter (required by grant_type = authorization_code)" }, 400) data['client_secret'] = os.environ['OAUTH_CLIENT_SECRET'] response = requests.post( 'https://oauth2.googleapis.com/token', headers={'Content-Type': 'application/x-www-form-urlencoded'}, data=data) if response.status_code != 200: return ({ 'error': 'Authorization failed', 'message': 'Google rejected authorization request: {}'.format( response.text) }, response.status_code) return response.json(), 200 elif data['grant_type'] == 'refresh_token': if 'refresh_token' not in data: return ({ 'error': 'Missing Parameters', 'message': "Missing required 'refresh_token' parameter (required by grant_type = refresh_token)" }, 400) data['client_secret'] = os.environ['OAUTH_CLIENT_SECRET'] response = requests.post( 'https://oauth2.googleapis.com/token', headers={'Content-Type': 'application/x-www-form-urlencoded'}, data=data) if response.status_code != 200: return ({ 'error': 'Refresh failed', 'message': 'Google rejected refresh request: {}'.format(response.text) }, response.status_code) return response.json(), 200 else: return ({ 'error': 'Bad grant_type', 'message': 'grant_type must be "authorization_code" or "refresh_token"' }, 400) except: logger.log_exception() return ({ 'error': 'Unknown Error', 'message': traceback.format_exc() }, 500)
def webhook(request): """ Admins: Use this to trigger a self-update to the given reference (commit, tag, or branch) Control who has access to trigger updates with the invoker permissions to the webhook """ logger = utils.CloudLogger().log_request(request) try: # 1) Check token details to ensure user in whitelist token = utils.extract_token(request.headers, None) if token is None: return { 'error': 'No credentials', 'message': 'User did not pass identity in Authorization header' }, 400 token_data = utils.get_token_info(token) if 'error' in token_data: return ( { 'error': 'Invalid Token', 'message': token_data['error_description'] if 'error_description' in token_data else 'Google rejected the client token' }, 401 ) if token_data['email'] not in os.environ['INVOKERS'].split(','): return { 'error': 'Not authorized', 'message': 'User "{}" not present in function environment configuration'.format(token_data['email']) }, 403 data = request.get_json() if 'ref' not in data: return { 'error': 'Missing parameters', 'message': 'Missing required parameter "ref"', }, 400 update_payload = { 'random': os.urandom(16).hex(), 'timestamp': datetime.datetime.utcnow().timestamp(), 'tag': data['ref'], 'url': 'https://github.com/getzlab/lapdog.git' } # 2) Get all resolved namespaces and update the iam policy for the signing key default_session = utils.generate_default_session() resolutions = [ blob.download_as_string().decode() for page in utils._getblob_client(default_session.credentials).bucket('lapdog-resolutions').list_blobs(fields='items/name,nextPageToken').pages for blob in page ] policy = default_session.get( 'https://cloudkms.googleapis.com/v1/projects/broad-cga-aarong-gtex/locations/global/keyRings/lapdog:getIamPolicy' ) if policy.status_code != 200: return { 'error': 'Unable to read IAM policy', 'message':response.text }, 500 policy = policy.json() for i, binding in enumerate(policy['bindings']): if binding['role'] == 'projects/broad-cga-aarong-gtex/roles/signingKeyVerifier': policy['bindings'][i]['members'] = [ 'serviceAccount:lapdog-functions@{}.iam.gserviceaccount.com'.format(resolution) for resolution in resolutions ] logger.log( "Updating project-wide IAM roles", bindings={ 'lapdog-functions@{}.iam.gserviceaccount.com'.format(resolution): 'projects/broad-cga-aarong-gtex/roles/signingKeyVerifier' for resolution in resolutions }, severity='INFO' ) response = default_session.post( 'https://cloudkms.googleapis.com/v1/projects/broad-cga-aarong-gtex/locations/global/keyRings/lapdog:setIamPolicy', headers={'Content-Type': 'application/json'}, json={ "policy": policy, "updateMask": "bindings" } ) if response.status_code != 200: return { 'error': 'Unable to update IAM policy', 'message': "Google rejected the policy update: (%d) : %s" % (response.status_code, response.text) }, 500 # 3) Trigger update for all resolutions status = { 'results': [] } failed = 200 logger.log( 'Generating new signature', data=json.dumps(update_payload) ) signature = utils._get_signature(json.dumps(update_payload).encode(), utils.UPDATE_KEY_PATH, default_session.credentials) max_version = int(utils.__API_VERSION__['update'][1:]) for resolution in resolutions: updated = False for version in range(max_version, (data['__min_version__'] if '__min_version__' in data else 0), -1): try: update_url = 'https://us-central1-{project}.cloudfunctions.net/update-v{version}'.format( project=resolution, version=version ) if default_session.options(update_url).status_code == 204: logger.log( "Triggering update", project=resolution ) response = default_session.post( update_url, headers={ 'Content-Type': 'application/json', 'X-Lapdog-Signature': signature.hex() }, json=update_payload ) failed = max(failed, response.status_code) status['results'].append({ 'project': resolution, 'status': 'OK' if response.status_code == 200 else 'Failed', 'message': response.text, 'code': response.status_code }) logger.log( "Update triggered", project=resolution, version=version, status=response.status_code, message=response.text, severity='INFO' ) updated = True break except: logger.log_exception("Failed to update project", project=resolution) failed = max(failed, 500) status['results'].append({ 'project': resolution, 'status': 'Error', 'message': traceback.format_exc(), 'code': 0 }) updated = True break if not updated: status['results'].append({ 'project': resolution, 'status': 'Error', 'message': "The target endpoint does not support any self-update endpoint versions", 'code': 0 }) return status, failed except: logger.log_exception() return { 'error': 'Unknown error', 'message': traceback.format_exc() }, 500
def update(request): """ Handles update request from the master update webhook """ logger = utils.CloudLogger().log_request(request) try: # 1) Validate the request if 'X-Lapdog-Signature' not in request.headers: return { 'error': 'Missing Signature', 'message': "The required X-Lapdog-Signature header was not provided" }, 400 signature = request.headers['X-Lapdog-Signature'] data = request.get_json() if not isinstance(data, dict): return ( { 'error': "Bad Request", 'message': ("No data was provided" if data is None else "Expected JSON dictionary in request body") }, 400 ) result = utils.verify_signature( bytes.fromhex(signature), json.dumps(data).encode(), utils.UPDATE_KEY_PATH, _is_blob=False ) if not result: return { 'error': 'Bad Signature', 'message': "The provided signature for this update was invalid" }, 403 if not ('tag' in data and 'url' in data and 'random' in data and 'timestamp' in data): return { 'error': 'Missing parameters', 'message': 'Missing one or more of the required parameters "tag", "url", "timestamp", and "random"', }, 400 if (datetime.datetime.utcnow().timestamp() - data['timestamp']) > 300: return { 'error': 'Expired', 'message': 'This update signature has expired' }, 403 # 2) Build pipeline to boot update VM regions = utils.enabled_regions() if len(regions) < 1: return { 'error': 'No regions', 'message': 'There are no regions enabled in this project' }, 503 pipeline = { 'pipeline': { 'actions': [ { 'imageUri': 'gcr.io/broad-cga-aarong-gtex/self_update:' + utils.UPDATE_IMAGE_TAG, 'commands': ['/update.sh'], 'environment': { 'LAPDOG_PROJECT': os.environ.get('GCP_PROJECT'), 'LAPDOG_LOG_PATH': "gs://{bucket}/update-logs/{time}-{tag}/".format( bucket=utils.ld_meta_bucket_for_project(), tag=data['tag'], time=int(time.time()) ), 'LAPDOG_CLONE_URL': data['url'], 'LAPDOG_NAMESPACE': utils.getblob( 'gs://{bucket}/resolution'.format( bucket=utils.ld_meta_bucket_for_project(os.environ.get('GCP_PROJECT')) ) ).download_as_string().decode(), 'LAPDOG_TAG': data['tag'] } } ], 'resources': { 'regions': regions, 'virtualMachine': { 'machineType': 'f1-micro', 'preemptible': False, 'labels': { 'lapdog-execution-role': 'self-update', }, 'serviceAccount': { 'email': 'lapdog-update@{}.iam.gserviceaccount.com'.format(os.environ.get('GCP_PROJECT')), 'scopes': [ "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/devstorage.read_write", "https://www.googleapis.com/auth/genomics" ] }, 'bootDiskSizeGb': 20, 'network': { 'network': 'default', 'usePrivateAddress': False } } }, } } # 3) Launch pipeline papi_url = 'https://lifesciences.googleapis.com/v2beta/projects/{}/locations/{}/pipelines:run'.format( os.environ.get('GCP_PROJECT'), regions[0] ) logger.log( "Launching PAPIv2 pipeline", pipeline=pipeline['pipeline'], url=papi_url, severity='NOTICE' ) response = utils.generate_default_session( [ "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/genomics" ] ).post( papi_url, headers={ 'Content-Type': 'application/json' }, json=pipeline ) try: if response.status_code == 200: return response.json()['name'], 200 return ( { 'error': 'Unable to start update', 'message': 'Google rejected the pipeline request (%d) : %s' % (response.status_code, response.text) }, 400 ) except: logger.log_exception("PAPIv2 request failed") return ( { 'error': 'Unable to start update', 'message': traceback.format_exc() }, 500 ) except: logger.log_exception() return { 'error': 'Unknown error', 'message': traceback.format_exc() }, 500