def read_token(self): with open('token.txt', 'r') as f: token = f.readline().strip() if utils.validate_token(token): self.token = token return True else: return False
def query_account(request): logger = utils.CloudLogger().log_request(request) try: # 1) Validate the token token = utils.extract_token(request.headers, None) if token is None: return ({ 'error': 'Bad Request', 'message': 'Token must be provided in header or body' }, 400) token_info = utils.get_token_info(token) if 'error' in token_info: return ({ 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401) if not utils.validate_token(token_info): return ({ 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403) # 2) Check service account default_session = utils.generate_default_session( scopes=['https://www.googleapis.com/auth/cloud-platform']) account_email = utils.ld_acct_in_project(token_info['email']) response = utils.query_service_account(default_session, account_email) if response.status_code >= 400: return ({ 'error': 'Unable to query service account', 'message': response.text }, 400) if response.json()['email'] != account_email: return ({ 'error': 'Service account email did not match expected value', 'message': response.json()['email'] + ' != ' + account_email }, 400) return account_email, 200 except: logger.log_exception() return ({ 'error': 'Unknown Error', 'message': traceback.format_exc() }, 500)
def change_email(token): if validate_token(user=current_user, token=token, operation=Operations.CHANGE_EMAIL): flash('Email updated.', 'success') return redirect(url_for('.index', username=current_user.username)) else: flash('Invalid or expired token.', 'warning') return redirect(url_for('.change_email_request'))
def download(data): db = DbClient() if not validate_token(request): return render_template("error_page.html") user = get_data_by_token(request.cookies.get('token', None)) if data == 'full': sensors = db.sensors.get_sensors() sensors = [tuple(sensor.values()) for sensor in sensors] utils.write_sensors_to_csv(sensors) return send_file('export.csv', attachment_filename='export.csv')
def confirm(token): if current_user.confirmed: return redirect(url_for('main.index')) if validate_token(user=current_user, token=token, operation=Operations.CONFIRM): flash('Account confirmed.', 'success') return redirect(url_for('main.index')) else: flash('Invalid or expired token.', 'danger') return redirect(url_for('.resend_confirm_email'))
def confirm(token): print('token') if current_user.confirmed: print(current_user.username) return redirect(url_for('main.index')) if validate_token(user=current_user,token=token,operation=Operations.CONFIRM): print('验证成功') flash('验证成功','success') return redirect(url_for('main.index')) else: print('验证失败') flash('验证失败','danger') return redirect(url_for('.resend_confirmation'))
def new_databins(): db = DbClient() if not validate_token(request): return render_template("error_page.html") user = get_data_by_token(request.cookies.get('token', None)) if request.method == 'POST': result = request.form if result['radio-stacked'] == "capacity": sensors = db.sensors.get_sensor_between_capacity( result['capacity'], 100) if not sensors: return render_template("databins.html") elif result['radio-stacked'] == 'id': sensors = db.sensors.get_sensor_by_id(result['Bin_ID']) if sensors is not None: pass else: return render_template("databins.html") else: sensors = db.sensors.get_sensor_by_address(result['address']) if not sensors: return render_template("databins.html") else: sensors_low = db.sensors.get_sensor_between_capacity(0, 25) if not sensors_low: sensors_low = [] sensors_mid = db.sensors.get_sensor_between_capacity(26, 75) if not sensors_mid: sensors_mid = [] sensors_full = db.sensors.get_sensor_between_capacity(76, 100) if not sensors_full: sensors_full = [] sensors = sensors_low + sensors_mid + sensors_full if type(sensors) is dict: tuple_sensors = [ tuple(sensors.values()), ] sensors = [ sensors, ] else: tuple_sensors = [tuple(sensor.values()) for sensor in sensors] utils.write_sensors_to_csv(tuple_sensors) return render_template("databins.html", sensors=sensors)
def reset_password(token): if current_user.is_authenticated: return redirect(url_for('main.index')) form = ResetPasswordForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data.lower()).first() if user is None: return redirect(url_for('main.index')) if validate_token(user=user, token=token, operation=Operations.RESET_PASSWORD, new_password=form.password.data): flash('Password updated.', 'success') return redirect(url_for('.login')) else: flash('Invalid or expired link.', 'danger') return redirect(url_for('.forget_password')) return render_template('auth/reset_password.html', form=form)
def login(): ''' POST login to system and generate JWT token :param request: flask request object ''' db = DbClient() if validate_token(request): sensors = [] sensors_low = db.sensors.get_sensor_between_capacity(0, 25) if sensors_low: sensors += sensors_low sensors_mid = db.sensors.get_sensor_between_capacity(26, 75) if sensors_mid: sensors += sensors_mid sensors_full = db.sensors.get_sensor_between_capacity(76, 100) if sensors_full: sensors += sensors_full sensors = [tuple(sensor.values()) for sensor in sensors] utils.write_sensors_to_csv(sensors) sensors = [[x[1], x[4], x[5], x[3], x[2], x[0]] for x in sensors] return render_template("index.html", sensors=sensors, persent_count=len(sensors), total_count=len(sensors)) if request.method == 'POST': try: data = request.values user = data.get('username') password = data.get('password') if not (user and password): logger.info( f'Login failed on {request.remote_addr}, missing credentials' ) raise InvalidCredentials(user, password) user_data = db.users.get_user_by_username(user) if not user_data: raise UserNotExists(user) elif not (str.encode(password) == decrypt( conf.PASSWORD_ENCRYPTION_KEY, user_data['password'])): raise InvalidCredentials(user) else: token = generate_token(user_data['id']) logger.info(f'Token for user {user} created. token: {token}') sensors = [] sensors_low = db.sensors.get_sensor_between_capacity(0, 25) if sensors_low: sensors += sensors_low sensors_mid = db.sensors.get_sensor_between_capacity(26, 75) if sensors_mid: sensors += sensors_mid sensors_full = db.sensors.get_sensor_between_capacity(76, 100) if sensors_full: sensors += sensors_full sensors = [tuple(sensor.values()) for sensor in sensors] utils.write_sensors_to_csv(sensors) sensors = [[x[1], x[4], x[5], x[3], x[2], x[0]] for x in sensors] resp = make_response( render_template("index.html", sensors=sensors)) resp.set_cookie('token', token) return resp except UserNotVerified as e: logger.warning(e.__str__()) return render_template("error_page.html", error_msg=e.__str__()) except UserNotExists as e: logger.warning(e.__str__()) error_result = e.__str__() return render_template("error_page.html", error_msg=error_result) except InvalidCredentials as e: logger.warning(e.__str__()) return render_template("error_page.html", error_msg=e.__str__()) except Exception as e: logger.exception(f'Failed login from {request.remote_addr}') return render_template("error_page.html", error_msg=e.__str__()) else: return render_template('login.html')
def register(request): logger = utils.CloudLogger().log_request(request) try: data = request.get_json() # 1) Validate the token if not isinstance(data, dict): return ({ 'error': "Bad Request", 'message': ("No data was provided" if data is None else "Expected JSON dictionary in request body") }, 400) token = utils.extract_token(request.headers, data) if token is None: return ({ 'error': 'Bad Request', 'message': 'Token must be provided in header or body' }, 400) token_info = utils.get_token_info(token) if 'error' in token_info: return ({ 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401) if not utils.validate_token(token_info): return ({ 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403) # 2) Validate user's permission for the bucket if 'bucket' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameter "bucket"' }, 400) session = utils.generate_user_session(token) fc_auth = utils.generate_user_session( request.headers['X-Fc-Auth'] ) if 'X-Fc-Auth' in request.headers else None fc_auth_error = ( 'Authorized' if fc_auth is not None or 'cloud-platform' in token_info['scope'] else 'Not Authorized. Repeat request with the "X-Fc-Auth" header containing application-default credentials' ) if fc_auth is None: logger.log("Missing backup Firecloud authentication", token_info=token_info, authorized=True if 'cloud-platform' in token_info['scope'] else False, severity='DEBUG') read, write = utils.validate_permissions(session, data['bucket']) if read is None: # Error, write will contain a message return ({ 'error': 'Cannot Validate Bucket Permissions', 'message': write }, 400) if not (read and write): # User doesn't have full permissions to the bucket return ({ 'error': 'Not Authorized', 'message': 'User lacks read/write permissions to the requested bucket' }, 403) # 2.b) Verify that the bucket belongs to this project if 'namespace' not in data or 'workspace' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameters "namespace" and "workspace"' }, 400) core_session = utils.generate_core_session() result, message = utils.authenticate_bucket( data['bucket'], data['namespace'], data['workspace'], fc_auth if fc_auth is not None else session, core_session) if not result: return ({ 'error': 'Cannot Validate Bucket Signature', 'message': message, 'FC-Auth': fc_auth_error }, 400) # 3) Issue worker account default_session = utils.generate_default_session( scopes=['https://www.googleapis.com/auth/cloud-platform']) account_email = utils.ld_acct_in_project(token_info['email']) response = utils.query_service_account(default_session, account_email) if response.status_code == 404: account_name = account_email.split('@')[0] logger.log('Issuing new pet service account', user=token_info['email'], service_account=account_email, severity='DEBUG') response = default_session.post( 'https://iam.googleapis.com/v1/projects/{project}/serviceAccounts' .format(project=os.environ.get('GCP_PROJECT')), headers={'Content-Type': 'application/json'}, json={ 'accountId': account_name, 'serviceAccount': { 'displayName': token_info['email'] } }) if response.status_code >= 400: return ({ 'error': 'Unable to issue service account', 'message': response.text }, 400) elif response.status_code >= 400: return ({ 'error': 'Unable to query service account', 'message': response.text }, 400) if response.json()['email'] != account_email: return ({ 'error': 'Service account email did not match expected value', 'message': response.json()['email'] + ' != ' + account_email }, 400) # 4) Update worker bindings logger.log("Updating service account bindings", account=account_email, bindings={ os.environ.get("FUNCTION_IDENTITY"): 'roles/iam.serviceAccountUser', account_email: 'roles/iam.serviceAccountUser' }) response = default_session.post( 'https://iam.googleapis.com/v1/projects/{project}/serviceAccounts/{account}:setIamPolicy' .format(project=os.environ.get('GCP_PROJECT'), account=account_email), headers={'Content-Type': 'application/json'}, json={ "policy": { "bindings": [{ "role": "roles/iam.serviceAccountUser", "members": [ # Allows the gcloud functions account to set this pet account on comwell servers "serviceAccount:{email}".format( email=os.environ.get("FUNCTION_IDENTITY")), # Allows the service account to set itself as the compute account on cromwell workers "serviceAccount:{email}".format(email=account_email ) ] }] }, "updateMask": "bindings" }) if response.status_code != 200: return ({ 'error': 'Unable to update service account bindings', 'message': '(%d) : %s' % (response.status_code, response.text) }) # 5) Update project bindings logger.log("Updating project-wide iam roles", bindings={ account_email: 'Pet_account', token_info['email']: 'Lapdog_user' }, severity="INFO") status, response = utils.update_iam_policy( default_session, { 'serviceAccount:' + account_email: 'Pet_account', 'user:'******'email']: 'Lapdog_user' }) if not status: return ({ 'error': 'Unable to update project IAM policy', 'message': '(%d) : %s' % (response.status_code, response.text) }, 400) # 6) Generate Key logger.log('Issuing new service account key', service_account=account_email) response = default_session.post( 'https://iam.googleapis.com/v1/projects/{project}/serviceAccounts/{email}/keys' .format(project=os.environ.get('GCP_PROJECT'), email=quote(account_email))) if response.status_code >= 400: return ({ 'error': 'Unable to issue service account key', 'message': response.text }, 400) # 7) Register with Firecloud time.sleep( 10 ) # New service account keys take a few seconds before they're usable # Register the user's new pet service account w/ Firecloud # We authenticate as the service account by using the newly generated key pet_session = AuthorizedSession( google.oauth2.service_account.Credentials. from_service_account_info( json.loads( base64.b64decode( response.json() ['privateKeyData']).decode())).with_scopes([ 'https://www.googleapis.com/auth/userinfo.profile', 'https://www.googleapis.com/auth/userinfo.email' ])) while True: try: response = pet_session.post( "https://api.firecloud.org/register/profile", headers={ 'User-Agent': 'FISS/0.16.9', 'Content-Type': 'application/json' }, json={ "firstName": "Service", "lastName": "Account", "title": "None", "contactEmail": token_info['email'], "institute": "None", "institutionalProgram": "None", "programLocationCity": "None", "programLocationState": "None", "programLocationCountry": "None", "pi": "None", "nonProfitStatus": "false" }, timeout=10) break except google.auth.exceptions.RefreshError: logger.log_exception("Service account key not ready") time.sleep(10) # need more time for key to propagate if response.status_code != 200: return ({ 'error': "Unable to register account with firecloud", 'message': response.text }, 400) # 8) Check ProxyGroup # Either add the new service account to the user's [email protected] group # Or create the group, if it doesn't exist fc_session = fc_auth if fc_auth is not None else session response = fc_session.get('https://api.firecloud.org/api/groups', headers={'User-Agent': 'FISS/0.16.9'}, timeout=5) if response.status_code != 200: return ({ 'error': "Unable to enumerate user's groups", 'message': response.text, 'FC-Auth': fc_auth_error }, 400) target_group = utils.proxy_group_for_user(token_info['email']) for group in response.json(): if group['groupName'] == target_group: # 9) Register Account in Group response = fc_session.put( 'https://api.firecloud.org/api/groups/{group}/member/{email}' .format(group=target_group, email=quote(account_email)), timeout=5) if response.status_code != 204: return ({ 'error': 'Unable to add pet account to proxy group', 'message': "Please manually add {email} to {group}".format( group=target_group, email=quote(account_email)), 'FC-Auth': fc_auth_error }, 400) else: return (account_email, 200) # 8.b) Create Group response = session.post( 'https://api.firecloud.org/api/groups/{group}'.format( group=target_group), timeout=5) if response.status_code >= 400: return ({ 'error': 'Unable to create Firecloud proxy group', 'message': response.text }, 400) # 9) Register Account in Group response = fc_session.put( 'https://api.firecloud.org/api/groups/{group}/member/{email}'. format(group=target_group, email=quote(account_email)), timeout=5) if response.status_code != 204: return ({ 'error': 'Unable to add pet account to proxy group', 'message': "Please manually add {email} to {group}".format( group=target_group + '@firecloud.org', email=quote(account_email)), 'FC-Auth': fc_auth_error }, 400) else: return (account_email, 200) except requests.ReadTimeout: logger.log_exception('Firecloud timeout') return ({ 'error': 'timeout to firecloud', 'message': 'Took longer than 5 seconds for Firecloud to respond. Please try again later' }, 400) except: logger.log_exception() return ({ 'error': 'Unknown Error', 'message': traceback.format_exc() }, 500)
def new_about(): if not validate_token(request): return render_template("error_page.html") user = get_data_by_token(request.cookies.get('token', None)) return render_template("about.html")
def quotas(request): logger = utils.CloudLogger().log_request(request) try: # 1) Validate the token token = utils.extract_token(request.headers, None) if token is None: return ({ 'error': 'Bad Request', 'message': 'Token must be provided in header or body' }, 400) token_info = utils.get_token_info(token) if 'error' in token_info: return ({ 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401) if not utils.validate_token(token_info): return ({ 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403) # 2) Check service account default_session = utils.generate_default_session( scopes=['https://www.googleapis.com/auth/cloud-platform']) account_email = utils.ld_acct_in_project(token_info['email']) response = utils.query_service_account(default_session, account_email) if response.status_code >= 400: return ({ 'error': 'Unable to query service account', 'message': response.text }, 400) if response.json()['email'] != account_email: return ({ 'error': 'Service account email did not match expected value', 'message': response.json()['email'] + ' != ' + account_email }, 400) # 3) Query quota usage project_usage = default_session.get( 'https://www.googleapis.com/compute/v1/projects/{project}'.format( project=os.environ.get('GCP_PROJECT'))) if project_usage.status_code != 200: return ({ 'error': 'Invalid response from Google', 'message': '(%d) : %s' % (project_usage.status_code, project_usage.text) }, 400) quotas = [{ **quota, **{ 'percent': ('%0.2f%%' % (100 * quota['usage'] / quota['limit'])) if quota['limit'] > 0 else '0.00%' } } for quota in project_usage.json()['quotas']] for region_name in utils.enabled_regions(): region_usage = default_session.get( 'https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}' .format(project=os.environ.get('GCP_PROJECT'), region=region_name)) if region_usage.status_code != 200: return ({ 'error': 'Invalid response from Google', 'message': '(%d) : %s' % (region_usage.status_code, region_usage.text) }, 400) quotas += [{ **quota, **{ 'percent': ('%0.2f%%' % (100 * quota['usage'] / quota['limit'])) if quota['limit'] > 0 else '0.00%', 'metric': region_name + '.' + quota['metric'] } } for quota in region_usage.json()['quotas']] return ({ 'raw': quotas, 'alerts': [ quota for quota in quotas if quota['limit'] > 0 and quota['usage'] / quota['limit'] >= 0.5 ] }, 200) except: logger.log_exception() return ({ 'error': 'Unknown Error', 'message': traceback.format_exc() }, 500)
def new_calc(): db = DbClient() if not validate_token(request): return render_template("error_page.html") user = get_data_by_token(request.cookies.get('token', None)) username = db.users.get_user_by_id(user['user_id'])['user'] config_trashold = conf.trash_threshold present_treshold = conf.trash_threshold if request.method == 'POST': if request.form.get('range'): present_percent = request.form.get('range') capacity = int(present_percent) present_treshold = capacity if request.form.get('trashold'): result = request.form.get('trashold') if '%' in result: threshold = int(result[:-1]) else: threshold = int(result) conf.trash_threshold = int(threshold) capacity = int(conf.trash_threshold) config_trashold = capacity present_treshold = capacity else: capacity = conf.trash_threshold pickup_sensors = db.sensors.get_sensor_over_x_capacity(capacity) if not pickup_sensors: pickup_sensors = [] remain_sensors = db.sensors.get_sensor_under_x_capacity(capacity) if not remain_sensors: remain_sensors = [] threshold = conf.trash_threshold risk_sensors = [] total_trash_to_pickup = 0 for sensor in remain_sensors: fill_avg = utils.get_avg_fill_per_sensor( db.statistics.get_sensor_stat_by_id(sensor['id'])) if int(sensor['capacity']) + fill_avg >= threshold: risk_sensors.append(sensor) sensor['fill_avg'] = fill_avg for sensor in pickup_sensors: fill_avg = utils.get_avg_fill_per_sensor( db.statistics.get_sensor_stat_by_id(sensor['id'])) total_trash_to_pickup += sensor['capacity'] sensor['fill_avg'] = fill_avg calculate_capacity = sensor['capacity'] days_until_full = 0 while calculate_capacity <= capacity: calculate_capacity += fill_avg days_until_full += 1 sensor['day_until_full'] = days_until_full truck_needed = round(total_trash_to_pickup / 1000) if truck_needed == 0: truck_needed = 1 return render_template("calc.html", sensors=pickup_sensors, capacityint=capacity, total_to_pickup=len(pickup_sensors), config_trashold=config_trashold, present_treshold=present_treshold, risked=len(risk_sensors), truck_needed=truck_needed, unrisked=len(pickup_sensors) + len(remain_sensors) - len(risk_sensors), username=username)
def new_stats(): db = DbClient() if not validate_token(request): return render_template("error_page.html") user = get_data_by_token(request.cookies.get('token', None)) return render_template("analytics.html")
def create_submission(request): logger = utils.CloudLogger().log_request(request) try: data = request.get_json() # 1) Validate the token if not isinstance(data, dict): return ({ 'error': "Bad Request", 'message': ("No data was provided" if data is None else "Expected JSON dictionary in request body") }, 400) token = utils.extract_token(request.headers, data) if token is None: return ({ 'error': 'Bad Request', 'message': 'Token must be provided in header or body' }, 400) token_info = utils.get_token_info(token) if 'error' in token_info: return ({ 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401) if not utils.validate_token(token_info): return ({ 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403) # 1.b) Verify the user has a pet account response = utils.query_service_account( utils.generate_default_session( scopes=['https://www.googleapis.com/auth/cloud-platform']), utils.ld_acct_in_project(token_info['email'])) if response.status_code != 200: return ({ 'error': 'User has not registered with this Lapdog Engine', 'message': response.text }, 401) # 2) Validate user's permission for the bucket if 'bucket' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameter "bucket"' }, 400) session = utils.generate_user_session(token) fc_auth = utils.generate_user_session( request.headers['X-Fc-Auth'] ) if 'X-Fc-Auth' in request.headers else None if fc_auth is None: logger.log("Missing backup Firecloud authentication", token_info=token_info, authorized=True if 'cloud-platform' in token_info['scope'] else False, severity='DEBUG') read, write = utils.validate_permissions(session, data['bucket']) if read is None: # Error, write will contain a message return ({ 'error': 'Cannot Validate Bucket Permissions', 'message': write }, 400) if not (read and write): # User doesn't have full permissions to the bucket return ({ 'error': 'Not Authorized', 'message': 'User lacks read/write permissions to the requested bucket' }, 403) # 2.b) Verify that the bucket belongs to this project if 'namespace' not in data or 'workspace' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameters "namespace" and "workspace"' }, 400) core_session = utils.generate_core_session() result, message = utils.authenticate_bucket( data['bucket'], data['namespace'], data['workspace'], fc_auth if fc_auth is not None else session, core_session) if not result: return ({ 'error': 'Cannot Validate Bucket Signature', 'message': message, 'FC-Auth': ('Authorized' if fc_auth is not None or 'cloud-platform' in token_info['scope'] else 'Not Authorized. Repeat request with the "X-Fc-Auth" header containing application-default credentials' ) }, 400) # 3) Check that submission.json exists, and is less than 1 Gib if 'submission_id' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameter "submission_id"' }, 400) submission = utils.fetch_submission_blob(session, data['bucket'], data['submission_id']) result, message = utils.validate_submission_file(submission) if not result: return ({'error': 'Bad Submission', 'message': message}, 400) # 4) Submit pipelines request region = 'us-central1' if 'compute_region' in data: allowed_regions = utils.enabled_regions() if data['compute_region'] in allowed_regions: region = data['compute_region'] else: return ({ 'error': "Invalid Region", 'message': "Region not allowed. Enabled regions: " + repr(allowed_regions) }, 400) if 'memory' in data and data['memory'] > 3072: mtype = 'custom-%d-%d' % ( math.ceil(data['memory'] / 13312) * 2, # Cheapest core:memory ratio data['memory']) else: mtype = 'n1-standard-1' pipeline = { 'pipeline': { 'actions': [{ 'imageUri': 'gcr.io/broad-cga-aarong-gtex/wdl_runner:' + __CROMWELL_TAG__, 'commands': ['/wdl_runner/wdl_runner.sh'], 'environment': { 'SIGNATURE_ENDPOINT': 'https://{region}-{project}.cloudfunctions.net/signature-{version}' .format(region=os.environ.get('FUNCTION_REGION'), project=os.environ.get("GCP_PROJECT"), version=__API_VERSION__['signature']), 'LAPDOG_PROJECT': os.environ.get('GCP_PROJECT'), 'WDL': "gs://{bucket}/lapdog-executions/{submission_id}/method.wdl" .format(bucket=data['bucket'], submission_id=data['submission_id']), 'WORKFLOW_INPUTS': "gs://{bucket}/lapdog-executions/{submission_id}/config.tsv" .format(bucket=data['bucket'], submission_id=data['submission_id']), 'WORKFLOW_OPTIONS': json.dumps(data['options']) if 'options' in data else '{}', 'LAPDOG_SUBMISSION_ID': data['submission_id'], 'WORKSPACE': "gs://{bucket}/lapdog-executions/{submission_id}/workspace/" .format(bucket=data['bucket'], submission_id=data['submission_id']), 'OUTPUTS': "gs://{bucket}/lapdog-executions/{submission_id}/results" .format(bucket=data['bucket'], submission_id=data['submission_id']), 'SUBMISSION_DATA_PATH': "gs://{bucket}/lapdog-executions/{submission_id}/submission.json" .format(bucket=data['bucket'], submission_id=data['submission_id']), 'LAPDOG_LOG_PATH': "gs://{bucket}/lapdog-executions/{submission_id}/logs". format(bucket=data['bucket'], submission_id=data['submission_id']), 'PRIVATE_ACCESS': 'true' if ('no_ip' in data and data['no_ip']) else 'false', 'SUBMISSION_ZONES': " ".join('{}-{}'.format(region, zone) for zone in GCP_ZONES[region]), 'DUMP_PATH': (("gs://{bucket}/lapdog-call-cache.sql".format( bucket=data['bucket'])) if 'callcache' in data and data['callcache'] else "") } }], 'resources': { 'regions': [region], 'virtualMachine': { 'machineType': mtype, 'preemptible': False, 'labels': { 'lapdog-execution-role': 'cromwell', 'lapdog-submission-id': data['submission_id'] }, 'serviceAccount': { 'email': utils.ld_acct_in_project(token_info['email']), 'scopes': [ "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/devstorage.read_write", "https://www.googleapis.com/auth/genomics" ] }, 'bootDiskSizeGb': 20 + (max(0, data['cache_size'] - 10) if 'cache_size' in data else 0), 'network': { 'network': 'default', 'usePrivateAddress': ('no_ip' in data and data['no_ip']) } } }, } } papi_url = 'https://lifesciences.googleapis.com/v2beta/projects/{}/locations/{}/pipelines:run'.format( os.environ.get('GCP_PROJECT'), region) logger.log("Launching LifeSciences v2Beta pipeline", pipeline=pipeline['pipeline'], url=papi_url, severity='NOTICE') response = utils.generate_default_session([ "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/compute", "https://www.googleapis.com/auth/genomics" ]).post(papi_url, headers={'Content-Type': 'application/json'}, json=pipeline) try: if response.status_code == 200: operation = response.json()['name'] # 5) Sign the operation logger.log("Generating new signature", data=(data['submission_id'] + operation)) utils.sign_object( (data['submission_id'] + operation).encode(), utils.getblob( 'gs://{bucket}/lapdog-executions/{submission_id}/signature' .format(bucket=data['bucket'], submission_id=data['submission_id']), credentials=session.credentials), core_session.credentials) return operation, 200 except: logger.log_exception('PAPIv2 request failed') return ({ 'error': 'Unable to start submission', 'message': traceback.format_exc() }, 500) return ({ 'error': 'Unable to start submission', 'message': 'Google rejected the pipeline request (%d) : %s' % (response.status_code, response.text) }, 400) except: logger.log_exception() return ({ 'error': 'Unknown Error', 'message': traceback.format_exc() }, 500)
def main(): db = DbClient() try: if not validate_token(request): return render_template( "error_page.html", error_msg= "You are not login, please enter to login page to access to the system" ) user = get_data_by_token(request.cookies.get('token', None)) if request.method == 'POST': sensors_low = [] sensors_mid = [] sensors_full = [] sensors, checked_list = utils.get_sensors_by_main_filters(request) for sensor in sensors: sensor = [ sensor, ] if int(sensor[0][2]) < 25: sensors_low = sensors_low + sensor elif int(sensor[0][2]) < 75: sensors_mid = sensors_mid + sensor else: sensors_full = sensors_full + sensor else: sensors = [] sensors_low = db.sensors.get_sensor_between_capacity(0, 25) if sensors_low: sensors += sensors_low sensors_mid = db.sensors.get_sensor_between_capacity(26, 75) if sensors_mid: sensors += sensors_mid sensors_full = db.sensors.get_sensor_between_capacity(76, 100) if sensors_full: sensors += sensors_full sensors = [tuple(sensor.values()) for sensor in sensors] utils.write_sensors_to_csv(sensors) sensors = [[x[1], x[4], x[5], x[3], x[2], x[0]] for x in sensors] return render_template("index.html", sensors=sensors, persent_count=len(sensors), total_count=len(sensors)) utils.write_sensors_to_csv(sensors) sensor_count = db.sensors.get_count_sensors()['count(*)'] sensors = [[x[1], x[4], x[5], x[3], x[2], x[0]] for x in sensors] return render_template("index.html", sensors=sensors, capacity_empty=checked_list[0], capacity_mid=checked_list[1], capacity_full=checked_list[2], over_trashold=checked_list[3], below_trashold=checked_list[4], ConnectedBins=checked_list[5], FailedBins=checked_list[6], persent_count=len(sensors), total_count=sensor_count) except Exception as e: print(traceback.format_exc()) print(e)
def abort_submission(request): logger = utils.CloudLogger().log_request(request) try: data = request.get_json() # 1) Validate the token if not isinstance(data, dict): return ({ 'error': "Bad Request", 'message': ("No data was provided" if data is None else "Expected JSON dictionary in request body") }, 400) token = utils.extract_token(request.headers, data) if token is None: return ({ 'error': 'Bad Request', 'message': 'Token must be provided in header or body' }, 400) token_info = utils.get_token_info(token) if 'error' in token_info: return ({ 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401) if not utils.validate_token(token_info): return ({ 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403) # 2) Validate user's permission for the bucket if 'bucket' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameter "bucket"' }, 400) session = utils.generate_user_session(token) read, write = utils.validate_permissions(session, data['bucket']) if read is None: # Error, write will contain a message return ({ 'error': 'Cannot Validate Bucket Permissions', 'message': write }, 400) if not (read and write): # User doesn't have full permissions to the bucket return ({ 'error': 'Not Authorized', 'message': 'User lacks read/write permissions to the requested bucket' }, 401) # 3) Check that submission.json exists, and is less than 1 Gib if 'submission_id' not in data: return ({ 'error': 'Bad Request', 'message': 'Missing required parameter "submission_id"' }, 400) submission = utils.fetch_submission_blob(session, data['bucket'], data['submission_id']) result, message = utils.validate_submission_file(submission) if not result: return ({'error': 'Bad Submission', 'message': message}, 400) # 4) Download submission and parse operation try: submission = json.loads(submission.download_as_string().decode()) except: return ({ 'error': 'Invalid Submission', 'message': 'Submission was not valid JSON' }, 400) if 'operation' not in submission: return ({ 'error': 'Invalid Submission', 'message': 'Submission contained no operation metadata' }, 400) signature_blob = utils.getblob( 'gs://{bucket}/lapdog-executions/{submission_id}/signature'.format( bucket=data['bucket'], submission_id=data['submission_id']), credentials=session.credentials) if not signature_blob.exists(): return ({ 'error': 'No Signature', 'message': 'The submission signature could not be found. Refusing to abort job' }, 403) if not utils.verify_signature( signature_blob, (data['submission_id'] + submission['operation']).encode()): return ({ 'error': 'Invalid Signature', 'message': 'Could not validate submission signature. Refusing to abort job' }, 403) core_session = utils.generate_core_session() # 5) Generate abort key logger.log("Generating new signature", data=data['submission_id']) utils.sign_object( data['submission_id'].encode(), utils.getblob( 'gs://{bucket}/lapdog-executions/{submission_id}/abort-key'. format(bucket=data['bucket'], submission_id=data['submission_id']), credentials=session.credentials), core_session.credentials) if 'hard' in data and data['hard']: # 6) Abort operation logger.log("Hard-aborting submission", submission_id=data['submission_id'], operation_id=submission['operation'], severity='NOTICE') response = core_session.post( "https://genomics.googleapis.com/v2alpha1/{operation}:cancel". format(operation=quote( submission['operation']) # Do not quote slashes here )) return response.text, response.status_code return ({ 'status': 'Aborting', 'message': 'A soft-abort request has been sent.' ' If the submission does not abort soon, abort it with hard=True to force-kill the cromwell server' }, 200) except: logger.log_exception() return ({ 'error': 'Unknown Error', 'message': traceback.format_exc() }, 500)
IGNORE_FILES) zip_file = open(CHALLENGE_ZIP_FILE_PATH, 'rb') file = {"zip_configuration": zip_file} data = {"GITHUB_REPOSITORY": GITHUB_REPOSITORY} try: response = requests.post(url, data=data, headers=headers, files=file) if response.status_code != http.HTTPStatus.OK and response.status_code != http.HTTPStatus.CREATED: response.raise_for_status() else: print("\n" + response.json()["Success"]) except requests.exceptions.HTTPError as err: if response.status_code in EVALAI_ERROR_CODES: is_token_valid = validate_token(response.json()) if is_token_valid: error = response.json()["error"] error_message = "\nFollowing errors occurred while validating the challenge config:\n{}".format( error) print(error_message) os.environ["CHALLENGE_ERRORS"] = error_message else: print( "\nFollowing errors occurred while validating the challenge config: {}" .format(err)) os.environ["CHALLENGE_ERRORS"] = str(err) except Exception as e: if VALIDATION_STEP == "True": error_message = "\nFollowing errors occurred while validating the challenge config: {}".format( e)
def insert_resolution(request): """ This function is unique. It is not deployed into each project. It is deployed once into my personal project which serves as a centralized database. """ logger = utils.CloudLogger().log_request(request) try: data = request.get_json() if not isinstance(data, dict): return ( { 'error': "Bad Request", 'message': ("No data was provided" if data is None else "Expected JSON dictionary in request body") }, 400 ) token = utils.extract_token(request.headers, data) token_info = utils.get_token_info(token) if 'error' in token_info: return ( { 'error': 'Invalid Token', 'message': token_info['error_description'] if 'error_description' in token_info else 'Google rejected the client token' }, 401 ) if not utils.validate_token(token_info): return ( { 'error': 'Rejected token', 'message': 'Token was valid but did not meet Lapdog security requirements. Token must have email, profile, openid, and devstorage.read_write scopes.' ' Broad users must authenticate via a LapdogToken' }, 403 ) if 'namespace' not in data: return ( { 'error': 'Missing Parameters', 'message': "Missing required parameter \"namespace\"" }, 400 ) user_session = utils.generate_user_session(token) while True: response = user_session.get( 'https://api.firecloud.org/api/profile/billing' ) if response.status_code == 200: break print(response.status_code, response.text, file=sys.stderr) if response.status_code == 404: return ( { 'error': "User not found", 'message': "You are not registered yet with firecloud" }, 404 ) time.sleep(5) projects = {proj['projectName']:proj for proj in response.json()} if data['namespace'] not in projects: return ( { 'error': "Bad Namespace", 'message': 'The provided namespace "%s" could not be found' % data['namespace'] }, 400 ) if projects[data['namespace']]['role'] not in {'Owner', 'Admin', 'Administrator'}: return ( { 'error': "Insufficient Permissions", 'message': "The user lacks Owner/Admin privilages on the provided namespace" }, 401 ) if 'project' not in data: return ( { 'error': "Missing parameters", 'message': "Missing required parameter \"project\"" }, 400 ) response = user_session.post( 'https://cloudresourcemanager.googleapis.com/v1/projects/{project}:getIamPolicy'.format( project=data['project'] ) ) if response.status_code == 403: return ( { 'error': 'Unauthorized', 'message': "User lacks permissions on the provided project" }, 401 ) if response.status_code != 200: return ( { 'error': 'Unexpected response from Googla API', 'message': '(%d) : %s' % (response.status_code, response.text) }, 400 ) for policy in response.json()['bindings']: if policy['role'] == 'roles/owner': if ('user:'******'email']) in policy['members']: blob = utils.getblob( 'gs://lapdog-resolutions/%s' % sha512(data['namespace'].encode()).hexdigest(), credentials=utils.generate_default_session().credentials ) if blob.exists(): return ( { 'error': "Already Exists", 'message': "A resolution for this namespace is already in place" }, 409 ) logger.log( "Adding new resolution", namespace=data['namespace'], project_id=data['project'], admin=token_info['email'], severity='NOTICE' ) blob.upload_from_string( data['project'].encode() ) return ( 'gs://lapdog-resolutions/%s' % sha512(data['namespace'].encode()).hexdigest(), 200 ) return ( { 'error': "Unauthorized", 'message': "User lacks ownership of the provided project" }, 400 ) except: logger.log_exception() return ( { 'error': "Unknown Error", 'message': traceback.format_exc() }, 500 )