def timeout_handler(self, partials, signum=None, frame=None): """ Custom handler for signal that stores the current check or action with the appropriate information and then exits using sys.exit """ ignored(signum, frame) if partials['is_check']: result = self.CheckResult(partials['connection'], partials['name']) result.status = 'ERROR' else: result = self.ActionResult(partials['connection'], partials['name']) result.status = 'FAIL' result.description = 'AWS lambda execution reached the time limit. Please see check/action code.' kwargs = partials['kwargs'] kwargs['runtime_seconds'] = round(time.time() - partials['start_time'], 2) result.kwargs = kwargs result.store_result() # need to delete the sqs message and propogate if this is using the queue if kwargs.get('_run_info') and {'receipt', 'sqs_url'} <= set( kwargs['_run_info'].keys()): runner_input = {'sqs_url': kwargs['_run_info']['sqs_url']} self.sqs.delete_message_and_propogate( runner_input, kwargs['_run_info']['receipt']) sys.exit(f"-RUN-> TIMEOUT for execution of {partials['name']}." f" Elapsed time is {kwargs['runtime_seconds']} seconds;" f" keep under {self.CHECK_TIMEOUT}.")
def mocked_submit_genelist(*args, **kwargs): ignored(args, kwargs) # We don't need to test this function's actions because we test its call args below. # However, we do need to run this one test from the same dynamic context, # so this is close enough. assert KeyManager.keydicts_filename() == ( keyfile or KeyManager.DEFAULT_KEYDICTS_FILENAME)
def send_sqs_messages(cls, queue, environ, check_vals, uuid=None): """ Send messages to SQS queue. Check_vals are entries within a check_group. Optionally, provide a uuid that will be queued as the uuid for the run; if not provided, datetime.utcnow is used Args: queue: boto3 sqs resource (from get_sqs_queue) environ (str): foursight environment name check_vals (list): list of formatted check vals, like those from check_utils.CheckHandler().get_check_schedule uuid (str): optional string uuid Returns: str: uuid of queued messages """ # uuid used as the MessageGroupId if not uuid: uuid = datetime.utcnow().isoformat() # append environ and uuid as first elements to all check_vals proc_vals = [[environ, uuid] + val for val in check_vals] for val in proc_vals: response = queue.send_message(MessageBody=json.dumps(val)) ignored(response) return uuid
def test_get_s3_utils(self, env): """ Sanity test for s3 utils for all envs """ envname = env if env in ['data', 'staging'] else full_env_name(env) if _env_is_up_and_healthy(envname): print(f"performing init_connection for env {env}") conn = self.app_utils_obj.init_connection(env) print(f"creating s3Utils for env {env}") s3_obj = s3_utils.s3Utils(env=conn.ff_env) assert (s3_obj.sys_bucket is not None) assert (s3_obj.outfile_bucket is not None) assert (s3_obj.raw_file_bucket is not None) ff_keys = s3_obj.get_access_keys() ff_keys_keys = ff_keys.keys() ff_keys = None # for security, so it doesn't show up in errors ignored(ff_keys) assert ({'server', 'key', 'secret'} <= set(ff_keys_keys)) hg_keys = s3_obj.get_higlass_key() hg_keys_keys = hg_keys.keys() hg_keys = None # for security, so it doesn't show up in errors ignored(hg_keys) assert ({'server', 'key', 'secret'} <= set(hg_keys_keys)) else: pytest.skip(f"Health page for {env} is unavailable, so test is being skipped.")
def mock_update_service( *, cluster, service, forceNewDeployment): # noQA - AWS chose mixed case argument name """ Mock matching the relevant API signature for below (we don't actually want to trigger an ECS deploy in unit testing. """ ignored(cluster, service, forceNewDeployment) return
def test_secrets_table_getitem(): b3 = boto3_for_some_secrets_testing() with mock.patch.object(secrets_utils_module, 'boto3', b3): secrets_table = SecretsTable.find_application_secrets_table() secret_foo = secrets_table['foo'] assert secret_foo == some_secret_table['foo'] with pytest.raises(KeyError): secret_missing = secrets_table['missing'] ignored(secret_missing) # the previous line will fail
def test_random_nums_2(connection, **kwargs): ignored(kwargs) check = CheckResult(connection, 'test_random_nums_2') check.status = 'IGNORE' output = [] for i in range(random.randint(1, 20)): output.append(random.randint(1, 100)) check.full_output = output check.description = 'A test check as well' return check
def test_function_dummy(self, *args, **kwargs): ignored(args, kwargs) connection = self.app_utils_obj.init_connection(self.environ) check = run_result.CheckResult(connection, 'not_a_check') check.summary = 'A string summary' check.description = 'A string description' check.ff_link = 'A string link' check.action = 'A string action' check.kwargs = {} return check
def action_function(self, *default_args, **default_kwargs): """ Import decorator, used to decorate all actions. Required for action functions. Any kwargs provided to the decorator will be passed to the function if no kwargs are explicitly passed. Handles all exceptions within running of the action, including validation issues/some common errors when writing actions. Will also keep track of overall runtime and cancel the check with status=ERROR if runtime exceeds CHECK_TIMEOUT. If an exception is raised, will store the result in output and return an ActionResult with status FAIL. """ ignored(default_args) def action_deco(func): @wraps(func) def wrapper(*args, **kwargs): start_time = time.time() kwargs = self.handle_kwargs(kwargs, default_kwargs) parent_pid = os.getpid() child_pid = os.fork() if child_pid != 0: # we are the parent who will execute the check try: if 'check_name' not in kwargs or 'called_by' not in kwargs: raise BadCheckOrAction( 'Action requires check_name and called_by in its kwargs.' ) action = func(*args, **kwargs) action.validate() except Exception: # connection should be the first (and only) positional arg action = self.ActionResult(args[0], func.__name__) action.status = 'FAIL' action.description = 'Action failed to run. See output.' action.output = traceback.format_exc().split('\n') kwargs['runtime_seconds'] = round(time.time() - start_time, 2) action.kwargs = kwargs os.kill(child_pid, signal.SIGKILL) # we finished, so kill child return action.store_result() else: # we are the child who handles the timeout partials = { 'name': func.__name__, 'kwargs': kwargs, 'is_check': False, 'start_time': start_time, 'connection': args[0] } self.do_timeout(parent_pid, partials) wrapper.check_decorator = self.ACTION_DECO return wrapper return action_deco
def embed(context, request): """ API to return custom-embedded view of object posted to endpoint. If no parameters provided, attempt to return object with embedding done per default parameters. :param context: pyramid request context :param request: pyramid request object :return results: list of dicts of custom-embedded views of items """ ids = [] ignored_embeds = [] desired_embeds = [] cache = {} requested_fields = [] results = [] invalid_ids = [] embed_depth = 4 # Arbritary standard depth to search. ignored(context) if request.GET: ids += request.GET.dict_of_lists().get("id", []) embed_depth = int(request.GET.get("depth", embed_depth)) ignored_embeds += request.GET.dict_of_lists().get("ignored", []) desired_embeds += request.GET.dict_of_lists().get("desired", []) requested_fields += request.GET.dict_of_lists().get("field", []) elif request.json: ids += request.json.get("ids", []) ignored_embeds = request.json.get("ignored", []) desired_embeds = request.json.get("desired", []) embed_depth = request.json.get("depth", embed_depth) requested_fields = request.json.get("fields", []) ids = list(set(ids)) if len(ids) > 5: raise HTTPBadRequest( "Too many items were given for embedding." " Please limit to less than 5 items." ) if not ids: raise HTTPBadRequest("No item identifier was provided.") embed_props = { "ignored_embeds": ignored_embeds, "desired_embeds": desired_embeds, "embed_depth": embed_depth, "requested_fields": requested_fields, } for item_id in ids: item_embed = CustomEmbed(request, item_id, embed_props) results.append(item_embed.result) invalid_ids += item_embed.invalid_ids invalid_ids += [item for item in results if isinstance(item, str)] if invalid_ids: raise HTTPBadRequest( "The following IDs were invalid: %s." % ", ".join(invalid_ids) ) return results
def mocked_fetch_health_page_json( url, use_urllib=True): ignored(use_urllib) # we don't test this m = re.match(r'.*(fourfront-[a-z0-9-]+)(?:[.]|$)', url) if m: env_name = m.group(1) return make_mock_health_page(env_name) else: raise NotImplementedError( f"Mock can't handle URL: {url}")
def mocked_upload_item_data(*args, **kwargs): ignored(args, kwargs) # We don't need to test this function's actions because we test its call args below. # However, we do need to run this one test from the same dynamic context, # so this is close enough. if KeyManager.keydicts_filename() == ( keyfile or KeyManager.DEFAULT_KEYDICTS_FILENAME): exit(0) else: # This case is not expected to be reached but must be here for testing to catch exit(1) # pragma: no cover
def check_function(self, *default_args, **default_kwargs): """ Import decorator, used to decorate all checks. Sets the check_decorator attribute so that methods can be fetched. Any kwargs provided to the decorator will be passed to the function if no kwargs are explicitly passed. Handles all exceptions within running of the check, including validation issues/some common errors when writing checks. Will also keep track of overall runtime and cancel the check with status=ERROR if runtime exceeds CHECK_TIMEOUT. If an exception is raised, will store the result in full_output and return an ERROR CheckResult. """ ignored(default_args) def check_deco(func): @wraps(func) def wrapper(*args, **kwargs): start_time = time.time() kwargs = self.handle_kwargs(kwargs, default_kwargs) parent_pid = os.getpid() child_pid = os.fork() if child_pid != 0: # we are the parent who will execute the check try: check = func(*args, **kwargs) check.validate() except Exception: # connection should be the first (and only) positional arg check = self.CheckResult(args[0], func.__name__) check.status = 'ERROR' check.description = 'Check failed to run. See full output.' check.full_output = traceback.format_exc().split('\n') kwargs['runtime_seconds'] = round(time.time() - start_time, 2) check.kwargs = kwargs os.kill(child_pid, signal.SIGKILL) # we finished, so kill child return check.store_result() else: # we are the child who handles the timeout partials = { 'name': func.__name__, 'kwargs': kwargs, 'is_check': True, 'start_time': start_time, 'connection': args[0] } self.do_timeout(parent_pid, partials) wrapper.check_decorator = self.CHECK_DECO return wrapper return check_deco
def test_random_nums(connection, **kwargs): ignored(kwargs) check = CheckResult(connection, 'test_random_nums') check.status = 'IGNORE' check.action = 'add_random_test_nums' check.allow_action = True output = [] for i in range(random.randint(1, 20)): output.append(random.randint(1, 100)) check.full_output = output check.description = 'A test check' # sleep for 2 secs because this is used to test timing out time.sleep(2) return check
def mock_list_ecs_services(*, cluster): """ Mock API signature """ ignored(cluster) return { 'serviceArns': [ 'arn:aws:ecs:us-east-2:1234566777:service/c4-ecs-trial-alpha-stack-CGAPDockerCluster-Z4m1uYa2J11O/c4-ecs-trial-alpha-stack-CGAPIndexerService-YihcMquIc354', # noQA: E501 'arn:aws:ecs:us-east-2:1234566777:service/c4-ecs-trial-alpha-stack-CGAPDockerCluster-Z4m1uYa2J11O/c4-ecs-trial-alpha-stack-CGAPDeploymentService-NRyGGBTSnqbQ', # noQA: E501 'arn:aws:ecs:us-east-2:1234566777:service/c4-ecs-trial-alpha-stack-CGAPDockerCluster-Z4m1uYa2J11O/c4-ecs-trial-alpha-stack-CGAPIngesterService-QRcdjlE5ZJS1', # noQA: E501 'arn:aws:ecs:us-east-2:1234566777:service/c4-ecs-trial-alpha-stack-CGAPDockerCluster-Z4m1uYa2J11O/c4-ecs-trial-alpha-stack-CGAPWSGIService-oDZbeVVWjZMq' # noQA: E501 ], 'ResponseMetadata': { 'RequestId': 'not-a-uuid', 'HTTPStatusCode': 200 } }
def test_find_application_secrets_table(): b3 = boto3_for_some_secrets_testing() with mock.patch.object(secrets_utils_module, 'boto3', b3): found_table = SecretsTable.find_application_secrets_table( ) # in our examples, there's only one assert found_table.name == some_secret_identity found_table = SecretsTable.find_application_secrets_table( application_configuration_pattern=some_unique_secret_identity_token ) assert found_table.name == some_secret_identity with pytest.raises( RuntimeError ): # there are too many for this to succeed unassisted # There is no thing called DecoyApplicationConfiguration found = SecretsTable.find_application_secrets_table( application_configuration_pattern='Decoy') ignored(found) # Shouldn't reach here
def lambda_handler(event, context): ignored( context ) # This will be ignored unless the commented-out block below is uncommented. staged = event.get("rawPath", PRD_ENDPOINT_PATH) == STG_ENDPOINT_PATH data = get_calendar_data(staged=staged) params = event.get("queryStringParameters") or {} # It might be a security problem to leave this turned on in production, but it may be useful to enable # this during development to be able to see what's coming through. -kmp 7-Jul-2020 # vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv # if params.get("echoevent"): # return { # "statusCode": 200, # "headers": { # "Content-Type": "application/json", # "Cache-Control": "public, max-age=120", # # Note that this does not do Access-Control-Allow-Origin, etc. # # as this is for debugging only. -kmp 19-Mar-2020 # }, # "body": json.dumps(event, indent=2), # # Maybe also this, too ... # # "context": json.dumps(context) # or repr(context) # } # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # The referer is available in a standard event packaging, in the headers, # https://docs.aws.amazon.com/apigateway/latest/developerguide/request-response-data-mappings.html try: now = params.get("now", None) debug = params.get("debug", "FALSE").upper() == "TRUE" application = params.get("application") host = event.get('headers', {}).get('host') referer = event.get('headers', {}).get('referer') environment = params.get("environment") environment = resolve_environment(host=host, referer=referer, application=application, environment=environment) data = filter_data(data, environment, debug=debug, now=now) response_format = params.get("format") or "html" if response_format == 'json': result = { "statusCode": 200, "headers": { "Content-Type": "application/json", "Cache-Control": "public, max-age=120", }, "body": json.dumps(data, indent=2), } else: result = { "statusCode": 200, "headers": { "Content-Type": 'text/html', "Cache-Control": "public, max-age=120", }, "body": convert_to_html(data or [], environment) } result = dict(result, **CORS_HEADERS) return result except Exception as e: return {"message": "%s: %s" % (full_class_name(e), e)}
def is_gene_name(instance): """This SHOULD check a webservice at HGNC/MGI for validation, but for now this just returns True always..""" ignored(instance) return True
def mocked_requests_get(url, *args, **kwargs): ignored(args, kwargs) assert "ingestion-submissions" in url return MockResponse( 200, json=INGESTION_FRAGMENT_WITH_UPLOAD_INFO)
def mocked_ecr_login(*, username, password, registry): ignored(username, password, registry) return
def load_data_view(context, request): """ expected input data {'local_path': path to a directory or file in file system 'fdn_dir': inserts folder under encoded 'store': if not local_path or fdn_dir, look for a dictionary of items here 'overwrite' (Bool): overwrite if existing data 'itype': (list or str): only pick some types from the source or specify type in in_file 'iter_response': invoke the Response as an app_iter, directly calling load_all_gen 'config_uri': user supplied configuration file} post can contain 2 different styles of data 1) reference to a folder or file (local_path or fd_dir). If this is done itype can be optionally used to specify type of items loaded from files 2) store in form of {'item_type': [items], 'item_type2': [items]} item_type should be same as insert file names i.e. file_fastq """ ignored(context) # this is a bit wierd but want to reuse load_data functionality so I'm rolling with it config_uri = request.json.get('config_uri', 'production.ini') patch_only = request.json.get('patch_only', False) post_only = request.json.get('post_only', False) app = get_app(config_uri, 'app') environ = {'HTTP_ACCEPT': 'application/json', 'REMOTE_USER': '******'} testapp = webtest.TestApp(app, environ) # expected response request.response.status = 200 result = { 'status': 'success', '@type': ['result'], } store = request.json.get('store', {}) local_path = request.json.get('local_path') fdn_dir = request.json.get('fdn_dir') overwrite = request.json.get('overwrite', False) itype = request.json.get('itype') iter_resp = request.json.get('iter_response', False) inserts = None from_json = False if fdn_dir: inserts = resource_filename('encoded', 'tests/data/' + fdn_dir + '/') elif local_path: inserts = local_path elif store: inserts = store from_json = True # if we want to iterate over the response to keep the connection alive # this directly calls load_all_gen, instead of load_all if iter_resp: return Response(content_type='text/plain', app_iter=LoadGenWrapper( load_all_gen(testapp, inserts, None, overwrite=overwrite, itype=itype, from_json=from_json, patch_only=patch_only, post_only=post_only))) # otherwise, it is a regular view and we can call load_all as usual if inserts: res = load_all(testapp, inserts, None, overwrite=overwrite, itype=itype, from_json=from_json) else: res = 'No uploadable content found!' if res: # None if load_all is successful print(LOAD_ERROR_MESSAGE) request.response.status = 422 result['status'] = 'error' result['@graph'] = str(res) return result
def es_url(cls, env): ignored(env) # replace this with the correct url in the child class # e.g. "https://search-cgap-testing-6-8-vo4mdkmkshvmyddc65ux7dtaou.us-east-1.es.amazonaws.com" return 'https://placeholder_url'
def ff_url(cls, env): ignored(env) # replace this with the correct url in the child class # e.g. 'https://cgap.hms.harvard.edu/' return 'http://placeholder_url'
def build_config(cls, stage, trial_creds=None, trial_global_env_bucket=False, global_env_bucket=None, security_group_ids=None, subnet_ids=None, check_runner=None, rds_name=None, lambda_timeout=DEFAULT_LAMBDA_TIMEOUT): """ Builds the chalice config json file. See: https://aws.github.io/chalice/topics/configfile""" ignored(stage) if trial_creds: # key to decrypt access key s3_enc_secret = trial_creds['S3_ENCRYPT_KEY'] client_id = trial_creds['CLIENT_ID'] client_secret = trial_creds['CLIENT_SECRET'] dev_secret = None es_host = trial_creds['ES_HOST'] env_name = trial_creds['ENV_NAME'] rds_name = trial_creds['RDS_NAME'] s3_key_id = trial_creds.get('S3_ENCRYPT_KEY_ID') if not (s3_enc_secret and client_id and client_secret and es_host and rds_name): print(''.join(['ERROR. You are missing one more more environment', 'variables needed to deploy the Foursight trial. Need:\n', 'S3_ENCRYPT_KEY, CLIENT_ID, CLIENT_SECRET, ES_HOST, RDS_NAME in trial_creds dict.']) ) sys.exit() else: s3_enc_secret = os.environ.get("S3_ENCRYPT_KEY") client_id = os.environ.get("CLIENT_ID") client_secret = os.environ.get("CLIENT_SECRET") dev_secret = os.environ.get("DEV_SECRET") es_host = None # not previously passed to config env_name = None s3_key_id = None # not supported in legacy if not (s3_enc_secret and client_id and client_secret and dev_secret): print(''.join(['ERROR. You are missing one more more environment ', 'variables needed to deploy Foursight.\n', 'Need: S3_ENCRYPT_KEY, CLIENT_ID, CLIENT_SECRET, DEV_SECRET.']) ) sys.exit() for curr_stage_name in ['dev', 'prod']: curr_stage = cls.CONFIG_BASE['stages'][curr_stage_name] curr_stage_environ = curr_stage['environment_variables'] curr_stage_environ['S3_ENCRYPT_KEY'] = s3_enc_secret curr_stage_environ['CLIENT_ID'] = client_id curr_stage_environ['CLIENT_SECRET'] = client_secret if rds_name: curr_stage_environ['RDS_NAME'] = rds_name if dev_secret: # still pass in main account, ignored in alpha infra - Will Aug 24 2021 curr_stage_environ['DEV_SECRET'] = dev_secret if env_name: curr_stage_environ['ENV_NAME'] = env_name if es_host: curr_stage_environ['ES_HOST'] = es_host if s3_key_id: curr_stage_environ['S3_ENCRYPT_KEY_ID'] = s3_key_id if trial_global_env_bucket: # in the trial account setup, use a shorter timeout curr_stage['lambda_timeout'] = lambda_timeout if not global_env_bucket: global_bucket_env_from_environ = os.environ.get('GLOBAL_BUCKET_ENV') global_env_bucket_from_environ = os.environ.get('GLOBAL_ENV_BUCKET') if (global_bucket_env_from_environ and global_env_bucket_from_environ and global_bucket_env_from_environ != global_env_bucket_from_environ): print('ERROR. GLOBAL_BUCKET_ENV and GLOBAL_ENV_BUCKET are both set, but inconsistently.') sys.exit() global_env_bucket = global_bucket_env_from_environ or global_env_bucket_from_environ if global_env_bucket: curr_stage_environ['GLOBAL_BUCKET_ENV'] = global_env_bucket # legacy compatibility curr_stage_environ['GLOBAL_ENV_BUCKET'] = global_env_bucket else: print('ERROR. GLOBAL_ENV_BUCKET must be set or global_env_bucket= must be passed' ' when building the trial config.') sys.exit() if security_group_ids: curr_stage['security_group_ids'] = security_group_ids if subnet_ids: curr_stage['subnet_ids'] = subnet_ids if check_runner: curr_stage_environ['CHECK_RUNNER'] = check_runner filename = cls.get_config_filepath() print(''.join(['Writing: ', filename])) with open(filename, 'w') as config_file: config_file.write(json.dumps(cls.CONFIG_BASE)) # export poetry into requirements subprocess.check_call( ['poetry', 'export', '-f', 'requirements.txt', '--without-hashes', '-o', 'requirements.txt'])
def uuid_normalizer(*, label, item): ignored(label) if isinstance(item, dict) and 'uuid' in item: return item['uuid'] else: return item
def mock_update_es_unknown(DomainName, ElasticsearchClusterConfig ): # noQA - mixed-case params chosen by AWS ignored(DomainName, ElasticsearchClusterConfig) raise Exception('Literally anything')
def mock_update_es_success(DomainName, ElasticsearchClusterConfig ): # noQA - mixed-case params chosen by AWS ignored(DomainName, ElasticsearchClusterConfig) return {'ResponseMetadata': {'HTTPStatusCode': 200}}
def handle_simulated_bundle(submission: SubmissionFolio): """ This handler exists for test purposes and as an example of how to write an alternate processor. It wants a file that contains data like: { "success": true, "validation_output": ["Some validation stuff"], "post_output": ["Some post stuff", "More post stuff"], "upload_info": {}, "result": {"answer": 42}, "institution": "/institutions/hms-dbmi/", "project": "/projects/hms-dbmi/" } and does several things: * Checks that the submission was given a matching institution and project, or else it won't validate. Returns the given validation output PLUS information about the validation of those two fields. * If validation_only=true is given, then returns the validation result, as the result. That includes: * success" - either true or false to validation success or failure in the case of validation_only, or always false (of course) in the case validation_only was false but the validation failed. * validation_output - a list of output lines explaining validation results. There might be such lines whether or not there was validation success. * If validation_only is missing or not true, then returns the indicated values for * success - either true or false to simulate overall success or failure * result - any overall result value * post_output - a list of text lines that represent output describing results of posts. * upload_info - information about any uploads that need to be done, in the format: [{'filename': ..., 'uuid': ...}, ...} where filename is the name of the filename that needs to be uploaded, and uuid is the uuid of the item for which it needs to be uploaded. """ with submission.processing_context() as resolution: ignored(resolution) s3_client = submission.s3_client submission_id = submission.submission_id institution = get_parameter(submission.parameters, 'institution') project = get_parameter(submission.parameters, 'project') validate_only = get_parameter(submission.parameters, 'validate_only', as_type=bool, default=False) bundle_results = simulated_processor(s3_client=s3_client, bucket=submission.bucket, key=submission.object_name, project=project, institution=institution, vapp=submission.vapp, validate_only=validate_only) debuglog(submission_id, "bundle_result:", json.dumps(bundle_results, indent=2)) with submission.s3_output(key_name='validation_report') as fp: submission.show_report_lines( bundle_results.get('validation_output', []), fp) submission.note_additional_datum('validation_output', from_dict=bundle_results, default=[]) submission.process_standard_bundle_results(bundle_results) if not bundle_results.get('success'): submission.fail()
def list_all_keys_w_prefix(self, prefix): """ Given a prefix, return all keys that have that prefix. """ ignored(prefix) raise NotImplementedError
def simulated_processor( s3_client, bucket, key, project, institution, vapp, # <- Required keyword arguments validate_only=False): # <-- Optional keyword arguments (with defaults) """ This processor expects the data to contain JSON containing: { "project": <project>, # The value to validate the give project against. "institution": <institution>, # The value to validate the given project against. "success": <true/false>, # True if full processing should return success "result": <processing-result>, # Result to return if simulated processing happens "post_output": [...], # Post output to expect if simulated processing happens "upload_info": [...] # Upload info to return if simulated processing happens } Simulated validation will check that the given project is the same as the project in the file and the given institution is the same as the institution in the file. * If simulated validation fails, the simulated processing won't occur. * If validate_only is True, simulated processing won't occur, so the result, post_output, and upload_info will be null. """ ignored(vapp) def simulated_validation(data, project, institution): # Simulated Validation validated = True validation_output = data.get("validation_output", []) for key, value in [("project", project), ("institution", institution)]: if data.get(key) == value: validation_output.append("The %s is OK" % key) else: validation_output.append("Expected %s %s." % (key, value)) validated = False return validated, validation_output with s3_local_file(s3_client=s3_client, bucket=bucket, key=key) as filename: with io.open(filename) as fp: data = json.load(fp) result = {} validated, validation_output = simulated_validation( data, project, institution) result["validation_output"] = validation_output if validate_only or not validated: result["success"] = validated return result for key, default in [("success", False), ("result", {}), ("post_output", []), ("upload_info", [])]: result[key] = data.get(key, default) return result