def test_event_filter_open_history(self, task_manager_mock, sapper_mock, called_event, open_event_history, mock_context): mock_sapper_obj = sapper_mock(open_event_history) mock_sapper = mock_sapper_obj.return_value mock_task_manager = task_manager_mock() results = handler(called_event, mock_context) for entry in called_event['Records']: body = ajson.loads(entry['body']) event = ajson.loads(body['Message']) mock_task_manager.assert_not_called() mock_sapper.retrieve_state_history.assert_called_with(event['flow_id'], event['run_id']) assert results
def retrieve_documentation_versions(self, encounter_id): url = self._base_stem + _url_stems['Versions'] response = self._session.post(url, data={'clientvisit_id': encounter_id}, verify=False) if response.status_code != 200: raise RuntimeError(f'could not get the version data for {encounter_id}, ' f'response code: {response.status_code}') return ajson.loads(response.text)['data']
def _download_object(bucket_name: str, folder_name: str, object_name: str) -> Any: resource = boto3.resource('s3') object_key = f'{folder_name}/{object_name}' stored_object = resource.Object(bucket_name, object_key).get() string_body = stored_object['Body'].read() return ajson.loads(string_body)
def send_task(self): self._sapper.mark_event_started(**self.payload) try: response = self._fire_lambda() response_payload = response['Payload'].read() status_code = response['StatusCode'] if status_code != 200: fn_error = response['FunctionError'] log_results = response['LogResult'] failure_details = { 'cause': fn_error, 'failure_logs': log_results, 'status_code': status_code, 'fn_response': response_payload } self._sapper.mark_event_failed(failure_details=failure_details, **self.payload) return result = ajson.loads(response_payload) self._sapper.mark_event_completed(results={'results': result}, **self.payload) return result except Exception as e: failure_details = ajson.dumps({'cause': e.args}) self._sapper.mark_event_failed(failure_details=failure_details, **self.payload)
def test_credential_to_json(self): id_source = 'ICFS' credentials = CredibleLoginCredentials.retrieve(id_source) json_string = ajson.dumps(credentials) assert json_string rebuilt_credentials = ajson.loads(json_string) assert isinstance(rebuilt_credentials, CredibleLoginCredentials) assert rebuilt_credentials.validate()
def retrieve(self): compiled_pattern = re.compile(r's3://(?P<bucket>[^/]*)/(?P<key>.*)') results = compiled_pattern.search(self._storage_uri) bucket_name, object_key = results.group('bucket'), results.group('key') stored_object_data = boto3.resource('s3').Object( bucket_name, object_key).get() object_data = stored_object_data['Body'].read() stored_data = ajson.loads(object_data) return set_property_data_type(self._data_type, stored_data)
def test_event_filter_failed_history(self, task_manager_mock, sapper_mock, called_event, failed_event_history, mock_context): mock_sapper_obj = sapper_mock(failed_event_history) mock_sapper = mock_sapper_obj.return_value mock_task_manager = task_manager_mock() results = handler(called_event, mock_context) for entry in called_event['Records']: body = ajson.loads(entry['body']) event = ajson.loads(body['Message']) mock_sapper.retrieve_state_history.assert_called_with(event['flow_id'], event['run_id']) mock_sapper.mark_event_failed.assert_not_called() mock_sapper.mark_event_started.assert_called_with(**event) mock_task_manager.assert_called() mock_sapper.mark_event_completed.assert_called_with(results={'results': {'some_result': 'here'}}, **event) assert results
def test_generate_source_vertex(self, source_vertex_task_integration_event, mock_context, mocks): results = handler(source_vertex_task_integration_event, mock_context) assert results parsed_results = ajson.loads(results) expected_keys = ['source_vertex', 'schema', 'schema_entry', 'extracted_data'] for key_value in expected_keys: assert key_value in parsed_results generated_vertex_data = parsed_results['source_vertex'] assert generated_vertex_data.vertex_properties assert mocks['bullhorn'].called assert mocks['gql'].called
def potential_connections_unit_event(request): params = request.param schema_entry = _read_schema(params[1], params[2]) schema = _read_schema(params[1]) test_event = _read_test_event(params[0]) task_kwargs = ajson.loads(ajson.dumps(test_event)) task_kwargs.update({'schema': schema, 'schema_entry': schema_entry}) event = { 'task_name': 'derive_potential_connections', 'task_kwargs': task_kwargs } event_string = ajson.dumps(event) message_object = {'Message': event_string} body_object = {'body': ajson.dumps(message_object)} return {'Records': [body_object]}
def check_flow_logs(client_id, encounter_id, state_gql_endpoint): results = set() flow_id = f"leech-psi-201905291215#get_client_encounter_ids-{client_id}#get_encounter-{encounter_id}" gql_client = GqlNotary(state_gql_endpoint) states, token = _paginate_flow(flow_id, gql_client) while token: new_states, token = _paginate_flow(flow_id, gql_client, token) states.extend(new_states) complete_events = [ x for x in states if x['state_type'] == 'EventCompleted' ] for complete_event in complete_events: for property_entry in complete_event['state_properties']: if property_entry['property_name'] == 'task_results': results.add(property_entry['property_value']) if len(results) > 1: raise RuntimeError(f'too many results for flow_id: {flow_id}') for result in results: return ajson.loads(result)
def generate_edge_integration_event(request): params = request.param schema_entry = _read_schema(params[1], params[2]) test_event = _read_test_event(params[0]) edge_type = params[3] rule_entry = _generate_linking_rule(schema_entry, edge_type) edge_schema_entry = _read_schema(params[1], edge_type) task_kwargs = ajson.loads(ajson.dumps(test_event)) task_kwargs.update({ 'schema_entry': edge_schema_entry, 'rule_entry': rule_entry }) event = { 'task_name': 'generate_potential_edge', 'task_kwargs': task_kwargs, 'flow_id': 'some_flow_id' } event_string = ajson.dumps(event) message_object = {'Message': event_string} body_object = {'body': ajson.dumps(message_object)} return {'Records': [body_object]}
def find_existing_vertexes(request): params = request.param schema_entry = _read_schema(params[1], params[2]) schema = _read_schema(params[1]) test_event = _read_test_event(params[0]) task_kwargs = ajson.loads(ajson.dumps(test_event)) edge_type = params[3] rule_entry = _generate_linking_rule(schema_entry, edge_type) task_kwargs.update({ 'schema': schema, 'schema_entry': schema_entry, 'rule_entry': rule_entry }) event = { 'task_name': 'check_for_existing_vertexes', 'task_kwargs': task_kwargs } event_string = ajson.dumps(event) message_object = {'Message': event_string} body_object = {'body': ajson.dumps(message_object)} return {'Records': [body_object]}
def _parse(self): if self._parsed: return stored_asset = ajson.loads(super().stored_asset) self._stored_asset = stored_asset self._parsed = True
def from_encrypted_token(cls, token, username): json_string = SneakyKipper('pagination').decrypt( token, {'username': username}) obj_dict = ajson.loads(json_string) return cls(username, obj_dict['inclusive_start'], obj_dict['page_size'], obj_dict['pagination_id'])
def _send_command(self, command: str, variables: Dict[str, Any] = None) -> Dict[str, Any]: if not variables: variables = {} command_results = self._connection.send(command, variables) parsed_results = ajson.loads(command_results) return parsed_results['data']
def _send(self, query, variables=None): if not variables: variables = {} response = self._gql_connection.send(query, variables) return ajson.loads(response)