def test___add_request_queries_to_sqs(self, mock_set_table_field_with_value, mock_complete_subtask_execution, mock_add_to_queue): config = MatrixInfraConfig() config.set({'query_job_q_url': "query_job_q_url"}) self._driver.config = config test_query_loc = "test_path" self._driver._add_request_query_to_sqs(test_query_loc) payload = {'request_id': self.request_id, 's3_obj_key': test_query_loc} mock_add_to_queue.assert_called_once_with("query_job_q_url", payload)
def dss_notification(): body = app.current_request.json_body bundle_uuid = body['match']['bundle_uuid'] bundle_version = body['match']['bundle_version'] subscription_id = body['subscription_id'] event_type = body['event_type'] config = MatrixInfraConfig() hmac_secret_key = config.dss_subscription_hmac_secret_key.encode() HTTPSignatureAuth.verify( requests.Request(url="http://host/dss/notification", method=app.current_request.method, headers=app.current_request.headers), key_resolver=lambda key_id, algorithm: hmac_secret_key) payload = { 'bundle_uuid': bundle_uuid, 'bundle_version': bundle_version, 'event_type': event_type, } queue_url = config.notification_q_url SQSHandler().add_message_to_queue(queue_url, payload) return chalice.Response( status_code=requests.codes.ok, body=f"Received notification from subscription {subscription_id}: " f"{event_type} {bundle_uuid}.{bundle_version}")
def _post_notification(self, bundle_fqid, event_type): data = {} bundle_uuid = bundle_fqid.split('.', 1)[0] bundle_version = bundle_fqid.split('.', 1)[1] url = f"{self.api_url[:-3]}/dss/notification" config = MatrixInfraConfig() data["transaction_id"] = "test_transaction_id" data["subscription_id"] = "test_subscription_id" data["event_type"] = event_type data["match"] = {} data["match"]["bundle_uuid"] = bundle_uuid data["match"]["bundle_version"] = bundle_version response = requests.post( url=url, json=data, auth=HTTPSignatureAuth( key_id=DSS_SUBSCRIPTION_HMAC_SECRET_ID, key=config.dss_subscription_hmac_secret_key.encode())) print( f"POST NOTIFICATION TO MATRIX SERVICE: \nPOST {url}\n-> {response.status_code}" ) if response.content: print(response.content.decode('utf8'))
def __init__(self, request_id: str): Logging.set_correlation_id(logger, value=request_id) self.request_id = request_id self.request_tracker = RequestTracker(request_id) self.dynamo_handler = DynamoHandler() self.sqs_handler = SQSHandler() self.infra_config = MatrixInfraConfig() self.redshift_config = MatrixRedshiftConfig() self.query_results_bucket = os.environ['MATRIX_QUERY_RESULTS_BUCKET'] self.s3_handler = S3Handler(os.environ['MATRIX_QUERY_BUCKET'])
def setUp(self): self.dynamo_mock = mock_dynamodb2() self.dynamo_mock.start() self.s3_mock = mock_s3() self.s3_mock.start() self.sqs_mock = mock_sqs() self.sqs_mock.start() self.sts_mock = mock_sts() self.sts_mock.start() self.matrix_infra_config = MatrixInfraConfig() self.redshift_config = MatrixRedshiftConfig() self.sqs = boto3.resource('sqs') self.sqs.create_queue(QueueName=f"test_query_job_q_name") self.sqs.create_queue(QueueName=f"test_deadletter_query_job_q_name") self.sqs.create_queue(QueueName=f"test_notification_q_url")
import os import requests import uuid from matrix.common import constants from matrix.common import query_constructor from matrix.common.exceptions import MatrixException from matrix.common.constants import GenusSpecies, MatrixFormat, MatrixRequestStatus from matrix.common.config import MatrixInfraConfig from matrix.common.aws.lambda_handler import LambdaHandler, LambdaName from matrix.common.aws.redshift_handler import RedshiftHandler, TableName from matrix.common.aws.s3_handler import S3Handler from matrix.common.request.request_tracker import RequestTracker lambda_handler = LambdaHandler() matrix_infra_config = MatrixInfraConfig() def post_matrix(body: dict): feature = body.get("feature", constants.DEFAULT_FEATURE) fields = body.get("fields", constants.DEFAULT_FIELDS) format_ = body['format'] if 'format' in body else MatrixFormat.LOOM.value expected_formats = [mf.value for mf in MatrixFormat] # Validate input parameters if format_ not in expected_formats: return ({ 'message': "Invalid parameters supplied. " "Please supply a valid `format`. "
def retrieve_gcp_credentials(): # pragma: no cover return json.loads( base64.b64decode(MatrixInfraConfig().gcp_service_acct_creds).decode())
def __init__(self): self.sqs_handler = SQSHandler() self.s3_handler = S3Handler(os.environ["MATRIX_QUERY_BUCKET"]) self.batch_handler = BatchHandler() self.redshift_handler = RedshiftHandler() self.matrix_infra_config = MatrixInfraConfig()
import base64 import json import os import requests import hca from matrix.common.config import MatrixInfraConfig if __name__ == '__main__': gcp_service_acct_creds = json.loads( base64.b64decode(MatrixInfraConfig().gcp_service_acct_creds).decode()) with open('gcp_creds.json', 'w') as outfile: json.dump(gcp_service_acct_creds, outfile) try: os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = os.getcwd( ) + "/gcp_creds.json" deployment_stage = os.environ['DEPLOYMENT_STAGE'] replica = "aws" jmespath_query = "(event_type==`CREATE` || event_type==`TOMBSTONE` || event_type==`DELETE`) \ && (files.library_preparation_protocol[].library_construction_method.ontology==`EFO:0008931` \ || files.library_preparation_protocol[].library_construction_method.ontology_label==`10X v2 sequencing`) \ && files.analysis_process[].type.text==`analysis`" if deployment_stage == "prod": swagger_url = "https://dss.data.humancellatlas.org/v1/swagger.json" matrix_callback = "https://matrix.data.humancellatlas.org/v0/dss/notifications" else: swagger_url = f"https://dss.{deployment_stage}.data.humancellatlas.org/v1/swagger.json" matrix_callback = f"https://matrix.{deployment_stage}.data.humancellatlas.org/v0/dss/notifications" dss_client = hca.dss.DSSClient(swagger_url=swagger_url)