def triggerEvent(data): """ submits the given data to the queue :param data: requires sample :return: a serialized version of the submitted message """ validate(data, __RESULT_SCHEMA__) timestamp = int(time.time() * 1000) data['time'] = timestamp data['id'] = data['sample'] if 'sample' in data: table = Bucket(os.environ['resultTable']) # lookup from the stasis tables the correct file handle # TODO right now we are faking it name = get_file_handle(data['id']) existing = table.exists(name) if existing: existing = json.loads(table.load(name)) # need to append and/or update result to injections data['injections'] = { **existing['injections'], **data['injections'] } result = table.save( name, json.dumps(TableManager().sanitize_json_for_dynamo(data))) return { 'body': json.dumps(data), 'statusCode': result['ResponseMetadata']['HTTPStatusCode'], 'isBase64Encoded': False, 'headers': __HTTP_HEADERS__ } else: return { 'body': json.dumps({'error': 'no sample provided'}), 'statusCode': 400, 'isBase64Encoded': False, 'headers': __HTTP_HEADERS__ }
def test_bucket_operations_bin(requireMocking): """ simple test for out bucket :param requireMocking: :return: """ bucket = Bucket(os.environ["resultTable"]) assert not bucket.exists("test.zip") bucket.save("test.zip", str.encode("tada")) assert bucket.exists("test.zip") assert bucket.load("test.zip", binary=True) == str.encode("tada") bucket.delete("test.zip") assert not bucket.exists("test.zip")
def test_bucket_operations_txt(requireMocking): """ simple test for out bucket :param requireMocking: :return: """ bucket = Bucket(os.environ["resultTable"]) assert not bucket.exists("test.txt") bucket.save("test.txt", "tada") assert bucket.exists("test.txt") assert bucket.load("test.txt") == "tada" bucket.delete("test.txt") assert not bucket.exists("test.txt")
def get(events, context): """ downloads a finished job results. This will be zipfile :param events: :param context: :return: """ if 'pathParameters' in events: if 'job' in events['pathParameters']: job = events['pathParameters']['job'] state: str = get_job_state(job) if state is None: return { "statusCode": 503, "headers": __HTTP_HEADERS__, "body": json.dumps({ "error": "job does not exist!", "job": job }) } if state != AGGREGATED_AND_UPLOADED: return { "statusCode": 503, "headers": __HTTP_HEADERS__, "body": json.dumps({ "error": "job not ready yet!", "job": job, "state": state, "required_state": AGGREGATED_AND_UPLOADED }) } db = Bucket(os.environ["dataBucket"]) filename = "{}.zip".format(job) if db.exists(filename): try: content = base64.b64encode(db.load( filename, binary=True)).decode("utf-8") # create a response return { "statusCode": 200, "headers": __HTTP_HEADERS__, "body": json.dumps({ "content": content, "job": job }) } except Exception as e: traceback.print_exc() return { "statusCode": 503, "headers": __HTTP_HEADERS__, "body": json.dumps({ "error": str(e), "job": job }) } else: return { "statusCode": 404, "headers": __HTTP_HEADERS__, "body": json.dumps({ 'error': "not able to find job", "filename": filename, "job": job }) } else: return { "statusCode": 404, "headers": __HTTP_HEADERS__, "body": json.dumps({"error": "job is not provided!"}) } else: return { "statusCode": 404, "headers": __HTTP_HEADERS__, "body": json.dumps({ "error": "not supported, need's be called from a http event!" }) }