def invoke_lambda(batches, m_id): xray_recorder.begin_segment('Invoke mapper Lambda') ''' lambda invoke function ''' #batch = [k['Key'] for k in batches[m_id-1]] batch = [ json.dumps({ "key": k['key'].key, "range": k['range'] }) for k in batches[m_id - 1] ] xray_recorder.current_segment().put_annotation( "batch_for_mapper_" + str(m_id), str(batch)) #print("invoking", m_id, len(batch)) resp = lambda_client.invoke(FunctionName=mapper_lambda_name, InvocationType='RequestResponse', Payload=json.dumps({ "bucket": bucket, "keys": batch, "jobBucket": job_bucket, "jobId": job_id, "mapperId": m_id })) out = eval(resp['Payload'].read()) mapper_outputs.append(out) print("mapper output", out) xray_recorder.end_segment()
def main(): if config.IGNORE_TIMEOUT: info("Worker configured to ignore timeout, will run forever...") while not config.EXPERIMENT_ID: info("Experiment not yet assigned, waiting...") time.sleep(5) # Disable X-Ray for initial setup so we don't end up # with segment warnings before any message is sent xray.global_sdk_config.set_sdk_enabled(False) last_activity = datetime.datetime.utcnow() task_factory = TaskFactory() info(f"Now listening for experiment {config.EXPERIMENT_ID}, waiting for work to do...") while ( datetime.datetime.utcnow() - last_activity ).total_seconds() <= config.TIMEOUT or config.IGNORE_TIMEOUT: # Disable X-Ray before message is identified and processed xray.global_sdk_config.set_sdk_enabled(False) msg = consume() if msg: result = task_factory.submit(msg) response = Response(request=msg, result=result) response.publish() last_activity = datetime.datetime.utcnow() xray_recorder.end_segment() info("Timeout exceeded, shutting down...")
def put_item(): trace_id = request.headers.get(X_RAY_HEADER_TRACE) parent_id = request.headers.get(X_RAY_HEADER_PARENT) xray_recorder.begin_segment(name='API2', parent_id=parent_id, traceid=trace_id, sampling=1) current_segment = xray_recorder.current_segment() headers = { X_RAY_HEADER_TRACE: current_segment.trace_id, X_RAY_HEADER_PARENT: current_segment.id } url = "http://" + os.environ['API3_HOST'] + ":5000" r = requests.get(url, headers=headers) data = r.json() response = jsonify({'api2': "ok", 'api3': data['api3']}) response.status_code = 200 response.mimetype = "application/json" xray_recorder.end_segment() return response
def construct_ctx(): """ Clean up context storage on each test run and begin a segment so that later subsegment can be attached. After each test run it cleans up context storage again. """ pre_run_modules = set(module for module in sys.modules.keys()) xray_recorder.configure(service='test', sampling=False, context=Context()) xray_recorder.clear_trace_entities() xray_recorder.begin_segment('name') yield xray_recorder.end_segment() xray_recorder.clear_trace_entities() # Reload wrapt.importer references to modules to start off clean reload(wrapt) reload(wrapt.importer) # Reload patcher references to already patched modules reload(patcher) # Cleanup the already imported module references in the system for module_name, module in sorted(sys.modules.items(), key=lambda m: len(m[0]), reverse=True): if module_name not in pre_run_modules and inspect.ismodule(module): reload(module) for module_name in sorted(sys.modules.keys(), key=lambda m: len(m), reverse=True): if module_name not in pre_run_modules: del sys.modules[module_name]
def handler(event, context): # Extract out S3 object details valid_bucket = event['valid_bucket_name'] valid_key = event['valid_key'] nonexistent_bucket = event['nonexistent_bucket'] nonexistent_key = event['nonexistent_key'] # Create a segment xray_recorder.begin_segment('s3trace') # Copy valid S3 object locally to /tmp valid_subsegment = xray_recorder.begin_subsegment('valid') local_file = local_directory + '/' + valid_key print 'Copying valid object s3://{}/{} to {}...'.format(valid_bucket, valid_key, local_file) try: s3.Bucket(valid_bucket).download_file(valid_key, '{}'.format(local_file)) except Exception as e: print 'Error: {}'.format(str(e)) xray_recorder.end_subsegment() # Copy invalid S3 object locally to /tmp invalid_subsegment = xray_recorder.begin_segment('invalid') local_file = local_directory + '/' + nonexistent_key print 'Copying invalid object s3://{}/{} to {}...'.format(nonexistent_bucket, nonexistent_key, local_file) try: s3.Bucket(nonexistent_bucket).download_file(nonexistent_key, '{}'.format(local_file)) except Exception as e: print 'Error: {}'.format(str(e)) xray_recorder.end_subsegment() # End segment xray_recorder.end_segment() return '{"message": "X-Ray worked"}'
async def run_task(task, sqs_msg): global execution_is_completed_flag xray_recorder.begin_segment('run_task') logging.info("Running Task: {}".format(task)) xray_recorder.begin_subsegment('encoding') bin_protobuf = prepare_arguments_for_execution(task) tast_str = bin_protobuf.decode("utf-8") task_def = json.loads(tast_str) submit_pre_agent_measurements(task) task_id = task["task_id"] fname_stdout = "./stdout-{task_id}.log".format(task_id=task_id) fname_stderr = "./stderr-{task_id}.log".format(task_id=task_id) f_stdout = open(fname_stdout, "w") f_stderr = open(fname_stderr, "w") xray_recorder.end_subsegment() execution_is_completed_flag = 0 task_execution = asyncio.create_task( do_task_local_lambda_execution_thread(perf_tracker_post, task, sqs_msg, task_def)) task_ttl_update = asyncio.create_task(do_ttl_updates_thread(task)) await asyncio.gather(task_execution, task_ttl_update) f_stdout.close() f_stderr.close() xray_recorder.end_segment() logging.info("Finished Task: {}".format(task)) return True
def __call__(self, request): sampling_decision = None meta = request.META xray_header = construct_xray_header(meta) # a segment name is required name = calculate_segment_name(meta.get(HOST_KEY), xray_recorder) sampling_req = { 'host': meta.get(HOST_KEY), 'method': request.method, 'path': request.path, 'service': name, } sampling_decision = calculate_sampling_decision( trace_header=xray_header, recorder=xray_recorder, sampling_req=sampling_req, ) if self.in_lambda_ctx: segment = xray_recorder.begin_subsegment(name) else: segment = xray_recorder.begin_segment( name=name, traceid=xray_header.root, parent_id=xray_header.parent, sampling=sampling_decision, ) segment.save_origin_trace_header(xray_header) segment.put_http_meta(http.URL, request.build_absolute_uri()) segment.put_http_meta(http.METHOD, request.method) if meta.get(USER_AGENT_KEY): segment.put_http_meta(http.USER_AGENT, meta.get(USER_AGENT_KEY)) if meta.get(X_FORWARDED_KEY): # X_FORWARDED_FOR may come from untrusted source so we # need to set the flag to true as additional information segment.put_http_meta(http.CLIENT_IP, meta.get(X_FORWARDED_KEY)) segment.put_http_meta(http.X_FORWARDED_FOR, True) elif meta.get(REMOTE_ADDR_KEY): segment.put_http_meta(http.CLIENT_IP, meta.get(REMOTE_ADDR_KEY)) response = self.get_response(request) segment.put_http_meta(http.STATUS, response.status_code) if response.has_header(CONTENT_LENGTH_KEY): length = int(response[CONTENT_LENGTH_KEY]) segment.put_http_meta(http.CONTENT_LENGTH, length) response[http.XRAY_HEADER] = prepare_response_header( xray_header, segment) if self.in_lambda_ctx: xray_recorder.end_subsegment() else: xray_recorder.end_segment() return response
def session(): """Test Fixture to Create DataBase Tables and start a trace segment""" xray_recorder.configure(service='test', sampling=False, context=Context()) xray_recorder.clear_trace_entities() xray_recorder.begin_segment('SQLAlchemyTest') db.create_all() yield xray_recorder.end_segment() xray_recorder.clear_trace_entities()
def _xray_hook_after(self, *args, **kwargs): segment = xray_recorder.current_segment() segment.put_http_meta(http.STATUS, self._status_code) content_length = self._headers.get('Content-Length') if content_length: segment.put_http_meta(http.CONTENT_LENGTH, int(content_length)) xray_recorder.end_segment()
def main(): while True: # Start a segment xray_recorder.begin_segment('processor') process_messages() # Close the segment xray_recorder.end_segment('processor') time.sleep(5)
def connection(engine): conn = engine.connect() xray_recorder.configure(service='test', sampling=False, context=Context()) xray_recorder.clear_trace_entities() xray_recorder.begin_segment('SQLAlchemyTest') Session = XRaySessionMaker(bind=conn) Base.metadata.create_all(engine) session = Session() yield session xray_recorder.end_segment() xray_recorder.clear_trace_entities()
def __exit__(self, exc_type=None, exc_value=None, exc_traceback=None): metadata = self.ctx.get_metadata(('api-stats',)) metadata.update(self.metadata) xray_recorder.put_metadata('custodian', metadata) if self.in_lambda: xray_recorder.end_subsegment() return xray_recorder.end_segment() if not self.use_daemon: self.emitter.flush() self.metadata.clear()
def __exit__(self, exc_type=None, exc_value=None, exc_traceback=None): metadata = self.ctx.get_metadata(('api-stats', )) metadata.update(self.metadata) xray_recorder.put_metadata('custodian', metadata) if self.in_lambda: xray_recorder.end_subsegment() return xray_recorder.end_segment() if not self.use_daemon: self.emitter.flush() self.metadata.clear()
def session(): """Test Fixture to Create DataBase Tables and start a trace segment""" engine = create_engine('sqlite:///:memory:') xray_recorder.configure(service='test', sampling=False, context=Context()) xray_recorder.clear_trace_entities() xray_recorder.begin_segment('SQLAlchemyTest') Session = XRaySessionMaker(bind=engine) Base.metadata.create_all(engine) session = Session() yield session xray_recorder.end_segment() xray_recorder.clear_trace_entities()
def put_item(): trace_id = request.headers.get(X_RAY_HEADER_TRACE) parent_id = request.headers.get(X_RAY_HEADER_PARENT) xray_recorder.begin_segment(name='API3', parent_id=parent_id ,traceid=trace_id, sampling=1) response = jsonify({'api3': "ok"} ) response.status_code = 200 response.mimetype = "application/json" xray_recorder.end_segment() return response
def s3_upload_file(): segment = xray_recorder.begin_segment('1') subsegment = xray_recorder.begin_subsegment('requesting form data/upload') bucket = request.form.get('bucket') filename = request.form.get('filename') s3_upload_svc = boto3.resource('s3') bucket_selected = s3_upload_svc.Bucket(bucket) with open(filename, 'rb') as f: bucket_selected.upload_fileobj(f, Key=filename) #confirm if tag argument is needed. xray_recorder.end_subsegment() xray_recorder.end_segment() return "OK"
def func_setup(request, user_class): xray_recorder.stream_sql = request.param xray_recorder.clear_trace_entities() xray_recorder.begin_segment('name') try: user_class.create_table() yield finally: xray_recorder.clear_trace_entities() try: user_class.delete_table() finally: xray_recorder.end_segment()
def hello_world(): # Start a segment segment = xray_recorder.begin_segment('hello_world') # Start a subsegment subsegment = xray_recorder.begin_subsegment('get-s3-data-files') version = "version-05" cluster_name = 'not_set' resp = {} data = {} try: logging.info("metadata_uri:" + os.environ['ECS_CONTAINER_METADATA_URI']) resp = requests.get(url=os.environ['ECS_CONTAINER_METADATA_URI'] + '/task') data = resp.json() except Exception as e: logging.error("error getting metadata:" + str(e)) if 'Cluster' in data: cluster_arn = data['Cluster'] cluster_list = cluster_arn.split('/') cluster_name = cluster_list[1] else: cluster_name = 'not_set' logging.info("cluster:" + cluster_name) client = boto3.client('s3') response = client.list_buckets() logging.info("s3 response: %s", response['Owner']['DisplayName']) # Close the subsegment and segment xray_recorder.end_subsegment() try: get_file() except Exception as e: logging.error("get_File error") xray_recorder.end_segment() return json.dumps({"version": version}), 500
def __exit__(self, exc_type=None, exc_value=None, exc_traceback=None): metadata = self.ctx.get_metadata(('api-stats',)) metadata.update(self.metadata) xray_recorder.put_metadata('custodian', metadata) if self.in_lambda: xray_recorder.end_subsegment() return xray_recorder.end_segment() if not self.use_daemon: self.emitter.flush() log.info( ('View XRay Trace https://console.aws.amazon.com/xray/home?region=%s#/' 'traces/%s' % (self.ctx.options.region, self.segment.trace_id))) self.metadata.clear()
def test_function(self): xray_recorder.begin_segment("test_function") file = open("event.json", "rb") try: event = file.read() logger.warning("## EVENT") context = {"requestid": "1234"} result = handler(event, context) print(str(result)) self.assertRegex(json.loads(result)["id"], "cs_test_.*", "Should match") finally: file.close() file.close() xray_recorder.end_segment()
def test_function(self): xray_recorder.begin_segment('test_function') file = open('event.json', 'rb') try: ba = bytearray(file.read()) event = jsonpickle.decode(ba) logger.warning('## EVENT') logger.warning(jsonpickle.encode(event)) context = {'requestid': '1234'} result = handler(event, context) print(str(result)) finally: file.close() file.close() xray_recorder.end_segment()
def test_id_generation_default_sampling_true(): segment = xray_recorder.begin_segment('segment_name', sampling=True) # Start and end a subsegment subsegment = xray_recorder.begin_subsegment('subsegment_name') xray_recorder.end_subsegment() # Close the segment xray_recorder.end_segment() assert segment.id != '0000000000000000' assert segment.trace_id != '1-00000000-000000000000000000000000' assert subsegment.id != '0000000000000000' assert subsegment.trace_id != '1-00000000-000000000000000000000000' assert subsegment.parent_id != '0000000000000000'
def test_id_generation_noop_false(): os.environ['AWS_XRAY_NOOP_ID'] = 'FALSE' segment = xray_recorder.begin_segment('segment_name', sampling=False) # Start and end a subsegment subsegment = xray_recorder.begin_subsegment('subsegment_name') xray_recorder.end_subsegment() # Close the segment xray_recorder.end_segment() assert segment.id != '0000000000000000' assert segment.trace_id != '1-00000000-000000000000000000000000' assert subsegment.id != '0000000000000000' assert subsegment.trace_id != '1-00000000-000000000000000000000000' assert subsegment.parent_id != '0000000000000000'
def test_function(self): xray_recorder.begin_segment("test_function") file = open("in.json", "rb") try: ba = bytearray(file.read()) event = jsonpickle.decode(ba) logger.warning("## EVENT") logger.warning(jsonpickle.encode(event)) context = {"requestid": "1234"} result = handler(event, context) print(str(result)) self.assertRegex(str(result), "FunctionCount", "Should match") finally: file.close() file.close() xray_recorder.end_segment()
def ddb_message(): xray_recorder.begin_segment('ddb_message') message_id = os.environ.get('MESSAGE_ID') app.logger.info(message_id) try: response = table.get_item(Key={'id': int(message_id)}) item = response['Item'] app.logger.info("----------") app.logger.info(item) app.logger.info("----------") app.logger.info(json.dumps(item, indent=4, cls=DecimalEncoder)) except Exception: app.logger.info(traceback.print_exc()) xray_recorder.end_segment() return item['message']
def main(): xray_recorder.begin_segment('main_function') file = open('event.json', 'rb') try: # read sample event ba = bytearray(file.read()) event = jsonpickle.decode(ba) logger.warning('## EVENT') logger.warning(jsonpickle.encode(event)) # create sample context context = {'requestid': '1234'} # invoke handler result = upload_task_issues(event, context) # print response print('## RESPONSE') print(str(result)) finally: file.close() file.close() xray_recorder.end_segment()
def xray_func(): # xray_recorder.configure( # sampling=True, # context_missing='LOG_ERROR', # daemon_address='0.0.0.0:2000', # # plugins=('ec2_plugin'), # service='my-app', # context=Context() # ) # Start a segment segment = xray_recorder.begin_segment('segment_name') # Start a subsegment subsegment = xray_recorder.begin_subsegment('subsegment_name') print("Hello here...") # Add metadata or annotation here if necessary segment.put_metadata('key', dict, 'namespace') subsegment.put_annotation('key', 'value') xray_recorder.end_subsegment() # Close the segment xray_recorder.end_segment()
def dynamodb(): xray_recorder.begin_segment(name='API1', sampling=1) current_segment = xray_recorder.current_segment() headers = { X_RAY_HEADER_TRACE: current_segment.trace_id, X_RAY_HEADER_PARENT: current_segment.id } url = "http://" + os.environ['API2_HOST'] + ":5000" r = requests.get(url, headers=headers) data = r.json() response = jsonify({ 'api1': 'ok', 'api2': data['api2'], 'api3': data['api3'] }) response.status_code = 200 xray_recorder.end_segment() return response
app.logger.info(response) return 'Your order has NOT been fulfilled' else: app.logger.info('Invalid request JSON. %s was sent', payload['Message']) #print 'Invalid Request JSON.' #print 'The data sent was %s' % payload['Message'] except Exception as e: # Looks like it wasn't. # print e #print 'This was not a fulfillment request. This microservice is expecting exactly {"'+resource+'": 1}' #print 'The data sent was %s' % payload['Message'] app.logger.error( 'Something really bad happened. This was definitely not a fulfillment request. Expected {"%s":"1"} but got %s instead', resource, payload['Message']) return 'We were unable to place your order' return 'This was not a fulfillment request. This microservice is expecting exactly {"' + resource + '": 1}' else: # We should never get here return "This is not the page you are looking for" #close xray segments xray_recorder.end_subsegment() xray_recorder.end_segment() if __name__ == '__main__': app.run(debug=True, host='0.0.0.0', port=portNum)
s3_storage_hour_cost = 1 * 0.0000521574022522109 * ( total_s3_size / 1024.0 / 1024.0 / 1024.0) # cost per GB/hr s3_put_cost = len(job_keys) * 0.005 / 1000 # S3 GET # $0.004/10000 total_s3_get_ops += len(job_keys) s3_get_cost = total_s3_get_ops * 0.004 / 10000 # Total Lambda costs total_lambda_secs += reducer_lambda_time lambda_cost = total_lambda_secs * 0.00001667 * lambda_memory / 1024.0 s3_cost = (s3_get_cost + s3_put_cost + s3_storage_hour_cost) # Print costs print("Reducer L", reducer_lambda_time * 0.00001667 * lambda_memory / 1024.0) print("Lambda Cost", lambda_cost) print("S3 Storage Cost", s3_storage_hour_cost) print("S3 Request Cost", s3_get_cost + s3_put_cost) print("S3 Cost", s3_cost) print("Total Cost: ", lambda_cost + s3_cost) print("Total Lines:", total_lines) xray_recorder.end_subsegment() #Calculate cost # Delete Reducer function xray_recorder.begin_subsegment('Delete reducers') l_reducer.delete_function() l_rc.delete_function() xray_recorder.end_subsegment() #Delete reducers xray_recorder.end_segment() #Map Reduce Driver
async def middleware(request, handler): """ Main middleware function, deals with all the X-Ray segment logic """ # Create X-Ray headers xray_header = construct_xray_header(request.headers) # Get name of service or generate a dynamic one from host name = calculate_segment_name(request.headers['host'].split(':', 1)[0], xray_recorder) sampling_req = { 'host': request.headers['host'], 'method': request.method, 'path': request.path, 'service': name, } sampling_decision = calculate_sampling_decision( trace_header=xray_header, recorder=xray_recorder, sampling_req=sampling_req, ) # Start a segment segment = xray_recorder.begin_segment( name=name, traceid=xray_header.root, parent_id=xray_header.parent, sampling=sampling_decision, ) segment.save_origin_trace_header(xray_header) # Store request metadata in the current segment segment.put_http_meta(http.URL, str(request.url)) segment.put_http_meta(http.METHOD, request.method) if 'User-Agent' in request.headers: segment.put_http_meta(http.USER_AGENT, request.headers['User-Agent']) if 'X-Forwarded-For' in request.headers: segment.put_http_meta(http.CLIENT_IP, request.headers['X-Forwarded-For']) segment.put_http_meta(http.X_FORWARDED_FOR, True) elif 'remote_addr' in request.headers: segment.put_http_meta(http.CLIENT_IP, request.headers['remote_addr']) else: segment.put_http_meta(http.CLIENT_IP, request.remote) try: # Call next middleware or request handler response = await handler(request) except HTTPException as exc: # Non 2XX responses are raised as HTTPExceptions response = exc raise except Exception as err: # Store exception information including the stacktrace to the segment response = None segment.put_http_meta(http.STATUS, 500) stack = stacktrace.get_stacktrace(limit=xray_recorder.max_trace_back) segment.add_exception(err, stack) raise finally: if response is not None: segment.put_http_meta(http.STATUS, response.status) if 'Content-Length' in response.headers: length = int(response.headers['Content-Length']) segment.put_http_meta(http.CONTENT_LENGTH, length) header_str = prepare_response_header(xray_header, segment) response.headers[http.XRAY_HEADER] = header_str xray_recorder.end_segment() return response