def _get_wiki_url(_url, q): BASE_URL = _url payload = {} resp = {"statusCode": 400} HOT_TOPICS = [ 'cholas', 'cheras', 'pandyas', 'pallavas', 'sangam_era', 'kural' ] if q: q = q.split('/')[-1] if not q: q = random.choice(HOT_TOPICS) try: random_sleep() if _trigger_exception(): xray_recorder.put_annotation("SIMULATED_ERRORS", "True") xray_recorder.begin_subsegment("BRITTLE_LEGACY_APP") d = xray_recorder.current_subsegment() d.put_annotation("MANUALLY_TRIGGERRED_IN_SUBSEGMENT", "True") xray_recorder.end_subsegment() raise Exception("RANDOM_ERROR: Simulate Mystique Failure") r1 = requests.get(f'{BASE_URL}/{q}', params=payload) xray_recorder.put_metadata('RESPONSE', resp) resp["statusCode"] = r1.status_code z = r1.json() resp["body"] = json.dumps({"message": z["body"]["message"]}) _ddb_put_item(resp) except Exception as e: resp["body"] = json.dumps({"message": str(e)}) return resp
def subProcessTwo(): print("Running sub process 2") if xray_recorder.is_sampled(): xray_recorder.put_annotation('AnnotationKey', 'AnnotationValue') xray_recorder.put_metadata('MetadataKey', 'MetatDataValue') time.sleep(5) raise NameError('This is a NameError Exception')
def __exit__(self, exc_type=None, exc_value=None, exc_traceback=None): metadata = self.ctx.get_metadata(('api-stats', )) metadata.update(self.metadata) xray_recorder.put_metadata('custodian', metadata) if self.in_lambda: xray_recorder.end_subsegment() return xray_recorder.end_segment() if not self.use_daemon: self.emitter.flush() self.metadata.clear()
def __exit__(self, exc_type=None, exc_value=None, exc_traceback=None): metadata = self.ctx.get_metadata(('api-stats',)) metadata.update(self.metadata) xray_recorder.put_metadata('custodian', metadata) if self.in_lambda: xray_recorder.end_subsegment() return xray_recorder.end_segment() if not self.use_daemon: self.emitter.flush() self.metadata.clear()
def __exit__(self, exc_type=None, exc_value=None, exc_traceback=None): metadata = self.ctx.get_metadata(('api-stats',)) metadata.update(self.metadata) xray_recorder.put_metadata('custodian', metadata) if self.in_lambda: xray_recorder.end_subsegment() return xray_recorder.end_segment() if not self.use_daemon: self.emitter.flush() log.info( ('View XRay Trace https://console.aws.amazon.com/xray/home?region=%s#/' 'traces/%s' % (self.ctx.options.region, self.segment.trace_id))) self.metadata.clear()
def api(): "Our API logic, to route calls to correct microservices, and return the result" if request.method == 'POST': #This code deals with API calls, and sends to microservice chain #Lets update our micro service endpoints / routes. update_routes() json_log('Routes: {}'.format(repr(ROUTES))) # Read the API payload, and pass on to external microservices messageRequest = request.get_json() json_log("Host {} received RequestId {}, asking for services: {}".format(socket.gethostname(),messageRequest['RequestId'],','.join(messageRequest['Services']))) xray_recorder.put_annotation("RequestId", str(messageRequest['RequestId'])) xray_recorder.put_metadata("Services", messageRequest['Services']) xray_recorder.put_metadata("InputMessage", messageRequest['Message']) routerResponse = {'Responses':[]} errors = 0 json_log(messageRequest) for service in messageRequest['Services']: # Select a random microservice endpoint, from global ROUTES, updated from DynamoDB service_endpoint = "" try: service_endpoint = random.choice(ROUTES[service]) json_log('RequestId {} needs Service:{} using Endpoint:{}'.format(messageRequest['RequestId'],service,service_endpoint)) req = requests.post(service_endpoint, timeout=10, json={'RequestId':messageRequest['RequestId'],'Message':messageRequest['Message']}).json() except KeyError as e: json_log('Request included a service we dont support, Shutting down server','error') json_log('service-router may need HA config, or to run the latest code from the dashboard','warning') service_endpoint = "Error" func = request.environ.get('werkzeug.server.shutdown') func() #Code isnt working correctly, quit so instance will be replaced. #Comment out the lines from 'except KeyError' down to this comment, if you dont want the service-router to restart randomly except Exception as e: #Catch any issues with upstream microservice #If this code happens, our response will end up being invalid, as we didnt do each transformation json_log('Error:"{}" connecting to Service:{} Endpoint:{}'.format(e, service, service_endpoint),'exception') errors += 1 try: routerResponse['Responses'].append(req)#Add the microservice response to our list of responses messageRequest['Message']=req['Message'] except Exception as e: json_log('Bad response:"{}" from Service:{} Endpoint:{}'.format(e,service,service_endpoint),'exception') errors += 1 #return an http 200, and our API output xray_recorder.put_metadata("ErrorCount", errors) xray_recorder.put_metadata("Response", routerResponse) if errors: json_log('RequestId {} completed with {} errors'.format(messageRequest['RequestId'], str(errors))) else: json_log('RequestId {} completed successfully'.format(messageRequest['RequestId'])) return json.dumps(routerResponse), 200 return 'Service Map:\n {}'.format(repr(ROUTES)), 200
def get_wiki_data(needle='Python_(programming_language)', RENDER_HTML=False): # resp = {'statusCode': 404, } pg_info = {'status': False} try: # AWS XRay Annotation xray_recorder.put_annotation("LEGACY_APP_ON_EC2", "BEGIN_PROCESSING") _wiki = wikipediaapi.Wikipedia('en') _wiki_page = _wiki.page(str(needle)) if not _wiki_page.exists(): print('Hell N0!') pg_info[ 'ERROR'] = f'No information available for \'{needle}\'. Be the first person to create a wiki page for this topic.' else: pg_info['title'] = _wiki_page.title pg_info['summary'] = _wiki_page.summary[0:100] pg_info['url'] = _wiki_page.fullurl pg_info['status'] = True # AWS XRay Metadata xray_recorder.put_metadata('WIKI_QUERY_INFO', pg_info) except Exception as e: print(str(e)) pg_info['ERROR'] = str(e) # Deliver as web page using HTML/CSS if NEEDED, set using global variable. if RENDER_HTML: return render_template("wiki_page.html", _wiki_needle=str(needle), _wiki_page_info=pg_info) else: # return jsonify(pg_info) # return pg_info # Prep for API Gateway return {'statusCode': 200, 'body': {'message': pg_info}}
def lambda_handler(event, context): ###################################################################### # Create, Add, and Configure Python logging handler # https://stackoverflow.com/questions/2266646/how-to-disable-and-re-enable-console-logging-in-python/2267567#2267567 ###################################################################### log = logging.getLogger("invokeonnotification-Logger") # log.setLevel(logging.DEBUG) # logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG) # log.addHandler(handler) enable_logging = os.getenv('enable_logging') # if enable_logging == 'True': # enable_logging = True # logging.Logger.disabled = False # else: # enable_logging = False # logging.Logger.disabled = True try: print("Received event: ") print(json.dumps(event, indent=2)) # log.info("Received event: " + json.dumps(event, indent=2)) lambda_client = boto3.client('lambda') labmdafunction1 = os.getenv('labmdafunction1') labmdafunction2 = os.getenv('labmdafunction2') AWS_XRAY_TRACING_NAME = os.getenv('AWS_XRAY_TRACING_NAME') except: log.debug("failed to initialize function data!") ###################################################################### # Start X-Ray segment # https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-python-segment.html # https://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/basic.html # ONLY SUBSEGMENTS IN AWS LAMBDA FUNCTIONS # SUBSEGMENTS CANNOT SET THE USER ###################################################################### # segment = xray_recorder.begin_segment('invokeonnotification') # document = xray_recorder.current_segment() # segment = xray_recorder.current_segment() ###################################################################### # Invoke Labmda functions within X-Ray sub-segments ###################################################################### # log.debug("hello stdout world") subsegment = xray_recorder.begin_subsegment('labmdafunction1') subsegment.put_annotation('function_name', labmdafunction1) now = datetime.now() # current date and time time_now = now.strftime("%H:%M:%S.%f") subsegment.put_annotation("Version", "2.0") subsegment.put_annotation("Developer", "Adrian") subsegment.put_annotation("lambdafunction", "invokeonnotification") subsegment.put_metadata("function", __name__) # subsegment.put_metadata("enable_logging", enable_logging) subsegment.put_metadata("system time H:M:S.milliseconds", time_now) # subsegment.set_user("invokeonnotification") # f = io.BytesIO(b'test') # f.read() log.info((str(type(event)))) # using encode() + dumps() to convert to bytes res_bytes = json.dumps(event).encode('utf-8') # printing type and binary dict log.info("The type after conversion to bytes is : " + str(type(res_bytes))) log.info("The value after conversion to bytes is : " + str(res_bytes)) ###################################################################### # Start the X-Ray sub-segment ###################################################################### subsegment = xray_recorder.begin_subsegment('labmdafunction1') subsegment.put_annotation('function_arn', labmdafunction1) log.info("Invoking labmdafunction1") response1 = lambda_client.invoke(FunctionName=labmdafunction1, InvocationType='Event', Payload=res_bytes) xray_recorder.put_metadata("response1", response1) xray_recorder.end_subsegment() ###################################################################### # Start the X-Ray sub-segment ###################################################################### subsegment = xray_recorder.begin_subsegment('labmdafunction2') subsegment.put_annotation('function_arn', labmdafunction2) log.info("Invoking labmdafunction2") response2 = lambda_client.invoke(FunctionName=labmdafunction2, InvocationType='Event', Payload=res_bytes) xray_recorder.put_metadata("response2", response2) xray_recorder.end_subsegment()
def lambda_handler(event, context): ###################################################################### # Create and Configure Python logging ###################################################################### enable_logging = os.getenv('enable_logging') if enable_logging == 'True': enable_logging = True logging.Logger.disabled = False else: enable_logging = False logging.Logger.disabled = True # log = logging.getLogger("accesslogstoelasticcloud") log = logging.getLogger() log.setLevel(logging.DEBUG) # log.addHandler(handler) log.debug("Received event: " + json.dumps(event, indent=2)) # print("Received event: " + json.dumps(event, indent=2)) ###################################################################### # Get all parameters containing credentials for this app # If not -> user credentials from environment variables ###################################################################### parent_stack_name = os.getenv('parent_stack_name') try: param_name = '/' + parent_stack_name + '/cloud_id' param_details = client.get_parameter(Name=param_name, WithDecryption=True) if 'Parameter' in param_details and len( param_details.get('Parameter')) > 0: parameter = param_details.get('Parameter') cloud_id = parameter.get('Value') log.info('cloud_id=' + cloud_id) param_name = '/' + parent_stack_name + '/http_auth_username' param_details = client.get_parameter(Name=param_name, WithDecryption=True) if 'Parameter' in param_details and len( param_details.get('Parameter')) > 0: parameter = param_details.get('Parameter') http_auth_username = parameter.get('Value') log.info('http_auth_username='******'/' + parent_stack_name + '/http_auth_password' param_details = client.get_parameter(Name=param_name, WithDecryption=True) if 'Parameter' in param_details and len( param_details.get('Parameter')) > 0: parameter = param_details.get('Parameter') http_auth_password = parameter.get('Value') log.info('http_auth_password='******'/' + parent_stack_name + '/index_name' param_details = client.get_parameter(Name=param_name, WithDecryption=True) if 'Parameter' in param_details and len( param_details.get('Parameter')) > 0: parameter = param_details.get('Parameter') index_name = parameter.get('Value') log.info('index_name=' + index_name) except: log.debug("Encountered an error loading credentials from SSM.") traceback.print_exc() cloud_id = os.getenv('cloud_id') http_auth_username = os.getenv('http_auth_username') http_auth_password = os.getenv('http_auth_password') index_name = os.getenv('index_name') ###################################################################### # Get the object from the event and show its content type ###################################################################### bucket = event['Records'][0]['s3']['bucket']['name'] key = urllib.parse.unquote_plus(event['Records'][0]['s3']['object']['key'], encoding='utf-8') try: response = s3.get_object(Bucket=bucket, Key=key) log.info("CONTENT TYPE: " + response['ContentType']) except Exception as e: log.debug( 'Error getting object {} from bucket {}. Make sure they exist and your bucket is in the same region as this function.' .format(key, bucket)) log.debug(e) # print(e) # print('Error getting object {} from bucket {}. Make sure they exist and your bucket is in the same region as this function.'.format(key, bucket)) raise e StreamingBody = response['Body'] access_log = StreamingBody.read() ###################################################################### # Example Access Log: ###################################################################### # access_log='2279185f7619a617e0a834c7f0660e4b09ea7f842f9d768d39109ee6e4cdf522 bucket [20/Dec/2019:06:36:32 +0000] 174.65.125.92 arn:aws:sts::696965430234:assumed-role/AWSReservedSSO_AdministratorAccess_563d3ebb7af9cd35/[email protected] 6ED2206C36ABCD61 REST.GET.ACL object.mov "GET /bucket/object.mov?acl= HTTP/1.1" 200 - 550 - 277 - "-" "S3Console/0.4, aws-internal/3 aws-sdk-java/1.11.666 Linux/4.9.184-0.1.ac.235.83.329.metal1.x86_64 OpenJDK_64-Bit_Server_VM/25.232-b09 java/1.8.0_232 vendor/Oracle_Corporation" - eGkU7fkbpX9QOfaV1GDHSXQ9zVEokrE0KgIhdVMr63PbSCxWwZoEtr5GDbaDGr1/LFf9lTpiJ3U= SigV4 ECDHE-RSA-AES128-SHA AuthHeader s3-us-west-2.amazonaws.com TLSv1.2\n' log.info(f"access_log={access_log}\n") f = NamedTemporaryFile(mode='w+', delete=False) f.write(str(access_log)) f.close() # with open(f.name, "r") as new_f: # print(new_f.read()) with open(f.name, "r") as fh: for log_entry in s3logparse.parse_log_lines(fh.readlines()): log.info(log_entry) os.unlink(f.name) # delete the file after usage ###################################################################### # Start the X-Ray sub-segment ###################################################################### subsegment = xray_recorder.begin_subsegment( 'accesslogstoelasticcloud - send data to ElasticCloud') subsegment.put_annotation('function', 'accesslogstoelasticcloud') xray_recorder.put_metadata("access_log", access_log) ################################################################################################## #Now put that data in ElasticCloud! ################################################################################################## es = Elasticsearch(cloud_id=cloud_id, http_auth=(http_auth_username, http_auth_password)) es.info() # create an index in elasticsearch, ignore status code 400 (index already exists) # es.indices.create(index='accesslogstoelasticcloud', ignore=400) es.indices.create(index=index_name, ignore=400) # {'acknowledged': True, 'shards_acknowledged': True, 'index': 'my-index'} # datetimes will be serialized # es.index(index="my-index", id=44, body={"any": "data44", "timestamp": datetime.now()}) es_body = { "bucket_owner": log_entry.bucket_owner, "bucket": log_entry.bucket, "timestamp": log_entry.timestamp, "remote_ip": log_entry.remote_ip, "requester": log_entry.requester, "request_id": log_entry.request_id, "operation": log_entry.operation, "s3_key": log_entry.s3_key, "request_uri": log_entry.request_uri, "status_code": log_entry.status_code, "error_code": log_entry.error_code, "bytes_sent": log_entry.bytes_sent, "object_size": log_entry.object_size, "total_time": log_entry.total_time, "turn_around_time": log_entry.turn_around_time, "referrer": log_entry.referrer, "user_agent": log_entry.user_agent, "version_id": log_entry.version_id } es.index(index=index_name, body=es_body) ###################################################################### # End the X-Ray sub-segment ###################################################################### xray_recorder.end_subsegment()
bucket = os.getenv('DST_BUCKET', '1233343-src') key = 'upload.png' LOGGER = Logger() if __name__ == '__main__': # Start a segment if no segment exist LOGGER.info('starting segment.') segment = xray_recorder.begin_segment('BatchJob') # This will add the key value pair to segment as it is active LOGGER.info('adding annotation to segment.') xray_recorder.put_annotation('XRayOnBatch', 'What sourcery is this?') # This will add the key value pair to subsegment as it is activeimport platform xray_recorder.put_metadata('Python Run-Time', platform.python_version()) if xray_recorder.is_sampled(): LOGGER.info('segment was sampled.') xray_recorder.put_annotation('job_id', os.getenv('AWS_BATCH_JOB_ID')) object_key = f"{id_generator()}.png" LOGGER.info(f"downloading picture from {url}") downloaded_file = requests.get(url) LOGGER.info(f"Uploading {object_key} to bucket: {bucket}") s3.put_object(Body=downloaded_file.content, Bucket=bucket, Key=object_key) xray_recorder.end_segment()