async def _push_to_dynatrace(context: Context, lines_batch: List[IngestLine]): ingest_input = "\n".join([line.to_string() for line in lines_batch]) if context.print_metric_ingest_input: context.log("Ingest input is: ") context.log(ingest_input) ingest_response = await context.session.post( url=f"{context.dynatrace_url.rstrip('/')}/api/v2/metrics/ingest", headers={ "Authorization": f"Api-Token {context.dynatrace_api_key}", "Content-Type": "text/plain; charset=utf-8" }, data=ingest_input, verify_ssl=context.require_valid_certificate ) if ingest_response.status == 401: context.dynatrace_connectivity = DynatraceConnectivity.ExpiredToken raise Exception("Expired token") elif ingest_response.status == 403: context.dynatrace_connectivity = DynatraceConnectivity.WrongToken raise Exception("Wrong token - missing 'Ingest metrics using API V2' permission") elif ingest_response.status == 404 or ingest_response.status == 405: context.dynatrace_connectivity = DynatraceConnectivity.WrongURL raise Exception("Wrong URL") ingest_response_json = await ingest_response.json() context.dynatrace_request_count[ingest_response.status] = context.dynatrace_request_count.get(ingest_response.status, 0) + 1 context.dynatrace_ingest_lines_ok_count += ingest_response_json.get("linesOk", 0) context.dynatrace_ingest_lines_invalid_count += ingest_response_json.get("linesInvalid", 0) context.log(f"Ingest response: {ingest_response_json}") await log_invalid_lines(context, ingest_response_json, lines_batch)
async def push_ingest_lines(context: Context, fetch_metric_results: List[IngestLine]): lines_sent = 0 maximum_lines_threshold = context.maximum_metric_data_points_per_minute start_time = time.time() try: lines_batch = [] for result in fetch_metric_results: lines_batch.append(result) lines_sent += 1 if len(lines_batch) >= context.metric_ingest_batch_size: await _push_to_dynatrace(context, lines_batch) lines_batch = [] if lines_sent >= maximum_lines_threshold: await _push_to_dynatrace(context, lines_batch) lines_dropped_count = len(fetch_metric_results) - maximum_lines_threshold context.dynatrace_ingest_lines_dropped_count = lines_dropped_count context.log(f"Number of metric lines exceeded maximum {maximum_lines_threshold}, dropped {lines_dropped_count} lines") return if lines_batch: await _push_to_dynatrace(context, lines_batch) except Exception as e: if isinstance(e, InvalidURL): context.dynatrace_connectivity = DynatraceConnectivity.WrongURL context.log(f"Failed to push ingest lines to Dynatrace due to {type(e).__name__} {e}") finally: context.push_to_dynatrace_execution_time = time.time() - start_time context.log(f"Finished uploading metric ingest lines to Dynatrace in {context.push_to_dynatrace_execution_time} s")
async def get_and_upload(ctx: Context, project_id: str, svc_def: GCPService) -> Iterable[Entity]: try: entities = await fun(ctx, project_id, svc_def) except Exception as e: ctx.log( f"Failed to finish entity extractor task, reason is {type(e).__name__} {e}" ) return [] return entities
async def run_fetch_metric( context: Context, project_id: str, service: GCPService, metric: Metric ): try: return await fetch_metric(context, project_id, service, metric) except Exception as e: context.log(f"Failed to finish task for [{metric.google_metric}], reason is {type(e).__name__} {e}") return []
async def get_all_accessible_projects(context: Context, session: ClientSession): url = "https://cloudresourcemanager.googleapis.com/v1/projects" headers = {"Authorization": "Bearer {token}".format(token=context.token)} response = await session.get(url, headers=headers) response_json = await response.json() all_projects = [ project["projectId"] for project in response_json.get("projects", []) ] context.log("Access to following projects: " + ", ".join(all_projects)) return all_projects
def start(args): ctx = Context(s3_path=args.context, cache_dir=args.cache_dir, workload_id=args.workload_id) package.install_packages(ctx.python_packages, ctx.bucket) api = ctx.apis_id_map[args.api] model = ctx.models[api["model_name"]] tf_lib.set_logging_verbosity(ctx.environment["log_level"]["tensorflow"]) local_cache["ctx"] = ctx local_cache["api"] = api local_cache["model"] = model if not os.path.isdir(args.model_dir): aws.download_and_extract_zip(model["key"], args.model_dir, ctx.bucket) for column_name in model["feature_columns"] + [model["target_column"]]: if ctx.is_transformed_column(column_name): trans_impl, _ = ctx.get_transformer_impl(column_name) local_cache["trans_impls"][column_name] = trans_impl transformed_column = ctx.transformed_columns[column_name] input_args_schema = transformed_column["inputs"]["args"] # cache aggregates and constants in memory if input_args_schema is not None: local_cache["transform_args_cache"][ column_name] = ctx.populate_args(input_args_schema) channel = implementations.insecure_channel("localhost", args.tf_serve_port) local_cache[ "stub"] = prediction_service_pb2.beta_create_PredictionService_stub( channel) local_cache["required_inputs"] = tf_lib.get_base_input_columns( model["name"], ctx) # wait a bit for tf serving to start before querying metadata limit = 600 for i in range(limit): try: local_cache["metadata"] = run_get_model_metadata() break except Exception as e: if i == limit - 1: logger.exception( "An error occurred, see `cx logs api {}` for more details." .format(api["name"])) sys.exit(1) time.sleep(1) logger.info("Serving model: {}".format(model["name"])) serve(app, listen="*:{}".format(args.port))
async def log_invalid_lines(context: Context, ingest_response_json: Dict, lines_batch: List[IngestLine]): error = ingest_response_json.get("error", None) if error is None: return invalid_lines = error.get("invalidLines", []) if invalid_lines: for invalid_line_error_message in invalid_lines: line_index = invalid_line_error_message.get("line", 0) - 1 if line_index > -1: invalid_line_error_message = invalid_line_error_message.get("error", "") context.log(f"INVALID LINE: '{lines_batch[line_index].to_string()}', reason: '{invalid_line_error_message}'")
async def delete_metric_descriptor(context: Context, metric_type: str): context.log(f"Removing old descriptor for '{metric_type}'") response = await context.session.request( "DELETE", url= f"https://monitoring.googleapis.com/v3/projects/{context.project_id_owner}/metricDescriptors/{metric_type}", headers={"Authorization": f"Bearer {context.token}"}) if response.status != 200: response_body = await response.json() context.log( f"Failed to remove descriptor for '{metric_type}' due to '{response_body}'" )
async def create_metric_descriptor(context: Context, metric_descriptor: Dict, metric_type: str): context.log(f"Creating missing metric descriptor for '{metric_type}'") response = await context.session.request( "POST", url= f"https://monitoring.googleapis.com/v3/projects/{context.project_id_owner}/metricDescriptors", data=json.dumps(metric_descriptor), headers={"Authorization": f"Bearer {context.token}"}) if response.status > 202: response_body = await response.json() context.log( f"Failed to create descriptor for '{metric_type}' due to '{response_body}'" )
def main(): parser = argparse.ArgumentParser() na = parser.add_argument_group("required named arguments") na.add_argument("--workload-id", required=True, help="Workload ID") na.add_argument( "--context", required=True, help="S3 path to context (e.g. s3://bucket/path/to/context.json") na.add_argument("--cache-dir", required=True, help="Local path for the context cache") na.add_argument("--python-packages", help="Resource ids of packages to build") na.add_argument("--build", action="store_true", help="Flag to determine mode (build vs install)") args, _ = parser.parse_known_args() if args.build: build(args) else: ctx = Context(s3_path=args.context, cache_dir=args.cache_dir, workload_id=args.workload_id) install_packages(ctx.python_packages, ctx.bucket)
def _on_apply_context(self, context_data): if 'context' in context_data: context = Context(context_data['context']) self.contexts[str(context_data['tid'])] = context sym = '' if 'pc' in context_data['context']: name = context_data['ptr'] if context_data['context']['pc']['symbol']['name'] is not None: sym = context_data['context']['pc']['symbol']['moduleName'] sym += ' - ' sym += context_data['context']['pc']['symbol']['name'] else: name = context_data['ptr'] self._app_window.threads.add_context( context_data, library_onload=self.loading_library) if self.loading_library is None and context_data['reason'] == 0: self.log('hook %s %s @thread := %d' % (name, sym, context_data['tid'])) # if len(self.contexts.keys()) > 1 and self._app_window.context_panel.have_context(): # return else: self._arch = context_data['arch'] self._platform = context_data['platform'] self._pointer_size = context_data['pointerSize'] self.java_available = context_data['java'] str_fmt = ('injected into := {0:d}'.format(self.pid)) self.log(str_fmt) self.context_tid = context_data['tid'] if self._loading_library is not None: self._loading_library = None
def _on_apply_context(self, context_data): reason = context_data['reason'] if reason == -1: # set initial context self._arch = context_data['arch'] self._platform = context_data['platform'] self._pointer_size = context_data['pointerSize'] self.java_available = context_data['java'] str_fmt = ('injected into := {0:d}'.format(self.pid)) self.log_event(str_fmt) # unlock java on loads if self.java_available: self._app_window.hooks_panel.new_menu.addAction( 'Java class loading', self._app_window.hooks_panel._on_add_java_on_load) elif 'context' in context_data: context = Context(context_data['context']) self.contexts[str(context_data['tid'])] = context sym = '' if 'pc' in context_data['context']: name = context_data['ptr'] if 'symbol' in context_data['context']['pc'] and \ context_data['context']['pc']['symbol']['name'] is not None: sym = context_data['context']['pc']['symbol']['moduleName'] sym += ' - ' sym += context_data['context']['pc']['symbol']['name'] else: name = context_data['ptr'] if context_data['reason'] == 0: self.log_event('hook %s %s @thread := %d' % (name, sym, context_data['tid'])) if not reason == -1 and self.context_tid == 0: self.context_tid = context_data['tid']
def _on_apply_context(self, context_data): if 'context' in context_data: context = Context(context_data['context']) self.contexts[str(context_data['tid'])] = context sym = '' if 'pc' in context_data['context']: name = context_data['ptr'] if 'symbol' in context_data['context']['pc'] and \ context_data['context']['pc']['symbol']['name'] is not None: sym = context_data['context']['pc']['symbol']['moduleName'] sym += ' - ' sym += context_data['context']['pc']['symbol']['name'] else: name = context_data['ptr'] self._app_window.threads.add_context( context_data, library_onload=self.loading_library) if self.loading_library is None and context_data['reason'] == 0: self.log('hook %s %s @thread := %d' % (name, sym, context_data['tid'])) if context_data['is_java']: self._app_window.show_main_tab('java-explorer') else: self._arch = context_data['arch'] self._platform = context_data['platform'] self._pointer_size = context_data['pointerSize'] self.java_available = context_data['java'] str_fmt = ('injected into := {0:d}'.format(self.pid)) self.log(str_fmt) # update current context tid self.context_tid = context_data['tid'] if self._loading_library is not None: self._loading_library = None
async def _check_x_goog_user_project_header_permissions(context: Context, project_id: str): if project_id in context.use_x_goog_user_project_header: return service_usage_booking = os.environ['SERVICE_USAGE_BOOKING'] if 'SERVICE_USAGE_BOOKING' in os.environ.keys() \ else 'source' if service_usage_booking != 'destination': context.use_x_goog_user_project_header[project_id] = False return url = f"https://monitoring.googleapis.com/v3/projects/{project_id}/metricDescriptors" params = [('pageSize', 1)] headers = { "Authorization": "Bearer {token}".format(token=context.token), "x-goog-user-project": project_id } resp = await context.gcp_session.get(url=url, params=params, headers=headers) page = await resp.json() if resp.status == 200: context.use_x_goog_user_project_header[project_id] = True elif resp.status == 403 and 'serviceusage.services.use' in page['error']['message']: context.use_x_goog_user_project_header[project_id] = False context.log(project_id, "Ignoring SERVICE_USAGE_BOOKING. Missing permission: 'serviceusage.services.use'") else: context.log(project_id, f"Unexpected response when checking 'x-goog-user-project' header: {str(page)}")
async def fetch_ingest_lines_task( context: Context, project_id: str, services: List[GCPService]) -> List[IngestLine]: fetch_metric_tasks = [] topology_tasks = [] topology_task_services = [] for service in services: if service.name in entities_extractors: topology_task = entities_extractors[service.name](context, project_id, service) topology_tasks.append(topology_task) topology_task_services.append(service) fetch_topology_results = await asyncio.gather(*topology_tasks, return_exceptions=True) skipped_services = [] for service in services: if service in topology_task_services: service_topology = fetch_topology_results[ topology_task_services.index(service)] if not service_topology: skipped_services.append(service.name) continue # skip fetching the metrics because there are no instances for metric in service.metrics: fetch_metric_task = run_fetch_metric(context=context, project_id=project_id, service=service, metric=metric) fetch_metric_tasks.append(fetch_metric_task) if skipped_services: skipped_services_string = ', '.join(skipped_services) context.log( project_id, f"Skipped fetching metrics for {skipped_services_string} due to no instances detected" ) fetch_metric_results = await asyncio.gather(*fetch_metric_tasks, return_exceptions=True) entity_id_map = build_entity_id_map(fetch_topology_results) flat_metric_results = flatten_and_enrich_metric_results( fetch_metric_results, entity_id_map) return flat_metric_results
async def create_metric_descriptors_if_missing(context: Context): try: dynatrace_metrics_descriptors = await context.session.request( 'GET', url=f"https://monitoring.googleapis.com/v3/projects/{context.project_id_owner}/metricDescriptors", params=[('filter', f'metric.type = starts_with("{SELF_MONITORING_METRIC_PREFIX}")')], headers={"Authorization": f"Bearer {context.token}"} ) dynatrace_metrics_descriptors_json = await dynatrace_metrics_descriptors.json() types = [metric.get("type", "") for metric in dynatrace_metrics_descriptors_json.get("metricDescriptors", [])] for metric_type, metric_descriptor in SELF_MONITORING_METRIC_MAP.items(): if metric_type not in types: context.log(f"Creating missing metric descriptor for '{metric_type}'") await context.session.request( "POST", url=f"https://monitoring.googleapis.com/v3/projects/{context.project_id_owner}/metricDescriptors", data=json.dumps(metric_descriptor), headers={"Authorization": f"Bearer {context.token}"} ) except Exception as e: context.log(f"Failed to create self monitoring metrics descriptors, reason is {type(e).__name__} {e}")
def reverse_file(filename, symbol, options): ctx = Context() ctx.sectionsname = False ctx.color = False ctx.filename = filename ctx.entry = symbol ctx.quiet = True for o in options: if o == "--raw x86": ctx.raw_type = "x86" elif o == "--raw x64": ctx.raw_type = "x64" elif o.startswith("--rawbase"): ctx.raw_base = int(o.split(" ")[1], 16) sio = StringIO() with redirect_stdout(sio): reverse(ctx) postfix = '{0}.rev'.format('' if symbol is None else '_' + symbol) with open(filename.replace('.bin', postfix)) as f: assert_equal(sio.getvalue(), f.read())
def context(self, scope): logins = self.__logins() passwords = self.__passwords() credentials = self.__credentials() pipelines = self.__pipelines() return Context(name=self.name, scope=scope, logins=self.__logins(), passwords=self.__passwords(), credentials=self.__credentials(), pipelines=self.__pipelines(), generators=self.__generators(), outputs=self.outputs)
def run_job(args): should_ingest, features_to_validate, features_to_aggregate, features_to_transform, training_datasets = parse_args( args) resource_id_list = (features_to_validate + features_to_aggregate + features_to_transform + training_datasets) try: ctx = Context(s3_path=args.context, cache_dir=args.cache_dir, workload_id=args.workload_id) except Exception as e: logger.exception("An error occurred, see the logs for more details.") sys.exit(1) try: spark = None # For the finally clause spark = get_spark_session(ctx.workload_id) raw_df = ingest_raw_dataset(spark, ctx, features_to_validate, should_ingest) if len(features_to_aggregate) > 0: run_custom_aggregators(spark, ctx, features_to_aggregate, raw_df) if len(features_to_transform) > 0: validate_transformers(spark, ctx, features_to_transform, raw_df) create_training_datasets(spark, ctx, training_datasets, raw_df) util.log_job_finished(ctx.workload_id) except CortexException as e: e.wrap("error") logger.error(str(e)) logger.exception( "An error occurred, see `cx logs {} {}` for more details.".format( ctx.id_map[resource_id_list[0]]["resource_type"], ctx.id_map[resource_id_list[0]]["name"], )) sys.exit(1) except Exception as e: logger.exception( "An error occurred, see `cx logs {} {}` for more details.".format( ctx.id_map[resource_id_list[0]]["resource_type"], ctx.id_map[resource_id_list[0]]["name"], )) sys.exit(1) finally: if spark is not None: spark.stop()
async def push_self_monitoring_time_series(context: Context, is_retry: bool = False): try: context.log(f"Pushing self monitoring time series to GCP Monitor...") await create_metric_descriptors_if_missing(context) time_series = create_self_monitoring_time_series(context) self_monitoring_response = await context.session.request( "POST", url= f"https://monitoring.googleapis.com/v3/projects/{context.project_id_owner}/timeSeries", data=json.dumps(time_series), headers={ "Authorization": "Bearer {token}".format(token=context.token) }) status = self_monitoring_response.status if status == 500 and not is_retry: context.log( "GCP Monitor responded with 500 Internal Error, it may occur when metric descriptor is updated. Retrying after 5 seconds" ) await asyncio.sleep(5) await push_self_monitoring_time_series(context, True) elif status != 200: self_monitoring_response_json = await self_monitoring_response.json( ) context.log( f"Failed to push self monitoring time series, error is: {status} => {self_monitoring_response_json}" ) else: context.log( f"Finished pushing self monitoring time series to GCP Monitor") self_monitoring_response.close() except Exception as e: context.log( f"Failed to push self monitoring time series, reason is {type(e).__name__} {e}" )
def build(args): ctx = Context(s3_path=args.context, cache_dir=args.cache_dir, workload_id=args.workload_id) python_packages_list = [ ctx.pp_id_map[id] for id in args.python_packages.split(",") ] python_packages = { python_package["name"]: python_package for python_package in python_packages_list } ctx.upload_resource_status_start(*python_packages_list) try: build_packages(python_packages, ctx.bucket) util.log_job_finished(ctx.workload_id) except CortexException as e: e.wrap("error") logger.exception(e) ctx.upload_resource_status_failed(*python_packages_list) except Exception as e: logger.exception(e) ctx.upload_resource_status_failed(*python_packages_list) else: ctx.upload_resource_status_success(*python_packages_list)
async def push_self_monitoring_time_series(context: Context): try: context.log(f"Pushing self monitoring time series to GCP Monitor...") await create_metric_descriptors_if_missing(context) time_series = create_self_monitoring_time_series(context) self_monitoring_response = await context.session.request( "POST", url=f"https://monitoring.googleapis.com/v3/projects/{context.project_id_owner}/timeSeries", data=json.dumps(time_series), headers={"Authorization": "Bearer {token}".format(token=context.token)} ) status = self_monitoring_response.status if status != 200: self_monitoring_response_json = await self_monitoring_response.json() context.log(f"Failed to push self monitoring time series, error is: {status} => {self_monitoring_response_json}") else: context.log(f"Finished pushing self monitoring time series to GCP Monitor") self_monitoring_response.close() except Exception as e: context.log(f"Failed to push self monitoring time series, reason is {type(e).__name__} {e}")
async def generic_paging( url: Text, ctx: Context, mapper: Callable[[Dict[Any, Any]], List[Entity]]) -> List[Entity]: """Apply mapper function on any page returned by gcp api url.""" headers = { "Accept": "application/json", "Authorization": "Bearer {token}".format(token=ctx.token) } get_page = True params: Dict[Text, Text] = {} entities: List[Entity] = [] while get_page: resp = await ctx.gcp_session.request("GET", params=params, url=url, headers=headers) try: page = await resp.json() except Exception: error_message = await resp.text() error_message = ' '.join(error_message.split()) ctx.log(f'Failed to decode JSON. {url} {error_message}') return entities if resp.status >= 400: ctx.log( f'Failed to retrieve information from googleapis. {url} {page}' ) return entities try: entities.extend(mapper(page)) except Exception as ex: ctx.log(f"Failed to map response from googleapis. {url} {ex}") return entities get_page = "nextPageToken" in page if get_page: params["pageToken"] = page.get("nextPageToken", None) return entities
async def handle_event(event: Dict, event_context, project_id_owner: Optional[str]): if event_context is Dict: context = LoggingContext(event_context.get("execution_id", None)) else: context = LoggingContext(None) selected_services = None if "GCP_SERVICES" in os.environ: selected_services_string = os.environ.get("GCP_SERVICES", "") selected_services = selected_services_string.split(",") if selected_services_string else [] services = load_supported_services(context, selected_services) async with aiohttp.ClientSession() as session: setup_start_time = time.time() token = await create_token(context, session) if token is None: context.log("Cannot proceed without authorization token, stopping the execution") return if not isinstance(token, str): raise Exception(f"Failed to fetch access token, got non string value: {token}") context.log("Successfully obtained access token") if not project_id_owner: project_id_owner = get_project_id_from_environment() dynatrace_api_key = await fetch_dynatrace_api_key(session=session, project_id=project_id_owner, token=token) dynatrace_url = await fetch_dynatrace_url(session=session, project_id=project_id_owner, token=token) print_metric_ingest_input = \ "PRINT_METRIC_INGEST_INPUT" in os.environ and os.environ["PRINT_METRIC_INGEST_INPUT"].upper() == "TRUE" context = Context( session=session, project_id_owner=project_id_owner, token=token, execution_time=datetime.utcnow(), execution_interval_seconds=60 * 1, dynatrace_api_key=dynatrace_api_key, dynatrace_url=dynatrace_url, print_metric_ingest_input=print_metric_ingest_input, scheduled_execution_id=context.scheduled_execution_id ) projects_ids = await get_all_accessible_projects(context, session) context.setup_execution_time = (time.time() - setup_start_time) fetch_gcp_data_start_time = time.time() fetch_ingest_lines_tasks = [fetch_ingest_lines_task(context, project_id, services) for project_id in projects_ids] ingest_lines_per_project = await asyncio.gather(*fetch_ingest_lines_tasks, return_exceptions=True) ingest_lines = [ingest_line for sublist in ingest_lines_per_project for ingest_line in sublist] context.fetch_gcp_data_execution_time = time.time() - fetch_gcp_data_start_time context.log(f"Fetched GCP data in {context.fetch_gcp_data_execution_time} s") await push_ingest_lines(context, ingest_lines) await push_self_monitoring_time_series(context) await session.close()
def train(args): ctx = Context(s3_path=args.context, cache_dir=args.cache_dir, workload_id=args.workload_id) model = ctx.models_id_map[args.model] logger.info("Training") with util.Tempdir(ctx.cache_dir) as temp_dir: model_dir = os.path.join(temp_dir, "model_dir") ctx.upload_resource_status_start(model) try: model_impl = ctx.get_model_impl(model["name"]) train_util.train(model["name"], model_impl, ctx, model_dir) ctx.upload_resource_status_success(model) logger.info("Caching") logger.info("Caching model " + model["name"]) model_export_dir = os.path.join(model_dir, "export", "estimator") model_zip_path = os.path.join(temp_dir, "model.zip") util.zip_dir(model_export_dir, model_zip_path) aws.upload_file_to_s3(local_path=model_zip_path, key=model["key"], bucket=ctx.bucket) util.log_job_finished(ctx.workload_id) except CortexException as e: ctx.upload_resource_status_failed(model) e.wrap("error") logger.error(str(e)) logger.exception( "An error occurred, see `cx logs model {}` for more details.". format(model["name"])) sys.exit(1) except Exception as e: ctx.upload_resource_status_failed(model) logger.exception( "An error occurred, see `cx logs model {}` for more details.". format(model["name"])) sys.exit(1)
async def handle_event(event: Dict, event_context, project_id_owner: Optional[str], projects_ids: Optional[List[str]] = None): if isinstance(event_context, Dict): context = LoggingContext(event_context.get("execution_id", None)) else: context = LoggingContext(None) selected_services = None if "GCP_SERVICES" in os.environ: selected_services_string = os.environ.get("GCP_SERVICES", "") selected_services = selected_services_string.split(",") if selected_services_string else [] #set default featureset if featureset not present in env variable for i, service in enumerate(selected_services): if "/" not in service: selected_services[i]=f"{service}/default" services = load_supported_services(context, selected_services) async with init_gcp_client_session() as gcp_session, init_dt_client_session() as dt_session: setup_start_time = time.time() token = await create_token(context, gcp_session) if token is None: context.log("Cannot proceed without authorization token, stopping the execution") return if not isinstance(token, str): raise Exception(f"Failed to fetch access token, got non string value: {token}") context.log("Successfully obtained access token") if not project_id_owner: project_id_owner = get_project_id_from_environment() dynatrace_api_key = await fetch_dynatrace_api_key(gcp_session=gcp_session, project_id=project_id_owner, token=token) dynatrace_url = await fetch_dynatrace_url(gcp_session=gcp_session, project_id=project_id_owner, token=token) print_metric_ingest_input = \ "PRINT_METRIC_INGEST_INPUT" in os.environ and os.environ["PRINT_METRIC_INGEST_INPUT"].upper() == "TRUE" context = Context( gcp_session=gcp_session, dt_session=dt_session, project_id_owner=project_id_owner, token=token, execution_time=datetime.utcnow(), execution_interval_seconds=60 * 1, dynatrace_api_key=dynatrace_api_key, dynatrace_url=dynatrace_url, print_metric_ingest_input=print_metric_ingest_input, scheduled_execution_id=context.scheduled_execution_id ) if not projects_ids: projects_ids = await get_all_accessible_projects(context, gcp_session, token) setup_time = (time.time() - setup_start_time) context.setup_execution_time = {project_id: setup_time for project_id in projects_ids} context.start_processing_timestamp = time.time() process_project_metrics_tasks = [ process_project_metrics(context, project_id, services) for project_id in projects_ids ] await asyncio.gather(*process_project_metrics_tasks, return_exceptions=True) context.log(f"Fetched and pushed GCP data in {time.time() - context.start_processing_timestamp} s") context.log(f"Processed {sum(context.dynatrace_ingest_lines_ok_count.values())} lines") await push_self_monitoring_time_series(context) await gcp_session.close() await dt_session.close()
def parse_args(): # Parse arguments parser = ArgumentParser(description= 'Reverse engineering for x86/ARM binaries. Generation of pseudo-C. ' 'Supported formats : ELF, PE. https://github.com/joelpx/reverse') parser.add_argument('filename', nargs='?', metavar='FILENAME') parser.add_argument('-nc', '--nocolor', action='store_true') parser.add_argument('-g', '--graph', action='store_true', help='Generate an html flow graph. See d3/index.html.') parser.add_argument('--nocomment', action='store_true', help="Don't print comments") parser.add_argument('--noandif', action='store_true', help="Print normal 'if' instead of 'andif'") parser.add_argument('--datasize', type=int, default=30, metavar='N', help='default 30, maximum of chars to display for strings or bytes array.') parser.add_argument('-x', '--entry', metavar='SYMBOLNAME|0xXXXXX|EP', help='default main. EP stands for entry point.') parser.add_argument('--vim', action='store_true', help='Generate syntax colors for vim') parser.add_argument('-s', '--symbols', action='store_true', help='Print all symbols') parser.add_argument('-c', '--calls', action='store_true', help='Print all calls which are in the section containing the address' 'given with -x.') parser.add_argument('--raw', metavar='x86|x64|arm', help='Consider the input file as a raw binary') parser.add_argument('--dump', action='store_true', help='Dump asm without decompilation') parser.add_argument('--lines', type=int, default=30, metavar='N', help='Max lines to dump') parser.add_argument('-i', '--interactive', action='store_true', help='Interactive mode') parser.add_argument('--symfile', metavar='FILENAME', type=FileType('r'), help=('Add user symbols for better readability of the analysis. ' 'Line format: ADDRESS_HEXA SYMBOL_NAME')) parser.add_argument('-d', '--opt_debug', action='store_true') parser.add_argument('-ns', '--nosectionsname', action='store_true') args = parser.parse_args() ctx = Context() ctx.debug = args.opt_debug ctx.print_andif = not args.noandif ctx.color = not args.nocolor ctx.comments = not args.nocomment ctx.sectionsname = not args.nosectionsname ctx.max_data_size = args.datasize ctx.filename = args.filename ctx.raw_type = args.raw ctx.symfile = args.symfile ctx.syms = args.symbols ctx.calls = args.calls ctx.entry = args.entry ctx.dump = args.dump ctx.vim = args.vim ctx.interactive = args.interactive ctx.lines = args.lines ctx.graph = args.graph return ctx
VERSION = "0.3" import sys try: import settings except ImportError as e: print("Error: unable to load 'settings.py': %s" % e) sys.exit(1) from lib.context import Context from lib.core import Core from lib.webserver import Webserver from lib.log import get_logger import logging if __name__ == '__main__': get_logger().event("main", "started") logging.info( "Yelena, version {version} starting ...".format(version=VERSION)) context = Context() core = Core(context=context) webserver = Webserver(core=core, context=context) webserver.start() core.start()
def parse_args(): # Parse arguments parser = ArgumentParser( description= 'Reverse engineering for x86/ARM/MIPS binaries. Generation of pseudo-C. ' 'Supported formats : ELF, PE. More commands available in the interactive' ' mode. https://github.com/joelpx/reverse') parser.add_argument('filename', nargs='?', metavar='FILENAME') parser.add_argument('-nc', '--nocolor', action='store_true') parser.add_argument('-g', '--graph', action='store_true', help='Generate an html flow graph. See d3/index.html.') parser.add_argument('--nocomment', action='store_true', help="Don't print comments") parser.add_argument('--noandif', action='store_true', help="Print normal 'if' instead of 'andif'") parser.add_argument( '--datasize', type=int, default=30, metavar='N', help= 'default 30, maximum of chars to display for strings or bytes array.') parser.add_argument( '-x', '--entry', metavar='SYMBOLNAME|0xXXXXX|EP', help='Pseudo-decompilation, default is main. EP stands for entry point.' ) parser.add_argument('--vim', action='store_true', help='Generate syntax colors for vim') parser.add_argument('-s', '--symbols', action='store_true', help='Print all symbols') parser.add_argument('-c', '--calls', metavar='SECTION_NAME', type=str, help='Print all calls which are in the given section') parser.add_argument('--sections', action='store_true', help='Print all sections') parser.add_argument('--dump', action='store_true', help='Dump asm without decompilation') parser.add_argument('-l', '--lines', type=int, default=30, metavar='N', help='Max lines used with --dump') parser.add_argument('--bytes', action='store_true', help='Print instruction bytes') parser.add_argument('-i', '--interactive', action='store_true', help='Interactive mode') parser.add_argument('-d', '--opt_debug', action='store_true') parser.add_argument('-ns', '--nosectionsname', action='store_true') parser.add_argument('--raw', metavar='x86|x64|arm|mips|mips64', help='Consider the input file as a raw binary') parser.add_argument('--rawbase', metavar='0xXXXXX', help='Set base address of a raw file (default=0)') parser.add_argument('--rawbe', action='store_true', help='If not set it\'s in little endian') args = parser.parse_args() ctx = Context() ctx.debug = args.opt_debug ctx.print_andif = not args.noandif ctx.color = not args.nocolor ctx.comments = not args.nocomment ctx.sectionsname = not args.nosectionsname ctx.max_data_size = args.datasize ctx.filename = args.filename ctx.raw_type = args.raw ctx.raw_base = args.rawbase ctx.syms = args.symbols ctx.calls_in_section = args.calls ctx.entry = args.entry ctx.dump = args.dump ctx.vim = args.vim ctx.interactive = args.interactive ctx.lines = args.lines ctx.graph = args.graph ctx.raw_big_endian = args.rawbe ctx.list_sections = args.sections ctx.print_bytes = args.bytes if ctx.raw_base is not None: if ctx.raw_base.startswith("0x"): ctx.raw_base = int(ctx.raw_base, 16) else: error("--rawbase must in hex format") die() else: ctx.raw_base = 0 return ctx
def parse_args(): # Parse arguments parser = ArgumentParser( description= 'Reverse engineering for x86/ARM binaries. Generation of pseudo-C. ' 'Supported formats : ELF, PE. https://github.com/joelpx/reverse') parser.add_argument('filename', nargs='?', metavar='FILENAME') parser.add_argument('-nc', '--nocolor', action='store_true') parser.add_argument('-g', '--graph', action='store_true', help='Generate an html flow graph. See d3/index.html.') parser.add_argument('--nocomment', action='store_true', help="Don't print comments") parser.add_argument('--noandif', action='store_true', help="Print normal 'if' instead of 'andif'") parser.add_argument( '--datasize', type=int, default=30, metavar='N', help= 'default 30, maximum of chars to display for strings or bytes array.') parser.add_argument('-x', '--entry', metavar='SYMBOLNAME|0xXXXXX|EP', help='default main. EP stands for entry point.') parser.add_argument('--vim', action='store_true', help='Generate syntax colors for vim') parser.add_argument('-s', '--symbols', action='store_true', help='Print all symbols') parser.add_argument( '-c', '--calls', action='store_true', help='Print all calls which are in the section containing the address ' 'given with -x.') parser.add_argument('--raw', metavar='x86|x64|arm', help='Consider the input file as a raw binary') parser.add_argument('--dump', action='store_true', help='Dump asm without decompilation') parser.add_argument('--lines', type=int, default=30, metavar='N', help='Max lines to dump') parser.add_argument('-i', '--interactive', action='store_true', help='Interactive mode') parser.add_argument( '--symfile', metavar='FILENAME', type=FileType('r'), help=('Add user symbols for better readability of the analysis. ' 'Line format: ADDRESS_HEXA SYMBOL_NAME')) parser.add_argument('-d', '--opt_debug', action='store_true') parser.add_argument('-ns', '--nosectionsname', action='store_true') args = parser.parse_args() ctx = Context() ctx.debug = args.opt_debug ctx.print_andif = not args.noandif ctx.color = not args.nocolor ctx.comments = not args.nocomment ctx.sectionsname = not args.nosectionsname ctx.max_data_size = args.datasize ctx.filename = args.filename ctx.raw_type = args.raw ctx.symfile = args.symfile ctx.syms = args.symbols ctx.calls = args.calls ctx.entry = args.entry ctx.dump = args.dump ctx.vim = args.vim ctx.interactive = args.interactive ctx.lines = args.lines ctx.graph = args.graph return ctx
def parse_args(): # Parse arguments parser = ArgumentParser(description= 'Reverse engineering for x86/ARM/MIPS binaries. Generation of pseudo-C. ' 'Supported formats : ELF, PE. More commands available in the interactive' ' mode. https://github.com/joelpx/reverse') parser.add_argument('filename', nargs='?', metavar='FILENAME') parser.add_argument('-nc', '--nocolor', action='store_true') parser.add_argument('-g', '--graph', action='store_true', help='Generate an html flow graph. See d3/index.html.') parser.add_argument('--nocomment', action='store_true', help="Don't print comments") parser.add_argument('--noandif', action='store_true', help="Print normal 'if' instead of 'andif'") parser.add_argument('--datasize', type=int, default=30, metavar='N', help='default 30, maximum of chars to display for strings or bytes array.') parser.add_argument('-x', '--entry', metavar='SYMBOLNAME|0xXXXXX|EP', help='Pseudo-decompilation, default is main. EP stands for entry point.') parser.add_argument('--vim', action='store_true', help='Generate syntax colors for vim') parser.add_argument('-s', '--symbols', action='store_true', help='Print all symbols') parser.add_argument('-c', '--calls', metavar='SECTION_NAME', type=str, help='Print all calls which are in the given section') parser.add_argument('--sections', action='store_true', help='Print all sections') parser.add_argument('--dump', action='store_true', help='Dump asm without decompilation') parser.add_argument('-l', '--lines', type=int, default=30, metavar='N', help='Max lines used with --dump') parser.add_argument('--bytes', action='store_true', help='Print instruction bytes') parser.add_argument('-i', '--interactive', action='store_true', help='Interactive mode') parser.add_argument('--symfile', metavar='FILENAME', type=FileType('r'), help=('Add user symbols for better readability of the analysis. ' 'Line format: ADDRESS_HEXA SYMBOL_NAME')) parser.add_argument('-d', '--opt_debug', action='store_true') parser.add_argument('-ns', '--nosectionsname', action='store_true') parser.add_argument('--raw', metavar='x86|x64|arm|mips|mips64', help='Consider the input file as a raw binary') parser.add_argument('--rawbase', metavar='0xXXXXX', help='Set base address of a raw file (default=0)') parser.add_argument('--rawbe', action='store_true', help='If not set it\'s in little endian') args = parser.parse_args() ctx = Context() ctx.debug = args.opt_debug ctx.print_andif = not args.noandif ctx.color = not args.nocolor ctx.comments = not args.nocomment ctx.sectionsname = not args.nosectionsname ctx.max_data_size = args.datasize ctx.filename = args.filename ctx.raw_type = args.raw ctx.raw_base = args.rawbase ctx.symfile = args.symfile ctx.syms = args.symbols ctx.calls_in_section = args.calls ctx.entry = args.entry ctx.dump = args.dump ctx.vim = args.vim ctx.interactive = args.interactive ctx.lines = args.lines ctx.graph = args.graph ctx.raw_big_endian = args.rawbe ctx.list_sections = args.sections ctx.print_bytes = args.bytes if ctx.raw_base is not None: if ctx.raw_base.startswith("0x"): ctx.raw_base = int(ctx.raw_base, 16) else: error("--rawbase must in hex format") die() else: ctx.raw_base = 0 return ctx
def on_message(self, message, data): if 'payload' not in message: print(message) return what = message['payload'] parts = what.split(':::') if len(parts) < 2: print(what) return cmd = parts[0] if cmd == 'backtrace': if self.app.get_session_ui() is not None and self.app.get_backtrace_panel() is not None: try: self.app.get_backtrace_panel().set_backtrace(json.loads(parts[1])) except: pass elif cmd == 'emulator': # on a separate thread to allow js api recursion Thread(target=self.emulator.api, args=(parts[1:],)).start() elif cmd == 'enumerate_java_classes_start': if self.app.get_java_classes_panel() is not None: self.app.get_java_classes_panel().on_enumeration_start() if self.app.get_java_trace_panel() is not None: self.app.get_java_trace_panel().on_enumeration_start() elif cmd == 'enumerate_java_classes_match': if self.app.get_java_classes_panel() is not None: self.app.get_java_classes_panel().on_enumeration_match(parts[1]) if self.app.get_java_trace_panel() is not None: self.app.get_java_trace_panel().on_enumeration_match(parts[1]) elif cmd == 'enumerate_java_classes_complete': self.app_window.get_menu().on_java_classes_enumeration_complete() if self.app.get_java_classes_panel() is not None: self.app.get_java_classes_panel().on_enumeration_complete() if self.app.get_java_trace_panel() is not None: self.app.get_java_trace_panel().on_enumeration_complete() elif cmd == 'enumerate_java_methods_complete': self.bus.emit(parts[1], json.loads(parts[2]), parts[1]) elif cmd == 'ftrace': if self.app.get_ftrace_panel() is not None: self.app.get_ftrace_panel().append_data(parts[1]) elif cmd == 'enable_kernel': self.app_window.get_menu().enable_kernel_menu() elif cmd == 'hook_java_callback': h = Hook(Hook.HOOK_JAVA) h.set_ptr(1) h.set_input(parts[1]) if self.java_pending_args: h.set_condition(self.java_pending_args['condition']) h.set_logic(self.java_pending_args['logic']) self.java_pending_args = None self.java_hooks[h.get_input()] = h self.app.get_hooks_panel().hook_java_callback(h) elif cmd == 'hook_native_callback': h = Hook(Hook.HOOK_NATIVE) h.set_ptr(int(parts[1], 16)) h.set_input(self.temporary_input) h.set_bytes(binascii.unhexlify(parts[2])) self.temporary_input = '' if self.native_pending_args: h.set_condition(self.native_pending_args['condition']) h.set_logic(self.native_pending_args['logic']) self.native_pending_args = None self.hooks[h.get_ptr()] = h self.app.get_hooks_panel().hook_native_callback(h) elif cmd == 'hook_onload_callback': h = Hook(Hook.HOOK_ONLOAD) h.set_ptr(0) h.set_input(parts[1]) self.on_loads[parts[1]] = h if self.app.session_ui is not None and self.app.get_hooks_panel() is not None: self.app.get_hooks_panel().hook_onload_callback(h) elif cmd == 'java_trace': panel = self.app.get_java_trace_panel() if panel is None: panel = self.app.get_session_ui().add_dwarf_tab(SessionUi.TAB_JAVA_TRACE) panel.on_event(parts[1], parts[2], parts[3]) elif cmd == 'log': self.app.get_console_panel().get_js_console().log(parts[1]) elif cmd == 'memory_scan_match': self.bus.emit(parts[1], parts[2], json.loads(parts[3])) elif cmd == 'memory_scan_complete': self.app_window.get_menu().on_bytes_search_complete() self.bus.emit(parts[1] + ' complete', 0, 0) elif cmd == 'onload_callback': self.loading_library = parts[1] self.app.get_console_panel().get_js_console().log('hook onload %s @thread := %s' % ( parts[1], parts[3])) self.app.get_hooks_panel().hit_onload(parts[1], parts[2]) elif cmd == 'release': if parts[1] in self.contexts: del self.contexts[parts[1]] self.app.on_tid_resumed(int(parts[1])) elif cmd == 'set_context': data = json.loads(parts[1]) if 'context' in data: context = Context(data['context']) self.contexts[str(data['tid'])] = context sym = '' if 'pc' in context.__dict__: name = data['ptr'] if context.pc.symbol_name is not None: sym = '(%s - %s)' % (context.pc.symbol_module_name, context.pc.symbol_name) else: name = data['ptr'] self.app.get_contexts_lists_panel().add_context(data, library_onload=self.loading_library) # check if data['reason'] is 0 (REASON_HOOK) if self.loading_library is None and data['reason'] == 0: self.log('hook %s %s @thread := %d' % (name, sym, data['tid'])) if len(self.contexts.keys()) > 1 and self.app.get_context_panel().have_context(): return self.app.get_session_ui().request_session_ui_focus() else: self.arch = data['arch'] self.pointer_size = data['pointerSize'] self.java_available = data['java'] self.app.get_console_panel().get_js_console().log('injected into := ' + str(self.pid)) self.app_window.on_context_info() self.context_tid = data['tid'] self.app.apply_context(data) if self.loading_library is not None: self.loading_library = None elif cmd == 'set_data': key = parts[1] if data: self.app.get_data_panel().append_data(key, hexdump(data, result='return')) else: self.app.get_data_panel().append_data(key, str(parts[2])) elif cmd == 'script_loaded': if self._spawned: self.device.resume(self.pid) elif cmd == 'tracer': panel = self.app.get_trace_panel() if panel is None: panel = self.app.get_session_ui().add_dwarf_tab(SessionUi.TAB_TRACE) if panel is not None: # safely checked later panel.start() trace_events_parts = parts[1].split(',') while len(trace_events_parts) > 0: t = TraceEvent(trace_events_parts.pop(0), trace_events_parts.pop(0), trace_events_parts.pop(0), trace_events_parts.pop(0)) panel.event_queue.append(t) elif cmd == 'unhandled_exception': # todo pass elif cmd == 'update_modules': self.app.apply_context({'tid': parts[1], 'modules': json.loads(parts[2])}) elif cmd == 'update_ranges': self.app.apply_context({'tid': parts[1], 'ranges': json.loads(parts[2])}) elif cmd == 'watcher': exception = json.loads(parts[1]) self.log('watcher hit op %s address %s @thread := %s' % (exception['memory']['operation'], exception['memory']['address'], parts[2])) elif cmd == 'watcher_added': if self.app.get_watchers_panel() is not None: self.app.get_watchers_panel().add_watcher_callback(parts[1]) elif cmd == 'watcher_removed': if self.app.get_watchers_panel() is not None: self.app.get_watchers_panel().remove_watcher_callback(parts[1]) else: print(what)