def get_health_status(): _es_client = es_client.EsClient(esHost=APP_CONFIG["esHost"], grafanaHost=APP_CONFIG["grafanaHost"], app_config=APP_CONFIG) _postgres_dao = postgres_dao.PostgresDAO(APP_CONFIG) status = "" if not _es_client.is_healthy(): status += "Elasticsearch is not healthy;" if APP_CONFIG["grafanaHost"].strip( ) and not _es_client.is_grafana_healthy(): status += "Grafana is not healthy;" if not _postgres_dao.test_query_handling(): status += "Postgres is not healthy;" if APP_CONFIG["amqpUrl"].strip(): try: _ = amqp.AmqpClient(APP_CONFIG) except Exception as err: logger.error(err) status += "Connection to Rabbitmq is not healthy;" if status: logger.error("Metrics gatherer health check status failed: %s", status) return Response(json.dumps({"status": status}), status=503, mimetype='application/json') return jsonify({"status": "healthy"})
def start_metrics_gathering(): _es_client = es_client.EsClient(esHost=APP_CONFIG["esHost"], grafanaHost=APP_CONFIG["grafanaHost"], app_config=APP_CONFIG) if not utils.is_the_time_for_task_starting(APP_CONFIG["allowedStartTime"], APP_CONFIG["allowedEndTime"]): logger.debug( "Starting of tasks is allowed only from %s to %s. Now is %s", APP_CONFIG["allowedStartTime"], APP_CONFIG["allowedEndTime"], datetime.datetime.now()) return date_to_check = utils.take_the_date_to_check() if not _es_client.is_the_date_metrics_calculated(date_to_check): logger.debug("Task started...") _metrics = metrics_gatherer.MetricsGatherer(APP_CONFIG) _metrics.gather_metrics(date_to_check, date_to_check) _es_client.delete_old_info(APP_CONFIG["maxDaysStore"]) _es_client.bulk_index( _es_client.task_done_index, [{ '_index': _es_client.task_done_index, '_source': { "gather_date": date_to_check.date(), "started_task_time": datetime.datetime.now() } }]) logger.debug("Task finished...") else: logger.debug("Task for today was already completed...")
def __init__(self, app_settings): self.app_settings = app_settings self.postgres_dao = postgres_dao.PostgresDAO(app_settings) self.es_client = es_client.EsClient( esHost=app_settings["esHost"], grafanaHost=app_settings["grafanaHost"], app_config=app_settings) self.models_remover = models_remover.ModelsRemover(app_settings)
def __init__(self, app_config, conditions_field="", model_name=""): self.app_config = app_config self.model_name = model_name self.conditions_field = conditions_field self.conditions = utils.parse_conditions(app_config[conditions_field]) self.es_client = es_client.EsClient( esHost=app_config["esHost"], grafanaHost=app_config["grafanaHost"], app_config=app_config)
def __init__(self, app_config): self.app_config = app_config self.model_policies = {} for policy in [ AutoAnalysisModelRemovePolicy(app_config), SuggestModelRemovePolicy(app_config)]: self.model_policies[policy.model_name] = policy self.es_client = es_client.EsClient( esHost=app_config["esHost"], grafanaHost=app_config["grafanaHost"], app_config=app_config)
logging.disable(logging.NOTSET) elif APP_CONFIG["logLevel"].lower() == "info": logging.disable(logging.DEBUG) else: logging.disable(logging.INFO) logger = logging.getLogger("metricsGatherer") application = create_application() CORS(application) while True: try: if not APP_CONFIG["grafanaHost"].strip(): break _es_client = es_client.EsClient(esHost=APP_CONFIG["esHost"], grafanaHost=APP_CONFIG["grafanaHost"], app_config=APP_CONFIG) data_source_created = [] for index in [ _es_client.main_index, _es_client.rp_aa_stats_index, _es_client.rp_model_train_stats_index, _es_client.rp_suggest_metrics_index, _es_client.rp_model_remove_stats_index ]: date_field = "gather_date" if index == _es_client.rp_suggest_metrics_index: date_field = "savedDate" data_source_created.append( int( _es_client.create_grafana_data_source( APP_CONFIG["esHostGrafanaDataSource"], index,