def process_run(): try: scan = Scan.get_not_started_run() if scan is not None: scan.status = "pending" metrics_session.commit() for_time = scan.start_date while for_time <= scan.end_date: conf = Conf(for_time) for source_db in DataSource.source_dbs(): run_check_for_new_tables(source_db, conf) run_checks(source_db, conf) run_compute_alerts(source_db, conf) for_time += timedelta(days=1) generate_grafana() scan.status = "success" metrics_session.commit() except Exception: scan.status = "error" metrics_session.commit()
def process_run(): run = Run.get_not_started_run() if run is not None: run.status = 'pending' metrics_session.commit() conf = Conf(run.for_date) for source_db in DataSource.source_dbs(): run_check_for_new_tables(source_db, conf) run_checks(source_db, conf) run_compute_alerts(source_db, conf) generate_grafana() run.status = 'success' metrics_session.commit()
def create_dashboards(): grafana_api = GrafanaFace( auth=(settings.GF_SECURITY_ADMIN_USER, settings.GF_SECURITY_ADMIN_PASSWORD), host=f'{settings.GRAFANA_WEB_HOST}:{settings.GRAFANA_WEB_PORT}') create_source_in_grafana(grafana_api) create_notifcation_channels(grafana_api) dashboards = [] for db in DataSource.source_dbs(): monitored_tables = MonitoredTable.get_monitored_tables(db.name) for table in monitored_tables: dash_data = create_dashboard_for_table(grafana_api, db, table) table.grafana_url = dash_data['dashboard']['url'] dashboards.append(dash_data) metrics_session.commit() home_response = create_home_dashboard(grafana_api, dashboards) star_home_dashboard(grafana_api, home_response)
def main(): parser = argparse.ArgumentParser( description="Either create dashboards in grafana or run manual data ingestion of data metrics" ) parser.add_argument( "--grafana", action="store_true", help="Setup grafana dashboards, based on existing metrics", ) parser.add_argument("--metrics", action="store_true", help="Push metrics to redata") parser.add_argument( "--generate-sample-data", action="store_true", help="Add sample data to REDATA DB for demonstration", ) parser.add_argument( "--create-admin-user", action="store_true", help="Generate admin user" ) parser.add_argument( "--backfill", dest="backfill_days", action="store", nargs="?", type=int, help="Run backfill for last X days of metrics data", ) args = parser.parse_args() if not any( ( args.grafana, args.metrics, args.backfill_days, args.generate_sample_data, args.create_admin_user, ) ): print("No arugments supplied, write -h to get list of possible commands") if args.grafana: create_dashboards() if args.metrics: for db in DataSource.source_dbs(): print("run_check_for_new_table") run_check_for_new_tables(db, Conf(datetime.utcnow())) print("run_checks") run_checks(db, Conf(datetime.utcnow())) print("run alerts") run_compute_alerts(db, Conf(datetime.utcnow())) if args.generate_sample_data: create_sample_tables_in_redata() if args.backfill_days: days = args.backfill_days for db in DataSource.source_dbs(): run_check_for_new_tables(db, Conf(datetime.utcnow())) past = datetime.utcnow() - timedelta(days=days) while past <= datetime.utcnow(): run_checks(db, Conf(past)) run_compute_alerts(db, Conf(past)) past += timedelta(days=1) if args.create_admin_user: User.create_admin_user_if_not_exist()