def update_health_status(): for data_source in models.DataSource.query: logger.info(u"task=update_health_status state=start ds_id=%s", data_source.id) runtime = None query_text = data_source.query_runner.noop_query ds_id = str(data_source.id) custom_query_env_var = "REDASH_CUSTOM_HEALTH_QUERIES_{data_source_id}".format(data_source_id=ds_id) custom_query = os.environ.get(custom_query_env_var, "") query_text = custom_query or query_text try: start_time = time.time() test_connection(data_source.query_runner, query_text) runtime = time.time() - start_time except NotImplementedError: logger.info(u"Unable to compute health status without test query for %s", data_source.name) continue except Exception as e: logger.warning(u"Failed health check for the data source: %s", data_source.name, exc_info=1) statsd_client.incr('update_health_status.error') logger.info(u"task=update_health_status state=error ds_id=%s runtime=%.2f", data_source.id, time.time() - start_time) status = { "status": "FAIL" if runtime is None else "SUCCESS", "last_run": start_time, "last_run_human": str(parse_human_time(str(start_time))), "runtime": runtime } store_health_status(ds_id, data_source.name, query_text, status)
def health_status(): for ds in models.DataSource.query: logger.info(u"task=health_status state=start ds_id=%s", ds.id) runtime = None query_text = ds.query_runner.noop_query custom_queries = settings.CUSTOM_HEALTH_QUERIES ds_id = str(ds.id) if custom_queries and ds_id in custom_queries: query_text = custom_queries[ds_id] try: start_time = time.time() ds.query_runner.test_connection(query_text) runtime = time.time() - start_time except Exception as e: logger.warning(u"Failed health check for the data source: %s", ds.name, exc_info=1) statsd_client.incr('health_status.error') logger.info( u"task=health_status state=error ds_id=%s runtime=%.2f", ds.id, time.time() - start_time) update_health_status( ds_id, ds.name, query_text, { "status": "SUCCESS" if runtime is not None else "FAIL", "last_run": start_time, "last_run_human": str(parse_human_time(str(start_time))), "runtime": runtime })
def test_supports_relative_timestamps(self): query = { 'ts': {'$humanTime': '1 hour ago'} } one_hour_ago = parse_human_time("1 hour ago", None) query_data = parse_query_json(json.dumps(query), None) self.assertEqual(query_data['ts'], one_hour_ago)
def test_supports_relative_timestamps(self): query = { 'ts': {'$humanTime': '1 hour ago'} } one_hour_ago = parse_human_time("1 hour ago") query_data = parse_query_json(json.dumps(query)) self.assertEqual(query_data['ts'], one_hour_ago)
def tests_use_utc_for_relative_timestamps_with_timezone(self): query = { 'ts': {'$humanTime': '1 hour ago'} } one_hour_ago = parse_human_time("1 hour ago", None) query_data = parse_query_json(json.dumps(query), 'US/Eastern') self.assertEqual(query_data['ts'], one_hour_ago)
def parse_query(query): query = yaml.safe_load(query) for timeKey in ["StartTime", "EndTime"]: if isinstance(query.get(timeKey), str): query[timeKey] = int(parse_human_time(query[timeKey]).timestamp()) if not query.get("EndTime"): query["EndTime"] = int(datetime.datetime.now().timestamp()) return query
def datetime_parser(dct): for k, v in dct.iteritems(): if isinstance(v, basestring): m = date_regex.findall(v) if len(m) > 0: dct[k] = parse(m[0], yearfirst=True) if '$humanTime' in dct: return parse_human_time(dct['$humanTime']) return bson_object_hook(dct)
def datetime_parser(dct): for k, v in dct.items(): if isinstance(v, str): m = date_regex.findall(v) if len(m) > 0: dct[k] = parse(m[0], yearfirst=True) if "$humanTime" in dct: return parse_human_time(dct["$humanTime"]) if "$oids" in dct: return parse_oids(dct["$oids"]) return bson_object_hook(dct)
def test_supports_relative_timestamps(self): query = {"ts": {"$humanTime": "1 hour ago"}} one_hour_ago = parse_human_time("1 hour ago") query_data = parse_query_json(json_dumps(query)) self.assertEqual(query_data["ts"], one_hour_ago)