예제 #1
0
    def test_store_health_status_sets_correct_keys(self):
        current_health = redis_connection.get('data_sources:health')
        self.assertEqual(None, current_health)

        DATA_SOURCE = self.factory.create_data_source()
        QUERY_SUCCESS = "SELECT 1"
        QUERY_FAIL = "SELECT meep"
        SOME_DATA_FAIL = {"a": "b", "foo": "bar", "status": "FAIL"}
        SOME_DATA_SUCCESS = {"a": "b", "foo": "bar", "status": "SUCCESS"}
        store_health_status(str(DATA_SOURCE.id), DATA_SOURCE.name, QUERY_FAIL,
                            SOME_DATA_FAIL)
        store_health_status(str(DATA_SOURCE.id), DATA_SOURCE.name,
                            QUERY_SUCCESS, SOME_DATA_SUCCESS)
        '''
          The expected format of the cached health data is:
          {
            "<data_source_id>": {
              "metadata": "<data_source_name>",
              "queries": {
                "<query_text>": {...},
                "<query_text>": {...},
                "<query_text>": {...},
                ...
              }
            },
            ...
          }
        '''

        current_health = json.loads(
            redis_connection.get('data_sources:health'))

        # There is 1 data source.
        self.assertEqual(1, len(current_health.keys()))
        self.assertEqual(DATA_SOURCE.id, int(current_health.keys()[0]))

        # The data source has "metadata", "queries" and "status" keys.
        ds_id = str(DATA_SOURCE.id)
        self.assertEqual(3, len(current_health[ds_id].keys()))
        self.assertTrue("metadata" in current_health[ds_id].keys())
        self.assertTrue("queries" in current_health[ds_id].keys())
        self.assertTrue("status" in current_health[ds_id].keys())

        # There are two queries with correct data
        self.assertEqual(2, len(current_health[ds_id]["queries"]))
        self.assertTrue(
            QUERY_SUCCESS in current_health[ds_id]["queries"].keys())
        self.assertTrue(QUERY_FAIL in current_health[ds_id]["queries"].keys())
        self.assertEqual(SOME_DATA_FAIL,
                         current_health[ds_id]["queries"][QUERY_FAIL])
        self.assertEqual(SOME_DATA_SUCCESS,
                         current_health[ds_id]["queries"][QUERY_SUCCESS])
        self.assertEqual(SOME_DATA_FAIL["status"],
                         current_health[ds_id]["status"])
예제 #2
0
def stmo_status_api():
    status = original_get_status()
    health_data = json.loads(redis_connection.get('data_sources:health') or '{}')

    # Get the top level status for each data source
    for health_data_point in health_data.values():
        data_source_name = health_data_point["metadata"]["name"]
        dashboard_label = "[Data Source Health] {name}".format(name=data_source_name)
        status[dashboard_label] = health_data_point["status"]
    return jsonify(status)
예제 #3
0
def get_object_counts():
    status = {}
    status['queries_count'] = models.db.session.query(models.Query).count()
    if settings.FEATURE_SHOW_QUERY_RESULTS_COUNT:
        status['query_results_count'] = models.db.session.query(
            models.QueryResult).count()
        status['unused_query_results_count'] = models.QueryResult.unused(
        ).count()
    status['dashboards_count'] = models.Dashboard.query.count()
    status['widgets_count'] = models.Widget.query.count()
    status['data_sources'] = json.loads(
        redis_connection.get('data_sources:health') or '{}')
    return status
예제 #4
0
    def get_schema(self, refresh=False):
        cache = None
        if not refresh:
            cache = redis_connection.get(self._schema_key)

        if cache is None:
            query_runner = self.query_runner
            schema = sorted(query_runner.get_schema(get_stats=refresh), key=lambda t: t['name'])

            redis_connection.set(self._schema_key, json_dumps(schema))
        else:
            schema = json_loads(cache)

        return schema
예제 #5
0
def store_health_status(data_source_id, data_source_name, query_text, data):
    key = "data_sources:health"

    cache = json.loads(redis_connection.get(key) or "{}")
    if data_source_id not in cache:
        cache[data_source_id] = {"metadata": {"name": data_source_name}, "queries": {}}
    cache[data_source_id]["queries"][query_text] = data

    cache[data_source_id]["status"] = "SUCCESS"
    for query_status in cache[data_source_id]["queries"].values():
        if query_status["status"] == "FAIL":
            cache[data_source_id]["status"] = "FAIL"
            break

    redis_connection.set(key, json.dumps(cache))
예제 #6
0
def remove_ghost_locks():
    """
    Removes query locks that reference a non existing RQ job.
    """
    keys = redis_connection.keys("query_hash_job:*")
    locks = {k: redis_connection.get(k) for k in keys}
    jobs = list(rq_job_ids())

    count = 0

    for lock, job_id in locks.items():
        if job_id not in jobs:
            redis_connection.delete(lock)
            count += 1

    logger.info("Locks found: {}, Locks removed: {}".format(len(locks), count))
예제 #7
0
파일: models.py 프로젝트: Drunkar/redash
    def get_schema(self, refresh=False):
        key = "data_source:schema:{}".format(self.id)

        cache = None
        if not refresh:
            cache = redis_connection.get(key)

        if cache is None:
            query_runner = self.query_runner
            schema = sorted(query_runner.get_schema(get_stats=refresh), key=lambda t: t['name'])

            redis_connection.set(key, json.dumps(schema))
        else:
            schema = json.loads(cache)

        return schema
예제 #8
0
파일: monitor.py 프로젝트: jrbenny35/redash
def get_status():
    status = {}
    info = redis_connection.info()
    status['redis_used_memory'] = info['used_memory']
    status['redis_used_memory_human'] = info['used_memory_human']
    status['version'] = __version__
    status['queries_count'] = models.db.session.query(models.Query).count()
    if settings.FEATURE_SHOW_QUERY_RESULTS_COUNT:
        status['query_results_count'] = models.db.session.query(models.QueryResult).count()
        status['unused_query_results_count'] = models.QueryResult.unused().count()
    status['dashboards_count'] = models.Dashboard.query.count()
    status['widgets_count'] = models.Widget.query.count()

    status['workers'] = []

    status['manager'] = redis_connection.hgetall('redash:status')
    status['data_sources'] = json.loads(redis_connection.get('data_sources:health') or '{}')

    queues = {}
    for ds in models.DataSource.query:
        for queue in (ds.queue_name, ds.scheduled_queue_name):
            queues.setdefault(queue, set())
            queues[queue].add(ds.name)

    status['manager']['queues'] = {}
    for queue, sources in queues.iteritems():
        status['manager']['queues'][queue] = {
            'data_sources': ', '.join(sources),
            'size': redis_connection.llen(queue)
        }
    
    status['manager']['queues']['celery'] = {
        'size': redis_connection.llen('celery'),
        'data_sources': ''
    }

    status['database_metrics'] = []
    # have to include the fake FROM in the SQL to prevent an IndexError
    queries = [
        ['Query Results Size', "pg_size_pretty(pg_total_relation_size('query_results')) as size from (select 1) as a"],
        ['Redash DB Size', "pg_size_pretty(pg_database_size('postgres')) as size from (select 1) as a"]
    ]
    for query_name, query in queries:
        result = models.db.session.query(query).first()
        status['database_metrics'].append([query_name, result[0]])

    return status
예제 #9
0
파일: __init__.py 프로젝트: xenisys/redash
    def get_schema(self, refresh=False):
        cache = None
        if not refresh:
            cache = redis_connection.get(self._schema_key)

        if cache is None:
            query_runner = self.query_runner
            schema = query_runner.get_schema(get_stats=refresh)

            try:
                out_schema = self._sort_schema(schema)
            except Exception:
                logging.exception(
                    "Error sorting schema columns for data_source {}".format(self.id)
                )
                out_schema = schema
            finally:
                redis_connection.set(self._schema_key, json_dumps(out_schema))
        else:
            out_schema = json_loads(cache)

        return out_schema
예제 #10
0
def stmo_status_api():
    status = original_get_status()
    status['data_sources'] = json.loads(redis_connection.get('data_sources:health') or '{}')
    return jsonify(status)
예제 #11
0
 def get(self):
     health_data = json.loads(redis_connection.get('data_sources:health') or '{}')
     return jsonify(health_data)
예제 #12
0
 def get_cached_schema(self):
     cache = redis_connection.get(self._schema_key)
     return json_loads(cache) if cache else None
def get_latest_version():
    return redis_connection.get(REDIS_KEY)
예제 #14
0
def _get_tables_from_cache(data_source_id, database_name):
    cache = redis_connection.get(_tables_key(data_source_id, database_name))
    return json_loads(cache) if cache else None
예제 #15
0
def get_latest_version():
    return redis_connection.get(REDIS_KEY)
예제 #16
0
 def get(self):
     health_data = json.loads(redis_connection.get("data_sources:health") or "{}")
     return jsonify(health_data)
예제 #17
0
def get_latest_version():
    return redis_connection.get(REDIS_KEY).decode(
        "utf-8") if redis_connection.get(REDIS_KEY) else None
예제 #18
0
파일: models.py 프로젝트: Drunkar/redash
 def pause_reason(self):
     return redis_connection.get(self._pause_key())
예제 #19
0
 def pause_reason(self):
     return redis_connection.get(
         self._pause_key).decode("utf-8") if redis_connection.get(
             self._pause_key) else None