def public_widget(widget): res = { 'id': widget.id, 'width': widget.width, 'options': json_loads(widget.options), 'text': widget.text, 'updated_at': widget.updated_at, 'created_at': widget.created_at } if widget.visualization and widget.visualization.id: query_data = models.QueryResult.query.get(widget.visualization.query_rel.latest_query_data_id).to_dict() res['visualization'] = { 'type': widget.visualization.type, 'name': widget.visualization.name, 'description': widget.visualization.description, 'options': json_loads(widget.visualization.options), 'updated_at': widget.visualization.updated_at, 'created_at': widget.visualization.created_at, 'query': { 'query': ' ', # workaround, as otherwise the query data won't be loaded. 'name': widget.visualization.query_rel.name, 'description': widget.visualization.query_rel.description, 'options': {}, 'latest_query_data': query_data } } return res
def get_schema(self, get_stats=False): query = """ select release_version from system.local; """ results, error = self.run_query(query, None) results = json_loads(results) release_version = results['rows'][0]['release_version'] query = """ SELECT table_name, column_name FROM system_schema.columns WHERE keyspace_name ='{}'; """.format(self.configuration['keyspace']) if release_version.startswith('2'): query = """ SELECT columnfamily_name AS table_name, column_name FROM system.schema_columns WHERE keyspace_name ='{}'; """.format(self.configuration['keyspace']) results, error = self.run_query(query, None) results = json_loads(results) schema = {} for row in results['rows']: table_name = row['table_name'] column_name = row['column_name'] if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append(column_name) return schema.values()
def get_waiting_in_queue(queue_name): jobs = [] for raw in redis_connection.lrange(queue_name, 0, -1): job = json_loads(raw) try: args = json_loads(job['headers']['argsrepr']) if args.get('query_id') == 'adhoc': args['query_id'] = None except ValueError: args = {} job_row = { 'state': 'waiting_in_queue', 'task_name': job['headers']['task'], 'worker': None, 'worker_pid': None, 'start_time': None, 'task_id': job['headers']['id'], 'queue': job['properties']['delivery_info']['routing_key'] } job_row.update(args) jobs.append(job_row) return jobs
def process_formdata(self, valuelist): if valuelist: try: json_loads(valuelist[0]) except ValueError: raise ValueError(self.gettext(u'Invalid JSON')) self.data = valuelist[0] else: self.data = ''
def get_schema(self, get_stats=False): query = """ SELECT col.table_schema, col.table_name, col.column_name FROM {database}.information_schema.columns col WHERE col.table_schema <> 'INFORMATION_SCHEMA' """.format(database=self.configuration['database']) results, error = self.run_query(query, None) if error is not None: raise Exception("Failed getting schema.") schema = {} results = json_loads(results) for row in results['rows']: table_name = '{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME']) if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append(row['COLUMN_NAME']) return schema.values()
def _get_tables(self, schema): query = """ SELECT table_schema, table_name, column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema NOT IN ('guest','INFORMATION_SCHEMA','sys','db_owner','db_accessadmin' ,'db_securityadmin','db_ddladmin','db_backupoperator','db_datareader' ,'db_datawriter','db_denydatareader','db_denydatawriter' ); """ results, error = self.run_query(query, None) if error is not None: raise Exception("Failed getting schema.") results = json_loads(results) for row in results['rows']: if row['table_schema'] != self.configuration['db']: table_name = u'{}.{}'.format(row['table_schema'], row['table_name']) else: table_name = row['table_name'] if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append(row['column_name']) return schema.values()
def _get_tables(self, schema): query = """ SELECT col.table_schema, col.table_name, col.column_name FROM `information_schema`.`columns` col WHERE col.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys'); """ results, error = self.run_query(query, None) if error is not None: raise Exception("Failed getting schema.") results = json_loads(results) for row in results['rows']: if row['table_schema'] != self.configuration['db']: table_name = u'{}.{}'.format(row['table_schema'], row['table_name']) else: table_name = row['table_name'] if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append(row['column_name']) return schema.values()
def run_query(self, query, user): logger.debug("BigQuery got query: %s", query) bigquery_service = self._get_bigquery_service() jobs = bigquery_service.jobs() try: if "totalMBytesProcessedLimit" in self.configuration: limitMB = self.configuration["totalMBytesProcessedLimit"] processedMB = self._get_total_bytes_processed(jobs, query) / 1000.0 / 1000.0 if limitMB < processedMB: return None, "Larger than %d MBytes will be processed (%f MBytes)" % (limitMB, processedMB) data = self._get_query_result(jobs, query) error = None json_data = json_dumps(data, ignore_nan=True) except apiclient.errors.HttpError as e: json_data = None if e.resp.status == 400: error = json_loads(e.content)['error']['message'] else: error = e.content except KeyboardInterrupt: error = "Query cancelled by user." json_data = None return json_data, error
def run_query(self, query, user): jql_url = '{}/rest/api/2/search'.format(self.configuration["url"]) try: query = json_loads(query) query_type = query.pop('queryType', 'select') field_mapping = FieldMapping(query.pop('fieldMapping', {})) if query_type == 'count': query['maxResults'] = 1 query['fields'] = '' else: query['maxResults'] = query.get('maxResults', 1000) response, error = self.get_response(jql_url, params=query) if error is not None: return None, error data = response.json() if query_type == 'count': results = parse_count(data) else: results = parse_issues(data, field_mapping) return results.to_json(), None except KeyboardInterrupt: return None, "Query cancelled by user."
def parse_tasks(task_lists, state): rows = [] for task in itertools.chain(*task_lists.values()): task_row = { 'state': state, 'task_name': task['name'], 'worker': task['hostname'], 'queue': task['delivery_info']['routing_key'], 'task_id': task['id'], 'worker_pid': task['worker_pid'], 'start_time': task['time_start'], } if task['name'] == 'redash.tasks.execute_query': try: args = json_loads(task['args']) except ValueError: args = {} if args.get('query_id') == 'adhoc': args['query_id'] = None task_row.update(args) rows.append(task_row) return rows
def get_schema(self, get_stats=False): query = """ SELECT TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA <> 'INFORMATION_SCHEMA' """ results, error = self.run_query(query, None) if error is not None: raise Exception("Failed getting schema.") schema = {} results = json_loads(results) for row in results['rows']: table_name = '{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME']) if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append(row['COLUMN_NAME']) return schema.values()
def _fetch_rows(self, query_id): query = models.Query.get_by_id_and_org(query_id, self.current_org) require_access(query.data_source.groups, self.current_user, view_only) query_result = models.QueryResult.get_by_id_and_org( query.latest_query_data_id, self.current_org) return json_loads(query_result.data)["rows"]
def data(self): if self._data is None: return None if not hasattr(self, DESERIALIZED_DATA_ATTR): setattr(self, DESERIALIZED_DATA_ATTR, json_loads(self._data)) return self._deserialized_data
def _get_analytics_service(self): scope = ["https://www.googleapis.com/auth/analytics.readonly"] key = json_loads(b64decode(self.configuration["jsonKeyFile"])) creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope) return build("analytics", "v3", http=creds.authorize(httplib2.Http()), cache_discovery=False)
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True): layout = json_loads(obj.layout) widgets = [] if with_widgets: for w in obj.widgets: if w.visualization_id is None: widgets.append(serialize_widget(w)) elif user and has_access(w.visualization.query_rel, user, view_only): widgets.append(serialize_widget(w)) else: widget = project(serialize_widget(w), ('id', 'width', 'dashboard_id', 'options', 'created_at', 'updated_at')) widget['restricted'] = True widgets.append(widget) else: widgets = None if obj.user is not None: _user = obj.user.to_dict() else: assets = app.extensions['webpack']['assets'] or {} path = 'images/avatar.svg' profile_image_url = url_for('static', filename=assets.get(path, path)) _user = { 'name': 'user', 'profile_image_url': profile_image_url, } d = { 'id': obj.id, 'slug': obj.slug, 'name': obj.name, 'user_id': obj.user_id, # TODO: we should properly load the users 'user': _user, 'layout': layout, 'dashboard_filters_enabled': obj.dashboard_filters_enabled, 'widgets': widgets, 'is_archived': obj.is_archived, 'is_draft': obj.is_draft, 'tags': obj.tags or [], # TODO: bulk load favorites 'updated_at': obj.updated_at, 'created_at': obj.created_at, 'version': obj.version } if with_favorite_state: d['is_favorite'] = models.Favorite.is_favorite(current_user.id, obj) return d
def test_get_dashboard(self): d1 = self.factory.create_dashboard() rv = self.make_request('get', '/api/dashboards/{0}'.format(d1.slug)) self.assertEquals(rv.status_code, 200) expected = serialize_dashboard(d1, with_widgets=True, with_favorite_state=False) actual = json_loads(rv.data) self.assertResponseEqual(expected, actual)
def _get_definitions(self, schema, query): results, error = self.run_query(query, None) if error is not None: raise Exception("Failed getting schema.") results = json_loads(results) build_schema(results, schema)
def _load_result(query_id): from redash.authentication.org_resolving import current_org from redash import models query = models.Query.get_by_id_and_org(query_id, current_org) require_access(query.data_source.groups, current_user, view_only) query_result = models.QueryResult.get_by_id_and_org(query.latest_query_data_id, current_org) return json_loads(query_result.data)
def test_get_dashboard_with_slug(self): d1 = self.factory.create_dashboard() rv = self.make_request("get", "/api/dashboards/{0}?legacy".format(d1.slug)) self.assertEqual(rv.status_code, 200) expected = serialize_dashboard(d1, with_widgets=True, with_favorite_state=False) actual = json_loads(rv.data) self.assertResponseEqual(expected, actual)
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True): layout = json_loads(obj.layout) widgets = [] if with_widgets: for w in obj.widgets: if w.visualization_id is None: widgets.append(serialize_widget(w)) elif user and has_access(w.visualization.query_rel, user, view_only): widgets.append(serialize_widget(w)) else: widget = project( serialize_widget(w), ( "id", "width", "dashboard_id", "options", "created_at", "updated_at", ), ) widget["restricted"] = True widgets.append(widget) else: widgets = None d = { "id": obj.id, "slug": obj.name_as_slug, "name": obj.name, "user_id": obj.user_id, "user": { "id": obj.user.id, "name": obj.user.name, "email": obj.user.email, "profile_image_url": obj.user.profile_image_url, }, "layout": layout, "dashboard_filters_enabled": obj.dashboard_filters_enabled, "widgets": widgets, "options": obj.options, "is_archived": obj.is_archived, "is_draft": obj.is_draft, "tags": obj.tags or [], "updated_at": obj.updated_at, "created_at": obj.created_at, "version": obj.version, } return d
def render_template(self): if not self.template: return '' data = json_loads(self.query_rel.latest_query_data.data) context = { 'rows': data['rows'], 'cols': data['columns'], 'state': self.state } return mustache_render(self.template, context)
def get_databases(self): query = "SHOW DATABASES" results, error = self.run_query(query, None) if error is not None: raise Exception("Failed getting schema.") results = json_loads(results) return [row["namespace"] for row in results["rows"]]
def to_dict(self): return { 'id': self.id, 'query_hash': self.query_hash, 'query': self.query_text, 'data': json_loads(self.data), 'data_source_id': self.data_source_id, 'runtime': self.runtime, 'retrieved_at': self.retrieved_at }
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True): layout = json_loads(obj.layout) widgets = [] if with_widgets: for w in obj.widgets: if w.visualization_id is None: widgets.append(serialize_widget(w)) elif user and has_access(w.visualization.query_rel, user, view_only): widgets.append(serialize_widget(w)) else: widget = project( serialize_widget(w), ( "id", "width", "dashboard_id", "options", "created_at", "updated_at", ), ) widget["restricted"] = True widgets.append(widget) else: widgets = None d = { "id": obj.id, "slug": obj.slug, "name": obj.name, "user_id": obj.user_id, # TODO: we should properly load the users "user": obj.user.to_dict(), "layout": layout, "dashboard_filters_enabled": obj.dashboard_filters_enabled, "widgets": widgets, "is_archived": obj.is_archived, "is_draft": obj.is_draft, "tags": obj.tags or [], # TODO: bulk load favorites "updated_at": obj.updated_at, "created_at": obj.created_at, "version": obj.version, } if with_favorite_state: d["is_favorite"] = models.Favorite.is_favorite(current_user.id, obj) return d
def get_databases(self): query = "SHOW DATABASES" results, error = self.run_query(query, None) if error is not None: raise Exception("Failed getting schema.") results = json_loads(results) first_column_name = results["columns"][0]["name"] return [row[first_column_name] for row in results["rows"]]
def _load_result(query_id): from redash.authentication.org_resolving import current_org from redash import models query = models.Query.get_by_id_and_org(query_id, current_org) if query.data_source: query_result = models.QueryResult.get_by_id_and_org(query.latest_query_data_id, current_org) return json_loads(query_result.data) else: abort(400, message="This query is detached from any data source. Please select a different query.")
def _load_result(query_id, org): from redash import models query = models.Query.get_by_id_and_org(query_id, org) if query.data_source: query_result = models.QueryResult.get_by_id_and_org( query.latest_query_data_id, org) return json_loads(query_result.data) else: raise QueryDetachedFromDataSourceError(query_id)
def _get_spreadsheet_service(self): scope = ["https://spreadsheets.google.com/feeds"] key = json_loads(b64decode(self.configuration["jsonKeyFile"])) creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope) timeout_session = Session() timeout_session.requests_session = TimeoutSession() spreadsheetservice = gspread.Client(auth=creds, session=timeout_session) spreadsheetservice.login() return spreadsheetservice
def make_csv_content(self): s = cStringIO.StringIO() query_data = json_loads(self.data) writer = csv.DictWriter(s, extrasaction="ignore", fieldnames=[col['name'] for col in query_data['columns']]) writer.writer = utils.UnicodeWriter(s) writer.writeheader() for row in query_data['rows']: writer.writerow(row) return s.getvalue()
def run_query(self, query, user): try: error = None logger.debug(query) query_dict = json_loads(query) index_name = query_dict.pop("index", "") result_fields = query_dict.pop("result_fields", None) if not self.server_url: error = "Missing configuration key 'server'" return None, error url = "{0}/{1}/_search".format(self.server_url, index_name) mapping_url = "{0}/{1}/_mapping".format(self.server_url, index_name) mappings, error = self._get_query_mappings(mapping_url) if error: return None, error logger.debug("Using URL: %s", url) logger.debug("Using query: %s", query_dict) r = requests.get(url, json=query_dict, auth=self.auth) r.raise_for_status() logger.debug("Result: %s", r.json()) result_columns = [] result_rows = [] self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows) json_data = json_dumps({ "columns": result_columns, "rows": result_rows }) except KeyboardInterrupt: logger.exception(e) error = "Query cancelled by user." json_data = None except requests.HTTPError as e: logger.exception(e) error = "Failed to execute query. Return Code: {0} Reason: {1}".format( r.status_code, r.text) json_data = None except requests.exceptions.RequestException as e: logger.exception(e) error = "Connection refused" json_data = None return json_data, error
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True): layout = json_loads(obj.layout) widgets = [] if with_widgets: for w in obj.widgets: if w.visualization_id is None: widgets.append(serialize_widget(w)) elif user and has_access(w.visualization.query_rel, user, view_only): widgets.append(serialize_widget(w)) else: widget = project(serialize_widget(w), ('id', 'width', 'dashboard_id', 'options', 'created_at', 'updated_at')) widget['restricted'] = True widgets.append(widget) else: widgets = None d = { 'id': obj.id, 'slug': obj.slug, 'name': obj.name, 'user_id': current_user.id, 'created_by': obj.user.to_dict(), 'user': current_user.to_dict(), 'layout': layout, 'dashboard_filters_enabled': obj.dashboard_filters_enabled, 'widgets': widgets, 'is_archived': obj.is_archived, 'is_draft': obj.is_draft, 'tags': obj.tags or [], 'updated_at': obj.updated_at, 'created_at': obj.created_at, 'version': obj.version, 'background_image': obj.background_image, 'description': obj.description, 'type': obj.type or 'dashboard', 'folder_id': obj.folder_id } d['groups'] = [ g.to_dict(with_permissions_for=True) for g in models.DashboardGroup.get_by_dashboard(obj) ] return d
def _get_spreadsheet_service(self): scope = [ 'https://spreadsheets.google.com/feeds', ] key = json_loads(b64decode(self.configuration['jsonKeyFile'])) creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope) timeout_session = HTTPSession() timeout_session.requests_session = TimeoutSession() spreadsheetservice = gspread.Client(auth=creds, http_session=timeout_session) spreadsheetservice.login() return spreadsheetservice
def get_definition(self, base_url): def_url = base_url + "adapter.json" response, error = self.__get_json_response(def_url) if error is not None: return None, error json_data = response.content.strip() if json_data: json_data_obj = json_loads(json_data) return json_data_obj, None else: return None, "Got empty response from '{}'.".format(base_url)
def test_success(self): d1 = self.factory.create_dashboard() access_token = self.factory.create_access_token() rv = self.make_request( "get", "/api/dashboards/embed/{}?access_token={}".format(d1.id, access_token), ) self.assertEqual(rv.status_code, 200) expected = public_dashboard(d1) actual = json_loads(rv.data) self.assertResponseEqual(expected, actual)
def _get_bigquery_service(self): scope = [ "https://www.googleapis.com/auth/bigquery", "https://www.googleapis.com/auth/drive" ] key = json_loads(b64decode(self.configuration['jsonKeyFile'])) creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope) http = httplib2.Http(timeout=settings.BIGQUERY_HTTP_TIMEOUT) http = creds.authorize(http) return build("bigquery", "v2", http=http)
def get_query_results(user, query_id, bring_from_cache): query = _load_query(user, query_id) if bring_from_cache: if query.latest_query_data_id is not None: results = query.latest_query_data.data else: raise Exception("No cached result available for query {}.".format(query.id)) else: results, error = query.data_source.query_runner.run_query(query.query_text, user) if error: raise Exception("Failed loading results for query id {}.".format(query.id)) return json_loads(results)
def public_widget(widget): res = { 'id': widget.id, 'width': widget.width, 'options': json_loads(widget.options), 'text': widget.text, 'updated_at': widget.updated_at, 'created_at': widget.created_at } if widget.visualization and widget.visualization.id: res['visualization'] = public_visualization(widget.visualization) return res
def _get_tables(self, schema): query_table = "select tbl_name from sqlite_master where type='table'" query_columns = "PRAGMA table_info(%s)" results, error = self.run_query(query_table, None) if error is not None: raise Exception("Failed getting schema.") results = json_loads(results) for row in results['rows']: table_name = row['tbl_name'] schema[table_name] = {'name': table_name, 'columns': []} results_table, error = self.run_query(query_columns % (table_name,), None) if error is not None: raise Exception("Failed getting schema.") results_table = json_loads(results_table) for row_column in results_table['rows']: schema[table_name]['columns'].append(row_column['name']) return schema.values()
def get_schema(self, refresh=False): cache = None if not refresh: cache = redis_connection.get(self._schema_key) if cache is None: query_runner = self.query_runner schema = sorted(query_runner.get_schema(get_stats=refresh), key=lambda t: t['name']) redis_connection.set(self._schema_key, json_dumps(schema)) else: schema = json_loads(cache) return schema
def _get_tables(self, schema): query_table = "select tbl_name from sqlite_master where type='table'" query_columns = "PRAGMA table_info(%s)" results, error = self.run_query(query_table, None) if error is not None: raise Exception("Failed getting schema.") results = json_loads(results) for row in results['rows']: table_name = row['tbl_name'] schema[table_name] = {'name': table_name, 'columns': []} results_table, error = self.run_query(query_columns % (table_name,), None) if error is not None: raise Exception("Failed getting schema.") results_table = json_loads(results_table) for row_column in results_table['rows']: schema[table_name]['columns'].append(row_column['name']) return list(schema.values())
def get_schema(self, get_stats=False): query = """ select release_version from system.local; """ results, error = self.run_query(query, None) results = json_loads(results) release_version = results["rows"][0]["release_version"] query = """ SELECT table_name, column_name FROM system_schema.columns WHERE keyspace_name ='{}'; """.format( self.configuration["keyspace"] ) if release_version.startswith("2"): query = """ SELECT columnfamily_name AS table_name, column_name FROM system.schema_columns WHERE keyspace_name ='{}'; """.format( self.configuration["keyspace"] ) results, error = self.run_query(query, None) results = json_loads(results) schema = {} for row in results["rows"]: table_name = row["table_name"] column_name = row["column_name"] if table_name not in schema: schema[table_name] = {"name": table_name, "columns": []} schema[table_name]["columns"].append(column_name) return list(schema.values())
def run_query(self, query, user): try: error = None logger.debug(query) query_dict = json_loads(query) index_name = query_dict.pop("index", "") result_fields = query_dict.pop("result_fields", None) if not self.server_url: error = "Missing configuration key 'server'" return None, error url = "{0}/{1}/_search".format(self.server_url, index_name) mapping_url = "{0}/{1}/_mapping".format(self.server_url, index_name) mappings, error = self._get_query_mappings(mapping_url) if error: return None, error logger.debug("Using URL: %s", url) logger.debug("Using query: %s", query_dict) r = requests.get(url, json=query_dict, auth=self.auth) r.raise_for_status() logger.debug("Result: %s", r.json()) result_columns = [] result_rows = [] self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows) json_data = json_dumps({ "columns": result_columns, "rows": result_rows }) except KeyboardInterrupt: logger.exception(e) error = "Query cancelled by user." json_data = None except requests.HTTPError as e: logger.exception(e) error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text) json_data = None except requests.exceptions.RequestException as e: logger.exception(e) error = "Connection refused" json_data = None return json_data, error
def get_schema(self, get_stats=False): query = "MATCH (n) RETURN distinct keys(n) as x, labels(n) as y" data, error = self.run_query(query, None) if error is not None: raise Exception("Failed getting schema.") results = json_loads(data) arr = [] schema = {} for x in results["rows"]: table_name = x["y"][0] if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'] = list( set(schema[table_name]['columns'] + x["x"])) return list(schema.values())
def serialize_visualization(object, with_query=True): d = { 'id': object.id, 'type': object.type, 'name': object.name, 'description': object.description, 'options': json_loads(object.options), 'updated_at': object.updated_at, 'created_at': object.created_at } if with_query: d['query'] = serialize_query(object.query_rel) return d
def serialize_widget(object): d = { 'id': object.id, 'width': object.width, 'options': json_loads(object.options), 'dashboard_id': object.dashboard_id, 'text': object.text, 'updated_at': object.updated_at, 'created_at': object.created_at } if object.visualization and object.visualization.id: d['visualization'] = serialize_visualization(object.visualization) return d
def get_schema(self, refresh=False): key = "data_source:schema:{}".format(self.id) cache = None if not refresh: cache = redis_connection.get(key) if cache is None: query_runner = self.query_runner schema = sorted(query_runner.get_schema(get_stats=refresh), key=lambda t: t['name']) redis_connection.set(key, json_dumps(schema)) else: schema = json_loads(cache) return schema
def _get_definitions(self, schema, query): results, error = self.run_query(query, None) if error is not None: raise Exception("Failed getting schema.") results = json_loads(results) for row in results['rows']: if row['table_schema'] != 'public': table_name = u'{}.{}'.format(row['table_schema'], row['table_name']) else: table_name = row['table_name'] if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append(row['column_name'])
def _get_tables(self, schema): query = "SELECT database, table, name FROM system.columns WHERE database NOT IN ('system')" results, error = self.run_query(query, None) if error is not None: raise Exception("Failed getting schema.") results = json_loads(results) for row in results['rows']: table_name = '{}.{}'.format(row['database'], row['table']) if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append(row['name']) return schema.values()
def evaluate(self): data = json_loads(self.query_rel.latest_query_data.data) if data['rows'] and self.options['column'] in data['rows'][0]: value = data['rows'][0][self.options['column']] op = self.options['op'] if op == 'greater than' and value > self.options['value']: new_state = self.TRIGGERED_STATE elif op == 'less than' and value < self.options['value']: new_state = self.TRIGGERED_STATE elif op == 'equals' and value == self.options['value']: new_state = self.TRIGGERED_STATE else: new_state = self.OK_STATE else: new_state = self.UNKNOWN_STATE return new_state
def serialize_query_result_to_csv(query_result): s = cStringIO.StringIO() query_data = json_loads(query_result.data) fieldnames, special_columns = _get_column_lists(query_data['columns']) writer = csv.DictWriter(s, extrasaction="ignore", fieldnames=fieldnames) writer.writer = UnicodeWriter(s) writer.writeheader() for row in query_data['rows']: for col_name, converter in special_columns.iteritems(): if col_name in row: row[col_name] = converter(row[col_name]) writer.writerow(row) return s.getvalue()
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True): layout = json_loads(obj.layout) widgets = [] if with_widgets: for w in obj.widgets: if w.visualization_id is None: widgets.append(serialize_widget(w)) elif user and has_access(w.visualization.query_rel, user, view_only): widgets.append(serialize_widget(w)) else: widget = project(serialize_widget(w), ('id', 'width', 'dashboard_id', 'options', 'created_at', 'updated_at')) widget['restricted'] = True widgets.append(widget) else: widgets = None d = { 'id': obj.id, 'slug': obj.slug, 'name': obj.name, 'user_id': obj.user_id, # TODO: we should properly load the users 'user': obj.user.to_dict(), 'layout': layout, 'dashboard_filters_enabled': obj.dashboard_filters_enabled, 'widgets': widgets, 'is_archived': obj.is_archived, 'is_draft': obj.is_draft, 'tags': obj.tags or [], # TODO: bulk load favorites 'updated_at': obj.updated_at, 'created_at': obj.created_at, 'version': obj.version } if with_favorite_state: d['is_favorite'] = models.Favorite.is_favorite(current_user.id, obj) return d
def outdated_queries(): manager_status = redis_connection.hgetall('redash:status') query_ids = json_loads(manager_status.get('query_ids', '[]')) if query_ids: outdated_queries = ( models.Query.query.outerjoin(models.QueryResult) .filter(models.Query.id.in_(query_ids)) .order_by(models.Query.created_at.desc()) ) else: outdated_queries = [] record_event(current_org, current_user._get_current_object(), { 'action': 'list', 'object_type': 'outdated_queries', }) response = { 'queries': QuerySerializer(outdated_queries, with_stats=True, with_last_modified_by=False).serialize(), 'updated_at': manager_status['last_refresh_at'], } return json_response(response)
def serialize_query_result_to_xlsx(query_result): s = cStringIO.StringIO() query_data = json_loads(query_result.data) book = xlsxwriter.Workbook(s, {'constant_memory': True}) sheet = book.add_worksheet("result") column_names = [] for (c, col) in enumerate(query_data['columns']): sheet.write(0, c, col['name']) column_names.append(col['name']) for (r, row) in enumerate(query_data['rows']): for (c, name) in enumerate(column_names): v = row.get(name) if isinstance(v, list) or isinstance(v, dict): v = str(v).encode('utf-8') sheet.write(r + 1, c, v) book.close() return s.getvalue()