Пример #1
0
def embed(query_id, visualization_id):

    query = models.Query.get_by_id(query_id)
    vis = query.visualizations.where(models.Visualization.id == visualization_id).first()
    qr = {}
    if vis is not None:
        vis = vis.to_dict()
        qr = query.latest_query_data
        if qr is None:
            abort(400, message="No Results for this query")
        else:
            qr = qr.to_dict()
    else:
        abort(404, message="Visualization not found.")

    client_config = {}
    client_config.update(settings.COMMON_CLIENT_CONFIG)

    return render_template(
        "embed.html",
        name=settings.NAME,
        client_config=json_dumps(client_config),
        visualization=json_dumps(vis),
        query_result=json_dumps(qr),
        analytics=settings.ANALYTICS,
    )
Пример #2
0
def embed(query_id, visualization_id, org_slug=None):
    # TODO: add event for embed access
    query = models.Query.get_by_id_and_org(query_id, current_org)
    vis = query.visualizations.where(models.Visualization.id == visualization_id).first()
    qr = {}

    if vis is not None:
        vis = vis.to_dict()
        qr = query.latest_query_data
        if qr is None:
            abort(400, message="No Results for this query")
        else:
            qr = qr.to_dict()
    else:
        abort(404, message="Visualization not found.")

    client_config = {}
    client_config.update(settings.COMMON_CLIENT_CONFIG)

    if settings.MULTI_ORG:
        base_href = url_for('index', _external=True, org_slug=current_org.slug)
    else:
        base_href = url_for('index', _external=True)

    return render_template("embed.html",
                           name=settings.NAME,
                           base_href=base_href,
                           client_config=json_dumps(client_config),
                           visualization=json_dumps(vis),
                           query_result=json_dumps(qr),
                           analytics=settings.ANALYTICS)
Пример #3
0
def embed(query_id, visualization_id, org_slug=None):
    # TODO: add event for embed access
    query = models.Query.get_by_id_and_org(query_id, current_org)
    require_access(query.groups, current_user, view_only)
    vis = query.visualizations.where(models.Visualization.id == visualization_id).first()
    qr = {}

    if vis is not None:
        vis = vis.to_dict()
        qr = query.latest_query_data
        if qr is None:
            abort(400, message="No Results for this query")
        else:
            qr = qr.to_dict()
    else:
        abort(404, message="Visualization not found.")

    client_config = {}
    client_config.update(settings.COMMON_CLIENT_CONFIG)

    qr = project(qr, ('data', 'id', 'retrieved_at'))
    vis = project(vis, ('description', 'name', 'id', 'options', 'query', 'type', 'updated_at'))
    vis['query'] = project(vis, ('created_at', 'description', 'name', 'id', 'latest_query_data_id', 'name', 'updated_at'))

    return render_template("embed.html",
                           name=settings.NAME,
                           base_href=base_href(),
                           client_config=json_dumps(client_config),
                           visualization=json_dumps(vis),
                           query_result=json_dumps(qr),
                           analytics=settings.ANALYTICS)
Пример #4
0
def embed(query_id, visualization_id, org_slug=None):
    query = models.Query.get_by_id_and_org(query_id, current_org)
    require_access(query.groups, current_user, view_only)
    vis = query.visualizations.where(models.Visualization.id == visualization_id).first()
    qr = {}

    parameter_values = collect_parameters_from_request(request.args)

    if vis is not None:
        vis = vis.to_dict()
        qr = query.latest_query_data
        logging.info("jonhere")
        logging.info( settings.ALLOW_PARAMETERS_IN_EMBEDS)
        if settings.ALLOW_PARAMETERS_IN_EMBEDS == True and len(parameter_values) > 0:
            #abort(404,message="jlk") 
            # run parameterized query
            #
            # WARNING: Note that the external query parameters
            #          are a potential risk of SQL injections.
            #
            results = run_query_sync(query.data_source, parameter_values, query.query)
            logging.info("jonhere2")
            logging.info("results")
            if results is None:
                abort(400, message="Unable to get results for this query")
            else:
                qr = {"data": json.loads(results)}
        elif qr is None:
            abort(400, message="No Results for this query")
        else:
            qr = qr.to_dict()
    else:
        abort(404, message="Visualization not found.")

    record_event(current_org, current_user, {
        'action': 'view',
        'object_id': visualization_id,
        'object_type': 'visualization',
        'query_id': query_id,
        'embed': True,
        'referer': request.headers.get('Referer')
    })

    client_config = {}
    client_config.update(settings.COMMON_CLIENT_CONFIG)

    qr = project(qr, ('data', 'id', 'retrieved_at'))
    vis = project(vis, ('description', 'name', 'id', 'options', 'query', 'type', 'updated_at'))
    vis['query'] = project(vis['query'], ('created_at', 'description', 'name', 'id', 'latest_query_data_id', 'name', 'updated_at'))

    return render_template("embed.html",
                           client_config=json_dumps(client_config),
                           visualization=json_dumps(vis),
                           query_result=json_dumps(qr))
Пример #5
0
    def post(self):
        """
        Add a widget to a dashboard.

        :<json number dashboard_id: The ID for the dashboard being added to
        :<json visualization_id: The ID of the visualization to put in this widget
        :<json object options: Widget options
        :<json string text: Text box contents
        :<json number width: Width for widget display

        :>json object widget: The created widget
        """
        widget_properties = request.get_json(force=True)
        dashboard = models.Dashboard.get_by_id_and_org(widget_properties.pop('dashboard_id'), self.current_org)
        require_object_modify_permission(dashboard, self.current_user)

        widget_properties['options'] = json_dumps(widget_properties['options'])
        widget_properties.pop('id', None)
        widget_properties['dashboard'] = dashboard

        visualization_id = widget_properties.pop('visualization_id')
        if visualization_id:
            visualization = models.Visualization.get_by_id_and_org(visualization_id, self.current_org)
            require_access(visualization.query_rel, self.current_user, view_only)
        else:
            visualization = None

        widget_properties['visualization'] = visualization

        widget = models.Widget(**widget_properties)
        models.db.session.add(widget)
        models.db.session.commit()

        models.db.session.commit()
        return serialize_widget(widget)
Пример #6
0
    def run_query(self, query, user):
        connection = atsd_client.connect_url(self.url,
                                             self.configuration.get('username'),
                                             self.configuration.get('password'),
                                             verify=self.configuration.get('trust_certificate', False),
                                             timeout=self.configuration.get('timeout', 600))
        sql = SQLService(connection)
        query_id = str(uuid.uuid4())

        try:
            logger.debug("SQL running query: %s", query)
            data = sql.query_with_params(query, {'outputFormat': 'csv', 'metadataFormat': 'EMBED',
                                                 'queryId': query_id})

            columns, rows = generate_rows_and_columns(data)

            data = {'columns': columns, 'rows': rows}
            json_data = json_dumps(data)
            error = None

        except SQLException as e:
            json_data = None
            error = e.content
        except (KeyboardInterrupt, InterruptException):
            sql.cancel_query(query_id)
            error = "Query cancelled by user."
            json_data = None

        return json_data, error
Пример #7
0
    def notify(self, alert, query, user, new_state, app, host, options):
        # Documentation: https://api.slack.com/docs/attachments
        fields = [
            {
                "title": "Query",
                "value": "{host}/queries/{query_id}".format(host=host, query_id=query.id),
                "short": True
            },
            {
                "title": "Alert",
                "value": "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id),
                "short": True
            }
        ]
        if new_state == "triggered":
            text = alert.name + " just triggered"
            color = "#c0392b"
        else:
            text = alert.name + " went back to normal"
            color = "#27ae60"
        
        payload = {'attachments': [{'text': text, 'color': color, 'fields': fields}]}

        if options.get('username'): payload['username'] = options.get('username')
        if options.get('icon_emoji'): payload['icon_emoji'] = options.get('icon_emoji')
        if options.get('icon_url'): payload['icon_url'] = options.get('icon_url')
        if options.get('channel'): payload['channel'] = options.get('channel')

        try:
            resp = requests.post(options.get('url'), data=json_dumps(payload), timeout=5.0)
            logging.warning(resp.text)
            if resp.status_code != 200:
                logging.error("Slack send ERROR. status_code => {status}".format(status=resp.status_code))
        except Exception:
            logging.exception("Slack send ERROR.")
Пример #8
0
    def run_query(self, query, user):
        connection = self._get_connection()
        _wait(connection, timeout=10)

        cursor = connection.cursor()

        try:
            cursor.execute(query)
            _wait(connection)

            if cursor.description is not None:
                columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description])
                rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor]

                data = {'columns': columns, 'rows': rows}
                error = None
                json_data = json_dumps(data, ignore_nan=True)
            else:
                error = 'Query completed but it returned no data.'
                json_data = None
        except (select.error, OSError) as e:
            error = "Query interrupted. Please retry."
            json_data = None
        except psycopg2.DatabaseError as e:
            error = e.message
            json_data = None
        except (KeyboardInterrupt, InterruptException):
            connection.cancel()
            error = "Query cancelled by user."
            json_data = None
        finally:
            connection.close()

        return json_data, error
Пример #9
0
def public_dashboard(token, org_slug=None):
    # TODO: verify object is a dashboard?
    if not isinstance(current_user, models.ApiUser):
        api_key = models.ApiKey.get_by_api_key(token)
        dashboard = api_key.object
    else:
        dashboard = current_user.object

    user = {
        'permissions': [],
        'apiKey': current_user.id
    }

    headers = {
        'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate'
    }

    response = render_template("public.html",
                               headless='embed' in request.args,
                               user=json.dumps(user),
                               seed_data=json_dumps({
                                 'dashboard': serializers.public_dashboard(dashboard)
                               }),
                               base_href=base_href(),
                               name=settings.NAME,
                               client_config=json.dumps(settings.COMMON_CLIENT_CONFIG),
                               analytics=settings.ANALYTICS)

    return response, 200, headers
Пример #10
0
    def run_query(self, query, user):
        url = self.configuration['url']
        kylinuser = self.configuration['user']
        kylinpass = self.configuration['password']
        kylinproject = self.configuration['project']

        resp = requests.post(
            os.path.join(url, "api/query"),
            auth=HTTPBasicAuth(kylinuser, kylinpass),
            json={
                "sql": query,
                "offset": settings.KYLIN_OFFSET,
                "limit": settings.KYLIN_LIMIT,
                "acceptPartial": settings.KYLIN_ACCEPT_PARTIAL,
                "project": kylinproject
            }
        )

        if not resp.ok:
            return {}, resp.text or str(resp.reason)

        data = resp.json()
        columns = self.get_columns(data['columnMetas'])
        rows = self.get_rows(columns, data['results'])

        return json_dumps({'columns': columns, 'rows': rows}), None
Пример #11
0
    def run_query(self, query, user):
        connection = snowflake.connector.connect(
            user=self.configuration['user'],
            password=self.configuration['password'],
            account=self.configuration['account'],
        )

        cursor = connection.cursor()

        try:
            cursor.execute("USE WAREHOUSE {}".format(self.configuration['warehouse']))
            cursor.execute("USE {}".format(self.configuration['database']))

            cursor.execute(query)

            columns = self.fetch_columns([(i[0], TYPES_MAP.get(i[1], None)) for i in cursor.description])
            rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor]

            data = {'columns': columns, 'rows': rows}
            error = None
            json_data = json_dumps(data)
        finally:
            cursor.close()
            connection.close()

        return json_data, error
Пример #12
0
    def run_query(self, query, user):
        connection = phoenixdb.connect(
            url=self.configuration.get('url', ''),
            autocommit=True)

        cursor = connection.cursor()

        try:
            cursor.execute(query)
            column_tuples = [(i[0], TYPES_MAPPING.get(i[1], None)) for i in cursor.description]
            columns = self.fetch_columns(column_tuples)
            rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())]
            data = {'columns': columns, 'rows': rows}
            json_data = json_dumps(data)
            error = None
            cursor.close()
        except Error as e:
            json_data = None
            error = 'code: {}, sql state:{}, message: {}'.format(e.code, e.sqlstate, e.message)
        except (KeyboardInterrupt, InterruptException) as e:
            error = "Query cancelled by user."
            json_data = None
        except Exception as ex:
            json_data = None
            error = unicode(ex)
        finally:
            if connection:
                connection.close()

        return json_data, error
Пример #13
0
    def run_query(self, query, user):
        connection = None
        try:
            if self.configuration.get('username', '') and self.configuration.get('password', ''):
                auth_provider = PlainTextAuthProvider(username='******'.format(self.configuration.get('username', '')),
                                                      password='******'.format(self.configuration.get('password', '')))
                connection = Cluster([self.configuration.get('host', '')],
                                     auth_provider=auth_provider,
                                     port=self.configuration.get('port', ''),
                                     protocol_version=self.configuration.get('protocol', 3))
            else:
                connection = Cluster([self.configuration.get('host', '')],
                                     port=self.configuration.get('port', ''),
                                     protocol_version=self.configuration.get('protocol', 3))
            session = connection.connect()
            session.set_keyspace(self.configuration['keyspace'])
            session.default_timeout = self.configuration.get('timeout', 10)
            logger.debug("Cassandra running query: %s", query)
            result = session.execute(query)

            column_names = result.column_names

            columns = self.fetch_columns(map(lambda c: (c, 'string'), column_names))

            rows = [dict(zip(column_names, row)) for row in result]

            data = {'columns': columns, 'rows': rows}
            json_data = json_dumps(data, cls=CassandraJSONEncoder)

            error = None
        except KeyboardInterrupt:
            error = "Query cancelled by user."
            json_data = None

        return json_data, error
Пример #14
0
    def run_query(self, query, user):
        connection = sqlite3.connect(self._dbpath)

        cursor = connection.cursor()

        try:
            cursor.execute(query)

            if cursor.description is not None:
                columns = self.fetch_columns([(i[0], None) for i in cursor.description])
                rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor]

                data = {'columns': columns, 'rows': rows}
                error = None
                json_data = json_dumps(data)
            else:
                error = 'Query completed but it returned no data.'
                json_data = None
        except KeyboardInterrupt:
            connection.cancel()
            error = "Query cancelled by user."
            json_data = None
        except Exception as e:
            # handle unicode error message
            err_class = sys.exc_info()[1].__class__
            err_args = [arg.decode('utf-8') for arg in sys.exc_info()[1].args]
            unicode_err = err_class(*err_args)
            reraise(unicode_err, None, sys.exc_info()[2])
        finally:
            connection.close()
        return json_data, error
Пример #15
0
    def make_request(self, method, path, org=None, user=None, data=None,
                     is_json=True):
        if user is None:
            user = self.factory.user

        if org is None:
            org = self.factory.org

        if org is not False:
            path = "/{}{}".format(org.slug, path)

        if user:
            authenticate_request(self.client, user)

        method_fn = getattr(self.client, method.lower())
        headers = {}

        if data and is_json:
            data = json_dumps(data)

        if is_json:
            content_type = 'application/json'
        else:
            content_type = None

        response = method_fn(path, data=data, headers=headers, content_type=content_type)

        if response.data and is_json:
            response.json = json.loads(response.data)

        return response
Пример #16
0
def json_representation(data, code, headers=None):
    # Flask-Restful checks only for flask.Response but flask-login uses werkzeug.wrappers.Response
    if isinstance(data, Response):
        return data
    resp = make_response(json_dumps(data), code)
    resp.headers.extend(headers or {})
    return resp
Пример #17
0
def public_dashboard(token, org_slug=None):
    # TODO: verify object is a dashboard?
    if not isinstance(current_user, models.ApiUser):
        api_key = models.ApiKey.get_by_api_key(token)
        dashboard = api_key.object
    else:
        dashboard = current_user.object

    user = {
        'permissions': [],
        'apiKey': current_user.id
    }

    headers = {
        'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate'
    }

    record_event(current_org, current_user, {
        'action': 'view',
        'object_id': dashboard.id,
        'object_type': 'dashboard',
        'public': True,
        'headless': 'embed' in request.args,
        'referer': request.headers.get('Referer')
    })

    response = render_template("public.html",
                               headless='embed' in request.args,
                               user=json.dumps(user),
                               seed_data=json_dumps({
                                 'dashboard': serializers.public_dashboard(dashboard)
                               }),
                               client_config=json.dumps(settings.COMMON_CLIENT_CONFIG))

    return response, 200, headers
Пример #18
0
    def run_query(self, query, user):
        logger.debug("BigQuery got query: %s", query)

        bigquery_service = self._get_bigquery_service()
        jobs = bigquery_service.jobs()

        try:
            if "totalMBytesProcessedLimit" in self.configuration:
                limitMB = self.configuration["totalMBytesProcessedLimit"]
                processedMB = self._get_total_bytes_processed(jobs, query) / 1000.0 / 1000.0
                if limitMB < processedMB:
                    return None, "Larger than %d MBytes will be processed (%f MBytes)" % (limitMB, processedMB)

            data = self._get_query_result(jobs, query)
            error = None

            json_data = json_dumps(data, ignore_nan=True)
        except apiclient.errors.HttpError as e:
            json_data = None
            if e.resp.status == 400:
                error = json_loads(e.content)['error']['message']
            else:
                error = e.content
        except KeyboardInterrupt:
            error = "Query cancelled by user."
            json_data = None

        return json_data, error
Пример #19
0
    def run_query(self, query, user):
        logger.debug("Metrica is about to execute query: %s", query)
        data = None
        query = query.strip()
        if query == "":
            error = "Query is empty"
            return data, error
        try:
            params = yaml.safe_load(query)
        except ValueError as e:
            logging.exception(e)
            error = unicode(e)
            return data, error

        if isinstance(params, dict):
            if 'url' in params:
                params = parse_qs(urlparse(params['url']).query, keep_blank_values=True)
        else:
            error = 'The query format must be JSON or YAML'
            return data, error

        try:
            data = json_dumps(parse_ym_response(self._send_query(**params)))
            error = None
        except Exception as e:
            logging.exception(e)
            error = unicode(e)
        return data, error
Пример #20
0
    def test_supports_relative_timestamps(self):
        query = {
            'ts': {'$humanTime': '1 hour ago'}
        }

        one_hour_ago = parse_human_time("1 hour ago")
        query_data = parse_query_json(json_dumps(query))
        self.assertEqual(query_data['ts'], one_hour_ago)
Пример #21
0
    def run_query(self, query, user):
        try:
            error = None

            code = compile_restricted(query, '<string>', 'exec')

            builtins = safe_builtins.copy()
            builtins["_write_"] = self.custom_write
            builtins["__import__"] = self.custom_import
            builtins["_getattr_"] = getattr
            builtins["getattr"] = getattr
            builtins["_setattr_"] = setattr
            builtins["setattr"] = setattr
            builtins["_getitem_"] = self.custom_get_item
            builtins["_getiter_"] = self.custom_get_iter
            builtins["_print_"] = self._custom_print

            # Layer in our own additional set of builtins that we have
            # considered safe.
            for key in self.safe_builtins:
                builtins[key] = __builtins__[key]

            restricted_globals = dict(__builtins__=builtins)
            restricted_globals["get_query_result"] = self.get_query_result
            restricted_globals["get_source_schema"] = self.get_source_schema
            restricted_globals["execute_query"] = self.execute_query
            restricted_globals["add_result_column"] = self.add_result_column
            restricted_globals["add_result_row"] = self.add_result_row
            restricted_globals["disable_print_log"] = self._custom_print.disable
            restricted_globals["enable_print_log"] = self._custom_print.enable

            # Supported data types
            restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME
            restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN
            restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER
            restricted_globals["TYPE_STRING"] = TYPE_STRING
            restricted_globals["TYPE_DATE"] = TYPE_DATE
            restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT


            # TODO: Figure out the best way to have a timeout on a script
            #       One option is to use ETA with Celery + timeouts on workers
            #       And replacement of worker process every X requests handled.

            exec((code), restricted_globals, self._script_locals)

            result = self._script_locals['result']
            result['log'] = self._custom_print.lines
            json_data = json_dumps(result)
        except KeyboardInterrupt:
            error = "Query cancelled by user."
            json_data = None
        except Exception as e:
            error = str(type(e)) + " " + str(e)
            json_data = None

        return json_data, error
Пример #22
0
    def notify(self, alert, query, user, new_state, app, host, options):
        try:
            if new_state == "triggered":
                message = "<b><font color=\"#c0392b\">Triggered</font></b>"
            elif new_state == "ok":
                message = "<font color=\"#27ae60\">Went back to normal</font>"
            else:
                message = "Unable to determine status. Check Query and Alert configuration."

            data = {
                "cards": [
                    {
                        "header": {
                            "title": alert.name
                        },
                        "sections": [
                            {
                                "widgets": [
                                    {
                                        "textParagraph": {
                                            "text": message
                                        }
                                    }
                                ]
                            }
                        ]
                    }
                ]
            }

            if options.get("icon_url"):
                data["cards"][0]["header"]["imageUrl"] = options.get("icon_url")

            # Hangouts Chat will create a blank card if an invalid URL (no hostname) is posted.
            if host:
                data["cards"][0]["sections"][0]["widgets"].append({
                    "buttons": [
                        {
                            "textButton": {
                                "text": "OPEN QUERY",
                                "onClick": {
                                    "openLink": {
                                        "url": "{host}/queries/{query_id}".format(host=host, query_id=query.id)
                                    }
                                }
                            }
                        }
                    ]
                })

            headers = {"Content-Type": "application/json; charset=UTF-8"}
            resp = requests.post(options.get("url"), data=json_dumps(data), headers=headers, timeout=5.0)
            if resp.status_code != 200:
                logging.error("webhook send ERROR. status_code => {status}".format(status=resp.status_code))
        except Exception:
            logging.exception("webhook send ERROR.")
Пример #23
0
    def run_query(self, query, user):
        connection = None

        try:
            server = self.configuration.get('server', '')
            user = self.configuration.get('user', '')
            password = self.configuration.get('password', '')
            db = self.configuration['db']
            port = self.configuration.get('port', 1433)
            tds_version = self.configuration.get('tds_version', '7.0')
            charset = self.configuration.get('charset', 'UTF-8')

            if port != 1433:
                server = server + ':' + str(port)

            connection = pymssql.connect(server=server, user=user, password=password, database=db, tds_version=tds_version, charset=charset)

            if isinstance(query, unicode):
                query = query.encode(charset)

            cursor = connection.cursor()
            logger.debug("SqlServer running query: %s", query)

            cursor.execute(query)
            data = cursor.fetchall()

            if cursor.description is not None:
                columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description])
                rows = [dict(zip((c['name'] for c in columns), row)) for row in data]

                data = {'columns': columns, 'rows': rows}
                json_data = json_dumps(data)
                error = None
            else:
                error = "No data was returned."
                json_data = None

            cursor.close()
        except pymssql.Error as e:
            try:
                # Query errors are at `args[1]`
                error = e.args[1]
            except IndexError:
                # Connection errors are `args[0][1]`
                error = e.args[0][1]
            json_data = None
        except KeyboardInterrupt:
            connection.cancel()
            error = "Query cancelled by user."
            json_data = None
        finally:
            if connection:
                connection.close()

        return json_data, error
Пример #24
0
    def run_query(self, query, user):
        base_url = self.configuration["url"]

        try:
            error = None
            query = query.strip()

            local_query = '/api/v1/query'
            url = base_url + local_query
            payload = {'query': query}
            response = requests.get(url, params=payload)
            response.raise_for_status()
            raw_data = response.json()['data']['result']
            columns = [
                {
                    'friendly_name': 'timestamp',
                    'type': TYPE_DATETIME,
                    'name': 'timestamp'
                },
                {
                    'friendly_name': 'value',
                    'type': TYPE_STRING,
                    'name': 'value'
                },
            ]
            columns_name = raw_data[0]['metric'].keys()
            for column_name in columns_name:
                columns.append({
                    'friendly_name': column_name,
                    'type': TYPE_STRING,
                    'name': column_name
                })
            rows = []
            for row in raw_data:
                h = {}
                for r in row['metric']:
                    h[r] = row['metric'][r]
                    h['value'] = row['value'][1]
                    h['timestamp'] = datetime.datetime.fromtimestamp(row['value'][0])
                rows.append(h)

            json_data = json_dumps(
                {
                    'rows': rows,
                    'columns': columns
                }
            )
        except requests.RequestException as e:
            return None, str(e)
        except KeyboardInterrupt:
            error = "Query cancelled by user."
            json_data = None

        return json_data, error
Пример #25
0
    def run_query(self, query, user):
        connection = None

        try:
            server = self.configuration.get('server', '')
            user = self.configuration.get('user', '')
            password = self.configuration.get('password', '')
            db = self.configuration['db']
            port = self.configuration.get('port', 1433)
            charset = self.configuration.get('charset', 'UTF-8')
            driver = self.configuration.get('driver', '{ODBC Driver 13 for SQL Server}')

            connection_string_fmt = 'DRIVER={};PORT={};SERVER={};DATABASE={};UID={};PWD={}'
            connection_string = connection_string_fmt.format(driver,
                                                             port,
                                                             server,
                                                             db,
                                                             user,
                                                             password)
            connection = pyodbc.connect(connection_string)
            cursor = connection.cursor()
            logger.debug("SQLServerODBC running query: %s", query)
            cursor.execute(query)
            data = cursor.fetchall()

            if cursor.description is not None:
                columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description])
                rows = [dict(zip((c['name'] for c in columns), row)) for row in data]

                data = {'columns': columns, 'rows': rows}
                json_data = json_dumps(data)
                error = None
            else:
                error = "No data was returned."
                json_data = None

            cursor.close()
        except pyodbc.Error as e:
            try:
                # Query errors are at `args[1]`
                error = e.args[1]
            except IndexError:
                # Connection errors are `args[0][1]`
                error = e.args[0][1]
            json_data = None
        except KeyboardInterrupt:
            connection.cancel()
            error = "Query cancelled by user."
            json_data = None
        finally:
            if connection:
                connection.close()

        return json_data, error
Пример #26
0
    def save(self, connection=None):
        if connection is None:
            connection = redis_connection

        self.data['updated_at'] = time.time()
        key_name = self._key_name(self.data['task_id'])
        connection.set(key_name, json_dumps(self.data))
        connection.zadd(self._get_list(), time.time(), key_name)

        for l in self.ALL_LISTS:
            if l != self._get_list():
                connection.zrem(l, key_name)
Пример #27
0
    def test_ignores_non_isodate_fields(self):
        query = {
            'test': 1,
            'test_list': ['a', 'b', 'c'],
            'test_dict': {
                'a': 1,
                'b': 2
            }
        }

        query_data = parse_query_json(json_dumps(query))
        self.assertDictEqual(query_data, query)
Пример #28
0
def json_request(method, path, data=None):
    if data:
        response = method(path, data=json_dumps(data))
    else:
        response = method(path)

    if response.data:
        response.json = json.loads(response.data)
    else:
        response.json = None

    return response
Пример #29
0
def notify_webhook(alert, query, html, new_state):
    try:
        data = {"event": "alert_state_change", "alert": alert.to_dict(full=False), "url_base": settings.HOST}
        headers = {"Content-Type": "application/json"}
        auth = (
            HTTPBasicAuth(settings.WEBHOOK_USERNAME, settings.WEBHOOK_PASSWORD) if settings.WEBHOOK_USERNAME else None
        )
        resp = requests.post(settings.WEBHOOK_ENDPOINT, data=json_dumps(data), auth=auth, headers=headers)
        if resp.status_code != 200:
            logger.error("webhook send ERROR. status_code => {status}".format(status=resp.status_code))
    except:
        logger.exception("webhook send ERROR.")
Пример #30
0
    def run_query(self, query, user):
        import vertica_python

        if query == "":
            json_data = None
            error = "Query is empty"
            return json_data, error

        connection = None
        try:
            conn_info = {
                'host': self.configuration.get('host', ''),
                'port': self.configuration.get('port', 5433),
                'user': self.configuration.get('user', ''),
                'password': self.configuration.get('password', ''),
                'database': self.configuration.get('database', ''),
                'read_timeout': self.configuration.get('read_timeout', 600)
            }
            
            if self.configuration.get('connection_timeout'):
                conn_info['connection_timeout'] = self.configuration.get('connection_timeout')

            connection = vertica_python.connect(**conn_info)
            cursor = connection.cursor()
            logger.debug("Vetica running query: %s", query)
            cursor.execute(query)

            # TODO - very similar to pg.py
            if cursor.description is not None:
                columns_data = [(i[0], i[1]) for i in cursor.description]

                rows = [dict(zip((c[0] for c in columns_data), row)) for row in cursor.fetchall()]
                columns = [{'name': col[0],
                            'friendly_name': col[0],
                            'type': types_map.get(col[1], None)} for col in columns_data]

                data = {'columns': columns, 'rows': rows}
                json_data = json_dumps(data)
                error = None
            else:
                json_data = None
                error = "No data was returned."

            cursor.close()
        except KeyboardInterrupt:
            error = "Query cancelled by user."
            json_data = None
        finally:
            if connection:
                connection.close()

        return json_data, error
Пример #31
0
    def run_query(self, query, user):
        try:
            error = None

            logger.debug(query)
            query_params = json_loads(query)

            index_name = query_params["index"]
            query_data = query_params["query"]
            size = int(query_params.get("size", 500))
            limit = int(query_params.get("limit", 500))
            result_fields = query_params.get("fields", None)
            sort = query_params.get("sort", None)

            if not self.server_url:
                error = "Missing configuration key 'server'"
                return None, error

            url = "{0}/{1}/_search?".format(self.server_url, index_name)
            mapping_url = "{0}/{1}/_mapping".format(self.server_url, index_name)

            mappings, error = self._get_query_mappings(mapping_url)
            if error:
                return None, error

            if sort:
                url += "&sort={0}".format(urllib.quote_plus(sort))

            url += "&q={0}".format(urllib.quote_plus(query_data))

            logger.debug("Using URL: {0}".format(url))
            logger.debug("Using Query: {0}".format(query_data))

            result_columns = []
            result_rows = []
            if isinstance(query_data, string_types):
                _from = 0
                while True:
                    query_size = size if limit >= (_from + size) else (limit - _from)
                    total = self._execute_simple_query(url + "&size={0}".format(query_size), self.auth, _from, mappings, result_fields, result_columns, result_rows)
                    _from += size
                    if _from >= limit:
                        break
            else:
                # TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
                raise Exception("Advanced queries are not supported")

            json_data = json_dumps({
                "columns": result_columns,
                "rows": result_rows
            })
        except KeyboardInterrupt:
            error = "Query cancelled by user."
            json_data = None
        except requests.HTTPError as e:
            logger.exception(e)
            error = "Failed to execute query. Return Code: {0}   Reason: {1}".format(r.status_code, r.text)
            json_data = None
        except requests.exceptions.RequestException as e:
            logger.exception(e)
            error = "Connection refused"
            json_data = None

        return json_data, error
Пример #32
0
    def run_query(self, query, user):
        qbol.configure(
            api_token=self.configuration.get("token"),
            api_url="%s/api" % self.configuration.get("endpoint"),
        )

        try:
            query_type = self.configuration.get("query_type", "hive")

            if query_type == "quantum":
                cmd = SqlCommand.create(query=query)
            elif query_type == "hive":
                cmd = HiveCommand.create(
                    query=query, label=self.configuration.get("cluster"))
            elif query_type == "presto":
                cmd = PrestoCommand.create(
                    query=query, label=self.configuration.get("cluster"))
            else:
                raise Exception("Invalid Query Type:%s.\
                        It must be : hive / presto / quantum." %
                                self.configuration.get("query_type"))

            logging.info("Qubole command created with Id: %s and Status: %s",
                         cmd.id, cmd.status)

            while not Command.is_done(cmd.status):
                time.sleep(qbol.poll_interval)
                cmd = Command.find(cmd.id)
                logging.info("Qubole command Id: %s and Status: %s", cmd.id,
                             cmd.status)

            rows = []
            columns = []
            error = None

            if cmd.status == "done":
                fp = StringIO()
                cmd.get_results(
                    fp=fp,
                    inline=True,
                    delim="\t",
                    fetch=False,
                    qlog=None,
                    arguments=["true"],
                )

                results = fp.getvalue()
                fp.close()

                data = results.split("\r\n")
                columns = self.fetch_columns([
                    (i, TYPE_STRING) for i in data.pop(0).split("\t")
                ])
                rows = [
                    dict(
                        zip((column["name"] for column in columns),
                            row.split("\t"))) for row in data
                ]

            json_data = json_dumps({"columns": columns, "rows": rows})
        except (KeyboardInterrupt, JobTimeoutException):
            logging.info("Sending KILL signal to Qubole Command Id: %s",
                         cmd.id)
            cmd.cancel()
            raise

        return json_data, error
Пример #33
0
    def run_query(self, query, user):
        db = self._get_db()

        logger.debug(
            "mongodb connection string: %s", self.configuration["connectionString"]
        )
        logger.debug("mongodb got query: %s", query)

        try:
            query_data = parse_query_json(query)
        except ValueError:
            return None, "Invalid query format. The query is not a valid JSON."

        if "collection" not in query_data:
            return None, "'collection' must have a value to run a query"
        else:
            collection = query_data["collection"]

        q = query_data.get("query", None)
        f = None

        aggregate = query_data.get("aggregate", None)
        if aggregate:
            for step in aggregate:
                if "$sort" in step:
                    sort_list = []
                    for sort_item in step["$sort"]:
                        sort_list.append((sort_item["name"], sort_item["direction"]))

                    step["$sort"] = SON(sort_list)

        if "fields" in query_data:
            f = query_data["fields"]

        s = None
        if "sort" in query_data and query_data["sort"]:
            s = []
            for field_data in query_data["sort"]:
                s.append((field_data["name"], field_data["direction"]))

        columns = []
        rows = []

        cursor = None
        if q or (not q and not aggregate):
            if s:
                cursor = db[collection].find(q, f).sort(s)
            else:
                cursor = db[collection].find(q, f)

            if "skip" in query_data:
                cursor = cursor.skip(query_data["skip"])

            if "limit" in query_data:
                cursor = cursor.limit(query_data["limit"])

            if "count" in query_data:
                cursor = cursor.count()

        elif aggregate:
            allow_disk_use = query_data.get("allowDiskUse", False)
            r = db[collection].aggregate(aggregate, allowDiskUse=allow_disk_use)

            # Backwards compatibility with older pymongo versions.
            #
            # Older pymongo version would return a dictionary from an aggregate command.
            # The dict would contain a "result" key which would hold the cursor.
            # Newer ones return pymongo.command_cursor.CommandCursor.
            if isinstance(r, dict):
                cursor = r["result"]
            else:
                cursor = r

        if "count" in query_data:
            columns.append(
                {"name": "count", "friendly_name": "count", "type": TYPE_INTEGER}
            )

            rows.append({"count": cursor})
        else:
            rows, columns = parse_results(cursor)

        if f:
            ordered_columns = []
            for k in sorted(f, key=f.get):
                column = _get_column_by_name(columns, k)
                if column:
                    ordered_columns.append(column)

            columns = ordered_columns

        if query_data.get("sortColumns"):
            reverse = query_data["sortColumns"] == "desc"
            columns = sorted(columns, key=lambda col: col["name"], reverse=reverse)

        data = {"columns": columns, "rows": rows}
        error = None
        json_data = json_dumps(data, cls=MongoDBJSONEncoder)

        return json_data, error
Пример #34
0
def enqueue_query(query,
                  data_source,
                  user_id,
                  scheduled_query=None,
                  metadata={}):
    query_hash = gen_query_hash(query)
    logging.info("Inserting job for %s with metadata=%s", query_hash, metadata)
    try_count = 0
    job = None

    while try_count < 5:
        try_count += 1

        pipe = redis_connection.pipeline()
        try:
            pipe.watch(_job_lock_id(query_hash, data_source.id))
            job_id = pipe.get(_job_lock_id(query_hash, data_source.id))
            if job_id:
                logging.info("[%s] Found existing job: %s", query_hash, job_id)

                job = QueryTask(job_id=job_id)

                if job.ready():
                    logging.info("[%s] job found is ready (%s), removing lock",
                                 query_hash, job.celery_status)
                    redis_connection.delete(
                        _job_lock_id(query_hash, data_source.id))
                    job = None

            if not job:
                pipe.multi()

                time_limit = None

                if scheduled_query:
                    queue_name = data_source.scheduled_queue_name
                    scheduled_query_id = scheduled_query.id
                else:
                    queue_name = data_source.queue_name
                    scheduled_query_id = None
                    time_limit = settings.ADHOC_QUERY_TIME_LIMIT

                args = (query, data_source.id, metadata, user_id,
                        scheduled_query_id)
                argsrepr = json_dumps({
                    'org_id': data_source.org_id,
                    'data_source_id': data_source.id,
                    'enqueue_time': time.time(),
                    'scheduled': scheduled_query_id is not None,
                    'query_id': metadata.get('Query ID'),
                    'user_id': user_id
                })

                result = execute_query.apply_async(args=args,
                                                   argsrepr=argsrepr,
                                                   queue=queue_name,
                                                   time_limit=time_limit)

                job = QueryTask(async_result=result)
                tracker = QueryTaskTracker.create(result.id, 'created',
                                                  query_hash, data_source.id,
                                                  scheduled_query is not None,
                                                  metadata)
                tracker.save(connection=pipe)

                logging.info("[%s] Created new job: %s", query_hash, job.id)
                pipe.set(_job_lock_id(query_hash, data_source.id), job.id,
                         settings.JOB_EXPIRY_TIME)
                pipe.execute()
            break

        except redis.WatchError:
            continue

    if not job:
        logging.error("[Manager][%s] Failed adding job for query.", query_hash)

    return job
Пример #35
0
def refresh_queries():
    logger.info("Refreshing queries...")

    outdated_queries_count = 0
    query_ids = []

    with statsd_client.timer('manager.outdated_queries_lookup'):
        for query in models.Query.outdated_queries():
            if settings.FEATURE_DISABLE_REFRESH_QUERIES:
                logging.info("Disabled refresh queries.")
            elif query.org.is_disabled:
                logging.debug(
                    "Skipping refresh of %s because org is disabled.",
                    query.id)
            elif query.data_source is None:
                logging.info(
                    "Skipping refresh of %s because the datasource is none.",
                    query.id)
            elif query.data_source.paused:
                logging.info(
                    "Skipping refresh of %s because datasource - %s is paused (%s).",
                    query.id, query.data_source.name,
                    query.data_source.pause_reason)
            else:
                if query.options and len(query.options.get('parameters',
                                                           [])) > 0:
                    query_params = {
                        p['name']: p.get('value')
                        for p in query.options['parameters']
                    }
                    query_text = mustache_render(query.query_text,
                                                 query_params)
                else:
                    query_text = query.query_text

                enqueue_query(query_text,
                              query.data_source,
                              query.user_id,
                              scheduled_query=query,
                              metadata={
                                  'Query ID': query.id,
                                  'Username': '******'
                              })

                query_ids.append(query.id)
                outdated_queries_count += 1

    statsd_client.gauge('manager.outdated_queries', outdated_queries_count)

    logger.info("Done refreshing queries. Found %d outdated queries: %s" %
                (outdated_queries_count, query_ids))

    status = redis_connection.hgetall('redash:status')
    now = time.time()

    redis_connection.hmset(
        'redash:status', {
            'outdated_queries_count': outdated_queries_count,
            'last_refresh_at': now,
            'query_ids': json_dumps(query_ids)
        })

    statsd_client.gauge('manager.seconds_since_refresh',
                        now - float(status.get('last_refresh_at', now)))
Пример #36
0
    def run_query(self, query, user):
        connection = None

        try:
            server = self.configuration.get("server", "")
            user = self.configuration.get("user", "")
            password = self.configuration.get("password", "")
            db = self.configuration["db"]
            port = self.configuration.get("port", 1433)
            tds_version = self.configuration.get("tds_version", "7.0")
            charset = self.configuration.get("charset", "UTF-8")

            if port != 1433:
                server = server + ":" + str(port)

            connection = pymssql.connect(
                server=server,
                user=user,
                password=password,
                database=db,
                tds_version=tds_version,
                charset=charset,
            )

            if isinstance(query, str):
                query = query.encode(charset)

            cursor = connection.cursor()
            logger.debug("SqlServer running query: %s", query)

            cursor.execute(query)
            data = cursor.fetchall()

            if cursor.description is not None:
                columns = self.fetch_columns([(i[0], types_map.get(i[1], None))
                                              for i in cursor.description])
                rows = [
                    dict(zip((column["name"] for column in columns), row))
                    for row in data
                ]

                data = {"columns": columns, "rows": rows}
                json_data = json_dumps(data)
                error = None
            else:
                error = "No data was returned."
                json_data = None

            cursor.close()
        except pymssql.Error as e:
            try:
                # Query errors are at `args[1]`
                error = e.args[1]
            except IndexError:
                # Connection errors are `args[0][1]`
                error = e.args[0][1]
            json_data = None
        except (KeyboardInterrupt, JobTimeoutException):
            connection.cancel()
            raise
        finally:
            if connection:
                connection.close()

        return json_data, error
Пример #37
0
def json_response(response):
    return current_app.response_class(json_dumps(response),
                                      mimetype='application/json')
Пример #38
0
    def process_bind_param(self, value, dialect):
        if value is None:
            return value

        return json_dumps(value)
Пример #39
0
 def test_handles_binary(self):
     self.assertEqual(json_dumps(buffer("test")), '"74657374"')
Пример #40
0
    def run_custom_query(self, querystr, user):
        '''
        例子1,子查询是个sql:
X{
    "store_to_db": false,
    "tables": [
    {
        "table_name": "tablea",
        "datetime_column": "daytime",
        "query": {
            "context": {"useApproximateCountDistinct": false},
            "sql": "SELECT DATE_TRUNC('day', __time) as daytime,PV_SRC_GEO_LOCATION,sum(AD_CLICK_COUNT) as click, sum(AD_CLICK_COUNT*KW_AVG_COST) as cost FROM travels_demo where EVENT_TYPE='被展现'  group by PV_SRC_GEO_LOCATION,DATE_TRUNC('day', __time) order by daytime"
        },
        "nodata_procs": [
        "SQLITE:CREATE TABLE tablea (daytime DATETIME, PV_SRC_GEO_LOCATION TEXT, click INTEGER, cost NUMERIC)",
        "SQLITE:INSERT INTO tablea VALUES('2020-01-01T00:00:00.000Z', 'CHINA', 252, 848.74)"
        ]
    },
    {
        "table_name": "tableb",
        "datetime_column": "daytime",
        "query": "SQLITE:SELECT * FROM tablea"
    }
    ],
    "main_query": "SQLITE:SELECT daytime, PV_SRC_GEO_LOCATION, click, cost FROM tableb",
    "final_sql": "SELECT daytime, PV_SRC_GEO_LOCATION, click, cost FROM tableb",
    "persist_table_name": "some_long_name_table_1",
    "persist_datetime_column": "daytime",
    "sub_queries":[
    {
        "name": "exdata1",
        "query":"SQLITE:SELECT daytime, click, cost FROM tablea",
        "persist_table_name": "some_long_name_table_2",
        "persist_datetime_column": "daytime"
    }
    ]
}
        例子2,子查询是个json:
X{
    "tables": [
    {
        "table_name": "tablea",
        "datetime_column": "daytime",
        "query":
            {
              "aggregations": [
                {
                  "type": "doubleSum",
                  "name": "showCount",
                  "fieldName": "AD_SHOW_COUNT"
                },
                {
                  "type": "doubleSum",
                  "name": "realcost",
                  "fieldName": null,
                  "expression": "(AD_CLICK_COUNT * KW_AVG_COST)"
                },
                {
                  "type": "doubleSum",
                  "name": "a1",
                  "fieldName": "AD_CLICK_COUNT"
                }
              ],
              "postAggregations": [
                {
                  "type": "expression",
                  "name": "click_per_cost",
                  "expression": "(realcost / a1)",
                  "ordering": null
                }
              ],
              "filter": {
                "type": "selector",
                "dimension": "EVENT_TYPE",
                "value": "数据报告"
              },
              "dataSource": "travels_demo",
              "dimension": "KEYWORD",
              "granularity": "day",
              "intervals": [
                "1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z"
              ],
              "metric": "realcost",
              "queryType": "topN",
              "threshold": 30
            }
    }
    ],
    "main_query": "SQLITE:SELECT * FROM tablea"
}
        '''
        error = None
        json_data = None

        #解析
        querystr = querystr[1:] #去掉X
        try:
            input_obj = json_loads(querystr)
        except:
            error = "Incorrect Json format."
        if error is not None:
            raise CustomException(error)

        #threading: 是否使用多线程进行查询
        use_threading = input_obj.get("threading")
        if use_threading is None:
            use_threading = True

        #store_to_db: 查询结果是否保存为sqlite的表,如果是,后续还得指定persist_table_name
        #   不需要可以不填,默认是False
        store_to_db = input_obj.get("store_to_db")
        if store_to_db is None:
            store_to_db = False
        #tables: 一系列辅助查询的过渡表,顺序执行,后续的表可以以来前面的表
        #   不需要可以不填
        tables = input_obj.get("tables")
        if (tables is not None) and (type(tables).__name__ !="list"):
            raise CustomException("Incorrect Json data: tables must be a list.")
        #main_query: 主查询,查询结果存放在query_result["data"]中
        #   不需要可以不填
        main_query = input_obj.get("main_query")
        if main_query is not None:
            if type(main_query).__name__ =="str":
                pass
            elif type(main_query).__name__ =="dict":
                main_query = json_dumps(main_query)
            else:
                raise CustomException("Incorrect Json data: main_query must be a string or json format.")
        #final_sql: 兼容,也是主查询,但只能从SQLITE中查结果;在有main_query的情况下,此项无效
        #   不需要可以不填
        final_sqlite_query = input_obj.get("final_sql")
        if (final_sqlite_query is not None) and (type(final_sqlite_query).__name__ !="str"):
            raise CustomException("Incorrect Json data: final_sql must be a string.")
        #persist_table_name: store_to_db为true的情况下,保存主查询数据的表名
        #persist_datetime_column: 查询结果中的时间项
        #   不需要可以不填
        persist_table_name = None
        persist_datetime_column = None
        if store_to_db and (main_query is not None or final_sqlite_query is not None):
            persist_table_name = input_obj.get("persist_table_name")
            if persist_table_name is None or type(persist_table_name).__name__ !="str":
                raise CustomException("Incorrect Json data: persist_table_name for main query must be a string.")
            persist_datetime_column = input_obj.get("persist_datetime_column")
            if persist_datetime_column is not None and type(persist_datetime_column).__name__ !="str":
                raise CustomException("Incorrect Json data: persist_datetime_column for main query must be a string.")
        #sub_queries: 子查询,查询结果存放在query_result["data_ex"]中
        #   不需要可以不填
        sub_queries = input_obj.get("sub_queries")
        if (sub_queries is not None) and (type(sub_queries).__name__ !="list"):
            raise CustomException("Incorrect Json data: sub_queries must be a string.")

        #对tables中的临时表名的随机化
        table_name_map = {}
        #创建sqlite
        sqlite_connection = sqlite3.connect(self.sqlite_dbpath)
        sqlite_cursor = sqlite_connection.cursor()
        sqlite_query_param = {"table_name_map": table_name_map, "can_create_table": False}
        try:
            #一、依次处理单个表
            if tables is not None:
                for table_cofig in tables:
                    #json配置
                    name = table_cofig.get("table_name")
                    if (name is None) or (type(name).__name__ !="str"):
                        raise CustomException("Incorrect Json data: table_name can't be none and must be a string.")
                    self._log_info("Processing Table[%s]" % name)
                    datetime_column = table_cofig.get("datetime_column")
                    if (datetime_column is not None) and (type(datetime_column).__name__ !="str"):
                        raise CustomException("Incorrect Json data in table %s: datetime_column must be a string." % name)
                    table_query = table_cofig.get("query")
                    if table_query is None:
                        raise CustomException("Incorrect Json data in table %s: query must exist." % name)
                    if type(table_query).__name__ =="str":
                        pass
                    elif type(table_query).__name__ =="dict":
                        table_query = json_dumps(table_query)
                    else:
                        raise CustomException("Incorrect Json data in table %s: query must be a string or json format." % name)
                    nodata_procs = table_cofig.get("nodata_procs")
                    if (nodata_procs is not None) and (type(nodata_procs).__name__ !="list"):
                        raise CustomException("Incorrect Json data in table %s: nodata_procs must be a list." % name)
                    #查询
                    query_data, query_error = self.run_query_obj_result(table_query, user, sqlite_query_param)
                    if query_error is not None:
                        raise CustomException(query_error)
                    if (query_data is None) or query_data.get("columns") is None:
                        raise CustomException("Incorrect query data for table %s." % name)
                    #存储
                    rand_num = random.randint(100000,999999)
                    table_name = name + str(rand_num)
                    table_name_map[name] = table_name
                    if len(query_data["columns"]) > 0:
                        self.store_data_to_sqlite(sqlite_connection, sqlite_cursor, query_data, table_name, datetime_column, drop_before_create = False)
                    #查询返回无数据的处理
                    elif nodata_procs is not None:
                        self._log_info("Using nodata_procs to build table: %s." % name)
                        sqlite_query_param["can_create_table"] = True
                        for proc in nodata_procs:
                            if type(proc).__name__ !="str":
                                raise CustomException("Incorrect Json data in table %s: nodata_procs must be a string list." % name)
                            t = GetTableNameToCreate(proc)
                            if t is not None and t != name:
                                raise CustomException("[nodata_procs]Invalid table name(%s) to create in table %s." % (t, name))
                            query_data, query_error = self.run_query_obj_result(proc, user, sqlite_query_param)
                            if query_error is not None:
                                raise CustomException(query_error)
                        sqlite_query_param["can_create_table"] = False
            else:
                pass

            #二、执行主查询
            if main_query is not None:
                self._log_info("Processing Main Query:#####%s#####" % main_query)
                json_data, error = self.run_query_obj_result(main_query, user, sqlite_query_param)
                if error is not None:
                    raise CustomException(error)
                if (json_data is None) or json_data.get("columns") is None:
                    raise CustomException("Incorrect query_data for main query.")
            elif final_sqlite_query is not None:
                for (k,v) in table_name_map.items():
                    final_sqlite_query = ReplaceTableName(final_sqlite_query, k, v)
                self._log_info("Processing Final SQL:#####%s#####" % final_sqlite_query)
                sqlite_cursor.execute(final_sqlite_query)
                if sqlite_cursor.description is not None:
                    columns = self.fetch_columns([(i[0], None) for i in sqlite_cursor.description])
                    rows = [
                        dict(zip((column["name"] for column in columns), row))
                        for row in sqlite_cursor
                    ]
                    error = None
                    #columns里的type全是null
                    columns = []
                    if len(rows) > 0:
                        row = rows[0]
                        for (column_name, column_value) in row.items():
                            columns.append(
                                {"name": column_name, "friendly_name": column_name, "type": PYTHON_TYPES_MAP[type(column_value).__name__]}
                            )
                    json_data = {"columns": columns, "rows": rows}
                else:
                    #error = "Query completed but it returned no data."
                    #json_data = None
                    error = None
                    json_data = {"columns": [], "rows": []}
            else:
                json_data = {"columns": [], "rows": []}
                error = None
            #存储
            if store_to_db and error is None and len(json_data["columns"]) > 0:
                self.store_data_to_sqlite(sqlite_connection, sqlite_cursor, json_data, persist_table_name, persist_datetime_column, drop_before_create = True)
                json_data = {"columns": [], "rows": []}


            #三、执行子查询
            if sub_queries is not None:
                json_data["data_ex"] = []
                if use_threading:
                    threads = []
                for query_config in sub_queries:
                    #json配置
                    name = query_config.get("name")
                    if (name is None) or (type(name).__name__ !="str"):
                        raise CustomException("Incorrect Json data in sub_queries: name must be exist and must be a string.")
                    sub_query = query_config.get("query")
                    if sub_query is None:
                        raise CustomException("Incorrect Json data in sub_query %s: query must be exist." % name)
                    if type(sub_query).__name__ =="str":
                        pass
                    elif type(sub_query).__name__ =="dict":
                        sub_query = json_dumps(sub_query)
                    else:
                        raise CustomException("Incorrect Json data in sub_query %s: query must be a string or json format." % name)
                    sub_persist_table_name = None
                    sub_persist_datetime_column = None
                    if store_to_db:
                        sub_persist_table_name = query_config.get("persist_table_name")
                        if sub_persist_table_name is None or type(sub_persist_table_name).__name__ !="str":
                            raise CustomException("Incorrect Json data in sub_query %s: persist_table_name must be a string." % name)
                        sub_persist_datetime_column = query_config.get("persist_datetime_column")
                        if sub_persist_datetime_column is not None and type(sub_persist_datetime_column).__name__ !="str":
                            raise CustomException("Incorrect Json data in sub_query %s: persist_datetime_column must be a string." % name)
                    if use_threading:
                        r = Result()
                        r.config = query_config
                        t = threading.Thread(target=self._run_query_threading, args=(sub_query, user, sqlite_query_param, r))
                        threads.append({"t": t, "r": r})
                        t.start()
                    else:
                        #查询
                        self._log_info("Processing Sub Query:#####%s#####" % sub_query)
                        query_data, query_error = self.run_query_obj_result(sub_query, user, sqlite_query_param)
                        if query_error is not None:
                            raise CustomException(query_error)
                        if (query_data is None) or query_data.get("columns") is None:
                            raise CustomException("Incorrect query data for sub query %s." % name)
                        #存储
                        if store_to_db:
                            if query_error is None and len(query_data["columns"]) > 0:
                                self.store_data_to_sqlite(sqlite_connection, sqlite_cursor, query_data, sub_persist_table_name, sub_persist_datetime_column, drop_before_create = True)
                        else:
                            json_data["data_ex"].append({"name": name, "data": query_data})

                if use_threading:
                    for itor in threads:
                        itor["t"].join()
                    for itor in threads:
                        r = itor["r"]
                        query_data = r.json_data
                        query_error = r.error
                        if query_error is not None:
                            raise CustomException(query_error)
                        if (query_data is None) or query_data.get("columns") is None:
                            name = r.config["name"]
                            raise CustomException("Incorrect query data for sub query %s." % name)
                    for itor in threads:
                        r = itor["r"]
                        query_data = r.json_data
                        query_error = r.error
                        if store_to_db:
                            if query_error is None and len(query_data["columns"]) > 0:
                                sub_persist_table_name = r.config["persist_table_name"]
                                sub_persist_datetime_column = r.config.get("persist_datetime_column")
                                self.store_data_to_sqlite(sqlite_connection, sqlite_cursor, query_data, sub_persist_table_name, sub_persist_datetime_column, drop_before_create = True)
                        else:
                            name = r.config["name"]
                            json_data["data_ex"].append({"name": name, "data": query_data})

        except CustomException as e:
            error = e.read()
            #sqlite_connection.cancel()
        except JobTimeoutException:
            error = "Query exceeded Redash query execution time limit."
            #sqlite_connection.cancel()
        except Exception as e:
            error = str(e)
            #sqlite_connection.cancel()
        finally:
            #删除所有数据表
            for (k,v) in table_name_map.items():
                drop_table_sql = "DROP TABLE IF EXISTS " + v + ";"
                self._log_info(drop_table_sql)
                sqlite_cursor.execute(drop_table_sql)
            sqlite_connection.commit()
            sqlite_connection.close()

        if error is not None:
            raise CustomException(error)
        return json_data, error
Пример #41
0
    def run_query(self, query, user):
        path = ""
        ua = ""
        args = {}
        try:
            args = yaml.safe_load(query)
            path = args['url']
            args.pop('url', None)
            ua = args['user-agent']
            args.pop('user-agent', None)

        except:
            pass

        try:
            response = requests_or_advocate.get(url=path,
                                                headers={"User-agent": ua})
            workbook = pd.read_excel(response.content, **args)

            df = workbook.copy()
            data = {'columns': [], 'rows': []}
            conversions = [{
                'pandas_type': np.integer,
                'redash_type': 'integer',
            }, {
                'pandas_type': np.inexact,
                'redash_type': 'float',
            }, {
                'pandas_type':
                np.datetime64,
                'redash_type':
                'datetime',
                'to_redash':
                lambda x: x.strftime('%Y-%m-%d %H:%M:%S')
            }, {
                'pandas_type': np.bool_,
                'redash_type': 'boolean'
            }, {
                'pandas_type': np.object,
                'redash_type': 'string'
            }]
            labels = []
            for dtype, label in zip(df.dtypes, df.columns):
                for conversion in conversions:
                    if issubclass(dtype.type, conversion['pandas_type']):
                        data['columns'].append({
                            'name':
                            label,
                            'friendly_name':
                            label,
                            'type':
                            conversion['redash_type']
                        })
                        labels.append(label)
                        func = conversion.get('to_redash')
                        if func:
                            df[label] = df[label].apply(func)
                        break
            data['rows'] = df[labels].replace({
                np.nan: None
            }).to_dict(orient='records')

            json_data = json_dumps(data)
            error = None
        except KeyboardInterrupt:
            error = "Query cancelled by user."
            json_data = None
        except UnacceptableAddressException:
            error = "Can't query private addresses."
            json_data = None
        except Exception as e:
            error = "Error reading {0}. {1}".format(path, str(e))
            json_data = None

        return json_data, error