def execute_query(self, query, data_source_id, metadata): start_time = time.time() logger.info("Loading data source (%d)...", data_source_id) # TODO: we should probably cache data sources in Redis data_source = models.DataSource.get_by_id(data_source_id) self.update_state(state='STARTED', meta={ 'start_time': start_time, 'custom_message': '' }) logger.info("Executing query:\n%s", query) query_hash = gen_query_hash(query) query_runner = get_query_runner(data_source.type, data_source.options) if query_runner.annotate_query(): metadata['Task ID'] = self.request.id metadata['Query Hash'] = query_hash metadata['Queue'] = self.request.delivery_info['routing_key'] annotation = u", ".join( [u"{}: {}".format(k, v) for k, v in metadata.iteritems()]) logging.debug(u"Annotation: %s", annotation) annotated_query = u"/* {} */ {}".format(annotation, query) else: annotated_query = query with statsd_client.timer('query_runner.{}.{}.run_time'.format( data_source.type, data_source.name)): data, error = query_runner.run_query(annotated_query) run_time = time.time() - start_time logger.info("Query finished... data length=%s, error=%s", data and len(data), error) self.update_state(state='STARTED', meta={ 'start_time': start_time, 'error': error, 'custom_message': '' }) # Delete query_hash redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id)) if not error: query_result, updated_query_ids = models.QueryResult.store_result( data_source.id, query_hash, query, data, run_time, utils.utcnow()) for query_id in updated_query_ids: check_alerts_for_query.delay(query_id) else: raise Exception(error) return query_result.id
def query_runner(self): query_runner = get_query_runner(self.type, self.options) if self.uses_ssh_tunnel: query_runner = with_ssh_tunnel(query_runner, self.options.get("ssh_tunnel")) return query_runner
def get_widget_rows(widget, qp): """ Executes the query associated with the widget and returns the resulting rows and errors (if any). :param widget: peeewee ORM Widget instance. :param qp: Dictionary containing query parameters to instantiate de jinja2 query template. :returns: a tuple (data, error) """ # Get the query runner instance for the Data Source corresponding to # the query about to be executed query_runner = get_query_runner( widget.visualization.query.data_source.type, widget.visualization.query.data_source.options ) # We need to build a jinja2 template in order to instantiate a query # with a certain date range query_template = jinja2.Template(widget.visualization.query.query) # Run the query using those parameters. attempt = REDASH_REPORTS_MAX_RETRIES error = 0 while attempt > 0 and error is not None: data, error = query_runner.run_query(query_template.render(**qp)) attempt -= 1 return data, error
def execute_query(self, query, data_source_id): # TODO: maybe this should be a class? start_time = time.time() logger.info("Loading data source (%d)...", data_source_id) # TODO: we should probably cache data sources in Redis data_source = models.DataSource.get_by_id(data_source_id) self.update_state(state='STARTED', meta={ 'start_time': start_time, 'custom_message': '' }) logger.info("Executing query:\n%s", query) query_hash = gen_query_hash(query) query_runner = get_query_runner(data_source.type, data_source.options) if query_runner.annotate_query(): # TODO: annotate with queue name annotated_query = "/* Task Id: %s, Query hash: %s */ %s" % \ (self.request.id, query_hash, query) else: annotated_query = query with statsd_client.timer('query_runner.{}.{}.run_time'.format( data_source.type, data_source.name)): data, error = query_runner.run_query(annotated_query) run_time = time.time() - start_time logger.info("Query finished... data length=%s, error=%s", data and len(data), error) self.update_state(state='STARTED', meta={ 'start_time': start_time, 'error': error, 'custom_message': '' }) # Delete query_hash redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id)) if not error: query_result = models.QueryResult.store_result( data_source.id, query_hash, query, data, run_time, datetime.datetime.utcnow()) else: raise Exception(error) return query_result.id
def execute_query(self, query, data_source_id, metadata): signal.signal(signal.SIGINT, signal_handler) start_time = time.time() logger.info("Loading data source (%d)...", data_source_id) # TODO: we should probably cache data sources in Redis data_source = models.DataSource.get_by_id(data_source_id) self.update_state(state="STARTED", meta={"start_time": start_time, "custom_message": ""}) logger.info("Executing query:\n%s", query) query_hash = gen_query_hash(query) query_runner = get_query_runner(data_source.type, data_source.options) if query_runner.annotate_query(): metadata["Task ID"] = self.request.id metadata["Query Hash"] = query_hash metadata["Queue"] = self.request.delivery_info["routing_key"] annotation = u", ".join([u"{}: {}".format(k, v) for k, v in metadata.iteritems()]) logging.debug(u"Annotation: %s", annotation) annotated_query = u"/* {} */ {}".format(annotation, query) else: annotated_query = query with statsd_client.timer("query_runner.{}.{}.run_time".format(data_source.type, data_source.name)): data, error = query_runner.run_query(annotated_query) run_time = time.time() - start_time logger.info("Query finished... data length=%s, error=%s", data and len(data), error) self.update_state(state="STARTED", meta={"start_time": start_time, "error": error, "custom_message": ""}) # Delete query_hash redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id)) if not error: query_result, updated_query_ids = models.QueryResult.store_result( data_source.id, query_hash, query, data, run_time, utils.utcnow() ) for query_id in updated_query_ids: check_alerts_for_query.delay(query_id) else: raise Exception(error) return query_result.id
def execute_query(self, query, data_source_id, metadata): start_time = time.time() logger.info("Loading data source (%d)...", data_source_id) # TODO: we should probably cache data sources in Redis data_source = models.DataSource.get_by_id(data_source_id) self.update_state(state='STARTED', meta={'start_time': start_time, 'custom_message': ''}) logger.info("Executing query:\n%s", query) query_hash = gen_query_hash(query) query_runner = get_query_runner(data_source.type, data_source.options) if query_runner.annotate_query(): metadata['Task ID'] = self.request.id metadata['Query Hash'] = query_hash metadata['Queue'] = self.request.delivery_info['routing_key'] annotation = u", ".join([u"{}: {}".format(k, v) for k, v in metadata.iteritems()]) logging.debug(u"Annotation: %s", annotation) annotated_query = u"/* {} */ {}".format(annotation, query) else: annotated_query = query with statsd_client.timer('query_runner.{}.{}.run_time'.format(data_source.type, data_source.name)): data, error = query_runner.run_query(annotated_query) run_time = time.time() - start_time logger.info("Query finished... data length=%s, error=%s", data and len(data), error) self.update_state(state='STARTED', meta={'start_time': start_time, 'error': error, 'custom_message': ''}) # Delete query_hash redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id)) if not error: query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, utils.utcnow()) else: raise Exception(error) return query_result.id
def execute_query(self, query, data_source_id): # TODO: maybe this should be a class? start_time = time.time() logger.info("Loading data source (%d)...", data_source_id) # TODO: we should probably cache data sources in Redis data_source = models.DataSource.get_by_id(data_source_id) self.update_state(state='STARTED', meta={'start_time': start_time, 'custom_message': ''}) logger.info("Executing query:\n%s", query) query_hash = gen_query_hash(query) query_runner = get_query_runner(data_source.type, data_source.options) if query_runner.annotate_query(): # TODO: annotate with queue name annotated_query = "/* Task Id: %s, Query hash: %s */ %s" % \ (self.request.id, query_hash, query) else: annotated_query = query with statsd_client.timer('query_runner.{}.{}.run_time'.format(data_source.type, data_source.name)): data, error = query_runner.run_query(annotated_query) run_time = time.time() - start_time logger.info("Query finished... data length=%s, error=%s", data and len(data), error) self.update_state(state='STARTED', meta={'start_time': start_time, 'error': error, 'custom_message': ''}) # Delete query_hash redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id)) if not error: query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, datetime.datetime.utcnow()) else: raise Exception(error) return query_result.id
def query_runner(self): return get_query_runner(self.type, self.options)
def query_runner(self): query_runner = get_query_runner(self.type, self.options) return query_runner
def execute_query(self, query, data_source_id, metadata): signal.signal(signal.SIGINT, signal_handler) start_time = time.time() logger.info("task=execute_query state=load_ds ds_id=%d", data_source_id) data_source = models.DataSource.get_by_id(data_source_id) self.update_state(state='STARTED', meta={'start_time': start_time, 'custom_message': ''}) logger.debug("Executing query:\n%s", query) query_hash = gen_query_hash(query) query_runner = get_query_runner(data_source.type, data_source.options) logger.info("task=execute_query state=before query_hash=%s type=%s ds_id=%d task_id=%s queue=%s query_id=%s username=%s", query_hash, data_source.type, data_source.id, self.request.id, self.request.delivery_info['routing_key'], metadata.get('Query ID', 'unknown'), metadata.get('Username', 'unknown')) if query_runner.annotate_query(): metadata['Task ID'] = self.request.id metadata['Query Hash'] = query_hash metadata['Queue'] = self.request.delivery_info['routing_key'] annotation = u", ".join([u"{}: {}".format(k, v) for k, v in metadata.iteritems()]) logging.debug(u"Annotation: %s", annotation) annotated_query = u"/* {} */ {}".format(annotation, query) else: annotated_query = query with statsd_client.timer('query_runner.{}.{}.run_time'.format(data_source.type, data_source.name)): data, error = query_runner.run_query(annotated_query) logger.info("task=execute_query state=after query_hash=%s type=%s ds_id=%d task_id=%s queue=%s query_id=%s username=%s", query_hash, data_source.type, data_source.id, self.request.id, self.request.delivery_info['routing_key'], metadata.get('Query ID', 'unknown'), metadata.get('Username', 'unknown')) run_time = time.time() - start_time logger.info("Query finished... data length=%s, error=%s", data and len(data), error) self.update_state(state='STARTED', meta={'start_time': start_time, 'error': error, 'custom_message': ''}) # Delete query_hash redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id)) if not error: query_result, updated_query_ids = models.QueryResult.store_result(data_source.org_id, data_source.id, query_hash, query, data, run_time, utils.utcnow()) logger.info("task=execute_query state=after_store query_hash=%s type=%s ds_id=%d task_id=%s queue=%s query_id=%s username=%s", query_hash, data_source.type, data_source.id, self.request.id, self.request.delivery_info['routing_key'], metadata.get('Query ID', 'unknown'), metadata.get('Username', 'unknown')) for query_id in updated_query_ids: check_alerts_for_query.delay(query_id) logger.info("task=execute_query state=after_alerts query_hash=%s type=%s ds_id=%d task_id=%s queue=%s query_id=%s username=%s", query_hash, data_source.type, data_source.id, self.request.id, self.request.delivery_info['routing_key'], metadata.get('Query ID', 'unknown'), metadata.get('Username', 'unknown')) else: raise QueryExecutionError(error) return query_result.id