def get(self, data_source_id, database_name, table_name): data_source = _get_databricks_data_source( data_source_id, user=self.current_user, org=self.current_org ) job = get_databricks_table_columns.delay(data_source.id, database_name, table_name) return serialize_job(job)
def run_query(query, parameters, data_source, query_id, max_age=0): if data_source.paused: if data_source.pause_reason: message = "{} is paused ({}). Please try later.".format( data_source.name, data_source.pause_reason) else: message = "{} is paused. Please try later.".format( data_source.name) return error_response(message) try: query.apply(parameters) except (InvalidParameterError, QueryDetachedFromDataSourceError) as e: abort(400, message=str(e)) if query.missing_params: return error_response("Missing parameter value for: {}".format( ", ".join(query.missing_params))) if max_age == 0: query_result = None else: query_result = models.QueryResult.get_latest(data_source, query.text, max_age) record_event( current_user.org, current_user, { "action": "execute_query", "cache": "hit" if query_result else "miss", "object_id": data_source.id, "object_type": "data_source", "query": query.text, "query_id": query_id, "parameters": parameters, }, ) if query_result: return { "query_result": serialize_query_result(query_result, current_user.is_api_user()) } else: job = enqueue_query( query.text, data_source, current_user.id, current_user.is_api_user(), metadata={ "Username": repr(current_user) if current_user.is_api_user() else current_user.email, "Query ID": query_id, }, ) return serialize_job(job)
def get(self, data_source_id): data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org) require_access(data_source, self.current_user, view_only) refresh = request.args.get("refresh") is not None job = get_schema.delay(data_source.id, refresh) return serialize_job(job)
def get(self, data_source_id, database_name): data_source = _get_databricks_data_source( data_source_id, user=self.current_user, org=self.current_org ) refresh = request.args.get("refresh") is not None if not refresh: cached_tables = _get_tables_from_cache(data_source_id, database_name) if cached_tables is not None: return {"schema": cached_tables, "has_columns": True} job = get_databricks_tables.delay(data_source.id, database_name) return serialize_job(job) job = get_database_tables_with_columns.delay( data_source.id, database_name, redis_key=_tables_key(data_source_id, database_name) ) return serialize_job(job)
def get(self, data_source_id, database_name): data_source = get_object_or_404( models.DataSource.get_by_id_and_org, data_source_id, self.current_org ) require_access(data_source, self.current_user, view_only) if not data_source.type == "databricks": abort( 400, message="Resource only available for the Databricks query runner." ) job = get_databricks_schema.delay(data_source.id, database_name) return serialize_job(job)
def get(self, data_source_id): data_source = _get_databricks_data_source( data_source_id, user=self.current_user, org=self.current_org ) refresh = request.args.get("refresh") is not None if not refresh: cached_databases = _get_databases_from_cache(data_source_id) if cached_databases is not None: return cached_databases job = get_databricks_databases.delay( data_source.id, redis_key=_databases_key(data_source_id) ) return serialize_job(job)
def get(self, job_id, query_id=None): """ Retrieve info about a running query job. """ job = Job.fetch(job_id) return serialize_job(job)