def run_query(self, query, user): connection = atsd_client.connect_url(self.url, self.configuration.get('username'), self.configuration.get('password'), verify=self.configuration.get('trust_certificate', False), timeout=self.configuration.get('timeout', 600)) sql = SQLService(connection) query_id = str(uuid.uuid4()) try: logger.debug("SQL running query: %s", query) data = sql.query_with_params(query, {'outputFormat': 'csv', 'metadataFormat': 'EMBED', 'queryId': query_id}) columns, rows = generate_rows_and_columns(data) data = {'columns': columns, 'rows': rows} json_data = json_dumps(data) error = None except SQLException as e: json_data = None error = e.content except (KeyboardInterrupt, InterruptException): sql.cancel_query(query_id) error = "Query cancelled by user." json_data = None return json_data, error
def run_query(self, query, user): connection = atsd_client.connect_url( self.url, self.configuration.get('username'), self.configuration.get('password'), verify=self.configuration.get('trust_certificate', False), timeout=self.configuration.get('timeout', 600)) sql = SQLService(connection) query_id = str(uuid.uuid4()) try: logger.debug("SQL running query: %s", query) data = sql.query_with_params( query, { 'outputFormat': 'csv', 'metadataFormat': 'EMBED', 'queryId': query_id }) columns, rows = generate_rows_and_columns(data) data = {'columns': columns, 'rows': rows} json_data = json_dumps(data) error = None except SQLException as e: json_data = None error = e.content except (KeyboardInterrupt, InterruptException): sql.cancel_query(query_id) error = "Query cancelled by user." json_data = None return json_data, error
def run_query(self, query, user): connection = atsd_client.connect_url( self.url, self.configuration.get("username"), self.configuration.get("password"), verify=self.configuration.get("trust_certificate", False), timeout=self.configuration.get("timeout", 600), ) sql = SQLService(connection) query_id = str(uuid.uuid4()) try: logger.debug("SQL running query: %s", query) data = sql.query_with_params( query, {"outputFormat": "csv", "metadataFormat": "EMBED", "queryId": query_id}, ) columns, rows = generate_rows_and_columns(data) data = {"columns": columns, "rows": rows} json_data = json_dumps(data) error = None except SQLException as e: json_data = None error = e.content except (KeyboardInterrupt, InterruptException): sql.cancel_query(query_id) error = "Query cancelled by user." json_data = None return json_data, error
target_db_connection = connect('/path/to/target.connection.properties') target_command_service = CommandsService(target_db_connection) metric_name = 'metric_name' sql_query = 'SELECT entity,metric, value, text, datetime, tags.* FROM "' + metric_name + '"' # print sql_query tags_names_to_remove = ['time_zone'] tags_values_to_remove = ['false'] default_tags_to_remove = {'_index': '1', 'status': '0'} batch_size = 1000 transformed_commands_batch = [] # read df from response with string dtype response = source_sql_service.query_with_params(sql_query) df = pandas.read_csv(StringIO(response), dtype=str, sep=',') for index, row in df.where(pandas.notnull(df), None).iterrows(): row_dict = row.to_dict() # stores fixed series fields series = {k: v for k, v in row_dict.items() if not k.startswith('tags.')} # stores series tags tags = dict( map(lambda kv: (kv[0].replace("tags.", ""), kv[1]), {k: v for k, v in row_dict.items() if k.startswith('tags.')}.items())) filter_tags = { k: v for k, v in tags.items()