def publish_tables_table(requestor, table_info): this_tablename = "qasino_csvpublisher_tables" table = qasino_table.QasinoTable(this_tablename) table.add_column("identity", "varchar") table.add_column("tablename", "varchar") table.add_column("read_epoch", "real") table.add_column("read_time_s", "int") table.add_column("mtime", "int") table.add_column("nr_errors", "int") table.add_column("error_message", "varchar") table.add_column("nr_rows", "int") table.add_column("filepath", "varchar") for tablename, table_stats in table_info.iteritems(): table.add_row([ Identity.get_identity(), tablename, table_stats.get("read_epoch", 0), table_stats.get("read_time_s", -1), table_stats.get("mtime", 0), table_stats.get("nr_errors", 0), table_stats.get("error_message", ""), table_stats.get("nr_rows", -1), table_stats.get("filepath", "") ]) logging.info("Sending table '%s' to '%s:%d' (%d rows).", this_tablename, options.hostname, options.port, table.get_nr_rows()) requestor.send_table(table)
def publish_info_table(requestor, nr_tables, nr_errors): tablename = "qasino_csvpublisher_info" table = qasino_table.QasinoTable(tablename) table.add_column("identity", "varchar") table.add_column("update_epoch", "int") table.add_column("nr_tables", "int") table.add_column("nr_errors", "int") table.add_row([Identity.get_identity(), time.time(), nr_tables, nr_errors]) logging.info("Sending table '%s' to '%s:%d' (1 rows).", tablename, options.hostname, options.port) requestor.send_table(table)
def insert_info_table(self, txn, db_generation_number, generation_start_epoch, generation_duration_s): """ Adds a status table (qasino_server_info) to the database in each generation. """ table = qasino_table.QasinoTable("qasino_server_info") table.add_column("generation_number", "int") table.add_column("generation_duration_s", "int") table.add_column("generation_start_epoch", "int") table.add_row([ str(db_generation_number), generation_duration_s, generation_start_epoch ]) return self.add_table_data(txn, table, Identity.get_identity())
def insert_sql_stats_table(self, txn, sql_backend_reader): """ Adds a status table (qasino_server_sql_stats) to the database in each generation. Note we are actually saving stats from the "reader" backend because that is where sql stats are logged. """ table = qasino_table.QasinoTable("qasino_server_sql_stats") table.add_column("sql_received", "int") table.add_column("sql_completed", "int") table.add_column("sql_errors", "int") table.add_row([ sql_backend_reader.stats.get('sql_received', 0), sql_backend_reader.stats.get('sql_completed', 0), sql_backend_reader.stats.get('sql_errors', 0) ]) return self.add_table_data(txn, table, Identity.get_identity())
def insert_views_table(self, txn, views): """ Adds a table (qasino_server_connections) to the database with per table info. """ table = qasino_table.QasinoTable("qasino_server_views") table.add_column("viewname", "varchar") table.add_column("loaded", "int") table.add_column("errormsg", "varchar") table.add_column("view", "varchar") for viewname, viewdata in views.iteritems(): table.add_row([ viewname, str(int(viewdata['loaded'])), str(viewdata['error']), viewdata['view'] ]) return self.add_table_data(txn, table, Identity.get_identity())
def insert_connections_table(self, txn): """ Adds a table (qasino_server_connections) to the database with per table info. """ table = qasino_table.QasinoTable("qasino_server_connections") table.add_column("identity", "varchar") table.add_column("nr_tables", "int") table.add_column("last_update_epoch", "int") for connection, connection_data in self.connections.items(): table.add_row([ connection, # identity str(len(connection_data["tables"])), connection_data["last_update_epoch"] ]) return self.add_table_data(txn, table, Identity.get_identity())
def read_json_table(filehandle, options): """ Read json table from the input file handle. """ json_str = filehandle.read() try: import json json_obj = json.loads(json_str) except Exception as e: print "Failed to parse input json: {}".format(e) return table = qasino_table.QasinoTable() table.from_obj({"table": json_obj}) return table
def insert_update_stats_table(self, txn): """ Adds a status table (qasino_server_update_stats) to the database in each generation. """ table = qasino_table.QasinoTable("qasino_server_update_stats") table.add_column("updates_received", "int") table.add_column("updates_completed", "int") table.add_column("update_errors", "int") table.add_column("inserts_received", "int") table.add_column("inserts_completed", "int") table.add_row([ self.stats.get('updates_received', 0), self.stats.get('updsates_completed', 0), self.stats.get('update_errors', 0), self.stats.get('inserts_received', 0), self.stats.get('inserts_completed', 0) ]) return self.add_table_data(txn, table, Identity.get_identity())
def send_dummy_table(zmq_requestor, schema_version): table = qasino_table.QasinoTable(options.tablename) if int(schema_version) == 0: table.add_column("identity", "varchar") table.add_column("the", "int") table.add_column("quick", "int") table.add_column("brown", "varchar") table.add_column("fox", "varchar") table.add_row([Identity.get_identity(), 34, 5, "yes", "no"]) table.add_row([ Identity.get_identity(), 1000, 321, "zanzabar", strftime("%Y-%m-%d %H:%M:%S GMT", gmtime()) ]) else: table.add_column("identity", "varchar") table.add_column("the", "int") table.add_column("quick", "int") table.add_column("brown", "varchar") table.add_column("fox", "varchar") table.add_column("foo", "varchar") table.add_row( [Identity.get_identity(), 34, 5, "yes", "no", "here I am!"]) table.add_row([ Identity.get_identity(), 1000, 321, "zanzabar", strftime("%Y-%m-%d %H:%M:%S GMT", gmtime()), "" ]) if options.persist: table.set_property("persist", 1) if options.static: table.set_property("static", 1) zmq_requestor.send_table(table)
def insert_tables_table(self, txn, sql_backend_writer, sql_backend_writer_static): table = qasino_table.QasinoTable("qasino_server_tables") table.add_column("tablename", "varchar") table.add_column("nr_rows", "int") table.add_column("nr_updates", "int") table.add_column("last_update_epoch", "int") table.add_column("static", "int") sql_backend_writer.add_tables_table_rows(table) sql_backend_writer_static.add_tables_table_rows(table) # the chicken or the egg - how do we add ourselves? table.add_row([ "qasino_server_tables", table.get_nr_rows() + 1, 1, time.time(), 0 ]) return sql_backend_writer.add_table_data(txn, table, util.Identity.get_identity())
def read_table(self, filehandle, tablename, colnames_lineno=1, types_lineno=2, options_lineno=-1, tablename_lineno=-1, skip_linenos=set()): table = qasino_table.QasinoTable(tablename) column_names = None column_types = None try: for lineno, line in enumerate(filehandle): line = line.rstrip('\n\r') if lineno in skip_linenos: continue elif tablename_lineno == lineno: table.set_tablename(line) elif options_lineno == lineno: try: parsed = csv.reader([line]).next() for option_pair in parsed: # If its a name value pair, its an option (otherwise its just a version number which we ignore) m = re.search(r'^(\S+)=(\S+)$', option_pair, flags=re.IGNORECASE) if m != None: if m.group(1) == 'static' and self.istrue( m.group(2)): table.set_property('static', 1) elif m.group(1) == 'update' and self.istrue( m.group(2)): table.set_property('update', 1) elif m.group(1) == 'persist' and self.istrue( m.group(2)): table.set_property('persist', 1) elif m.group(1) == 'keycols': table.set_property('keycols', m.group(2)) elif m.group(1) == 'identity': table.set_property('identity', m.group(2)) except Exception as inst: raise Exception("Unable to parse options: %s" % (lineno + 1, inst)) elif types_lineno == lineno: try: parsed = csv.reader([line]).next() column_types = [ self.csv_to_qasino_type_map[x.strip()] for x in parsed ] except Exception as inst: raise Exception( "Unsupported type in type list '%s' or parse error" % inst) if column_names != None and len(column_names) != len( column_types): raise Exception( "Number of type names does not match number of column names! (line %d)" % lineno + 1) table.set_column_types(column_types) elif colnames_lineno == lineno: column_names = csv.reader([line]).next() if column_types != None and len(column_names) != len( column_types): raise Exception( "Number of column names does not match number of column types! (line %d)" % lineno + 1) table.set_column_names(column_names) # Data else: input_row = csv.reader([line]).next() ## Parse all the data into rows. output_row = list() # Read each column in data. for column_index, column_cell in enumerate(input_row): ##print "%d CELL: %s name: %s" % (column_index, column_cell, column_names[column_index]) # Get the type so we know if it should be an int or not try: column_type = column_types[column_index] except: raise Exception( "Could not find type for column %d on line %d!" % (column_index, lineno + 1)) try: # Accept "null" fields as is (regardless of if they are an int/real etc) if column_cell is None or column_cell == '': output_row.append(column_cell) elif column_type == 'INTEGER': output_row.append(int(column_cell)) elif column_type == 'REAL': output_row.append(float(column_cell)) else: output_row.append(removeNonAscii(column_cell)) except Exception as e: raise Exception("Parse error on line %d: %s" % (lineno + 1, str(e))) if table.add_row(output_row) == -1: raise Exception( "Wrong number of rows on line %d: '%s'" % (lineno + 1, line)) # END if .. else .. # END for each line except Exception as inst: #raise Exception("Csv read error on line %d" % lineno) logging.error('Csv read error on line %d: %s', lineno + 1, inst) return (None, str(inst)) table.set_column_names(column_names) table.set_column_types(column_types) return (table, None)
def gotMessage(self, messageId, *messageParts): try: obj = json.loads(messageParts[0]) except Exception as e: logging.info( "ZmqReceiver: ERROR failed to get/parse content of POST: %s", str(e)) response_meta = { "response_op": "error", "error_message": "Failed to parse JSON message: %s" % str(e), "identity": Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) return response_meta = { "response_op": "error", "identity": Identity.get_identity(), "error_message": "Unspecified error" } if obj == None or obj["op"] == None: logging.error("ZmqReceiver: Error, unrecognized message.") response_meta = { "response_op": "error", "error_message": "Unrecognized request", "identity": Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) elif obj["op"] == "get_table_list": #logging.info("ZmqReceiver: Got request for table list.") response_meta = { "response_op": "tables_list", "identity": Identity.get_identity() } response_data = self.data_manager.get_table_list() self.reply(messageId, json.dumps(response_meta), json.dumps(response_data)) elif obj["op"] == "add_table_data": #logging.info("ZmqReceiver: Got request to add data.") #print "Got request: ", obj table = qasino_table.QasinoTable() err = table.from_obj(obj) if err is not None: errmsg = "Invalid input format: " + str(err) logging.info("ZmqReceiver: " + errmsg) response_meta = { "response_op": "error", "identity": Identity.get_identity(), "error_message": errmsg } else: response_meta = { "response_op": "ok", "identity": Identity.get_identity() } try: if table.get_property("static"): self.data_manager.sql_backend_writer_static.async_add_table_data( table, table.get_property("identity")) else: self.data_manager.sql_backend_writer.async_add_table_data( table, table.get_property("identity")) except Exception as e: response_meta = { "response_op": "error", "identity": util.Identity.get_identity(), "error_message": str(e) } self.reply(messageId, json.dumps(response_meta)) elif obj["op"] == "generation_signal": logging.info("ZmqReceiver: Got generation signal.") # Currently unused.. elif obj["op"] == "query": #logging.info("ZmqReceiver: Got request for table list.") use_write_db = True if "use_write_db" in obj and obj[ "use_write_db"] else False if "sql" not in obj: response_meta = { "response_op": "error", "error_message": "Must specify sql", "identity": Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) else: try: self.process_sql_statement(obj["sql"], messageId, use_write_db=use_write_db) except Exception as e: logging.error( 'ZmqReceiver: Invalid message received from client: error="%s", msg="%s"', str(e), str(obj)) response_meta = { "response_op": "error", "error_message": str(e), "identity": Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) # Response is handled in the callback. return else: logging.error("ZmqReceiver: Error, unrecognized op '%s'", obj["op"]) response_meta = { "response_op": "error", "identity": Identity.get_identity(), "error_message": "Unrecognized op '%s'" % obj["op"] } self.reply(messageId, json.dumps(response_meta))
def render_GET(self, request): request.setHeader("Content-Type", "application/json") #pprint(request.__dict__) if 'op' not in request.args: logging.error("HttpReceiver: No op specified for GET request: %s", ','.join(request.args)) response_meta = { "response_op": "error", "identity": util.Identity.get_identity(), "error_message": "No op specified" } return json.dumps(response_meta) ## A simple name value update op. if request.args['op'][0] == "name_value_update": logging.info("HttpReceiver: Name value update.") if 'name' in request.args and 'value' in request.args: identity = request.args['identity'][ 0] if 'identity' in request.args else 'unknown' # Parse the name into '<tablename>.<column>' name = request.args['name'][0] m = re.search(r'^([\w_]+)\.([\w_]+)$', name) if m == None: logging.info( "HttpReciever: Invalid name in name value update: '%s'", name) response_meta = { "response_op": "error", "error_message": "Invalid name in namve value update", "identity": util.Identity.get_identity() } return json.dumps(response_meta) tablename = m.group(1) columnname = m.group(2) value = request.args['value'][0] table = qasino_table.QasinoTable(tablename) table.add_column('identity', 'varchar') table.add_column(columnname, 'varchar') table.add_row([identity, value]) table.set_property('update', 1) table.set_property('persist', 1) table.set_property('keycols', 'identity') self.data_manager.sql_backend_writer.async_add_table_data( table, identity) response_meta = { "response_op": "ok", "identity": util.Identity.get_identity() } return json.dumps(response_meta) ## Query op elif request.args['op'][0] == "query": # Check 'format' arg to determine output type. JSON = 1 TEXT = 2 HTML = 3 ## TODO format = JSON try: if request.args['format'][0] == 'text': format = TEXT elif request.args['format'][0] == 'html': format = HTML except: pass # It is an error if no sql specified. if 'sql' not in request.args: logging.info("HttpReceiver: GET Query received with no sql.") if format == TEXT: request.setHeader("Content-Type", "text/plain") return "Must specify 'sql' param." else: response_meta = { "response_op": "error", "error_message": "Must specify 'sql' param", "identity": util.Identity.get_identity() } return json.dumps(response_meta) sql = request.args['sql'][0] try: if format == TEXT: self.process_sql_statement_for_text(sql, request) else: self.process_sql_statement(sql, request) return NOT_DONE_YET except Exception as e: logging.error('HttpReceiver: Error processing sql: %s: %s', str(e), sql) if format == TEXT: request.setHeader("Content-Type", "text/plain") return "Error processing sql: %s" % str(e) else: response_meta = { "response_op": "error", "error_message": "Error processing sql: %s" % str(e), "identity": util.Identity.get_identity() } return json.dumps(response_meta) logging.error("HttpReceiver: Unknown op: %s", request.args['op'][0]) response_meta = { "response_op": "error", "identity": util.Identity.get_identity(), "error_message": "Unrecognized operation" } return json.dumps(response_meta)
def render_POST(self, request): request.setHeader("Content-Type", "application/json") #pprint(request.__dict__) obj = dict() if 'op' not in request.args: logging.error("HttpReceiver: No op specified for POST request: %s", ','.join(request.args)) response_meta = { "response_op": "error", "identity": util.Identity.get_identity(), "error_message": "No op specified" } return json.dumps(response_meta) ## Add CSV table data op if request.args['op'][0] == "add_csv_table_data": logging.info("HttpReceiver: Add table data (CSV).") csv = csv_table_reader.CsvTableReader() (table, error) = csv.read_table(request.content, None, skip_linenos={4}, options_lineno=0, types_lineno=3, tablename_lineno=1, colnames_lineno=2) if table == None: logging.info( "HttpReceiver: Failure parsing csv input: {}".format( error)) response_meta = { "response_op": "error", "identity": util.Identity.get_identity(), "error_message": str(error) } return json.dumps(response_meta) response_meta = { "response_op": "ok", "identity": util.Identity.get_identity() } try: if table.get_property("static"): self.data_manager.sql_backend_writer_static.async_add_table_data( table, table.get_property("identity")) else: self.data_manager.sql_backend_writer.async_add_table_data( table, table.get_property("identity")) except Exception as e: response_meta = { "response_op": "error", "identity": util.Identity.get_identity(), "error_message": str(e) } return json.dumps(response_meta) ### The rest of the ops expect a JSON post body. try: # request.content might be a temp file if the content length is over some threshold. # fortunately json.load() will take a file ptr or a StringIO obj. obj = json.load(request.content) #print "Received POST:" #print obj except Exception as e: logging.info( "HttpReceiver: ERROR failed to get/parse content of POST: %s", str(e)) response_meta = { "response_op": "error", "identity": util.Identity.get_identity(), "error_message": "Could not parse POST body: %s" % str(e) } return json.dumps(response_meta) ## Table list op if request.args['op'][0] == "get_table_list": #logging.info("HttpReceiver: Got request for table list.") response_meta = { "response_op": "tables_list", "identity": util.Identity.get_identity() } response_data = self.data_manager.get_table_list() return json.dumps(response_meta) + json.dumps(response_data) ## Add table data op if request.args['op'][0] == "add_table_data": #logging.info("HttpReceiver: Add table data (JSON).") table = qasino_table.QasinoTable() err = table.from_obj(obj) if err is not None: errmsg = "Invalid input format: " + str(err) logging.info("HttpReceiver: " + errmsg) response_meta = { "response_op": "error", "identity": util.Identity.get_identity(), "error_message": errmsg } else: response_meta = { "response_op": "ok", "identity": util.Identity.get_identity() } try: if table.get_property("static"): self.data_manager.sql_backend_writer_static.async_add_table_data( table, table.get_property("identity")) else: self.data_manager.sql_backend_writer.async_add_table_data( table, table.get_property("identity")) except Exception as e: response_meta = { "response_op": "error", "identity": util.Identity.get_identity(), "error_message": str(e) } return json.dumps(response_meta) ## Query op if request.args['op'][0] == "query": if 'sql' not in obj: response_meta = { "response_op": "error", "error_message": "Must specify 'sql' param", "identity": util.Identity.get_identity() } logging.info("HttpReceiver: Query received with no sql.") return json.dumps(response_meta) try: self.process_sql_statement(obj["sql"], request) return NOT_DONE_YET except Exception as e: logging.error('HttpReceiver: Error processing sql: %s: %s', str(e), obj["sql"]) response_meta = { "response_op": "error", "error_message": str(e), "identity": util.Identity.get_identity() } return json.dumps(response_meta) response_meta = { "response_op": "error", "identity": util.Identity.get_identity(), "error_message": "Unrecognized operation" } return json.dumps(response_meta)