def send_dummy_table(zmq_requestor, schema_version): table = qasino_table.QasinoTable(options.tablename) if int(schema_version) == 0: table.add_column("identity", "varchar") table.add_column("the", "int") table.add_column("quick", "int") table.add_column("brown", "varchar") table.add_column("fox", "varchar") table.add_row( [ Identity.get_identity(), 34, 5, "yes", "no" ] ) table.add_row( [ Identity.get_identity(), 1000, 321, "zanzabar", strftime("%Y-%m-%d %H:%M:%S GMT", gmtime()) ] ) else: table.add_column("identity", "varchar") table.add_column("the", "int") table.add_column("quick", "int") table.add_column("brown", "varchar") table.add_column("fox", "varchar") table.add_column("foo", "varchar") table.add_row( [ Identity.get_identity(), 34, 5, "yes", "no", "here I am!" ] ) table.add_row( [ Identity.get_identity(), 1000, 321, "zanzabar", strftime("%Y-%m-%d %H:%M:%S GMT", gmtime()), "" ] ) if options.persist: table.set_property("persist", 1) if options.static: table.set_property("static", 1) zmq_requestor.send_table(table)
def update_table_stats(self, tablename, nr_rows, identity=Identity.get_identity(), now=time.time(), sum=False): # Keep track of how many updates a table has received. if tablename not in self.tables: self.tables[tablename] = {"updates": 1, "nr_rows": nr_rows, "last_update_epoch": now} else: if sum: self.tables[tablename]["nr_rows"] += nr_rows self.tables[tablename]["updates"] += 1 self.tables[tablename]["last_update_epoch"] = now # Keep track of which "identities" have added to a table. if identity not in self.connections: self.connections[identity] = {"tables": {tablename: nr_rows}, "last_update_epoch": now} else: self.connections[identity]["last_update_epoch"] = now if sum and tablename in self.connections[identity]["tables"]: self.connections[identity]["tables"][tablename] += nr_rows else: self.connections[identity]["tables"][tablename] = nr_rows
def send_table(self, table): request_options = {'headers': {'Content-Type': 'application/json'}} if self.skip_ssl_verify: request_options['verify'] = False if self.username and self.password: request_options['auth'] = (self.username, self.password) url = '{}://{}:{}/request?op=add_table_data'.format( self.url_proto, self.hostname, self.port) jsondata = table.get_json(op="add_table_data", identity=Identity.get_identity()) #print jsondata request_options['data'] = jsondata try: response = self.conn.post(url, **request_options) response.raise_for_status() except Exception as e: return "ERROR: HttpRequestor: Request failed: url={}: {}".format( url, e) return None
def publish_tables_table(requestor, table_info): this_tablename = "qasino_csvpublisher_tables" table = qasino_table.QasinoTable(this_tablename) table.add_column("identity", "varchar") table.add_column("tablename", "varchar") table.add_column("read_epoch", "real") table.add_column("read_time_s", "int") table.add_column("mtime", "int") table.add_column("nr_errors", "int") table.add_column("error_message", "varchar") table.add_column("nr_rows", "int") table.add_column("filepath", "varchar") for tablename, table_stats in table_info.iteritems(): table.add_row([ Identity.get_identity(), tablename, table_stats.get("read_epoch", 0), table_stats.get("read_time_s", -1), table_stats.get("mtime", 0), table_stats.get("nr_errors", 0), table_stats.get("error_message", ""), table_stats.get("nr_rows", -1), table_stats.get("filepath", "") ]) logging.info("Sending table '%s' to '%s:%d' (%d rows).", this_tablename, options.hostname, options.port, table.get_nr_rows()) requestor.send_table(table)
def publish_tables_table(requestor, table_info): this_tablename = "qasino_csvpublisher_tables" table = qasino_table.QasinoTable(this_tablename) table.add_column("identity", "varchar") table.add_column("tablename", "varchar") table.add_column("read_epoch", "real") table.add_column("read_time_s", "int") table.add_column("mtime", "int") table.add_column("nr_errors", "int") table.add_column("error_message", "varchar") table.add_column("nr_rows", "int") table.add_column("filepath", "varchar") for tablename, table_stats in table_info.iteritems(): table.add_row( [ Identity.get_identity(), tablename, table_stats.get("read_epoch", 0), table_stats.get("read_time_s", -1), table_stats.get("mtime", 0), table_stats.get("nr_errors", 0), table_stats.get("error_message", ""), table_stats.get("nr_rows", -1), table_stats.get("filepath", "") ] ) logging.info("Sending table '%s' to '%s:%d' (%d rows).", this_tablename, options.hostname, options.port, table.get_nr_rows()) requestor.send_table(table)
def send_generation_signal(self, generation_number, generation_duration_s): msg = { "op": "generation_signal", "identity": Identity.get_identity(), "generation_number": generation_number } if generation_duration_s: msg["generation_duration_s"] = generation_duration_s self.publish(json.dumps(msg), "GENSIG")
def insert_info_table(self, txn, db_generation_number, generation_start_epoch, generation_duration_s): """ Adds a status table (qasino_server_info) to the database in each generation. """ table = qasino_table.QasinoTable("qasino_server_info") table.add_column("generation_number", "int") table.add_column("generation_duration_s", "int") table.add_column("generation_start_epoch", "int") table.add_row([str(db_generation_number), generation_duration_s, generation_start_epoch]) return self.add_table_data(txn, table, Identity.get_identity())
def publish_info_table(requestor, nr_tables, nr_errors): tablename = "qasino_csvpublisher_info" table = qasino_table.QasinoTable(tablename) table.add_column("identity", "varchar") table.add_column("update_epoch", "int") table.add_column("nr_tables", "int") table.add_column("nr_errors", "int") table.add_row( [ Identity.get_identity(), time.time(), nr_tables, nr_errors ] ) logging.info("Sending table '%s' to '%s:%d' (1 rows).", tablename, options.hostname, options.port) requestor.send_table(table)
def send_dummy_table(zmq_requestor, schema_version): table = qasino_table.QasinoTable(options.tablename) if int(schema_version) == 0: table.add_column("identity", "varchar") table.add_column("the", "int") table.add_column("quick", "int") table.add_column("brown", "varchar") table.add_column("fox", "varchar") table.add_row([Identity.get_identity(), 34, 5, "yes", "no"]) table.add_row([ Identity.get_identity(), 1000, 321, "zanzabar", strftime("%Y-%m-%d %H:%M:%S GMT", gmtime()) ]) else: table.add_column("identity", "varchar") table.add_column("the", "int") table.add_column("quick", "int") table.add_column("brown", "varchar") table.add_column("fox", "varchar") table.add_column("foo", "varchar") table.add_row( [Identity.get_identity(), 34, 5, "yes", "no", "here I am!"]) table.add_row([ Identity.get_identity(), 1000, 321, "zanzabar", strftime("%Y-%m-%d %H:%M:%S GMT", gmtime()), "" ]) if options.persist: table.set_property("persist", 1) if options.static: table.set_property("static", 1) zmq_requestor.send_table(table)
def insert_views_table(self, txn, views): """ Adds a table (qasino_server_connections) to the database with per table info. """ table = qasino_table.QasinoTable("qasino_server_views") table.add_column("viewname", "varchar") table.add_column("loaded", "int") table.add_column("errormsg", "varchar") table.add_column("view", "varchar") for viewname, viewdata in views.iteritems(): table.add_row([viewname, str(int(viewdata["loaded"])), str(viewdata["error"]), viewdata["view"]]) return self.add_table_data(txn, table, Identity.get_identity())
def publish_info_table(requestor, nr_tables, nr_errors): tablename = "qasino_csvpublisher_info" table = qasino_table.QasinoTable(tablename) table.add_column("identity", "varchar") table.add_column("update_epoch", "int") table.add_column("nr_tables", "int") table.add_column("nr_errors", "int") table.add_row([Identity.get_identity(), time.time(), nr_tables, nr_errors]) logging.info("Sending table '%s' to '%s:%d' (1 rows).", tablename, options.hostname, options.port) requestor.send_table(table)
def insert_info_table(self, txn, db_generation_number, generation_start_epoch, generation_duration_s): """ Adds a status table (qasino_server_info) to the database in each generation. """ table = qasino_table.QasinoTable("qasino_server_info") table.add_column("generation_number", "int") table.add_column("generation_duration_s", "int") table.add_column("generation_start_epoch", "int") table.add_row([ str(db_generation_number), generation_duration_s, generation_start_epoch ]) return self.add_table_data(txn, table, Identity.get_identity())
def insert_sql_stats_table(self, txn, sql_backend_reader): """ Adds a status table (qasino_server_sql_stats) to the database in each generation. Note we are actually saving stats from the "reader" backend because that is where sql stats are logged. """ table = qasino_table.QasinoTable("qasino_server_sql_stats") table.add_column("sql_received", "int") table.add_column("sql_completed", "int") table.add_column("sql_errors", "int") table.add_row( [ sql_backend_reader.stats.get('sql_received', 0), sql_backend_reader.stats.get('sql_completed', 0), sql_backend_reader.stats.get('sql_errors', 0) ] ) return self.add_table_data(txn, table, Identity.get_identity())
def insert_connections_table(self, txn): """ Adds a table (qasino_server_connections) to the database with per table info. """ table = qasino_table.QasinoTable("qasino_server_connections") table.add_column("identity", "varchar") table.add_column("nr_tables", "int") table.add_column("last_update_epoch", "int") for connection, connection_data in self.connections.items(): table.add_row( [connection, str(len(connection_data["tables"])), connection_data["last_update_epoch"]] # identity ) return self.add_table_data(txn, table, Identity.get_identity())
def insert_update_stats_table(self, txn): """ Adds a status table (qasino_server_update_stats) to the database in each generation. """ table = qasino_table.QasinoTable("qasino_server_update_stats") table.add_column("updates_received", "int") table.add_column("updates_completed", "int") table.add_column("update_errors", "int") table.add_column("inserts_received", "int") table.add_column("inserts_completed", "int") table.add_row( [ self.stats.get('updates_received', 0), self.stats.get('updsates_completed', 0), self.stats.get('update_errors', 0), self.stats.get('inserts_received', 0), self.stats.get('inserts_completed', 0) ] ) return self.add_table_data(txn, table, Identity.get_identity())
def insert_sql_stats_table(self, txn, sql_backend_reader): """ Adds a status table (qasino_server_sql_stats) to the database in each generation. Note we are actually saving stats from the "reader" backend because that is where sql stats are logged. """ table = qasino_table.QasinoTable("qasino_server_sql_stats") table.add_column("sql_received", "int") table.add_column("sql_completed", "int") table.add_column("sql_errors", "int") table.add_row([ sql_backend_reader.stats.get('sql_received', 0), sql_backend_reader.stats.get('sql_completed', 0), sql_backend_reader.stats.get('sql_errors', 0) ]) return self.add_table_data(txn, table, Identity.get_identity())
def insert_connections_table(self, txn): """ Adds a table (qasino_server_connections) to the database with per table info. """ table = qasino_table.QasinoTable("qasino_server_connections") table.add_column("identity", "varchar") table.add_column("nr_tables", "int") table.add_column("last_update_epoch", "int") for connection, connection_data in self.connections.items(): table.add_row([ connection, # identity str(len(connection_data["tables"])), connection_data["last_update_epoch"] ]) return self.add_table_data(txn, table, Identity.get_identity())
def insert_views_table(self, txn, views): """ Adds a table (qasino_server_connections) to the database with per table info. """ table = qasino_table.QasinoTable("qasino_server_views") table.add_column("viewname", "varchar") table.add_column("loaded", "int") table.add_column("errormsg", "varchar") table.add_column("view", "varchar") for viewname, viewdata in views.iteritems(): table.add_row([ viewname, str(int(viewdata['loaded'])), str(viewdata['error']), viewdata['view'] ]) return self.add_table_data(txn, table, Identity.get_identity())
def insert_update_stats_table(self, txn): """ Adds a status table (qasino_server_update_stats) to the database in each generation. """ table = qasino_table.QasinoTable("qasino_server_update_stats") table.add_column("updates_received", "int") table.add_column("updates_completed", "int") table.add_column("update_errors", "int") table.add_column("inserts_received", "int") table.add_column("inserts_completed", "int") table.add_row([ self.stats.get('updates_received', 0), self.stats.get('updsates_completed', 0), self.stats.get('update_errors', 0), self.stats.get('inserts_received', 0), self.stats.get('inserts_completed', 0) ]) return self.add_table_data(txn, table, Identity.get_identity())
def sql_complete_callback(self, result, query_id, query_start, messageId): """ Called when a sql statement completes. """ # To start just our identity. response_meta = {"identity": Identity.get_identity()} retval = result["retval"] error_message = '' if "error_message" in result: error_message = str(result["error_message"]) # Success? if retval == 0 and "data" in result: response_meta["response_op"] = "result_table" response_meta["table"] = result["data"] if "max_widths" in result: response_meta["max_widths"] = result["max_widths"] # Or error? if retval != 0: logging.info("ZmqReceiver: (%d) SQL error: %s", query_id, error_message) response_meta["response_op"] = "error" response_meta["error_message"] = error_message else: logging.info("ZmqReceiver: (%d) SQL completed (%.02f seconds)", query_id, time.time() - query_start) # Send the response! self.reply(messageId, json.dumps(response_meta))
def update_table_stats(self, tablename, nr_rows, identity=Identity.get_identity(), now=time.time(), sum=False): # Keep track of how many updates a table has received. if tablename not in self.tables: self.tables[tablename] = { "updates": 1, "nr_rows": nr_rows, "last_update_epoch": now } else: if sum: self.tables[tablename]["nr_rows"] += nr_rows self.tables[tablename]["updates"] += 1 self.tables[tablename]["last_update_epoch"] = now # Keep track of which "identities" have added to a table. if identity not in self.connections: self.connections[identity] = { 'tables': { tablename: nr_rows }, 'last_update_epoch': now } else: self.connections[identity]["last_update_epoch"] = now if sum and tablename in self.connections[identity]["tables"]: self.connections[identity]["tables"][tablename] += nr_rows else: self.connections[identity]["tables"][tablename] = nr_rows
def sql_complete_callback(self, result, query_id, query_start, messageId): """ Called when a sql statement completes. """ # To start just our identity. response_meta = { "identity" : Identity.get_identity() } retval = result["retval"] error_message = '' if "error_message" in result: error_message = str(result["error_message"]) # Success? if retval == 0 and "data" in result: response_meta["response_op"] = "result_table" response_meta["table"] = result["data"] if "max_widths" in result: response_meta["max_widths"] = result["max_widths"] # Or error? if retval != 0: logging.info("ZmqReceiver: (%d) SQL error: %s", query_id, error_message) response_meta["response_op"] = "error" response_meta["error_message"] = error_message else: logging.info("ZmqReceiver: (%d) SQL completed (%.02f seconds)", query_id, time.time() - query_start) # Send the response! self.reply(messageId, json.dumps(response_meta))
def send_table(self, table): request_options = { 'headers' : {'Content-Type': 'application/json'} } if self.skip_ssl_verify: request_options['verify'] = False if self.username and self.password: request_options['auth'] = (self.username, self.password) url = '{}://{}:{}/request?op=add_table_data'.format(self.url_proto, self.hostname, self.port) jsondata = table.get_json(op="add_table_data", identity=Identity.get_identity()) #print jsondata request_options['data'] = jsondata try: response = self.conn.post(url, **request_options) response.raise_for_status() except Exception as e: return "ERROR: HttpRequestor: Request failed: url={}: {}".format(url, e) return None
def send_table(self, table): deferred = self.sendMsg(table.get_json(op="add_table_data", identity=Identity.get_identity())) deferred.callback = self.message_received
def gotMessage(self, messageId, *messageParts): try: obj = json.loads(messageParts[0]) except Exception as e: logging.info( "ZmqReceiver: ERROR failed to get/parse content of POST: %s", str(e)) response_meta = { "response_op": "error", "error_message": "Failed to parse JSON message: %s" % str(e), "identity": Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) return response_meta = { "response_op": "error", "identity": Identity.get_identity(), "error_message": "Unspecified error" } if obj == None or obj["op"] == None: logging.error("ZmqReceiver: Error, unrecognized message.") response_meta = { "response_op": "error", "error_message": "Unrecognized request", "identity": Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) elif obj["op"] == "get_table_list": #logging.info("ZmqReceiver: Got request for table list.") response_meta = { "response_op": "tables_list", "identity": Identity.get_identity() } response_data = self.data_manager.get_table_list() self.reply(messageId, json.dumps(response_meta), json.dumps(response_data)) elif obj["op"] == "add_table_data": #logging.info("ZmqReceiver: Got request to add data.") #print "Got request: ", obj table = qasino_table.QasinoTable() err = table.from_obj(obj) if err is not None: errmsg = "Invalid input format: " + str(err) logging.info("ZmqReceiver: " + errmsg) response_meta = { "response_op": "error", "identity": Identity.get_identity(), "error_message": errmsg } else: response_meta = { "response_op": "ok", "identity": Identity.get_identity() } try: if table.get_property("static"): self.data_manager.sql_backend_writer_static.async_add_table_data( table, table.get_property("identity")) else: self.data_manager.sql_backend_writer.async_add_table_data( table, table.get_property("identity")) except Exception as e: response_meta = { "response_op": "error", "identity": util.Identity.get_identity(), "error_message": str(e) } self.reply(messageId, json.dumps(response_meta)) elif obj["op"] == "generation_signal": logging.info("ZmqReceiver: Got generation signal.") # Currently unused.. elif obj["op"] == "query": #logging.info("ZmqReceiver: Got request for table list.") use_write_db = True if "use_write_db" in obj and obj[ "use_write_db"] else False if "sql" not in obj: response_meta = { "response_op": "error", "error_message": "Must specify sql", "identity": Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) else: try: self.process_sql_statement(obj["sql"], messageId, use_write_db=use_write_db) except Exception as e: logging.error( 'ZmqReceiver: Invalid message received from client: error="%s", msg="%s"', str(e), str(obj)) response_meta = { "response_op": "error", "error_message": str(e), "identity": Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) # Response is handled in the callback. return else: logging.error("ZmqReceiver: Error, unrecognized op '%s'", obj["op"]) response_meta = { "response_op": "error", "identity": Identity.get_identity(), "error_message": "Unrecognized op '%s'" % obj["op"] } self.reply(messageId, json.dumps(response_meta))
def main(): global options logging.basicConfig(format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S", level=logging.INFO) parser = OptionParser() parser.add_option("-I", "--identity", dest="identity", help="Use IDENTITY as identity", metavar="IDENTITY") parser.add_option("-H", "--hostname", dest="hostname", default='localhost', help="Send table to HOSTNAME qasino server", metavar="HOSTNAME") parser.add_option("-p", "--port", dest="port", default=constants.ZMQ_RPC_PORT, type=int, help="Use PORT for qasino server", metavar="PORT") parser.add_option("-u", "--username", dest="username", help="HTTPS auth username") parser.add_option("-w", "--password", dest="password", help="HTTPS auth password") parser.add_option("-P", "--pubsub-port", dest="pubsub_port", default=constants.ZMQ_PUBSUB_PORT, type=int, help="Use PORT for qasino pubsub connection", metavar="PORT") parser.add_option("-i", "--index", dest="indexes", action="append", help="Path to a index file to process" ) parser.add_option("-f", "--index-list", dest="index_list", help="Path to a file with a list of index files to process in it" ) parser.add_option("-T", "--table", dest="tables", action="append", help="Tables to limit publishing to" ) parser.add_option("-t", "--table-list", dest="table_list", help="Path to a file with a list of tables to limit publishing to" ) parser.add_option("-d", "--send-delay-max", dest="send_delay_max", default=15, help="Max delay to add when its time to send tables." ) parser.add_option("-x", "--interval", dest="interval", default=None, type=int, help="Interval to send updates (This will turn off subscribing)." ) parser.add_option("-s", "--use-https", dest="use_https", default=False, action="store_true", help="Use HTTP over SSL/TLS protocol to publish table.") parser.add_option("-k", "--skip-ssl-verify", dest="skip_ssl_verify", default=False, action="store_true", help="Don't verify SSL certificates.") parser.add_option("-g", "--gen-signal-timeout", dest="gen_signal_timeout", default=120, type=int, help="Timeout after which we restart the generation signal subscription.") (options, args) = parser.parse_args() logging.info("Qasino csv publisher starting") if options.identity != None: Identity.set_identity(options.identity) logging.info("Identity is %s", Identity.get_identity()) if options.hostname == None: logging.info("Please specify a hostname to connect to.") exit(1) zmq_factory = ZmqFactory() # Create a request object (either ZMQ or HTTPS). if options.use_https: import http_requestor # Change the default port if we're https if options.port == constants.ZMQ_RPC_PORT: options.port = constants.HTTPS_PORT logging.info("Connecting to {}:{} with HTTPS to send tables.".format(options.hostname, options.port)) # Disable extraneous logging in requests. requests_log = logging.getLogger("requests") requests_log.setLevel(logging.WARNING) requestor = http_requestor.HttpRequestor(options.hostname, options.port, username = options.username, password = options.password, skip_ssl_verify = options.skip_ssl_verify ) else: # Use zmq requestor import zmq_requestor logging.info("Connecting to {}:{} with ZeroMQ to send tables.".format(options.hostname, options.port)) requestor = zmq_requestor.ZmqRequestor(options.hostname, options.port, zmq_factory) # Determine the update trigger (interval or signal). if options.interval is None or options.interval < 10: logging.info("Connecting to {}:{} on pubsub ZeroMQ channel to listen for generation signals.".format(options.hostname, options.pubsub_port)) # Create a zeromq pub sub subscriber. import zmq_subscriber subscriber = zmq_subscriber.ZmqSubscriber(options.hostname, options.pubsub_port, zmq_factory) # Read and send the table when a generation signal comes in. global last_gen_signal_time last_gen_signal_time = time.time() subscriber.subscribe_generation_signal(initiate_read_and_send_tables, requestor, options) # Set a timeout so we can restart the subscribe if we haven't heard from the server in a while. reactor.callLater(5, check_for_gen_signal_timeout, options, subscriber, requestor, zmq_factory) else: # Read and send the table at a fixed interval. logging.info("Sending data on fixed interval ({} seconds).".format(options.interval)) request_metadata_task = task.LoopingCall(read_and_send_tables, requestor, options) request_metadata_task.start(int(options.interval)) # Read and send immediately, uncomment. # read_and_send_tables(requestor, options) # Run the event loop reactor.run() logging.info("Qasino csv publisher exiting")
def send_generation_signal(self, generation_number, generation_duration_s): msg = { "op" : "generation_signal", "identity" : Identity.get_identity(), "generation_number" : generation_number } if generation_duration_s: msg["generation_duration_s"] = generation_duration_s self.publish(json.dumps(msg), "GENSIG")
logging.basicConfig(format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S", level=logging.INFO) logging.info("Sending dummy table on port %d", constants.HTTP_PORT) table = {} if int(options.schema_version) == 0: table = { "tablename" : "dummy", "column_names" : [ "identity", "the", "quick", "brown", "fox" ], "column_types" : [ "varchar", "int", "int", "varchar", "varchar" ], "rows" : [ [ Identity.get_identity(), 34, 5, "yes", "no" ], [ Identity.get_identity(), 1000, 321, "zanzabar", strftime("%Y-%m-%d %H:%M:%S GMT", gmtime()) ] ] } else: table = { "tablename" : "dummy", "column_names" : [ "identity", "the", "quick", "brown", "fox", "foo" ], "column_types" : [ "varchar", "int", "int", "varchar", "varchar", "varchar" ], "rows" : [ [ Identity.get_identity(), 34, 5, "yes", "no", "here I am!" ], [ Identity.get_identity(), 1000, 321, "zanzabar", strftime("%Y-%m-%d %H:%M:%S GMT", gmtime()), "" ] ] } URL = 'http://%s:%d/request?op=add_table_data' % (options.hostname, constants.HTTP_PORT) msg = { "op" : "add_table_data",
def request_metadata(self): msg = { "op" : "get_table_list", "identity" : Identity.get_identity() } #logging.info("ZmqRequestor: Requesting table list from %s.", self.remote_host) deferred = self.sendMsg(json.dumps(msg)) deferred.callback = self.message_received
default='json', help="Read data in as either 'json' or 'csv'.") parser.add_option("-f", "--filename", dest="filename", help="Read data from FILENAME.") (options, args) = parser.parse_args() print "Qasino publish starting" if options.identity != None: Identity.set_identity(options.identity) print "Identity is {}".format(Identity.get_identity()) if options.hostname == None: print "Please specify a hostname to connect to." exit(1) # invalid combos if (options.use_zmq and options.use_https) or ( options.use_http and options.use_https) or (options.use_http and options.use_zmq): print "Pick one of --use-https, --use-https or --use-zmq" exit(1) # default to https if not options.use_http and not options.use_https and not options.use_zmq: options.use_https = True
def gotMessage(self, messageId, *messageParts): try: obj = json.loads(messageParts[0]) except Exception as e: logging.info("ZmqReceiver: ERROR failed to get/parse content of POST: %s", str(e)) response_meta = { "response_op" : "error", "error_message" : "Failed to parse JSON message: %s" % str(e), "identity" : Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) return response_meta = { "response_op" : "error", "identity" : Identity.get_identity(), "error_message" : "Unspecified error" } if obj == None or obj["op"] == None: logging.error("ZmqReceiver: Error, unrecognized message.") response_meta = { "response_op" : "error", "error_message" : "Unrecognized request", "identity" : Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) elif obj["op"] == "get_table_list": #logging.info("ZmqReceiver: Got request for table list.") response_meta = { "response_op" : "tables_list", "identity" : Identity.get_identity() } response_data = self.data_manager.get_table_list() self.reply(messageId, json.dumps(response_meta), json.dumps(response_data)) elif obj["op"] == "add_table_data": #logging.info("ZmqReceiver: Got request to add data.") #print "Got request: ", obj table = qasino_table.QasinoTable() err = table.from_obj(obj) if err is not None: errmsg = "Invalid input format: " + str(err) logging.info("ZmqReceiver: " + errmsg) response_meta = { "response_op" : "error", "identity" : Identity.get_identity(), "error_message" : errmsg } else: response_meta = { "response_op" : "ok", "identity" : Identity.get_identity() } try: if table.get_property("static"): self.data_manager.sql_backend_writer_static.async_add_table_data(table, table.get_property("identity")) else: self.data_manager.sql_backend_writer.async_add_table_data(table, table.get_property("identity")) except Exception as e: response_meta = { "response_op" : "error", "identity" : util.Identity.get_identity(), "error_message" : str(e) } self.reply(messageId, json.dumps(response_meta)) elif obj["op"] == "generation_signal": logging.info("ZmqReceiver: Got generation signal.") # Currently unused.. elif obj["op"] == "query": #logging.info("ZmqReceiver: Got request for table list.") use_write_db = True if "use_write_db" in obj and obj["use_write_db"] else False if "sql" not in obj: response_meta = { "response_op" : "error", "error_message" : "Must specify sql", "identity" : Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) else: try: self.process_sql_statement(obj["sql"], messageId, use_write_db=use_write_db) except Exception as e: logging.error('ZmqReceiver: Invalid message received from client: error="%s", msg="%s"', str(e), str(obj)) response_meta = { "response_op" : "error", "error_message" : str(e), "identity" : Identity.get_identity() } self.reply(messageId, json.dumps(response_meta)) # Response is handled in the callback. return else: logging.error("ZmqReceiver: Error, unrecognized op '%s'", obj["op"]) response_meta = { "response_op" : "error", "identity" : Identity.get_identity(), "error_message" : "Unrecognized op '%s'" % obj["op"] } self.reply(messageId, json.dumps(response_meta))
def request_metadata(self): msg = {"op": "get_table_list", "identity": Identity.get_identity()} #logging.info("ZmqRequestor: Requesting table list from %s.", self.remote_host) deferred = self.sendMsg(json.dumps(msg)) deferred.callback = self.message_received
parser.add_option("-d", "--data-format", dest="data_format", default='json', help="Read data in as either 'json' or 'csv'.") parser.add_option("-f", "--filename", dest="filename", help="Read data from FILENAME.") (options, args) = parser.parse_args() print "Qasino publish starting" if options.identity != None: Identity.set_identity(options.identity) print "Identity is {}".format(Identity.get_identity()) if options.hostname == None: print "Please specify a hostname to connect to." exit(1) # invalid combos if (options.use_zmq and options.use_https) or (options.use_http and options.use_https) or (options.use_http and options.use_zmq): print "Pick one of --use-https, --use-https or --use-zmq" exit(1); # default to https if not options.use_http and not options.use_https and not options.use_zmq: options.use_https = True # set the default port
def send_table(self, table): deferred = self.sendMsg( table.get_json(op="add_table_data", identity=Identity.get_identity())) deferred.callback = self.message_received
def main(): global options logging.basicConfig(format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S", level=logging.INFO) parser = OptionParser() parser.add_option("-I", "--identity", dest="identity", help="Use IDENTITY as identity", metavar="IDENTITY") parser.add_option("-H", "--hostname", dest="hostname", default='localhost', help="Send table to HOSTNAME qasino server", metavar="HOSTNAME") parser.add_option("-p", "--port", dest="port", default=constants.ZMQ_RPC_PORT, type=int, help="Use PORT for qasino server", metavar="PORT") parser.add_option("-u", "--username", dest="username", help="HTTPS auth username") parser.add_option("-w", "--password", dest="password", help="HTTPS auth password") parser.add_option("-P", "--pubsub-port", dest="pubsub_port", default=constants.ZMQ_PUBSUB_PORT, type=int, help="Use PORT for qasino pubsub connection", metavar="PORT") parser.add_option("-i", "--index", dest="indexes", action="append", help="Path to a index file to process") parser.add_option( "-f", "--index-list", dest="index_list", help="Path to a file with a list of index files to process in it") parser.add_option("-T", "--table", dest="tables", action="append", help="Tables to limit publishing to") parser.add_option( "-t", "--table-list", dest="table_list", help="Path to a file with a list of tables to limit publishing to") parser.add_option("-d", "--send-delay-max", dest="send_delay_max", default=15, help="Max delay to add when its time to send tables.") parser.add_option( "-x", "--interval", dest="interval", default=None, type=int, help="Interval to send updates (This will turn off subscribing).") parser.add_option("-s", "--use-https", dest="use_https", default=False, action="store_true", help="Use HTTP over SSL/TLS protocol to publish table.") parser.add_option("-k", "--skip-ssl-verify", dest="skip_ssl_verify", default=False, action="store_true", help="Don't verify SSL certificates.") parser.add_option( "-g", "--gen-signal-timeout", dest="gen_signal_timeout", default=120, type=int, help= "Timeout after which we restart the generation signal subscription.") (options, args) = parser.parse_args() logging.info("Qasino csv publisher starting") if options.identity != None: Identity.set_identity(options.identity) logging.info("Identity is %s", Identity.get_identity()) if options.hostname == None: logging.info("Please specify a hostname to connect to.") exit(1) zmq_factory = ZmqFactory() # Create a request object (either ZMQ or HTTPS). if options.use_https: import http_requestor # Change the default port if we're https if options.port == constants.ZMQ_RPC_PORT: options.port = constants.HTTPS_PORT logging.info("Connecting to {}:{} with HTTPS to send tables.".format( options.hostname, options.port)) # Disable extraneous logging in requests. requests_log = logging.getLogger("requests") requests_log.setLevel(logging.WARNING) requestor = http_requestor.HttpRequestor( options.hostname, options.port, username=options.username, password=options.password, skip_ssl_verify=options.skip_ssl_verify) else: # Use zmq requestor import zmq_requestor logging.info("Connecting to {}:{} with ZeroMQ to send tables.".format( options.hostname, options.port)) requestor = zmq_requestor.ZmqRequestor(options.hostname, options.port, zmq_factory) # Determine the update trigger (interval or signal). if options.interval is None or options.interval < 10: logging.info( "Connecting to {}:{} on pubsub ZeroMQ channel to listen for generation signals." .format(options.hostname, options.pubsub_port)) # Create a zeromq pub sub subscriber. import zmq_subscriber subscriber = zmq_subscriber.ZmqSubscriber(options.hostname, options.pubsub_port, zmq_factory) # Read and send the table when a generation signal comes in. global last_gen_signal_time last_gen_signal_time = time.time() subscriber.subscribe_generation_signal(initiate_read_and_send_tables, requestor, options) # Set a timeout so we can restart the subscribe if we haven't heard from the server in a while. reactor.callLater(5, check_for_gen_signal_timeout, options, subscriber, requestor, zmq_factory) else: # Read and send the table at a fixed interval. logging.info("Sending data on fixed interval ({} seconds).".format( options.interval)) request_metadata_task = task.LoopingCall(read_and_send_tables, requestor, options) request_metadata_task.start(int(options.interval)) # Read and send immediately, uncomment. # read_and_send_tables(requestor, options) # Run the event loop reactor.run() logging.info("Qasino csv publisher exiting")