def _supervisor_reset(): """This is used between tests involving the supervisor and evets processes, because triples from the former tests might be written, if the daemons are still here.""" daemon_factory.supervisor_stop() daemon_factory.supervisor_startup() # So the scripts started in daemon mode write their events in a shared graph. lib_common.set_events_credentials()
def setUp(self): # If a Survol agent does not run on this machine with this port, this script starts a local one. _supervisor_reset() lib_kbase.clear_all_events() self._rdf_test_agent, self._agent_url = start_cgiserver(RemoteRdf4TestServerPort) print("AgentUrl=", self._agent_url) # A shared database is needed because several processes use it simultaneously. # When reading the events, this uses the default SQLAlchemy database also used by the CGI scripts. lib_common.set_events_credentials()
def Main(): lib_common.set_events_credentials() url_supervisor_control = daemon_factory.supervisorctl_url() logging.info("url_supervisor_control=%s" % url_supervisor_control) try: urls_daemons_dict = _get_daemons_data() except Exception as exc: logging.error("Caught exc=%s" % exc) lib_common.ErrorMessageHtml("Supervisor %s display: Caught:%s" % (url_supervisor_control, exc)) if lib_util.GetJinja2(): MainJinja(url_supervisor_control, urls_daemons_dict) else: MainNoJinja(url_supervisor_control, urls_daemons_dict)
def Main(): lib_common.set_events_credentials() # This can process remote hosts because it does not call any script, just shows them. cgiEnv = lib_common.ScriptEnvironment() logging.debug("Starting.") grph = cgiEnv.GetGraph() logging.debug("About to get events") num_triples = lib_kbase.retrieve_all_events_to_graph_then_clear(grph) logging.debug("num_triples=%d" % num_triples) cgiEnv.OutCgiRdf()
def Main(): logging.getLogger().setLevel(logging.DEBUG) lib_common.set_events_credentials() time_start = time.time() http_content_length = int(os.environ['CONTENT_LENGTH']) # https://stackoverflow.com/questions/49171591/inets-httpd-cgi-script-how-do-you-retrieve-json-data # The script MUST NOT attempt to read more than CONTENT_LENGTH bytes, even if more data is available. logging.debug("http_content_length=%d time_start=%f", http_content_length, time_start) extra_error_status = "" try: rdflib_graph = _get_graph_from_stdin(http_content_length) time_loaded = time.time() triples_number = len(rdflib_graph) files_updates_total_number = lib_kbase.write_graph_to_events(None, rdflib_graph) time_stored = time.time() logging.debug("time_stored=%f files_updates_total_number=%d", time_stored, files_updates_total_number) server_result = { 'success': 'true', 'time_start': '%f' % time_start, 'time_loaded': '%f' % time_loaded, 'time_stored': '%f' % time_stored, 'triples_number': '%d' % triples_number} except Exception as exc: logging.error("Exception=%s", exc) server_result = { 'success': 'false', 'time_start': '%f' % time_start, 'error_message': '%s:%s:%s' % (str(exc), extra_error_status, traceback.format_exc())} json_output = json.dumps(server_result) sys.stdout.write('Content-Type: application/json\n') sys.stdout.write('Content-Length: %d\n\n' % len(json_output)) sys.stdout.write(json_output)
def Main(): lib_common.set_events_credentials() # This can process remote hosts because it does not call any script, just shows them. cgiEnv = lib_common.ScriptEnvironment() entity_id = cgiEnv.m_entity_id name_space, entity_type = cgiEnv.get_namespace_type() grph = cgiEnv.GetGraph() if entity_type: lib_common.ErrorMessageHtml( __file__ + " objects events retrieval not supported yet.") entity_node = lib_uris.gUriGen.node_from_args(entity_type, *entity_id) num_triples = lib_kbase.retrieve_events_to_graph(grph, entity_node) logging.debug("num_triples=%d", num_triples) cgiEnv.OutCgiRdf()
def setUpModule(): daemon_factory.supervisor_startup() # So the scripts started in daemon mode write their events in a shared graph. lib_common.set_events_credentials()