def client_process1(self, q): try: from dataclay.api import init, finish logger.debug('**Starting init 1**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI """ Test. From now on, the Functional Test itself. """ web_sites_ids_str = list() for i in range(0, 10): alias = "bsc%s" % str(i) web_site = WebSite(alias) try: web_site.make_persistent(alias=alias) except: traceback.print_exc() web_sites_ids_str.append(str(web_site.get_object_id())) finish() q.put(["OK", web_sites_ids_str]) except: q.put("FAIL")
def client_process2(self, q): try: from dataclay.api import init, finish logger.info('**Starting init 2**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Mapa, Node self.session_initialized = True """ Test. From now on, the Functional Test itself. """ m = Mapa.get_by_alias("mapa") logger.info("Map obtained ") mapa = m.mapa logger.info("** Getter of mapa done with num elements: %s" % str(len(mapa))) for nid, node in mapa.items(): logger.info("** Found node %s" % str(nid)) finish() q.put("OK") except: q.put("FAIL")
def tearDown(self): """ Finish all services started for simulation. """ if self.session_initialized: from dataclay.api import finish finish() self.mock.finishSimulation() logger.debug("Finished tear down of test")
def step_impl(context, user_name): """ Finish a session :param context: the current feature context :type context: context :param user_name: user name :type user_name: string """ from dataclay.api import finish finish()
def persist_and_exit(self): logger.info("Performing exit hook --persisting files") self.execution_environment.prepareThread() self.execution_environment.get_runtime().stop_gc() logger.info("Flushing all objects to disk") self.execution_environment.get_runtime().flush_all() logger.info("Stopping runtime") self.execution_environment.store_ee_info() from dataclay.api import finish finish() clean_runtime()
def finishSimulation(self): from dataclay import api logger.debug('**[PythonMockDataClay]** Finish client if started') if api.is_initialized(): api.finish() self.finishPythonExecutionEnvironments() logger.debug('**[PythonMockDataClay]** Finishing JVM...') self.finishJavaMockDataClay() self.cleanFiles() self.multiprocess_queue.put_nowait(None) self.log_listener.join() logger.debug('**[PythonMockDataClay]** Finished')
def clean_scenario(context): """Clean feature scenario (stubs, temporary files...) :param context: the current feature context :type context: context :param scenario: the current feature scenario :type scenario: scenario """ from dataclay.api import finish finish() cmd = "/bin/bash resources/utils/clean_scenario.sh" print(cmd) os.system(cmd) ALL_TEST_USERS.clear()
def main(): import sys import time from dataclay.api import init, finish from dataclay.exceptions.exceptions import DataClayException mqtt_wait = False if len(sys.argv) == 2: mqtt_wait = (sys.argv[1] != "False") init() from CityNS.classes import DKB, ListOfObjects # Register MQTT client to subscribe to MQTT server in 192.168.7.42 if mqtt_wait: client = register_mqtt() client.loop_start() # initialize all computing units in all workers num_cus = 8 for i in range(num_cus): init_task() compss_barrier() # Publish to the MQTT broker that the execution has started if mqtt_wait: publish_mqtt(client) try: kb = DKB.get_by_alias("DKB") except DataClayException: kb = DKB() list_objects = ListOfObjects() list_objects.make_persistent() kb.list_objects = list_objects kb.make_persistent("DKB") start_time = time.time() # execute_trackers(["192.168.50.103"], kb) execute_trackers([("/tmp/pipe_yolo2COMPSs", "/tmp/pipe_COMPSs2yolo")], kb) # pipe_paths = [("/tmp/pipe_yolo2COMPSs", "/tmp/pipe_COMPSs2yolo"), ("/tmp/pipe_write", "/tmp/pipe_read")] # print("ExecTime: " + str(time.time() - start_time)) # print("ExecTime per Iteration: " + str((time.time() - start_time) / NUM_ITERS)) if mqtt_wait: while CD_PROC < NUM_ITERS: pass print("Exiting Application...") finish()
def execute_from_command_line(argv=None): """Given the calling arguments to the manage.py script, do stuff. :param argv: Typically, sys.argv. Should be explicitly set by caller. :return: Nothing. """ # Perform implicit initialization of connections (client.properties only, no storage.properties) client_properties_path = os.getenv("DATACLAYCLIENTCONFIG", "./cfgfiles/client.properties") assert client_properties_path, "dataclay.tool module can only be called with DATACLAYCLIENTCONFIG set" init_connection(client_properties_path) _execute_from_command_line(argv) # Do the cleanup to avoid __del__ messages of gRPC library finish()
def restartDataClay(self): from dataclay import api logger.debug('**[PythonMockDataClay]** Finish client if started') if api.is_initialized(): api.finish() self.finishPythonExecutionEnvironments() self.mock_dataclay.finishServicesWithoutCleaningDBs() logger.debug('**[PythonMockDataClay]** Wait to restart...') self.multiprocess_queue.put_nowait(None) self.log_listener.join() self.startLogListener() self.prepareClientPropertiesFile() self.mock_dataclay.restartDataClaySimulation() self.startPythonExecutionEnvironments()
def client_process2(self, q, web_sites_ids_str): try: from dataclay.api import init, finish logger.debug('**Starting init 2 **') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI for i in range(0, 10): web_site_2 = WebSite.get_by_alias("bsc%s" % str(i)) self.assertEqual(web_sites_ids_str[i], str(web_site_2.get_object_id())) finish() q.put("OK") except: q.put("FAIL")
def main(): import sys import time from dataclay.api import init, register_dataclay, finish from dataclay.exceptions.exceptions import DataClayException init() from CityNS.classes import DKB # register_dataclay("192.168.7.32", 11034) try: DKB.get_by_alias("DKB") except DataClayException: DKB().make_persistent("DKB") start_time = time.time() execute_trackers() print("ExecTime: " + str(time.time() - start_time)) print("Exiting Application...") finish()
def client_process1(self, q): try: from dataclay.api import init, finish logger.info('**Starting init 1**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Mapa, Node self.session_initialized = True """ Test. From now on, the Functional Test itself. """ try: m = Mapa.get_by_alias("mapa") logger.info("Already in the DB") except Exception: m = Mapa() m.make_persistent(alias="mapa") logger.info( "Not found, creating Mapa and making it persistent") # Node with current location added by jetson # it should be done with get_current_location() in order to get pos1 and pos2 n = Node(1, 1, 1, 1.5, 1.5) n.make_persistent() m.add(n) print("NODE created and added to Mapa") time.sleep(5) finish() q.put("OK") except: q.put("FAIL")
def init_graph_db(self): """ Retrieve hwloc and cpu_info for each machine and add files to the Data Directory. """ LOG.info("Generating hwloc and cpu_info files") devices = list() # get dataClay agent_id agent_id = self.cnf.get_variable(CONFIG_SECTION_GENERAL, CONFIG_VARIABLE_DC_AGENT) if agent_id is None: LOG.error( "'dataclay_agentid' has not been set in the 'general' section of the config file" ) return # get this device's agent this_agent = Agent.get_by_alias(agent_id) if self.generate_files(this_agent.device): devices.append(this_agent.device.device_id) # get child devices - if leader if this_agent.is_leader: for child in this_agent.children: if self.generate_files(child.device): devices.append(child.device.device_id) # write the device list to the config file device_list = ','.join(str(x) for x in devices) LOG.info("DataClay device list: " + device_list) self.conf_manager.set_variable(CONFIG_SECTION_PHYSICAL, CONFIG_VARIABLE_MACHINES, device_list) # cleanup api.finish()
#!/usr/bin/env python2 import traceback from dataclay.api import init, finish # Init dataClay session init() from CityNS.classes import City if __name__ == "__main__": try: city = City() city.make_persistent("my-pycity") except: traceback.print_exc() # Close session finish() exit(0)
#owner = User.get_by_alias(user_key) #assert my_user == owner # Some other component(s) will do: behaviour_info = collect_behaviour_info() security_info = collect_security_info() sharing_info = collect_sharing_model_info() my_agent.set_behaviour_info(behaviour_info) my_agent.set_security_info(security_info) my_agent.set_sharing_model_info(sharing_info) my_agent.is_leader = True # You may want to test that instead... note that it will fail the get_static_info => that's by design! # my_agent.is_cloud = True # Test all test_all_not_cloud(my_agent) # Invented skeleton. I am not sure how the Cloud Agent should be managed if my_agent.is_leader: run_mf2c_as_leader(my_agent) else: run_mf2c_as_normal(my_agent) # Testing methods as cloud agent my_agent.is_cloud = True test_all_cloud(my_agent) api.finish()