def eval_bot(params): success = [] margins = [] agent_id = args.agent_id baseline_agent_id = 1 - agent_id config = Config(**params) for scenario in scenarios: baseline_agent = agents[baseline_agent_id].new_session( baseline_agent_id, scenario.kbs[baseline_agent_id], default_config) agent = agents[agent_id].new_session(agent_id, scenario.kbs[agent_id], config) controller = Controller(scenario, [baseline_agent, agent]) ex = controller.simulate(max_turns, verbose=False) #import sys; sys.exit() if StrategyAnalyzer.has_deal(ex): success.append(1) final_price = ex.outcome['offer']['price'] margin = StrategyAnalyzer.get_margin( ex, final_price, 1, scenario.kbs[agent_id].facts['personal']['Role'], remove_outlier=False) margins.append(margin) else: success.append(0) return { 'loss': -1. * np.mean(margins) if len(margins) > 0 else 1000, 'status': STATUS_OK, 'ste': sem(margins), 'success_rate': np.mean(success), }
def generate_examples(num_examples, scenario_db, examples_path, max_examples, remove_fail, max_turns): examples = [] num_failed = 0 scenarios = scenario_db.scenarios_list #scenarios = [scenario_db.scenarios_map['S_8COuPdjZZkYgrzhb']] #random.shuffle(scenarios) for i in range(max_examples): scenario = scenarios[num_examples % len(scenarios)] sessions = [ agents[0].new_session(0, scenario.kbs[0]), agents[1].new_session(1, scenario.kbs[1]) ] controller = Controller(scenario, sessions) ex = controller.simulate(max_turns, verbose=args.verbose) if not controller.complete(): num_failed += 1 if remove_fail: continue examples.append(ex) num_examples += 1 with open(examples_path, 'w') as out: print >> out, json.dumps([e.to_dict() for e in examples]) if num_failed == 0: print('All {} dialogues succeeded!'.format(num_examples)) else: print('Number of failed dialogues:', num_failed)
def __init__(self, **kwargs): logging.info('openstack.Controller.__init__: entry:\n{}'.format(pprint.PrettyPrinter().pformat(kwargs))) CoreController.__init__(self, **kwargs) self._process_services_list = self.process_initial_servers self._topology = 'unknown' # Could be 'all-in-one' or 'multi-node'
class AbstractGui(): instance = None def __init__(self, test=False, db_instance=None): self.con = Controller(test=test, db_instance=db_instance) self.is_test = test @classmethod def get_instance(cls, test=False, db_instance=None): if not cls.instance: cls.instance = cls(test=test, db_instance=db_instance) return cls.instance @classmethod def get_controller(cls): return cls.get_instance().con def purge_all_data(self): self.con.drop_db() def exit(self): self.con.close_db() def run(self): ''' To be implemented by subclasses ''' pass
def setUp(self): con = Controller(True) self.db_creator = DatabaseCreator.get_instance(con=con) self.db_creator.fill_full_db() db_instance = con.get_db() super().setUp(db_instance=db_instance)
def setUp(self): database.initialize() if not isdir(profile_visualizations_path): mkdir(profile_visualizations_path) self.start = datetime.utcnow() initialize_threading() self.controller = Controller(workflows_path=config.test_workflows_path)
def __init__(self): Controller.__init__(self) self.activeNotification = None self.enabled = False self.notificationLib = None self.errorNotificationLib = None self.bind(ScreenAvaibleEvent(), self.initNotifications) self.bind(ShowNotificationEvent(), self.show) self.bind(DisplayRefreshEvent(), self.update) self.bind(PauseEvent(), self.removeNotification)
def setUp(self): case_database.initialize() self.controller = Controller(workflows_path=path.join( ".", "tests", "testWorkflows", "testGeneratedWorkflows")) self.controller.load_workflows_from_file( path=path.join(config.test_workflows_path, 'simpleDataManipulationWorkflow.playbook')) self.id_tuple = ('simpleDataManipulationWorkflow', 'helloWorldWorkflow') self.workflow_name = construct_workflow_name_key(*self.id_tuple) self.testWorkflow = self.controller.get_workflow(*self.id_tuple)
def start(self): """ Parse the program arguments and initiate the vulnerability analysis. """ controller = Controller(self.args.networks, self.args.add_networks, self.args.omit_networks, self.args.update_modules, self.args.config, self.args.ports, self.args.output, self.args.input, self.user_results, self.args.separate_networks, self.verbose) controller.run()
def __init__(self): self.print_banner() self.parser = argparse.ArgumentParser() self.arguments = self.get_arguments() self.script_path = os.path.dirname(os.path.realpath(__file__)) # Содержит все важные параметры программы self.properties = defaultdict(lambda: defaultdict(None)) self.check_config_exist() self.config_path = self.arguments.config_file if self.arguments.config_file else 'config.ini' self.config = self.read_config(self.config_path) # Указываем параметры без консоли self._test() self.check_arguments() self.merge_args_and_config() if self.arguments.update_config: self.save_current_config() # Заполняем объект properties для большей маневренности в плане хранимых значений for section in self.config.sections(): for option in self.config[section]: self.properties[section][option] = self.config.get( section, option) # Для импорта по пути относительно main.py sys.path.append(self.script_path) self.controller = Controller(self.properties)
def _init_controller(my_index, partner_type, scenario, chat_id): my_session = self.systems[HumanSystem.name()].new_session(my_index, scenario.get_kb(my_index)) partner_session = self.systems[partner_type].new_session(1 - my_index, scenario.get_kb(1 - my_index)) controller = Controller(scenario, [my_session, partner_session], chat_id=chat_id) return controller, my_session, partner_session
def _get_controller(self, scenario, split='train'): # Randomize if random.random() < 0.5: scenario = copy.deepcopy(scenario) scenario.kbs = (scenario.kbs[1], scenario.kbs[0]) sessions = [self.agents[0].new_session(0, scenario.kbs[0]), self.agents[1].new_session(1, scenario.kbs[1])] return Controller(scenario, sessions)
class TestController(unittest.TestCase): def setUp(self): self.controller = Controller(name="testController") def test_create_controller(self): self.assertEqual(self.controller.name, "testController") self.assertEqual(self.controller.instances, {}) self.assertIsNone(self.controller.tree) self.assertIsInstance(self.controller.scheduler, GeventScheduler) self.assertEqual(self.controller.ancestry, ["testController"]) def test_load_workflow_from_file(self): path = '{0}{1}{2}.workflow'.format(paths.workflows_path, sep, "test") playbook_name = "testPlaybook" workflow_name = "helloWorldWorkflow" key = _WorkflowKey(playbook_name, workflow_name) ret = self.controller.load_workflow_from_file( path=path, workflow_name=workflow_name, playbook_override=playbook_name) self.assertTrue(ret) self.assertTrue(key in self.controller.workflows) def test_load_workflows_from_file(self): path = config.test_workflows_path + "tieredWorkflow.workflow" playbook_name = "testPlaybook" self.controller.load_workflows_from_file( path=path, playbook_override=playbook_name) key = _WorkflowKey(playbook_name, "parentWorkflow") self.assertTrue(key in self.controller.workflows) key = _WorkflowKey(playbook_name, "childWorkflow") self.assertTrue(key in self.controller.workflows) def test_load_all_workflows_from_directory(self): path = config.test_workflows_path workflows = helpers.locate_workflows_in_directory(path) keys = [] for workflow in workflows: for name in helpers.get_workflow_names_from_file( os.path.join(config.test_workflows_path, workflow)): playbook_name = workflow.split('.')[0] keys.append(_WorkflowKey(playbook_name, name)) self.controller.load_all_workflows_from_directory(path=path) for key in keys: self.assertTrue(key in self.controller.workflows)
def generate_examples(agents, agent_names, scenarios, num_examples, max_turns): examples = [] for i in range(num_examples): scenario = scenarios[i % len(scenarios)] # Each agent needs to play both buyer and seller for j in (0, 1): new_agents = [agents[j], agents[1 - j]] new_agent_names = [agent_names[j], agent_names[1 - j]] sessions = [ new_agents[0].new_session(0, scenario.kbs[0]), new_agents[1].new_session(1, scenario.kbs[1]) ] controller = Controller(scenario, sessions, session_names=new_agent_names) ex = controller.simulate(max_turns, verbose=args.verbose) examples.append(ex) return examples
def __init__(self): Controller.__init__(self) pygame.mixer.init() resource = Resource() self.config = Config() self.musicOn = False self.effects = { "bubble": resource.audio("bubble.wav"), "click": resource.audio("click.wav"), "delete":resource.audio("delete.wav"), } self.bgSound = resource.audio("bg.wav", music=True) self.bind(CellAddedEvent(), self.onCellAddedDoBubble) self.bind(CellRemovedEvent(), self.onCellRemovedDoDelete) self.bind(PauseEvent(), self.onPauseDoClick) self.bind(GameStartEvent(), self.onGameStartDoBackground)
def report(self, url): controller = Controller() df = controller.load_csv(file_=url) news, books, musics = controller.get_top_apps(df) report = controller.generate_report(top_news=news, top_books=books, top_musics=musics) file_ = controller.generate_csv(report) save = controller.save_db(df) return file_, save
def server_process_main(stop_handler, scmStatus=None): if scmStatus is not None: scmStatus.reportStartPending() _init_logging(main_config) controller = Controller(main_config, stop_handler) logger.info('Starting Server RPC Thread: %s' % ' '.join(sys.argv)) controller.start() print "Server out at: " + main_config.ams_monitor_out_file() print "Server log at: " + main_config.ams_monitor_log_file() save_pid(os.getpid(), PID_OUT_FILE) if scmStatus is not None: scmStatus.reportStarted() # For some reason this is needed to catch system signals like SIGTERM # TODO fix if possible signal.pause() #The controller thread finishes when the stop event is signaled controller.join() remove_file(PID_OUT_FILE) pass
def test_workflowExecutionEvents(self): workflow_name = construct_workflow_name_key('multiactionWorkflowTest', 'multiactionWorkflow') c = Controller(name="testExecutionEventsController") c.load_workflows_from_file(path=config.test_workflows_path + "multiactionWorkflowTest.playbook") subs = { 'testExecutionEventsController': Subscription( subscriptions={ workflow_name: Subscription(events=[ "App Instance Created", "Step Execution Success", "Next Step Found", "Workflow Shutdown" ]) }) } case_subscription.set_subscriptions({ 'testExecutionEvents': case_subscription.CaseSubscriptions(subscriptions=subs) }) c.execute_workflow('multiactionWorkflowTest', 'multiactionWorkflow') shutdown_pool() execution_events_case = case_database.case_db.session.query(case_database.Case) \ .filter(case_database.Case.name == 'testExecutionEvents').first() execution_event_history = execution_events_case.events.all() self.assertEqual( len(execution_event_history), 6, 'Incorrect length of event history. ' 'Expected {0}, got {1}'.format(6, len(execution_event_history)))
def test_ffkExecutionEventsCase(self): c = Controller(name="testStepFFKEventsController") c.load_workflows_from_file(path=config.test_workflows_path + "basicWorkflowTest.playbook") workflow_name = construct_workflow_name_key('basicWorkflowTest', 'helloWorldWorkflow') filter_sub = Subscription(events=['Filter Error']) flag_sub = Subscription(events=['Flag Success', 'Flag Error'], subscriptions={'length': filter_sub}) next_sub = Subscription( events=['Next Step Taken', 'Next Step Not Taken'], subscriptions={'regMatch': flag_sub}) step_sub = Subscription(events=[ 'Function Execution Success', 'Input Validated', 'Conditionals Executed' ], subscriptions={'1': next_sub}) subs = { 'testStepFFKEventsController': Subscription( subscriptions={ workflow_name: Subscription( subscriptions={'start': step_sub}) }) } global_subs = case_subscription.GlobalSubscriptions( step=[ 'Function Execution Success', 'Input Validated', 'Conditionals Executed' ], next_step=['Next Step Taken', 'Next Step Not Taken'], flag=['Flag Success', 'Flag Error'], filter=['Filter Error']) case_subscription.set_subscriptions({ 'testStepFFKEventsEvents': case_subscription.CaseSubscriptions( subscriptions=subs, global_subscriptions=global_subs) }) c.execute_workflow('basicWorkflowTest', 'helloWorldWorkflow') shutdown_pool() step_ffk_events_case = case_database.case_db.session.query(case_database.Case) \ .filter(case_database.Case.name == 'testStepFFKEventsEvents').first() step_ffk_event_history = step_ffk_events_case.events.all() self.assertEqual( len(step_ffk_event_history), 5, 'Incorrect length of event history. ' 'Expected {0}, got {1}'.format(5, len(step_ffk_event_history))) step_json = [ step.as_json() for step in step_ffk_event_history if step.as_json()['message'] == 'STEP' ] for step in step_json: if step['type'] == 'Function executed successfully': self.assertDictEqual(step['data'], {'result': 'REPEATING: Hello World'}) else: self.assertEqual(step['data'], '')
def server_process_main(stop_handler, scmStatus=None): if scmStatus is not None: scmStatus.reportStartPending() config = Configuration() _init_logging(config) controller = Controller(config, stop_handler) logger.info("Starting Server RPC Thread: %s" % " ".join(sys.argv)) controller.start() print "Server out at: " + SERVER_OUT_FILE print "Server log at: " + SERVER_LOG_FILE save_pid(os.getpid(), PID_OUT_FILE) if scmStatus is not None: scmStatus.reportStarted() # The controller thread finishes when the stop event is signaled controller.join() remove_file(PID_OUT_FILE) pass
def makeService(self, options): config = Parser(options['config']) top_service = service.MultiService() db_bmnodes = config.dbnodes nodeschema_name = 'nodes.sql' self.opendb(db_bmnodes, nodeschema_name) db_templates = config.dbtemplates templateschema_name = 'templates.sql' self.opendb(db_templates, templateschema_name) # shared queue for dead nodes dead_nodes_queue = Queue.Queue() ### server for node registration ### reg_port = config.node_registration_port reg = Register(port=reg_port, dbnodes=db_bmnodes, queue=dead_nodes_queue) node = server.Site(reg, logPath="/dev/null") node.noisy = False node_iface = internet.TCPServer(reg_port, node) node_iface.setServiceParent(top_service) ### server for external ### listening_port = config.ext_listening_port contr = Controller(port=listening_port, dbnodes=db_bmnodes, dbtemplates=db_templates, datafile_dir=config.datafile_dir, queue=dead_nodes_queue) ext = server.Site(contr, logPath="/dev/null") ext.noisy = False ext_iface = internet.TCPServer(listening_port, ext) ext_iface.setServiceParent(top_service) ### return top service return top_service
def run(self): self.print_banner() self.arguments = self.get_arguments() self.check_config_exist() self.config_path = self.arguments.config_file if self.arguments.config_file else self.config_path self.config = self.read_config(self.config_path) # Указываем параметры без консоли #self._test() self.check_arguments() self.merge_args_and_config() if self.arguments.update_config: self.save_current_config() # Заполняем объект properties для большей маневренности в плане хранимых значений for section in self.config.sections(): for option in self.config[section]: self.properties[section][option] = self.config.get( section, option) self.controller = Controller(self.properties)
def __init__(self): """Initializes a new Context object. This acts as an interface for objects to access other event specific variables that might be needed. """ self.apps = self.get_apps() from server.app import app from server.appdevice import Device, App from server.database import User, Role, userRoles, db, user_datastore from server.triggers import Triggers from server.casesubscription import CaseSubscription from core.controller import Controller self.User = User self.Role = Role self.Device = Device self.App = App self.Triggers = Triggers self.CaseSubscription = CaseSubscription self.flask_app = app self.user_roles = userRoles self.db = db self.user_datastore = user_datastore self.controller = Controller()
def test_ffkExecutionEvents(self): workflow_name = construct_workflow_name_key('basicWorkflowTest', 'helloWorldWorkflow') c = Controller(name="testStepFFKEventsController") c.load_workflows_from_file(path=config.test_workflows_path + "basicWorkflowTest.playbook") filter_sub = Subscription(events=['Filter Success', 'Filter Error']) flag_sub = Subscription(events=['Flag Success', 'Flag Error'], subscriptions={'length': filter_sub}) next_sub = Subscription( events=['Next Step Taken', 'Next Step Not Taken'], subscriptions={'regMatch': flag_sub}) step_sub = Subscription(events=[ "Function Execution Success", "Input Validated", "Conditionals Executed" ], subscriptions={'1': next_sub}) subs = { 'testStepFFKEventsController': Subscription( subscriptions={ workflow_name: Subscription( subscriptions={'start': step_sub}) }) } case_subscription.set_subscriptions({ 'testStepFFKEventsEvents': case_subscription.CaseSubscriptions(subscriptions=subs) }) c.execute_workflow('basicWorkflowTest', 'helloWorldWorkflow') shutdown_pool() step_ffk_events_case = case_database.case_db.session.query(case_database.Case) \ .filter(case_database.Case.name == 'testStepFFKEventsEvents').first() step_ffk_event_history = step_ffk_events_case.events.all() self.assertEqual( len(step_ffk_event_history), 6, 'Incorrect length of event history. ' 'Expected {0}, got {1}'.format(6, len(step_ffk_event_history)))
def test_stepExecutionEvents(self): workflow_name = construct_workflow_name_key('basicWorkflowTest', 'helloWorldWorkflow') c = Controller(name="testStepExecutionEventsController") c.load_workflows_from_file(path=config.test_workflows_path + "basicWorkflowTest.playbook") subs = { 'testStepExecutionEventsController': Subscription( subscriptions={ workflow_name: Subscription( subscriptions={ 'start': Subscription(events=[ "Function Execution Success", "Input Validated", "Conditionals Executed" ]) }) }) } case_subscription.set_subscriptions({ 'testStepExecutionEvents': case_subscription.CaseSubscriptions(subscriptions=subs) }) c.execute_workflow('basicWorkflowTest', 'helloWorldWorkflow') shutdown_pool() step_execution_events_case = case_database.case_db.session.query(case_database.Case) \ .filter(case_database.Case.name == 'testStepExecutionEvents').first() step_execution_event_history = step_execution_events_case.events.all() self.assertEqual( len(step_execution_event_history), 3, 'Incorrect length of event history. ' 'Expected {0}, got {1}'.format(3, len(step_execution_event_history)))
def server_process_main(stop_handler, scmStatus=None): if scmStatus is not None: scmStatus.reportStartPending() save_pid(os.getpid(), PID_OUT_FILE) config = Configuration() _init_logging(config) controller = Controller(config, stop_handler) logger.info('Starting Server RPC Thread: %s' % ' '.join(sys.argv)) controller.start() print "Server out at: " + SERVER_OUT_FILE print "Server log at: " + SERVER_LOG_FILE if scmStatus is not None: scmStatus.reportStarted() #The controller thread finishes when the stop event is signaled controller.join() remove_file(PID_OUT_FILE) pass
# of the message catalog locale.bindtextdomain(APP_NAME, localedir) locale.bind_textdomain_codeset(APP_NAME, "UTF-8") # to load in current locale properly for sorting etc try: locale.setlocale(locale.LC_ALL, "") except locale.Error, e: pass def start(): logger.info("1") try: setup_locale_and_gettext() except Exception, e: logger.exception("Error loading the internationalitation: %s", e) try: cr = Controller.get_instance() cr.start() except Exception, e: logger.exception("Error starting cloudsn: %s", e) #We not traduce this notification because the problem can be gettext notification.notify("Error starting cloudsn", str(e), utils.get_error_pixbuf()) if __name__ == "__main__": logger.debug("0") start()
def __init__(self, optimizer): Controller.__init__(self, optimizer) self.target_y = 0 # default
locale.bindtextdomain(APP_NAME, localedir) locale.bind_textdomain_codeset(APP_NAME, "UTF-8") # to load in current locale properly for sorting etc try: locale.setlocale(locale.LC_ALL, "") except locale.Error, e: pass def start (): logger.info("1") try: setup_locale_and_gettext() except Exception, e: logger.exception("Error loading the internationalitation: %s", e) try: cr = Controller.get_instance() cr.start() except Exception, e: logger.exception("Error starting cloudsn: %s", e) #We not traduce this notification because the problem can be gettext notification.notify ("Error starting cloudsn", str(e), utils.get_error_pixbuf()) if __name__ == "__main__": logger.debug("0") start()
core.utils.close_log() sys.exit(0) if os.name == 'posix': signal.signal(signal.SIGQUIT, sig_handler) signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGINT, sig_handler) signal.signal(signal.SIGUSR1, sig_handler) signal.signal(signal.SIGUSR2, sig_handler) elif os.name == 'nt': signal.signal(signal.SIGABRT, sig_handler) signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGINT, sig_handler) controller = Controller() controller.init() controller_thread = threading.Thread(target=controller.run, name='controller') controller_thread.setDaemon(True) controller_thread.start() import webui.app webui.app.set_controller(controller) core.utils.log("started") try: webui.app.run() finally: # stop controller if webui is stopped
def setUp(self): case_database.initialize() self.controller = Controller(workflows_path=config.test_workflows_path) self.start = datetime.utcnow() initialize_threading()
class TestSimpleWorkflow(unittest.TestCase): @classmethod def setUpClass(cls): App.registry = {} import_all_apps(path=config.test_apps_path, reload=True) core.config.config.load_app_apis(apps_path=config.test_apps_path) core.config.config.flags = import_all_flags('tests.util.flagsfilters') core.config.config.filters = import_all_filters( 'tests.util.flagsfilters') core.config.config.load_flagfilter_apis(path=config.function_api_path) def setUp(self): case_database.initialize() self.controller = Controller(workflows_path=config.test_workflows_path) self.start = datetime.utcnow() initialize_threading() def tearDown(self): database.case_db.tear_down() subscription.clear_subscriptions() def test_simple_workflow_execution(self): workflow_name = construct_workflow_name_key('basicWorkflowTest', 'helloWorldWorkflow') setup_subscriptions_for_step(workflow_name, ['start']) self.controller.execute_workflow('basicWorkflowTest', 'helloWorldWorkflow') shutdown_pool() steps = executed_steps('defaultController', workflow_name, self.start, datetime.utcnow()) self.assertEqual(len(steps), 1) step = steps[0] ancestry = step['ancestry'].split(',') self.assertEqual(ancestry[-1], "start") result = json.loads(step['data']) self.assertDictEqual(result['result'], { 'result': "REPEATING: Hello World", 'status': 'Success' }) def test_multi_action_workflow(self): workflow_name = construct_workflow_name_key('multiactionWorkflowTest', 'multiactionWorkflow') step_names = ['start', '1'] setup_subscriptions_for_step(workflow_name, step_names) self.controller.execute_workflow('multiactionWorkflowTest', 'multiactionWorkflow') shutdown_pool() steps = executed_steps('defaultController', workflow_name, self.start, datetime.utcnow()) self.assertEqual(len(steps), 2) names = [step['ancestry'].split(',')[-1] for step in steps] orderless_list_compare(self, names, step_names) name_result = { 'start': { 'result': { "message": "HELLO WORLD" }, 'status': 'Success' }, '1': { 'result': "REPEATING: Hello World", 'status': 'Success' } } for step in steps: name = step['ancestry'].split(',')[-1] self.assertIn(name, name_result) result = json.loads(step['data']) if type(name_result[name]) == dict: self.assertDictEqual(result['result'], name_result[name]) else: self.assertEqual(result['result'], name_result[name]) def test_error_workflow(self): workflow_name = construct_workflow_name_key( 'multistepError', 'multiactionErrorWorkflow') step_names = ['start', '1', 'error'] setup_subscriptions_for_step(workflow_name, step_names) self.controller.execute_workflow('multistepError', 'multiactionErrorWorkflow') shutdown_pool() steps = executed_steps('defaultController', workflow_name, self.start, datetime.utcnow()) self.assertEqual(len(steps), 2) names = [step['ancestry'].split(',')[-1] for step in steps] orderless_list_compare(self, names, ['start', 'error']) name_result = { 'start': { 'result': { "message": "HELLO WORLD" }, 'status': 'Success' }, 'error': { 'status': 'Success', 'result': 'REPEATING: Hello World' } } for step in steps: name = step['ancestry'].split(',')[-1] self.assertIn(name, name_result) result = json.loads(step['data']) self.assertDictEqual(result['result'], name_result[name]) def test_workflow_with_dataflow(self): workflow_name = construct_workflow_name_key('dataflowTest', 'dataflowWorkflow') step_names = ['start', '1', '2'] setup_subscriptions_for_step(workflow_name, step_names) self.controller.execute_workflow('dataflowTest', 'dataflowWorkflow') shutdown_pool() steps = executed_steps('defaultController', workflow_name, self.start, datetime.utcnow()) self.assertEqual(len(steps), 3) names = [step['ancestry'].split(',')[-1] for step in steps] orderless_list_compare(self, names, ['start', '1', '2']) name_result = { 'start': { 'result': 6, 'status': 'Success' }, '1': { 'result': 6, 'status': 'Success' }, '2': { 'result': 15, 'status': 'Success' } } for step in steps: name = step['ancestry'].split(',')[-1] self.assertIn(name, name_result) result = json.loads(step['data']) self.assertDictEqual(result['result'], name_result[name]) def test_workflow_with_dataflow_step_not_executed(self): workflow_name = construct_workflow_name_key('dataflowTest', 'dataflowWorkflow') step_names = ['start', '1'] setup_subscriptions_for_step(workflow_name, step_names) self.controller.execute_workflow('dataflowTest', 'dataflowWorkflow') shutdown_pool() steps = executed_steps('defaultController', workflow_name, self.start, datetime.utcnow()) self.assertEqual(len(steps), 2) names = [step['ancestry'].split(',')[-1] for step in steps] orderless_list_compare(self, names, ['start', '1'])
def __init__(self, test=False, db_instance=None): self.con = Controller(test=test, db_instance=db_instance) self.is_test = test
def fake_controller(hass_mock): c = Controller() c.args = {} return c
class TestWorkflowManipulation(unittest.TestCase): @classmethod def setUpClass(cls): App.registry = {} import_all_apps(path=config.test_apps_path, reload=True) core.config.config.load_app_apis(apps_path=config.test_apps_path) core.config.config.flags = import_all_flags('tests.util.flagsfilters') core.config.config.filters = import_all_filters( 'tests.util.flagsfilters') core.config.config.load_flagfilter_apis(path=config.function_api_path) initialize_threading() @classmethod def tearDownClass(cls): shutdown_pool() def setUp(self): case_database.initialize() self.controller = Controller(workflows_path=path.join( ".", "tests", "testWorkflows", "testGeneratedWorkflows")) self.controller.load_workflows_from_file( path=path.join(config.test_workflows_path, 'simpleDataManipulationWorkflow.playbook')) self.id_tuple = ('simpleDataManipulationWorkflow', 'helloWorldWorkflow') self.workflow_name = construct_workflow_name_key(*self.id_tuple) self.testWorkflow = self.controller.get_workflow(*self.id_tuple) def tearDown(self): self.controller.workflows = None case_database.case_db.tear_down() case_subscription.clear_subscriptions() reload(socket) def __execution_test(self): step_names = ['start', '1'] setup_subscriptions_for_step(self.testWorkflow.name, step_names) start = datetime.utcnow() # Check that the workflow executed correctly post-manipulation self.controller.execute_workflow(*self.id_tuple) steps = executed_steps('defaultController', self.testWorkflow.name, start, datetime.utcnow()) self.assertEqual(len(steps), 2) names = [step['ancestry'].split(',')[-1] for step in steps] orderless_list_compare(self, names, step_names) name_result = { 'start': "REPEATING: Hello World", '1': "REPEATING: This is a test." } for step in steps: name = step['ancestry'].split(',')[-1] self.assertIn(name, name_result) self.assertEqual(step['data']['result'], name_result[name]) """ CRUD - Workflow """ def test_create_workflow(self): self.assertEqual(len(self.controller.workflows), 2) # Create Empty Workflow self.controller.create_workflow_from_template('emptyWorkflow', 'emptyWorkflow') self.assertEqual(len(self.controller.workflows), 3) self.assertEqual( self.controller.get_workflow('emptyWorkflow', 'emptyWorkflow').steps, {}) xml = self.controller.get_workflow('emptyWorkflow', 'emptyWorkflow').to_xml() self.assertEqual(len(xml.findall(".//steps/*")), 0) def test_remove_workflow(self): initial_workflows = list(self.controller.workflows.keys()) self.controller.create_workflow_from_template('emptyWorkflow', 'emptyWorkflow') self.assertEqual(len(self.controller.workflows), 3) success = self.controller.remove_workflow('emptyWorkflow', 'emptyWorkflow') self.assertTrue(success) self.assertEqual(len(self.controller.workflows), 2) key = _WorkflowKey('emptyWorkflow', 'emptyWorkflow') self.assertNotIn(key, self.controller.workflows) orderless_list_compare(self, list(self.controller.workflows.keys()), initial_workflows) def test_update_workflow(self): self.controller.create_workflow_from_template('emptyWorkflow', 'emptyWorkflow') self.controller.update_workflow_name('emptyWorkflow', 'emptyWorkflow', 'newPlaybookName', 'newWorkflowName') old_key = _WorkflowKey('emptyWorkflow', 'emptyWorkflow') new_key = _WorkflowKey('newPlaybookName', 'newWorkflowName') self.assertEqual(len(self.controller.workflows), 3) self.assertNotIn(old_key, self.controller.workflows) self.assertIn(new_key, self.controller.workflows) def test_display_workflow(self): workflow = ast.literal_eval(self.testWorkflow.__repr__()) self.assertEqual(len(workflow["steps"]), 1) self.assertTrue(workflow["options"]) """ CRUD - Next """ def test_update_next(self): step = self.testWorkflow.steps["start"] self.assertEqual(step.conditionals[0].name, "1") step.conditionals[0].name = "2" self.assertEqual(step.conditionals[0].name, "2") xml = self.testWorkflow.to_xml() # Check XML self.assertEqual( xml.find(".//steps/step/[@id='start']/next").get("step"), "2") def test_display_next(self): conditional = ast.literal_eval( self.testWorkflow.steps["start"].conditionals[0].__repr__()) self.assertTrue(conditional["flags"]) self.assertEqual(conditional["name"], "1") def test_to_from_cytoscape_data(self): self.controller.load_workflows_from_file(path=path.join( config.test_workflows_path, 'multiactionWorkflowTest.playbook')) workflow = self.controller.get_workflow('multiactionWorkflowTest', 'multiactionWorkflow') original_steps = { step_name: step.as_json() for step_name, step in workflow.steps.items() } cytoscape_data = workflow.get_cytoscape_data() workflow.steps = {} workflow.from_cytoscape_data(cytoscape_data) derived_steps = { step_name: step.as_json() for step_name, step in workflow.steps.items() } self.assertDictEqual(derived_steps, original_steps) def test_name_parent_rename(self): workflow = Workflow(parent_name='workflow_parent', name='workflow') new_ancestry = ['workflow_parent_update'] workflow.reconstruct_ancestry(new_ancestry) new_ancestry.append('workflow') self.assertListEqual(new_ancestry, workflow.ancestry) def test_name_parent_step_rename(self): workflow = Workflow(parent_name='workflow_parent', name='workflow') step = Step(name="test_step", action='helloWorld', app='HelloWorld', ancestry=workflow.ancestry) workflow.steps["test_step"] = step new_ancestry = ["workflow_parent_update"] workflow.reconstruct_ancestry(new_ancestry) new_ancestry.append("workflow") new_ancestry.append("test_step") self.assertListEqual(new_ancestry, workflow.steps["test_step"].ancestry) def test_name_parent_multiple_step_rename(self): workflow = Workflow(parent_name='workflow_parent', name='workflow') step_one = Step(name="test_step_one", action='helloWorld', app='HelloWorld', ancestry=workflow.ancestry) step_two = Step(name="test_step_two", action='helloWorld', app='HelloWorld', ancestry=workflow.ancestry) workflow.steps["test_step_one"] = step_one workflow.steps["test_step_two"] = step_two new_ancestry = ["workflow_parent_update"] workflow.reconstruct_ancestry(new_ancestry) new_ancestry.append("workflow") new_ancestry.append("test_step_one") self.assertListEqual(new_ancestry, workflow.steps["test_step_one"].ancestry) new_ancestry.remove("test_step_one") new_ancestry.append("test_step_two") self.assertListEqual(new_ancestry, workflow.steps["test_step_two"].ancestry) def test_simple_risk(self): workflow = Workflow(name='workflow') workflow.create_step(name="stepOne", action='helloWorld', app='HelloWorld', risk=1) workflow.create_step(name="stepTwo", action='helloWorld', app='HelloWorld', risk=2) workflow.create_step(name="stepThree", action='helloWorld', app='HelloWorld', risk=3) self.assertEqual(workflow.total_risk, 6) def test_accumulated_risk_with_error(self): workflow = Workflow(name='workflow') step1 = Step(name="step_one", app='HelloWorld', action='Buggy', risk=1) step2 = Step(name="step_two", app='HelloWorld', action='Buggy', risk=2) step3 = Step(name="step_three", app='HelloWorld', action='Buggy', risk=3.5) workflow.steps = { 'step_one': step1, 'step_two': step2, 'step_three': step3 } workflow.total_risk = 6.5 instance = Instance.create(app_name='HelloWorld', device_name='test_device_name') workflow._Workflow__execute_step(workflow.steps["step_one"], instance) self.assertAlmostEqual(workflow.accumulated_risk, 1.0 / 6.5) workflow._Workflow__execute_step(workflow.steps["step_two"], instance) self.assertAlmostEqual(workflow.accumulated_risk, (1.0 / 6.5) + (2.0 / 6.5)) workflow._Workflow__execute_step(workflow.steps["step_three"], instance) self.assertAlmostEqual(workflow.accumulated_risk, 1.0) def test_pause_and_resume_workflow(self): from gevent import monkey monkey.patch_all() self.controller.load_workflows_from_file(path=path.join( config.test_workflows_path, 'pauseWorkflowTest.playbook')) waiter = Event() uid = None def step_2_finished_listener(sender, **kwargs): if sender.name == '2': waiter.set() def pause_resume_thread(): self.controller.pause_workflow('pauseWorkflowTest', 'pauseWorkflow', uid) gevent.sleep(1.5) self.controller.resume_workflow('pauseWorkflowTest', 'pauseWorkflow', uid) def step_1_about_to_begin_listener(sender, **kwargs): if sender.name == '1': gevent.spawn(pause_resume_thread) FunctionExecutionSuccess.connect(step_2_finished_listener) StepInputValidated.connect(step_1_about_to_begin_listener) start = default_timer() uid = self.controller.execute_workflow('pauseWorkflowTest', 'pauseWorkflow') waiter.wait(timeout=5) duration = default_timer() - start self.assertTrue(2.5 < duration < 5) def test_pause_and_resume_workflow_breakpoint(self): from gevent import monkey monkey.patch_all() self.controller.load_workflows_from_file(path=path.join( config.test_workflows_path, 'pauseWorkflowTest.playbook')) waiter = Event() def step_2_finished_listener(sender, **kwargs): if sender.name == '2': waiter.set() def pause_resume_thread(): self.controller.add_workflow_breakpoint_steps( 'pauseWorkflowTest', 'pauseWorkflow', ['2']) gevent.sleep(1.5) self.controller.resume_breakpoint_step('pauseWorkflowTest', 'pauseWorkflow') def step_1_about_to_begin_listener(sender, **kwargs): if sender.name == '1': gevent.spawn(pause_resume_thread) FunctionExecutionSuccess.connect(step_2_finished_listener) StepInputValidated.connect(step_1_about_to_begin_listener) start = default_timer() self.controller.execute_workflow('pauseWorkflowTest', 'pauseWorkflow') waiter.wait(timeout=5) duration = default_timer() - start self.assertTrue(2.5 < duration < 5) def test_change_step_input(self): import json input_list = [{'key': 'call', 'value': 'CHANGE INPUT'}] input_arg = {arg['key']: arg['value'] for arg in input_list} result = {'value': None} def step_finished_listener(sender, **kwargs): result['value'] = kwargs['data'] FunctionExecutionSuccess.connect(step_finished_listener) self.testWorkflow.execute(start_input=input_arg) self.assertDictEqual(json.loads(result['value']), { 'result': { 'result': 'REPEATING: CHANGE INPUT', 'status': 'Success' } })
def __init__(self, optimizer): Controller.__init__(self, optimizer) self.i = 0
core.utils.close_log() sys.exit(0) if os.name == 'posix': signal.signal(signal.SIGQUIT, sig_handler) signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGINT, sig_handler) signal.signal(signal.SIGUSR1, sig_handler) signal.signal(signal.SIGUSR2, sig_handler) elif os.name == 'nt': signal.signal(signal.SIGABRT, sig_handler) signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGINT, sig_handler) controller = Controller() controller.init() controller_thread = threading.Thread(target=controller.run, name='controller') controller_thread.setDaemon(True) controller_thread.start() import webui.app webui.app.set_controller(controller) core.utils.log("started") try: webui.app.run() finally: # stop controller if webui is stopped # controller.stop()