def eval_bot(params): success = [] margins = [] agent_id = args.agent_id baseline_agent_id = 1 - agent_id config = Config(**params) for scenario in scenarios: baseline_agent = agents[baseline_agent_id].new_session( baseline_agent_id, scenario.kbs[baseline_agent_id], default_config) agent = agents[agent_id].new_session(agent_id, scenario.kbs[agent_id], config) controller = Controller(scenario, [baseline_agent, agent]) ex = controller.simulate(max_turns, verbose=False) #import sys; sys.exit() if StrategyAnalyzer.has_deal(ex): success.append(1) final_price = ex.outcome['offer']['price'] margin = StrategyAnalyzer.get_margin( ex, final_price, 1, scenario.kbs[agent_id].facts['personal']['Role'], remove_outlier=False) margins.append(margin) else: success.append(0) return { 'loss': -1. * np.mean(margins) if len(margins) > 0 else 1000, 'status': STATUS_OK, 'ste': sem(margins), 'success_rate': np.mean(success), }
def test_ffkExecutionEventsCase(self): c = Controller(name="testStepFFKEventsController") c.load_workflows_from_file(path=config.test_workflows_path + "basicWorkflowTest.playbook") workflow_name = construct_workflow_name_key('basicWorkflowTest', 'helloWorldWorkflow') filter_sub = Subscription(events=['Filter Error']) flag_sub = Subscription(events=['Flag Success', 'Flag Error'], subscriptions={'length': filter_sub}) next_sub = Subscription( events=['Next Step Taken', 'Next Step Not Taken'], subscriptions={'regMatch': flag_sub}) step_sub = Subscription(events=[ 'Function Execution Success', 'Input Validated', 'Conditionals Executed' ], subscriptions={'1': next_sub}) subs = { 'testStepFFKEventsController': Subscription( subscriptions={ workflow_name: Subscription( subscriptions={'start': step_sub}) }) } global_subs = case_subscription.GlobalSubscriptions( step=[ 'Function Execution Success', 'Input Validated', 'Conditionals Executed' ], next_step=['Next Step Taken', 'Next Step Not Taken'], flag=['Flag Success', 'Flag Error'], filter=['Filter Error']) case_subscription.set_subscriptions({ 'testStepFFKEventsEvents': case_subscription.CaseSubscriptions( subscriptions=subs, global_subscriptions=global_subs) }) c.execute_workflow('basicWorkflowTest', 'helloWorldWorkflow') shutdown_pool() step_ffk_events_case = case_database.case_db.session.query(case_database.Case) \ .filter(case_database.Case.name == 'testStepFFKEventsEvents').first() step_ffk_event_history = step_ffk_events_case.events.all() self.assertEqual( len(step_ffk_event_history), 5, 'Incorrect length of event history. ' 'Expected {0}, got {1}'.format(5, len(step_ffk_event_history))) step_json = [ step.as_json() for step in step_ffk_event_history if step.as_json()['message'] == 'STEP' ] for step in step_json: if step['type'] == 'Function executed successfully': self.assertDictEqual(step['data'], {'result': 'REPEATING: Hello World'}) else: self.assertEqual(step['data'], '')
def generate_examples(num_examples, scenario_db, examples_path, max_examples, remove_fail, max_turns): examples = [] num_failed = 0 scenarios = scenario_db.scenarios_list #scenarios = [scenario_db.scenarios_map['S_8COuPdjZZkYgrzhb']] #random.shuffle(scenarios) for i in range(max_examples): scenario = scenarios[num_examples % len(scenarios)] sessions = [ agents[0].new_session(0, scenario.kbs[0]), agents[1].new_session(1, scenario.kbs[1]) ] controller = Controller(scenario, sessions) ex = controller.simulate(max_turns, verbose=args.verbose) if not controller.complete(): num_failed += 1 if remove_fail: continue examples.append(ex) num_examples += 1 with open(examples_path, 'w') as out: print >> out, json.dumps([e.to_dict() for e in examples]) if num_failed == 0: print('All {} dialogues succeeded!'.format(num_examples)) else: print('Number of failed dialogues:', num_failed)
def server_process_main(stop_handler, scmStatus=None): if scmStatus is not None: scmStatus.reportStartPending() _init_logging(main_config) controller = Controller(main_config, stop_handler) logger.info('Starting Server RPC Thread: %s' % ' '.join(sys.argv)) controller.start() print "Server out at: " + main_config.ams_monitor_out_file() print "Server log at: " + main_config.ams_monitor_log_file() save_pid(os.getpid(), PID_OUT_FILE) if scmStatus is not None: scmStatus.reportStarted() # For some reason this is needed to catch system signals like SIGTERM # TODO fix if possible signal.pause() #The controller thread finishes when the stop event is signaled controller.join() remove_file(PID_OUT_FILE) pass
def setUp(self): database.initialize() if not isdir(profile_visualizations_path): mkdir(profile_visualizations_path) self.start = datetime.utcnow() initialize_threading() self.controller = Controller(workflows_path=config.test_workflows_path)
def _init_controller(my_index, partner_type, scenario, chat_id): my_session = self.systems[HumanSystem.name()].new_session(my_index, scenario.get_kb(my_index)) partner_session = self.systems[partner_type].new_session(1 - my_index, scenario.get_kb(1 - my_index)) controller = Controller(scenario, [my_session, partner_session], chat_id=chat_id) return controller, my_session, partner_session
def setUp(self): con = Controller(True) self.db_creator = DatabaseCreator.get_instance(con=con) self.db_creator.fill_full_db() db_instance = con.get_db() super().setUp(db_instance=db_instance)
def test_workflowExecutionEvents(self): workflow_name = construct_workflow_name_key('multiactionWorkflowTest', 'multiactionWorkflow') c = Controller(name="testExecutionEventsController") c.load_workflows_from_file(path=config.test_workflows_path + "multiactionWorkflowTest.playbook") subs = { 'testExecutionEventsController': Subscription( subscriptions={ workflow_name: Subscription(events=[ "App Instance Created", "Step Execution Success", "Next Step Found", "Workflow Shutdown" ]) }) } case_subscription.set_subscriptions({ 'testExecutionEvents': case_subscription.CaseSubscriptions(subscriptions=subs) }) c.execute_workflow('multiactionWorkflowTest', 'multiactionWorkflow') shutdown_pool() execution_events_case = case_database.case_db.session.query(case_database.Case) \ .filter(case_database.Case.name == 'testExecutionEvents').first() execution_event_history = execution_events_case.events.all() self.assertEqual( len(execution_event_history), 6, 'Incorrect length of event history. ' 'Expected {0}, got {1}'.format(6, len(execution_event_history)))
def __init__(self): self.print_banner() self.parser = argparse.ArgumentParser() self.arguments = self.get_arguments() self.script_path = os.path.dirname(os.path.realpath(__file__)) # Содержит все важные параметры программы self.properties = defaultdict(lambda: defaultdict(None)) self.check_config_exist() self.config_path = self.arguments.config_file if self.arguments.config_file else 'config.ini' self.config = self.read_config(self.config_path) # Указываем параметры без консоли self._test() self.check_arguments() self.merge_args_and_config() if self.arguments.update_config: self.save_current_config() # Заполняем объект properties для большей маневренности в плане хранимых значений for section in self.config.sections(): for option in self.config[section]: self.properties[section][option] = self.config.get( section, option) # Для импорта по пути относительно main.py sys.path.append(self.script_path) self.controller = Controller(self.properties)
def _get_controller(self, scenario, split='train'): # Randomize if random.random() < 0.5: scenario = copy.deepcopy(scenario) scenario.kbs = (scenario.kbs[1], scenario.kbs[0]) sessions = [self.agents[0].new_session(0, scenario.kbs[0]), self.agents[1].new_session(1, scenario.kbs[1])] return Controller(scenario, sessions)
def report(self, url): controller = Controller() df = controller.load_csv(file_=url) news, books, musics = controller.get_top_apps(df) report = controller.generate_report(top_news=news, top_books=books, top_musics=musics) file_ = controller.generate_csv(report) save = controller.save_db(df) return file_, save
def setUp(self): case_database.initialize() self.controller = Controller(workflows_path=path.join( ".", "tests", "testWorkflows", "testGeneratedWorkflows")) self.controller.load_workflows_from_file( path=path.join(config.test_workflows_path, 'simpleDataManipulationWorkflow.playbook')) self.id_tuple = ('simpleDataManipulationWorkflow', 'helloWorldWorkflow') self.workflow_name = construct_workflow_name_key(*self.id_tuple) self.testWorkflow = self.controller.get_workflow(*self.id_tuple)
def start(self): """ Parse the program arguments and initiate the vulnerability analysis. """ controller = Controller(self.args.networks, self.args.add_networks, self.args.omit_networks, self.args.update_modules, self.args.config, self.args.ports, self.args.output, self.args.input, self.user_results, self.args.separate_networks, self.verbose) controller.run()
def generate_examples(agents, agent_names, scenarios, num_examples, max_turns): examples = [] for i in range(num_examples): scenario = scenarios[i % len(scenarios)] # Each agent needs to play both buyer and seller for j in (0, 1): new_agents = [agents[j], agents[1 - j]] new_agent_names = [agent_names[j], agent_names[1 - j]] sessions = [ new_agents[0].new_session(0, scenario.kbs[0]), new_agents[1].new_session(1, scenario.kbs[1]) ] controller = Controller(scenario, sessions, session_names=new_agent_names) ex = controller.simulate(max_turns, verbose=args.verbose) examples.append(ex) return examples
def test_ffkExecutionEvents(self): workflow_name = construct_workflow_name_key('basicWorkflowTest', 'helloWorldWorkflow') c = Controller(name="testStepFFKEventsController") c.load_workflows_from_file(path=config.test_workflows_path + "basicWorkflowTest.playbook") filter_sub = Subscription(events=['Filter Success', 'Filter Error']) flag_sub = Subscription(events=['Flag Success', 'Flag Error'], subscriptions={'length': filter_sub}) next_sub = Subscription( events=['Next Step Taken', 'Next Step Not Taken'], subscriptions={'regMatch': flag_sub}) step_sub = Subscription(events=[ "Function Execution Success", "Input Validated", "Conditionals Executed" ], subscriptions={'1': next_sub}) subs = { 'testStepFFKEventsController': Subscription( subscriptions={ workflow_name: Subscription( subscriptions={'start': step_sub}) }) } case_subscription.set_subscriptions({ 'testStepFFKEventsEvents': case_subscription.CaseSubscriptions(subscriptions=subs) }) c.execute_workflow('basicWorkflowTest', 'helloWorldWorkflow') shutdown_pool() step_ffk_events_case = case_database.case_db.session.query(case_database.Case) \ .filter(case_database.Case.name == 'testStepFFKEventsEvents').first() step_ffk_event_history = step_ffk_events_case.events.all() self.assertEqual( len(step_ffk_event_history), 6, 'Incorrect length of event history. ' 'Expected {0}, got {1}'.format(6, len(step_ffk_event_history)))
def test_stepExecutionEvents(self): workflow_name = construct_workflow_name_key('basicWorkflowTest', 'helloWorldWorkflow') c = Controller(name="testStepExecutionEventsController") c.load_workflows_from_file(path=config.test_workflows_path + "basicWorkflowTest.playbook") subs = { 'testStepExecutionEventsController': Subscription( subscriptions={ workflow_name: Subscription( subscriptions={ 'start': Subscription(events=[ "Function Execution Success", "Input Validated", "Conditionals Executed" ]) }) }) } case_subscription.set_subscriptions({ 'testStepExecutionEvents': case_subscription.CaseSubscriptions(subscriptions=subs) }) c.execute_workflow('basicWorkflowTest', 'helloWorldWorkflow') shutdown_pool() step_execution_events_case = case_database.case_db.session.query(case_database.Case) \ .filter(case_database.Case.name == 'testStepExecutionEvents').first() step_execution_event_history = step_execution_events_case.events.all() self.assertEqual( len(step_execution_event_history), 3, 'Incorrect length of event history. ' 'Expected {0}, got {1}'.format(3, len(step_execution_event_history)))
def makeService(self, options): config = Parser(options['config']) top_service = service.MultiService() db_bmnodes = config.dbnodes nodeschema_name = 'nodes.sql' self.opendb(db_bmnodes, nodeschema_name) db_templates = config.dbtemplates templateschema_name = 'templates.sql' self.opendb(db_templates, templateschema_name) # shared queue for dead nodes dead_nodes_queue = Queue.Queue() ### server for node registration ### reg_port = config.node_registration_port reg = Register(port=reg_port, dbnodes=db_bmnodes, queue=dead_nodes_queue) node = server.Site(reg, logPath="/dev/null") node.noisy = False node_iface = internet.TCPServer(reg_port, node) node_iface.setServiceParent(top_service) ### server for external ### listening_port = config.ext_listening_port contr = Controller(port=listening_port, dbnodes=db_bmnodes, dbtemplates=db_templates, datafile_dir=config.datafile_dir, queue=dead_nodes_queue) ext = server.Site(contr, logPath="/dev/null") ext.noisy = False ext_iface = internet.TCPServer(listening_port, ext) ext_iface.setServiceParent(top_service) ### return top service return top_service
def run(self): self.print_banner() self.arguments = self.get_arguments() self.check_config_exist() self.config_path = self.arguments.config_file if self.arguments.config_file else self.config_path self.config = self.read_config(self.config_path) # Указываем параметры без консоли #self._test() self.check_arguments() self.merge_args_and_config() if self.arguments.update_config: self.save_current_config() # Заполняем объект properties для большей маневренности в плане хранимых значений for section in self.config.sections(): for option in self.config[section]: self.properties[section][option] = self.config.get( section, option) self.controller = Controller(self.properties)
def server_process_main(stop_handler, scmStatus=None): if scmStatus is not None: scmStatus.reportStartPending() save_pid(os.getpid(), PID_OUT_FILE) config = Configuration() _init_logging(config) controller = Controller(config, stop_handler) logger.info('Starting Server RPC Thread: %s' % ' '.join(sys.argv)) controller.start() print "Server out at: " + SERVER_OUT_FILE print "Server log at: " + SERVER_LOG_FILE if scmStatus is not None: scmStatus.reportStarted() #The controller thread finishes when the stop event is signaled controller.join() remove_file(PID_OUT_FILE) pass
def __init__(self): """Initializes a new Context object. This acts as an interface for objects to access other event specific variables that might be needed. """ self.apps = self.get_apps() from server.app import app from server.appdevice import Device, App from server.database import User, Role, userRoles, db, user_datastore from server.triggers import Triggers from server.casesubscription import CaseSubscription from core.controller import Controller self.User = User self.Role = Role self.Device = Device self.App = App self.Triggers = Triggers self.CaseSubscription = CaseSubscription self.flask_app = app self.user_roles = userRoles self.db = db self.user_datastore = user_datastore self.controller = Controller()
from core.controller import Controller if __name__ == '__main__': Controller().run()
def setUp(self): case_database.initialize() self.controller = Controller(workflows_path=config.test_workflows_path) self.start = datetime.utcnow() initialize_threading()
def __init__(self, test=False, db_instance=None): self.con = Controller(test=test, db_instance=db_instance) self.is_test = test
def fake_controller(hass_mock): c = Controller() c.args = {} return c
core.utils.close_log() sys.exit(0) if os.name == 'posix': signal.signal(signal.SIGQUIT, sig_handler) signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGINT, sig_handler) signal.signal(signal.SIGUSR1, sig_handler) signal.signal(signal.SIGUSR2, sig_handler) elif os.name == 'nt': signal.signal(signal.SIGABRT, sig_handler) signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGINT, sig_handler) controller = Controller() controller.init() controller_thread = threading.Thread(target=controller.run, name='controller') controller_thread.setDaemon(True) controller_thread.start() import webui.app webui.app.set_controller(controller) core.utils.log("started") try: webui.app.run() finally: # stop controller if webui is stopped # controller.stop()
def setUp(self): self.controller = Controller()
def setUp(self): self.controller = Controller(name="testController")
price_tracker = pickle.load(f) # load schema schema = Schema(self.SCHEMA_PATH) # load system self.system = PytorchNeuralSystem(args, schema, price_tracker, self.MODEL_PATH, False) # load scenario db with open(self.DATA_PATH) as f: raw = json.load(f) raw = [r["scenario"] for r in raw] # HACK self.scenario_db = ScenarioDB.from_dict(schema, raw, Scenario) def from_uuid(self, agent, uuid): """Return a session object given a uuid and agent number""" scenario = self.scenario_db.get(uuid) kb = scenario.get_kb(agent) return scenario, self.system.new_session(agent, kb), kb if __name__ == "__main__": from sessions.cmd_session import CmdSession from core.controller import Controller loader = Loader() scenario, session, kb = loader.from_uuid(0, "S_To118PXuNicOd8SO") cmd_session = CmdSession(1, kb) controller = Controller(scenario, [session, cmd_session]) controller.simulate()
from flask import Flask, render_template, request from core.controller import Controller import json app = Flask(__name__) appname = "homecontrol" ctrl = Controller() @app.route('/') def index(): switches = ctrl.get_switches() r, g, b = ctrl.get_colors() return render_template('index.html', area_states=switches, r=r, g=g, b=b, rgbmodes=ctrl.get_rgb_modes(), active_rgbmode=ctrl.get_rgb_mode()) @app.route('/light/switch') def light_area(): switches = ctrl.get_switches() off = set([name for name, state in switches]).difference(request.args.keys()) on = set([name for name, state in switches]).intersection(request.args.keys()) for area in off:
from core.controller import Controller from core.model import Model from core.view import View if __name__ == '__main__': model = Model(50, 50) view = View(model) ctrl = Controller(view, model) view.set_controller(ctrl) ctrl.start()