def __init__(self, *, entrance: Queue, name: str = 'Session', aq: MPQueue): super().__init__() self.daemon = True self.name = name self.cockpit = Cockpit() self.cockpit.update(lat=self.deg2dm(deg=35.297318), lng=self.deg2dm(deg=139.757328), sog=22) self.enemy: Dict[int, Enemy] = {} self.aton: Dict[int, AtoN] = {} self.entrance = entrance self.counter: int = 0 self.deltas: int = 0 self.fragment: Dict[int, List[str]] = {} while True: ws = websocket.create_connection(url='ws://0.0.0.0:%d/' % (Constants.wsport,)) if ws: self.ws = ws break else: time.sleep(1) self.logger = logging.getLogger('Log') self.aq = aq # self.children = [self.dispatcher, self.archive] self.dispatcher = Dispatcher() self.locker = Lock() self.cycle = Cycle(cockpit=self.cockpit, enemy=self.enemy, locker=self.locker, ws=self.ws) self.cycle.start()
def test_uses_default_if_no_match(self): dispatcher = Dispatcher('func') dispatcher.add_target(Pattern(5), lambda x: x*x) dispatcher.add_default(lambda x: x*2) self.assertEqual(dispatcher(5), 25) self.assertEqual(dispatcher(3), 6)
def test_dispatches_correct_method(self): dispatcher = Dispatcher('func') dispatcher.add_target(Pattern(lambda num: num % 2 == 0), lambda x: 'even') dispatcher.add_target(Pattern(lambda num: num % 2 != 0), lambda x: 'odd') self.assertEqual(dispatcher(2), 'even') self.assertEqual(dispatcher(3), 'odd')
def __init__(self, server, parent=None): Dispatcher.__init__(self, server, parent) self.vm = VmManager() self.vm.loadAllMachines() #externalToVm --> {clientId: [(vmId, vmInfo),]} self.externalToVm = {} self.callbacks = { Message.CMD_START_VM: self.startVM, Message.CMD_CLOSE_VM: self.closeVM, Message.CMD_RESET_VM: self.resetVM, Message.CMD_QUERY_VMS: self.queryVms, Message.CMD_QUERY_GPS: self.queryGPS, Message.CMD_UPDATE_VM: self.updateVm, Message.CMD_UPDATE_GPS: self.updateGps, Message.CMD_ADD_GPS: self.addGps, Message.CMD_DELETE_GPS: self.deleteGps, Message.CMD_ADD_ACCOUNT: self.addAccount, Message.CMD_QUERY_ACCOUNT: self.queryAccount, Message.CMD_DELETE_ACCOUNT: self.deleteAccount, } self.server.clientDisconnected.connect(self.clearVmForClient) EventManager.bind('DataCenter.gpsLoaded', self._sendGpsData) EventManager.bind('DataCenter.gpsUpdated', self._updateGpsData) EventManager.bind('DataCenter.gpsAdded', self._addGpsData) EventManager.bind('DataCenter.gpsDeleted', self._deleteGpsData) EventManager.bind('DataCenter.accountAdded', self._addAccountData) EventManager.bind('DataCenter.accountLoaded', self._sendAccountData) EventManager.bind('DataCenter.accountDeleted', self._deleteAccountData)
class Client(NetStream, QtCore.QObject): def __init__(self, headMode=8, serverIP='127.0.0.1', serverPort=4829, sleepInterval=0.1, parent=None): NetStream.__init__(self, headMode) QtCore.QObject.__init__(self) print "Client Init ", serverIP, serverPort self.initData(serverIP, serverPort, sleepInterval, parent) self.setup() self.parent.parent.connect(self, QtCore.SIGNAL('serverCrashedAlert()'), self.parent.parent.serverCrashedAlert) def initData(self, serverIP, serverPort, sleepInterval, parent): self.serverIP = serverIP self.serverPort = serverPort self.sleepInterval = sleepInterval self.dispatcher = Dispatcher() self.parent = parent self.isAlive = True self.hasBegan = False def killClient(self): self.isAlive = False def setup(self): self.setupDispatcher() self.setupClient() def setupDispatcher(self): self.dispatcher.setParent(self) services = {\ 1001 : LoginService(self.dispatcher),\ 1002 : HallService(self.dispatcher),\ 1003 : RoomService(self.dispatcher) } self.dispatcher.registers(services) def setupClient(self): print self.serverPort, "\n", self.serverIP self.connect(self.serverIP, self.serverPort) self.nodelay(0) self.nodelay(1) def sendToServer(self, serviceID, commandID, data): message = {} message['create_time'] = time.time() message['service_id'] = serviceID message['command_id'] = commandID message['data'] = data try: messageString = json.dumps(message) except TypeError, e: print "Error while dumping json" print e print message print "Sending Messgae:", message self.send(messageString)
def test_never_transitions(self): """ Given these transitions: NEW: [T1, T2], T1.final_state: [T3], T2.final_state: [T4], Determine a full chain's progression and that the callback is called when a transition is validated (they always validate at this point) """ # reinitialize where T1 never validates and always skips to T2 T1.is_valid = lambda x: False T2.is_valid = lambda x: False dispatcher_config['chains'][0]['transitions'] = { NEW: [T1, T2], T1.final_state: [T3], T2.final_state: [T4], } dispatcher = Dispatcher(dispatcher_config) rsc_map = [ ('rsc1', '123'), ('rsc2', '456'), ] chain = dispatcher.get_or_create_resource_chain( 'sample_chain', rsc_map) self.assertEqual(chain.state, NEW) # State is NEW, should transition to the first available transition, # which in this case is T2 cb1 = mock.Mock() chain.execute(callback=cb1) self.assertEqual(chain.state, NEW) cb1.assert_not_called()
def __init__(self, clientManager, parent=None): Dispatcher.__init__(self, clientManager, parent) self.callbacks = { Message.CMD_CLIENT_VALIDATED: self.loadResources, Message.CMD_QUERY_VMS_OK: self.loadVms, Message.CMD_UPDATE_VM_OK: self.handleUpdateVmOk, Message.CMD_VM_START_OK: self.handleVmStartOk, Message.CMD_VM_START_FAIL: self.handleVmStartFail, Message.CMD_VM_UPDATED: self.updateVm, Message.CMD_QUERY_GPS_OK: self.loadGps, Message.CMD_ADD_GPS_OK: self.handleAddGpsOk, Message.CMD_UPDATE_GPS_OK: self.handleUpdateGpsOk, Message.CMD_GPS_UPDATED: self.updateGps, Message.CMD_GPS_ADDED: self.loadGps, Message.CMD_GPS_DELETED: self.deleteGps, Message.CMD_DELETE_GPS_OK: self.handleDeleteGpsOk, Message.CMD_ACCOUNT_ADDED: self.loadAccounts, Message.CMD_QUERY_ACCOUNT: self.loadAccounts, Message.CMD_QUERY_ACCOUNT_OK: self.loadAccounts, Message.CMD_ADD_ACCOUNT_OK: self.handleAddAccountOk, Message.CMD_DELETE_ACCOUNT_OK: self.handleDeleteAccountsOk, Message.CMD_ACCOUNT_DELETED: self.deleteAccounts }
def __init__(self, *, serialPort: str = '', baudrate: int = 0, mcip: str = '', mcport: int = 0): self.fragment = [] self.seq = 0 self.engine = Dispatcher() self.quePoint = queue.Queue() self.counter = 0 self.w = Thread(target=self.welcome, daemon=True) self.w.start() if serialPort: logger.debug('+++ use Serial') self.serialPort = serialPort self.baudrate = baudrate self.s = Thread(target=self.fromSerial, daemon=True) self.s.start() if mcip: logger.debug('+++ use UDP(multicast)') self.mcip = mcip self.mcport = mcport self.u = Thread(target=self.fromUDP, daemon=True) self.u.start()
def process_file(self, wav_file_path, attack=False, peak=False): wav_file, json_file = Importer.load(wav_file_path) if not wav_file: print("Error for path " + wav_file_path) return if not json_file and not attack: print("Error for path " + wav_file_path) return # if attack and not json_file: if attack and peak: X = Dispatcher.peak_extract(wav_file, json_file) if json_file: y = [json_file[i] for i in sorted(json_file.keys())] y.pop(0) else: y = None else: X, y = Dispatcher.timestamped_no_peak_check(wav_file, json_file) if X is None: print("X not present, exiting...") return X = Extractor.transform_mfcc(X, self.winlen, self.winstep, self.numcep, self.nfilt, self.nfft) self.total_X.extend(X) if y: self.total_y.extend(y)
def test_hgw_frontend_oauth_token_renewal(self, mocked_kafka_consumer, mocked_kafka_producer): """ Tests that when the hgw frontend token expires, the dispatcher requires another one """ messages = [ MockMessage(key=ACTIVE_CHANNEL_ID.encode('utf-8'), topic=SOURCES[0]['source_id'].encode('utf-8'), value=b'first_message', offset=0), MockMessage(key=ACTIVE_CHANNEL_ID.encode('utf-8'), topic=SOURCES[0]['source_id'].encode('utf-8'), value=b'second_message', offset=1), ] token_res = { 'access_token': 'OUfprCnmdJbhYAIk8rGMex4UBLXyf3', 'token_type': 'Bearer', 'expires_in': 36000, 'expires_at': 1516236829.8031383, 'scope': ['read', 'write'] } mocked_kafka_consumer().__iter__ = Mock(return_value=iter(messages)) self.counter = 0 def get_url(*args): """ Method that simulates the get. When the response is 401 it means that token expired and the dispatcher requires another token """ res = MagicMock() if args[1].startswith(HGW_FRONTEND_URI) and self.counter == 0: self.counter += 1 raise TokenExpiredError() else: res.status_code = 200 if args[1].startswith(CONSENT_MANAGER_URI): # simulates the consent manager with minimum data just to arrive to the point of # getting the hgw_frontend token res.json.return_value = { 'destination': DESTINATION, 'status': 'AC' } return res d = Dispatcher('kafka:9093', None, None, None, True) with patch.object(OAuth2Session, 'fetch_token', return_value=token_res) as fetch_token, \ patch.object(OAuth2Session, 'get', get_url): # NOTE: the first fetch_token calls (one to the consent manager and the second to the # hgw frontend) are not mocked since they occurs in Dispatcher.__init__ and when we call # the __init__ Oauth2Session is not mocked, so has_calls doesn't register them d.run() calls = [ call(token_url="{}/oauth2/token/".format(HGW_FRONTEND_URI), client_id=HGW_FRONTEND_OAUTH_CLIENT_ID, client_secret=HGW_FRONTEND_OAUTH_CLIENT_SECRET) ] # check that the fetch_token is called the second time with consent manager parameter fetch_token.assert_has_calls(calls)
def __init__(self): self.width = 3 self.height = 3 Deferrer.__init__(self) Dispatcher.__init__(self) self.data = util.make2dArray(self.width, self.height, None) self.curPlayer = 1
def __init__(self, robot): Algorithm.__init__(self, robot) self.calibration_time = 5000 sweep_time = self.calibration_time / 4 self.st_start = State("Start") self.st_calibrate_right = State("right") self.st_calibrate_left = State("left") self.st_calibrate_centre = State("recentre") self.st_done = State("Done") self.st_start.add_transition(Transition(self.st_calibrate_right)) self.st_calibrate_right.add_transition( TransitionTimed(sweep_time, self.st_calibrate_left)) self.st_calibrate_left.add_transition( TransitionTimed(2 * sweep_time, self.st_calibrate_centre)) self.st_calibrate_centre.add_transition( TransitionTimed(sweep_time, self.st_done)) self.fsm = FSM(self.st_start) self.dsp = Dispatcher(self.fsm) self.dsp.link_action(self.st_calibrate_right, self.calibrate_right) self.dsp.link_action(self.st_calibrate_left, self.calibrate_left) self.dsp.link_action(self.st_calibrate_centre, self.calibrate_right) self.dsp.link_action(self.st_done, self.robot.stop)
def main(args=None): """ Main CLI Entrypoint """ parser = argparse.ArgumentParser(description='Manages Kubeconfig files') parser.add_argument('-k', '--kubeconfig', dest='kubeconfig', help='Path to Kubeconfig file') # Positional Arguments parser.add_argument('operation', nargs='?', default='help') parser.add_argument('--name', dest='name', help='Cluster Name') parser.add_argument('--uuid', dest='uuid', help='Cluster UUID') # Register os_client_config argparse arguments cloud_config = os_client_config.OpenStackConfig() cloud_config.register_argparse_arguments(parser, sys.argv) args = parser.parse_args() # Try to get a cloud from os_client_config cloud = None try: cloud = cloud_config.get_one_cloud(argparse=args) except MissingRequiredOptions as ex: # We may not need this, don't fail LOG.warn("Unable to validate openstack credentials. Bad things may happen soon... " "Check this error out: \n" + ex.message) kcfg = Kubeconfig(kcfg_path=args.kubeconfig) dis = Dispatcher(cloud, kcfg) dis.do(args.operation, args)
def __init__(self, destaddr, output=None): Dispatcher.__init__(self) self.eventloop = EventLoop() self.starttime = None self.output = output self.ready = True self.max_ttl = 16 self.current_ttl = 16 self.min_ttl = 0 self.ident = -1 try: sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_UDP) sock.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1) self.set_socket(sock) except socket.error, err: if _sockerror(err.args[0]) == 'EPERM': logging.root.error('You need sudo privilages to use raw sockets') else: log_str = 'Socket Error: %s' % _sockerror(err.args[0]) logging.root.error(log_str) sys.exit(-1)
def _test_chain_execution(session): def job1(outbox, params): print 'did 1' outbox.send('do_2') def job2(outbox, params): print 'did 1' outbox.send('do_3') def job3(outbox, params): print 'did 1' outbox.send('done') disp = Dispatcher() disp.register('do_1', job1) disp.register('do_2', job2) disp.register('do_3', job3) disp.start() try: messenger = Messenger(session) # db.get_session()) assert not messenger.recv() messenger.send('do_1') messenger.subscribe('done') import time time.sleep(1) assert messenger.recv() finally: disp.stop()
def test_dispatch(): Commands = namedtuple("Commands", ["identifiers"]) Message = namedtuple("Message", ["username", "author_id"]) def command_func(message, database, arg1, arg2, arg3): return message, database, arg1, arg2, arg3 commands = Commands({"existing_command": command_func}) ds = Dispatcher(queue.Queue(), "database", "consumer", commands) message = Message("username123", "123455") # Correct args response = ds.dispatch("existing_command", message, "arg1", "arg2", "arg3") assert response == (message, "database", "arg1", "arg2", "arg3") # Wrong args, correct command try: response = ds.dispatch("existing_command", message, "arg1", "arg2") assert False, "Should have raised a TypeError." except TypeError: pass # Wrong command try: response = ds.dispatch("wrong_command", message, "database") assert False, "Should have raised a KeyError." except KeyError: pass
def lineReceived(self, data): """Called to dispatch a fully-delimited CRLF command""" try: reqData = json.loads(data) except: return if "request" in reqData: Dispatcher.get().dispatchCommand(self, reqData)
def setUp(self): self._head1 = MsgCSLogin('test', 0) self._head2 = MsgCSMoveto(3, 5) self._dispatcher = Dispatcher() self._dispatcher.register(100, TestService()) self.count = 0
def initData(self, serverIP, serverPort, sleepInterval, parent): self.serverIP = serverIP self.serverPort = serverPort self.sleepInterval = sleepInterval self.dispatcher = Dispatcher() self.parent = parent self.isAlive = True self.hasBegan = False
def test_tasks_with_same_hosts_are_sent_to_same_queue(self): workers = ['hosts_' + str(x) for x in range(5)] dispatcher = Dispatcher(workers) host = "host_000006" first_pick = dispatcher.select_worker(host) second_pick = dispatcher.select_worker(host) self.assertEqual(first_pick, second_pick, "Dispatcher should pick same worker for same host")
def get(self, request): if request.method == 'GET': # Initialize Dispatcher dispatcher = Dispatcher(request, self.trapi_version) # Get merged meta KG meta_knowledge_graph = dispatcher.get_meta_knowledge_graph() return JsonResponse(meta_knowledge_graph.to_dict())
def get(self, request): if request.method == 'GET': # Initialize dispatcher dispatcher = Dispatcher(request, self.trapi_version) # Get all chp app curies curies_db = dispatcher.get_curies() return JsonResponse(curies_db)
def __init__(self, connection): Dispatcher.__init__(self, 3, SASL_FRAME) self.connection = connection self.mechanisms = None self.mechanism = None self.username = None self.password = None self.output_redirect = False self.outcome = None
def start(): global dispatcher if dispatcher.isAlive(): logging.warning("Cannot start dispatcher : Dispatcher is already running") return "Cannot start dispatcher : Dispatcher is already running" else: dispatcher = Dispatcher(job_queue=job_queue) dispatcher.start() return "Dispatcher started... %s" % dispatcher
def __init__(self, canvas, x, y, width, height, options={}): Dispatcher.__init__(self) self.obj_id = None self.canvas = canvas self.x = x self.y = y self.width = width self.height = height self.options = options
def start_flask(): global job_queue global dispatcher # Create IN job queue & job dispatcher job_queue = Queue() #d = Dispatcher(job_queue=job_queue, log_queue=loggingqueue.queue) Process based logging dispatcher = Dispatcher(job_queue=job_queue) dispatcher.start() app.run(debug=True, use_reloader=False)
def __init__(self, sockaddr=DEFAULT_ADDR): asynchat.async_chat.__init__(self) Dispatcher.__init__(self) self._host, self._port = sockaddr self._receiver_thread = None self._receiver_thread_exiting = threading.Event() self._receiver_thread_exited = False self._ibuffer = "" # IQfeed separates rows with newline self.set_terminator("\r\n")
def run(cityMap): import copy cityMap = copy.deepcopy(cityMap) hospitalsOutput = cityMap.placeHospitals() dispatcher = Dispatcher(cityMap) ambulanceOutput = dispatcher.startDipatch() output = hospitalsOutput + '\n' + ambulanceOutput return output
class Client(NetStream, QtCore.QObject): def __init__(self, headMode=8, serverIP="127.0.0.1", serverPort=4829, sleepInterval=0.1, parent=None): NetStream.__init__(self, headMode) QtCore.QObject.__init__(self) print "Client Init ", serverIP, serverPort self.initData(serverIP, serverPort, sleepInterval, parent) self.setup() self.parent.parent.connect(self, QtCore.SIGNAL("serverCrashedAlert()"), self.parent.parent.serverCrashedAlert) def initData(self, serverIP, serverPort, sleepInterval, parent): self.serverIP = serverIP self.serverPort = serverPort self.sleepInterval = sleepInterval self.dispatcher = Dispatcher() self.parent = parent self.isAlive = True self.hasBegan = False def killClient(self): self.isAlive = False def setup(self): self.setupDispatcher() self.setupClient() def setupDispatcher(self): self.dispatcher.setParent(self) services = { 1001: LoginService(self.dispatcher), 1002: HallService(self.dispatcher), 1003: RoomService(self.dispatcher), } self.dispatcher.registers(services) def setupClient(self): print self.serverPort, "\n", self.serverIP self.connect(self.serverIP, self.serverPort) self.nodelay(0) self.nodelay(1) def sendToServer(self, serviceID, commandID, data): message = {} message["create_time"] = time.time() message["service_id"] = serviceID message["command_id"] = commandID message["data"] = data try: messageString = json.dumps(message) except TypeError, e: print "Error while dumping json" print e print message print "Sending Messgae:", message self.send(messageString)
def __init__(self, sockaddr=DEFAULT_ADDR, log=None): asynchat.async_chat.__init__(self) Dispatcher.__init__(self) self._host, self._port = sockaddr self._receiver_thread = threading.Thread(None, _start_receive_loop) self._close_event = threading.Event() self._ibuffer = "" self._log = log # IQfeed separates rows with newline self.set_terminator("\n")
def run(self, driver=True, asynch=True): disp = Dispatcher(self.eval_func, asynch) while True: if disp.run(): #jobs done if driver: if drive_optimization(population_size=self.population_size, dim=self.dim, lower_bounds=self.lower_bounds, upper_bounds=self.upper_bounds): continue # wait between 5 and 15 seconds to prevent several VMs from accessing GAE simultaneously time.sleep(random.randrange(5, 15))
def main(): report = Report(BEARER_TOKEN, BUDGET_ID, CATEGORY_ID) print('Building report...') message = report.build_report_string() dispatcher = Dispatcher(SENDER_EMAIL_ID, SENDER_EMAIL_PASSWORD, RECIPIENTS) print('Sending report...') print(message) dispatcher.email(REPORT_TITLE, message) print('Report sent to ' + RECIPIENTS)
def on_window_map_event(event, param): print 'Window mapped' thread1 = Thread(Dispatcher(message_fun, buffer), "thread1", 0.9) thread2 = Thread(Dispatcher(message_fun, buffer), "thread2", 0.9) thread3 = Thread(Dispatcher(message_fun, buffer), "thread3", 0.9) thread4 = Thread(Dispatcher(message_fun, buffer), "thread4", 0.5) gobject.timeout_add(100, timerfunc) thread1.start() thread2.start() thread3.start() thread4.start()
def __init__(self, tasks): Dispatcher.__init__(self, tasks, SyncReporter(tasks)) # create remote directory if it does not exists # this create remote-dir and remote-dir/satt for t in tasks: m = '{0}@{1}'.format(configs['ssh-user'], t.getMachine()) dbg('Creating remote directory on {0}'.format(t.getMachine())) subprocess.call(['ssh', m, 'mkdir', '-p', '{0}/satt'.format(configs['remote-dir'])])
def __init__(self, port, parent=None): QThread.__init__(self, parent) self.port = port self.serverSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.serverSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.serverSocket.bind(('', self.port)) self.serverSocket.listen(5) self.serverSocket.setblocking(False) self.dispatcher = Dispatcher(self) #clients --> {clientId: Socket object} self.clients = {} EventManager.bind('Message.broadcast', self.broadcast)
class IA: def __init__(self, name, **kargs): # Initialisation du logger self.logger = getLogger("ia") f=open(name+".ini") fileConfig(f) f.close() self.logger.info("Starting %s robot" % name) assert(name in ["petit", "gros"]) module = __import__("robots."+name) self.robot = getattr(getattr(module, name), name.capitalize()+"Robot")() Robot.copy_from(self.robot) # On crase les attributs du robot par ceux passs ici en argument, utiles pour le testing for argument in kargs: setattr(self.robot, argument, kargs[argument]) self.can_sock = socket.socket() self.ui_sock = socket.socket() self.inter = socket.socket() self.logger.debug("Trying to connect to the CAN") self.can_sock.connect((self.robot.can_ip, self.robot.can_port)) self.logger.debug("Trying to connect to the UI") self.ui_sock.connect((self.robot.ui_ip, self.robot.ui_port)) self.logger.debug("Trying to connect to the INTERCOMM") self.inter.connect((self.robot.inter_ip, self.robot.inter_port)) self.can = Can(self.can_sock) self.ui = UI(self.ui_sock) self.inter = InterCom(self.inter) self.dispatcher = Dispatcher(self.robot, self.can, self.ui) self.can.dispatcher = self.dispatcher self.ui.dispatcher = self.dispatcher self.inter.dispatcher = self.dispatcher self.dispatcher.start() # Mieux si demarre avant can et ui self.can.start() self.ui.start() self.inter.start() self.logger.info("IA initialized") self.ui.join() self.logger.info("IA stopped")
def __init__(self, tasks): Dispatcher.__init__(self, tasks, SyncReporter(tasks)) # create remote directory if it does not exists # this create remote-dir and remote-dir/satt for t in tasks: m = '{0}@{1}'.format(configs['ssh-user'], t.getMachine()) dbg('Creating remote directory on {0}'.format(t.getMachine())) subprocess.call([ 'ssh', m, 'mkdir', '-p', '{0}/satt'.format(configs['remote-dir']) ])
def __init__(self, canvas, x, y, num_x, num_y, rec_width, rec_height, reccls, rec_options={}): Dispatcher.__init__(self) self.canvas = canvas self.x = x self.y = y self.num_x = num_x self.num_y = num_y self.rec_width = rec_width self.rec_height = rec_height self.reccls = reccls self.rec_options = rec_options self.map_rec = {} Grid.regcb(self)
def test_consume_topic_subscription(self, mocked_kafka_consumer, mocked_kafka_producer): """ Tests that the consumer is subscribed to the topic :return: """ d = Dispatcher('kafka:9093', None, None, None, True) d.run() sources_id = [s['source_id'] for s in SOURCES] mocked_kafka_consumer().partitions_for_topic.assert_has_calls( [call(s) for s in sources_id]) mocked_kafka_consumer().subscribe.assert_called()
class Plugin: def __init__(self,path): self.config = Configuration(path) self.dispatcher = Dispatcher() def initialize(self): self.config.load() self.dispatcher.setConfig(self.config) def run(self): self.dispatcher.run()
def start(self): # Global activity flag self.isactive = True # Dispatcher-controlled Event worker threads wait for before they start self.wakeSignal = threading.Event() # create worker threads self.workers = self._create_worker(self.num_workers) # create dispatcher thread self.dispatcher = Dispatcher(self) # start threads [worker.start() for worker in self.workers] # start dispatcher thread self.dispatcher.start()
def tests_worker_doesnt_pick_tasks_for_only_one_host(self): workers = ['worker_' + str(x) for x in range(5)] hosts = ["host_00000" + str(x) for x in range(100)] dispatcher = Dispatcher(workers) selection = defaultdict(list) for host in hosts: worker = dispatcher.select_worker(host) selection[worker].append(host) worker_1 = selection['worker_1'] duplicate_check = {} for i in worker_1: self.assertEqual(None, duplicate_check.get(i)) duplicate_check[i] = 1
def main(): tasksq = multiprocessing.Queue() mgr = multiprocessing.Manager() e = mgr.Event() scheduler = Scheduler(e, tasksq) # Reads the probs to be executed and queues in tasksq dispatcher = Dispatcher(tasksq) o = Oscillator(e, 1) # TODO: this should be a configuration o.start() scheduler.start() dispatcher.start() o.join() scheduler.join()
def __init__(self, destaddr): Dispatcher.__init__(self) self.eventloop = EventLoop() self.destaddr = destaddr self.srcaddr = ("0.0.0.0", 1) try: sock = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_ICMP) self.set_socket(sock) self.bind(self.srcaddr) except socket.error, err: log_str = "Socket Error: %s" % _sockerror(err.args[0]) logging.root.exception(log_str) sys.exit(-1)
def handle_read(self): data = self.recv(4096) if data is None: return else: self.message += data while True: index = self.message.find(MessageMark.END_MARK) if index != -1: handle_part = self.message[:index] # Handle message Dispatcher.dispatch(self, handle_part) self.message = self.message[index+len(MessageMark.END_MARK):] else: break
class Main(): def __init__(self, gru_file=None): self.menu = Menu(self, config, game = None) self.library = Library(self, config) self.collider = Collider(self.menu, self.library, game=None) self.dispatcher = Dispatcher(self.collider, self.library, self.menu, game=None) self.clock = pygame.time.Clock() self.fps = Fps(self.clock, config) if gru_file is not None: game = Game(gru_file) self.load(game) else: self.game = None def load(self, game): self.library.clear() self.game = game self.collider.game = game self.menu.game = game def clear(self): self.game = None self.collider.game = None self.menu.game = None def draw(self): tick = self.clock.get_time() self.menu.draw() if self.game == None: self.library.draw() else: self.game.draw(tick) def start(self): while 1: self.clock.tick() mrect = Rect(mouse.get_pos(), (1,1)) self.collider.check() self.draw() self.fps.draw() for ev in event.get(): self.dispatcher.process(ev) display.update()
def __init__(self, name, **kargs): self.logger = getLogger("ia") module = __import__("robots." + name) # import robots.<name> self.robot = getattr(getattr(module, name), name.capitalize() + "Robot")() # create <Name>Robot self.comm = Comm(self.robot) self.dispatcher = Dispatcher(self.robot, self.comm) self.dispatcher.start() self.logger.info("IA started !") from events.internal import InternalEvent event = InternalEvent("start") self.dispatcher.add_event(event)
def __init__(self) -> None: """Initialize a Simulation. """ self._events = PriorityQueue() self._dispatcher = Dispatcher() self._monitor = Monitor()
def msg_process(msg, tstamp): js = json.loads(msg) storeData = Store() storeData.storeRequest(js['originationNumber'], js['messageBody']) print("Passed to store") d = Dispatcher(js['originationNumber'], js['messageBody']) print("Dispatcher Generated")
def __init__(self, num_floors=10, num_elevators=2, elevator_speed=1.0, elevator_wait_time=1.0): self._elevator_speed = elevator_speed self._elevator_wait_time = elevator_wait_time self._elevators = [] self._dispatcher = Dispatcher(self._elevators) self._num_floors = num_floors for i in range(num_elevators): self._elevators.append( Elevator(str(i), self._elevator_speed, self._elevator_wait_time))
class IA: def __init__(self, name, **kargs): self.logger = getLogger("ia") module = __import__("robots."+name) # import robots.<name> self.robot = getattr(getattr(module, name), name.capitalize()+"Robot")() # create <Name>Robot self.comm = Comm(self.robot) self.dispatcher = Dispatcher(self.robot, self.comm) self.dispatcher.start() self.logger.info("IA started !") from events.internal import InternalEvent event = InternalEvent("start") self.dispatcher.add_event(event)
def get(self): response = "XML Data Get" try: xml_data = self.request.get('xml') except: xml_data = None self.response.out.write("No XML Found") else: try: c = Dispatcher() response = c.dispatch(xml_data) except: response = "Error in dispatcher" #response = "hello process request" self.response.out.write(response)
def __init__(self, application_data): super(MainWindow, self).__init__() # event dispatcher self.dispatcher = Dispatcher.instance() # translations self.translate = QtCore.QCoreApplication.translate # appication data self.application_data = application_data self.interval = 'daily' # application settings self.settings = QtCore.QSettings('OttoBackup', 'settings') # thread which runs rsnapshot self.worker = Worker(self.settings) # is rsnapshot still running? self.busy = False # connect to events self.dispatcher.error.connect(self.command_error) self.dispatcher.command_complete.connect(self.command_complete) self.dispatcher.rsnapshot_firstset.connect(self.on_rsnapshot_firstset) # Install the custom output stream sys.stdout = EmittingStream(text_written=self.log_command) # init ui self.init_ui() # check required settings self.check_settings()
def main(): taxi1 = Taxi('Kia') dr1 = Driver('Armen', 1) dr2 = Driver('Karen', 2, 'Free') dr3 = Driver('Aram', 3, 'Busy') dr_list = Drivers() dr_list.addDriver(dr1) dr_list.addDriver(dr2) dr_list.addDriver(dr3) cus1 = Customer('Mari', 1) cus2 = Customer('Ani', 2) cus_list = Customers() cus_list.addCustomer(cus1) cus_list.addCustomer(cus2) print(cus_list.customers) dis = Dispatcher(dr_list) taxi1.drivers = dr_list taxi1.customers = cus_list.customers taxi1.dispatcher = dis print(taxi1.customers) cus1.request_taxi() ride1 = taxi1.start_ride() print(ride1.created_at)
def __init__(self, factory): Dispatcher.__init__(self, 0, AMQP_FRAME) self.factory = factory self.container_id = None self.open_rcvd = False self.open_sent = False self.close_rcvd = False self.close_sent = False self.exception = None # incoming channel -> session self.incoming = {} # outgoing channel -> session self.outgoing = {} self.max_frame_size = 4294967295
def init_dispatcher(self): if self.dispatcher: return if self.dispatcher_mode == 'external': raise RuntimeError, 'dispatcher mode is %s' % self.dispatcher_mode self.dispatcher = Dispatcher(self.hq.get_domaininfo(), self.jobname, mapper=self.mapper, scheduler=self.scheduler, inq=self.inq.rqfile)