def haunt(tMin,tMax): configure() global hauntMode global duration running = 1 timer_start = time.time() while running: schedule = random.randint(tMin,tMax) if duration > 0: if time.time() - timer_start + schedule >= duration: schedule = duration - (time.time() - timer_start) running = 0 time.sleep(schedule) if hauntMode == 0: randomNoise() if hauntMode == 1: coinflip = random.randint(1,2) if coinflip == 1: randomNoise() if coinflip == 2: relayTrigger() if hauntMode == 2: doSound = Process(target = randomNoise) doRelay = Process(target = relayTrigger) doSound.start() doRelay.start() doSound.join() doRelay.join() if duration > 0: if time.time() - timer_start >= duration: running = 0
def manager(self): try: putter_process = Process(target=self.put_queue) getter_process = Process(target=self.get_queue) putter_process.start() getter_process.start() putter_process.join() except Exception as e: raise Exception(e.args[0])
def observe(self): army_entri_p = Process(target=self.enemy_entrance, args=(0, [200, 200])) army_entri_p.start() i = 1 while True: self.draw_background() #pygame.time.delay(40) time.sleep(0.1) #playerstatus = self.player key_pressed = pygame.key.get_pressed() pygame.key.get_repeat() if key_pressed[pygame.K_UP]: self.player.moveUp() if key_pressed[pygame.K_DOWN]: self.player.moveDown() if key_pressed[pygame.K_LEFT]: self.player.moveLeft() if key_pressed[pygame.K_RIGHT]: self.player.moveRight() for event in pygame.event.get(): if event.type == pygame.QUIT: pygame.quit() exit() elif event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT: self.player.changeBody(Player.Body.Left) elif event.key == pygame.K_RIGHT: self.player.changeBody(Player.Body.Right) elif event.key == pygame.K_z: self.player.is_attack = True elif event.key == pygame.K_LSHIFT: self.player.speed = param.slow_speed Process(target=self.open_field, args=()).start() print('A.T. Filed 全開!') #self.open_field() elif event.type == pygame.KEYUP: if (event.key == pygame.K_LEFT and not key_pressed[pygame.K_RIGHT])\ or (event.key == pygame.K_RIGHT and not key_pressed[pygame.K_LEFT]): self.player.changeBody(Player.Body.Center) elif event.key == pygame.K_z: self.player.is_attack = False elif event.key == pygame.K_LSHIFT: #self.close_field() self.player.speed = param.speed Process(target=self.close_field, args=()).start() print('A.T. Filed 關閉!') self.attack_check() # self.safe_check() self.show_player() self.show_enemys() # print('onDraw()') pygame.display.update()
def scoreDuplicates(record_pairs: RecordPairs, data_model, classifier, num_cores: int = 1): if num_cores < 2: from multiprocessing.dummy import Process, Queue SimpleQueue = Queue else: from .backport import Process, SimpleQueue, Queue # type: ignore first, record_pairs = peek(record_pairs) if first is None: raise BlockingError("No records have been blocked together. " "Is the data you are trying to match like " "the data you trained on?") record_pairs_queue: _Queue = Queue(2) score_queue: _SimpleQueue = SimpleQueue() result_queue: _SimpleQueue = SimpleQueue() n_map_processes = max(num_cores, 1) score_records = ScoreDupes(data_model, classifier, record_pairs_queue, score_queue) map_processes = [ Process(target=score_records) for _ in range(n_map_processes) ] for process in map_processes: process.start() reduce_process = Process(target=mergeScores, args=(score_queue, result_queue, n_map_processes)) reduce_process.start() fillQueue(record_pairs_queue, record_pairs, n_map_processes) result = result_queue.get() if isinstance(result, Exception): raise ChildProcessError if result: scored_pairs_file, dtype, size = result scored_pairs = numpy.memmap(scored_pairs_file, dtype=dtype, shape=(size, )) else: dtype = numpy.dtype([('pairs', object, 2), ('score', 'f4', 1)]) scored_pairs = numpy.array([], dtype=dtype) reduce_process.join() for process in map_processes: process.join() return scored_pairs
def UpdateAll(self): self.ScheduleBase.UpdateSchedule(self.date) if DownloadScheduleFromSite(self.date): update_process = Process(target=self.UpdateClasses) update_process.start() return True elif path.exists(Config.PATH + f'work/source/{self.date}.png'): update_process = Process(target=self.UpdateClasses) update_process.start() return True return False
def run_both(): run = 20 for i in range(run): receiver = Process(target=run_receiver) sender = Process(target=run_sender) receiver.start() sender.start() receiver.join() sender.join() time.sleep(1)
def scoreDuplicates(records, data_model, classifier, num_cores=1, threshold=0) : if num_cores < 2 : from multiprocessing.dummy import Process, Queue SimpleQueue = Queue else : from .backport import Process, SimpleQueue, Queue first, records = peek(records) if first is None: raise ValueError("No records have been blocked together. " "Is the data you are trying to match like " "the data you trained on?") record_pairs_queue = Queue(2) score_queue = SimpleQueue() result_queue = SimpleQueue() n_map_processes = max(num_cores-1, 1) score_records = ScoreRecords(data_model, classifier, threshold) map_processes = [Process(target=score_records, args=(record_pairs_queue, score_queue)) for _ in range(n_map_processes)] [process.start() for process in map_processes] reduce_process = Process(target=mergeScores, args=(score_queue, result_queue, n_map_processes)) reduce_process.start() fillQueue(record_pairs_queue, records, n_map_processes) result = result_queue.get() if isinstance(result, Exception) : raise ChildProcessError if result : scored_pairs_file, dtype, size = result scored_pairs = numpy.memmap(scored_pairs_file, mode='r', dtype=dtype, shape=(size,)) else: scored_pairs = [] reduce_process.join() [process.join() for process in map_processes] return scored_pairs
def state_estimation_start(user_id): """ Actually start the process of state estimation. This saves a file called 'params.json' in /tmp/uncurl/<user_id> containing all parameters used in state estimation. """ path = os.path.join(current_app.config['USER_DATA_DIR'], user_id) gene_names_file = os.path.join(path, 'gene_names.txt') if not os.path.exists(gene_names_file): gene_names_file = None # TODO: deal with init here - make note if it's qualitative or # quantitative # run qualNorm??? init_path = os.path.join(path, 'init.txt') if not os.path.exists(init_path): init_path = None # load json params with open(os.path.join(path, 'preprocess.json')) as f: preprocess = json.load(f) for key in request.form.keys(): preprocess[key] = request.form[key] # params.json contains all input parameters to the state estimation, as well as all stats from preprocess.json. with open(os.path.join(path, 'params.json'), 'w') as f: json.dump(preprocess, f) P = Process(target=state_estimation_thread, args=(user_id, gene_names_file, init_path, path, preprocess, current_app.config.copy())) P.start() return redirect(url_for('views.state_estimation_result', user_id=user_id))
def state_estimation_input(): user_id = str(uuid.uuid4()) if 'username' in request.form: if len(request.form['username']) > 0: # make username a safe string keep_chars = set(['-', '_', ' ']) username = request.form['username'].strip()[:25] username = ''.join( [c for c in username if c.isalnum() or (c in keep_chars)]) user_id = user_id + '-' + username base_path = os.path.join(current_app.config['USER_DATA_DIR'], user_id) os.makedirs(base_path) # save request.form with open(os.path.join(base_path, 'inputs.json'), 'w') as f: f.write(json.dumps(request.form)) # TODO: if file is large, start a new thread. otherwise just # run the thing request_file = request.files request_form = request.form data_paths, gene_paths, output_filenames, init, shapes = load_upload_data( request_file, request_form, base_path) # TODO: deal with init P = Process(target=state_estimation_preproc, args=(user_id, base_path, data_paths, gene_paths, output_filenames, init, shapes)) P.start() #state_estimation_preproc(user_id, path) return redirect(url_for('views.state_estimation_result', user_id=user_id))
def test_pause_unpause(self): """ Verify that the pause method actually works. In this case, working means that the process doesn't send any more HTTP requests, fact that is verified with the "fake" count plugin. """ core_start = Process(target=self.w3afcore.start, name='TestRunner') core_start.daemon = True core_start.start() # Let the core start, and the count plugin send some requests. time.sleep(5) count_before_pause = self.count_plugin.count self.assertGreater(self.count_plugin.count, 0) # Pause and measure self.w3afcore.pause(True) count_after_pause = self.count_plugin.count time.sleep(2) count_after_sleep = self.count_plugin.count all_equal = count_before_pause == count_after_pause == count_after_sleep self.assertTrue(all_equal) # Unpause and verify that all requests were sent self.w3afcore.pause(False) core_start.join() self.assertEqual(self.count_plugin.count, self.count_plugin.loops)
def start(self, visitor): if not self.is_running(): BtTicker._visitor = visitor BtTicker._process = Process(target=BtTicker._run, args=(self, True)) BtTicker._process.start() return True
def runReadData(printBool, maxIter=50): ''' 跑实际的数据来实现定位 :param printBool: 【bool】是否打印输出 :param maxIter: 【int】最大迭代次数 :return: ''' snesorDict = {'imu': 'LSM6DS3TR-C', 'magSensor': 'AK09970d'} readObj = ReadData(snesorDict) # 创建读取数据的对象 outputData = multiprocessing.Array('f', [0] * len(snesorDict) * 24) magBg = multiprocessing.Array('f', [0] * 6) state0 = multiprocessing.Array('f', [0, 0, 0.01, 1, 0, 0, 0]) readObj.send() pRec = Process(target=readObj.receive, args=(outputData, magBg, None)) # pRec.daemon = True pRec.start() time.sleep(2) pTrack3D = multiprocessing.Process(target=track3D, args=(state0, )) pTrack3D.daemon = True pTrack3D.start() while True: measureData = np.concatenate((outputData[:3], outputData[6:9])) LM(state0, measureData, 7, maxIter, printBool) time.sleep(0.1)
def new_process(cls, *args, **kwargs): process = Process(target=BaseCoroutine.start_coroutine, args=(cls, ) + args, kwargs=kwargs) process.daemon = True process.start() return process
def start_websockify(): print('start vnc proxy..') t = Process(target=worker, args=()) t.start() print('vnc proxy started..')
def main(): workers=[] workers.append(Process(target=process_counters_msg,args=('amqp://*****:*****@193.168.15.156/netscan', 'http_list', 'amq.direct'))) for p in workers: p.start() for p in workers: p.join()
def main(): snesorDict = {'imu': 'LSM6DS3TR-C'} readObj = ReadData(snesorDict) # outputDataSigma = multiprocessing.Array('f', [0] * len(snesorDict) * 24) outputDataSigma = None magBg = multiprocessing.Array('f', [0] * 6) outputData = multiprocessing.Array('f', [0] * len(snesorDict) * 24) state = multiprocessing.Array('f', [0, 0, 0, 1, 0, 0, 0]) # Wait a second to let the port initialize # readObj.send() # receive data in a new process pRec = Process(target=readObj.receive, args=(outputData, magBg, outputDataSigma)) pRec.daemon = True pRec.start() pTrack3D = multiprocessing.Process(target=track3D, args=(state, )) pTrack3D.daemon = True pTrack3D.start() mp = MahonyPredictor(q=state[3:], Kp=100, Ki=0.01, dt=0.002) while True: # print("a={}, w={}".format(np.round(outputData[:3], 2), np.round(outputData[3:6], 2))) mp.getGyroOffset(outputData[3:6]) mp.IMUupdate(outputData[:3], outputData[3:6]) state[3:] = mp.q time.sleep(0.08)
def test_pause_stop(self): ''' Verify that the pause method actually works. In this case, working means that the process doesn't send any more HTTP requests after we, pause and that stop works when paused. ''' core_start = Process(target=self.w3afcore.start, name='TestRunner') core_start.daemon = True core_start.start() # Let the core start, and the count plugin send some requests. time.sleep(5) count_before_pause = self.count_plugin.count self.assertGreater(self.count_plugin.count, 0) # Pause and measure self.w3afcore.pause(True) count_after_pause = self.count_plugin.count time.sleep(2) count_after_sleep = self.count_plugin.count all_equal = count_before_pause == count_after_pause == count_after_sleep self.assertTrue(all_equal) # Unpause and verify that all requests were sent self.w3afcore.stop() core_start.join() # No more requests sent after pause self.assertEqual(self.count_plugin.count, count_after_sleep)
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.active = True self.withdraw() self.main_window = Window.MainWindow(self) self.main_window.abtn.config(command=self.do_something) self.main_window.protocol("WM_DELETE_WINDOW", self.on_closing) self.main_window.btn_exit.config(command=self.on_closing) self.main_window.abtn.config(command=self.connect_all) self.devices = [] for name, adr in SRXAddresses: dev = SRXdev.SRXDevice(name, adr, username, password) sub_fr = Window.SubFrame(self.main_window, name, adr, ' N/A ') action_with_arg = partial(self.connect_device, dev, sub_fr) sub_fr.abtn.config(command=action_with_arg) action1_with_arg = partial(self.get_curr_gw, dev, sub_fr) sub_fr.cbtn.config(command=action1_with_arg) action2_with_arg = partial(self.reset_ospf, dev, sub_fr) sub_fr.dbtn.config(command=action2_with_arg) sub_fr.setBad() self.devices.append([dev, sub_fr]) process = Process(target=self.check_status, args=()) process.start()
def _cmd_start(self, params): """ Start the core in a different thread, monitor keystrokes in the main thread. :return: None """ # Check if the console output plugin is enabled or not, and warn. output_plugins = self._w3af.plugins.get_enabled_plugins('output') if 'console' not in output_plugins: msg = "Warning: You disabled the console output plugin. If you"\ " start a new scan, the discovered vulnerabilities won\'t be"\ " printed to the console, we advise you to enable at least"\ " one output plugin in order to be able to actually see the"\ " the scan output." print msg # Note that I'm NOT starting this in a new multiprocess Process # please note the multiprocessing.dummy , this is required because # I want to start new threads inside this thread and there is a bug # with that http://bugs.python.org/issue10015 self._scan_thread = Process(target=self._real_start) self._scan_thread.name = 'ConsoleScanThread' self._scan_thread.daemon = True self._scan_thread.start() # let the core thread start time.sleep(1) try: if self._w3af.status.get_status() != 'Not running.': self.show_progress_on_request() except KeyboardInterrupt: om.out.console('User pressed Ctrl+C, stopping scan.') self._w3af.stop()
def test_stop(self): """ Verify that the stop method actually works. In this case, working means that the process doesn't send any more HTTP requests after we stop(). This test seems to be failing @ CircleCI because of a test dependency issue. If run alone in your workstation it will PASS, but if run at CircleCI the count plugin doesn't seem to start. """ core_start = Process(target=self.w3afcore.start, name='TestRunner') core_start.daemon = True core_start.start() # Let the core start, and the count plugin send some requests. time.sleep(5) count_before_stop = self.count_plugin.count self.assertGreater(count_before_stop, 0) # Stop now, self.w3afcore.stop() core_start.join() count_after_stop = self.count_plugin.count self.assertEqual(count_after_stop, count_before_stop)
def start(self, message): self.__result = '' self.__working = True self.__process = Process(target=self.__work__, args=(message, ), name=self.__name + '_thread') self.__process.start()
def test_pause_unpause(self): output = Queue.Queue() self.uri_opener.pause(True) def send(uri_opener, output): url = URL(get_moth_http()) try: http_response = uri_opener.GET(url) output.put(http_response) except: output.put(None) th = Process(target=send, args=(self.uri_opener, output)) th.daemon = True th.start() self.assertRaises(Queue.Empty, output.get, True, 2) self.uri_opener.pause(False) http_response = output.get() self.assertNotIsInstance(http_response, types.NoneType, 'Error in send thread.') th.join() self.assertEqual(http_response.get_code(), 200) self.assertIn(self.MOTH_MESSAGE, http_response.body)
def run(self): jobs = [] for i in range(int(ceil(self.count_vacancy / 25))): p = Process(target=self.get_graph_maker()) jobs.append(p) p.start() p.join()
def send_emails(modeladmin, request, queryset): messages = Queue() for user in queryset: process = Process(target=send_email, args=(user, messages)) process.start() messages.get().send() process.join()
def main(): workers=[] workers.append(Process(target=process_port_scan,args=(rabbituri, task_queue, 'amq.direct'))) for p in workers: p.start() for p in workers: p.join()
def report(pk, num): # hacky way to enforce db commit from outside the atomic transaction # spawning a separate celery task won't work as celery will queue # spawns after the commit of the underlying transaction. from multiprocessing.dummy import Process p = Process(target=update_status, args=(pk, num)) p.start() p.join()
def _terminate(self): om.out.information('The user terminated the spider_man session.') def stop(after): time.sleep(after) self.parent_process.stop() Process(target=stop, args=(2,)).start()
def parmap(f, X): pipe = [Pipe() for x in X] proc = [ Process(target=spawn(f), args=(c, x)) for x, (p, c) in izip(X, pipe) ] [p.start() for p in proc] [p.join() for p in proc] return [p.recv() for (p, c) in pipe]
def run_worker(): #update_paths() redis_conn = redis.StrictRedis(host=os.environ["SCHEDULER_SERVICE_HOST"], port=6379, encoding="utf-8") download_thread = Process(target=download_file_process) polymer_thread = Process(target=process_polymer_process) upload_thread = Process(target=upload_file_process) download_thread.start() polymer_thread.start() upload_thread.start() # Read from pickle files # if os.path.exists("/download_queue.pkl"): # with open("/download_queue.pkl", "r") as f: # old_data = pickle.load(f) # for o in old_data: # download_file_queue.put(o) # # if os.path.exists("/run_polymer_queue.pkl"): # with open("/run_polymer_queue.pkl", "r") as f: # old_data = pickle.load(f) # for o in old_data: # run_polymer_queue.put(o) # # if os.path.exists("/upload_queue.pkl"): # with open("/upload_queue.pkl", "r") as f: # old_data = pickle.load(f) # for o in old_data: # upload_output_queue.put(o) while True: next_folder = redis_conn.blpop("geo-queue", timeout=1) if next_folder is None: continue queue_name, folder_to_process_raw = next_folder folder_to_process = folder_to_process_raw.decode("utf-8") logging.info("Got folder: {0}".format(folder_to_process)) # logging.info("Putting in Download: {0}".format(download_file_queue.qsize())) download_file_queue.put(folder_to_process, block=True, timeout=None)
def new_send_email(email): if not IS_ACTIVE: new_send_email.queue = Manager().Queue() process = Process(target=process_sent_queue, args=(new_send_email.queue, )) process.daemon = True process.start() models.IS_ACTIVE = True new_send_email.queue.put(email)