def MakeModeling(baseDir, dirName, outputFileName="", key="", pdfDir=None, mcscale=1.0, histName="hist.root", interactive=False): # # Create the Histogram manager which will do the stacking with the data # thisJob = ProcessManager() # # Add the Process # thisJob.AddProcess("file1", file=baseDir + "/" + histName, dir=dirName, color=ROOT.kYellow, scale=mcscale) if not interactive: outputFile = TFile(outputFileName, "RECREATE") thisJob.makeStacks(outputFile, key) return thisJob
def evalInput(self, input): args = LaunchArguments([self.path], True) try: manager = ProcessManager(args, None) except PtraceError as e: warning(e) return -1 def do_write(proc_wrap: ProcessWrapper, s: str): if DO_SYSCALL: proc_wrap.writeToBuf( 'b"""%s"""' % s) # convert this so that no newline is added else: proc_wrap.in_pipe.write(s) proc_wrap = manager.getCurrentProcess() do_write(proc_wrap, input + "\n") try: manager.cont() except KeyboardInterrupt: pass out = proc_wrap.read(0x1000) assert out.startswith(b",") and out.endswith(b".") return int(out[1:-1])
def __init__(self, path_to_fuzzme: str): super().__init__(path_to_fuzzme) args = LaunchArguments([path], False) self.manager = manager = ProcessManager(args, None) self.pending_SIGCHLDs = 0 manager.addBreakpoint("b main") manager.cont() print(manager.getCurrentProcess().where())
def __init__(self, launch_args: LaunchArguments, startupfile=None, inputsock=False): self.inputPoll = PaulaPoll() self.manager = ProcessManager(launch_args, self.inputPoll) self.stdinQ = PollableQueue() self.inputPoll.register(self.stdinQ.fileno(), "userinput") self.reader_thread = InputReader(self.stdinQ, startupfile) self.sock_reader = InputSockReader(self.stdinQ) if inputsock else None self.hyxTalker = None self._errmsg_suffix = ""
def __init__(self, log, config, max_log_time=MAX_LOG_TIME, max_temp_time=MAX_TEMP_TIME): self.testnode_log = log self.log = log self.config = config or {} self.process_manager = ProcessManager(log) self.working_directory = config['working_directory'] self.node_test_suite_dict = {} self.file_handler = None self.max_log_time = max_log_time self.max_temp_time = max_temp_time self.url_access = "https://[0::0]:0123" # Ipv6 + port of the node
def main(): try: client = iothub_client_init() manager = ProcessManager() manager.start_process(DEFAULT_COMMAND) #manager = Rtl433Stub() while True: lines = manager.get_stdout() #lines = manager.get_telemtry() for line in lines: prevMsg = None for txt in line: jsonobj = json.loads(txt) pkt = dict() pkt['dateTime'] = jsonobj['time'] pkt['model'] = jsonobj['model'] pkt['id'] = jsonobj['id'] pkt['channel'] = jsonobj['channel'] pkt['battery'] = jsonobj['battery_ok'] msg_type = jsonobj['subtype'] pkt['wind_speed'] = jsonobj['wind_avg_km_h'] if msg_type == 49: pkt['wind_dir'] = jsonobj['wind_dir_deg'] pkt['rain_mm'] = jsonobj['rain_mm'] elif msg_type == 56: pkt['temperature'] = jsonobj['temperature_C'] pkt['humidity'] = jsonobj['humidity'] if pkt != prevMsg: prevMsg = pkt message = Message(json.dumps(pkt)) print(message) print("Sending message...") try: client.send_message(message) except: print("Sending failed") time.sleep(.1) except KeyboardInterrupt: manager.stop_process()
def __init__(self, path_to_fuzzme: str): super().__init__(path_to_fuzzme) args = LaunchArguments([path_to_fuzzme], False) self.manager = manager = ProcessManager(args, None) # manager.addBreakpoint("b main") self.root_proc = manager.getCurrentProcess() self.root_proginfo = ProgramInfo(path, self.root_proc.getPid(), self.root_proc) break_at = self.root_proginfo.getAddrOf("read") # break_at = root_proginfo.baseDict[path] + 0x1251 # break_at = 0x555555555251 manager.addBreakpoint("b %d" % break_at) manager.cont() print(self.root_proc.where()) self.pref_dict = dict([("", self.manager.getCurrentProcess())]) self.spawned_procs = 0
def MakeModeling(baseDir, dirName, outputFileName="", key="", pdfDir=None, mcscale=1.0, histName1="", histName2="", interactive=False, debug=False): # # Create the Histogram manager which will do the stacking with the data # thisJob = ProcessManager() # # Add the various background modeling # thisJob.AddProcess("dir1", file=baseDir + "/" + histName1, dir=dirName, color=ROOT.kBlack, scale=1.0) thisJob.AddProcess("dir2", file=baseDir + "/" + histName2, dir=dirName, color=ROOT.kYellow, scale=mcscale) if not interactive: outputFile = TFile(outputFileName, "RECREATE") thisJob.makeStacks(outputFile, key) return thisJob
parser = ArgumentParser() parser.add_argument("num_children") parser.add_argument("sleepint") args = parser.parse_args() num_children = int(args.num_children) sleep_every = int(args.sleepint) print(resource.getrlimit(resource.RLIMIT_NPROC)) args = ["demo/vuln"] launch_args = LaunchArguments(args, random=False) dummy_poll = PaulaPoll() manager = ProcessManager(launch_args, dummy_poll) root_proc = manager.getCurrentProcess() root_proc.insertBreakpoint("main") root_proc.cont() new_procs = [] time_dict = dict() time_dict["start"] = default_timer() #time_dict["launch"] = default_timer() # root_proc.getrlimit(1) #new_procs = [root_proc.forkProcess() for _ in range(num_children)] RECOVER_EVERY = sleep_every
def t265_update(): try: xyz_rpy_queue.get_nowait() xyz_rpy = xyzrpy_value[0:6] pose_tracker.update_t265_pose(xyz_rpy[0], xyz_rpy[1], xyz_rpy[5]) for v, c in zip(pose_tracker.field_xyt, 'xyt'): odom_table.putNumber(f'field_{c}', v) for v, c in zip(pose_tracker.robot_xyt, 'xyt'): odom_table.putNumber(f'robot_{c}', v) return True except Empty: return False t265_process_manager = ProcessManager( lambda: T265Process(xyz_rpy_queue, xyzrpy_value, encoder_v_queue), t265_update, ) time.sleep(4) # must have t265 launch first to work def cv_update(): global last_target_found_time try: target_found, target_dis, target_relative_dir_left, \ target_t265_azm, camera_xyt = target_queue.get_nowait() if not target_found: pose_tracker.clear_calibration() if time.time( ) > last_target_found_time + Constants.HOLD_TARGET_TIME: odom_table.putBoolean('target_found', False)
print(f'Using config file {args.config}') config = toml.load(args.config) return config config = process_args() app = flask.Flask(__name__) CORS(app) flog.default_handler.setFormatter(logging.Formatter(config['log_format'])) logging.basicConfig(filename=config['log_file'], format=config['log_format'], level=logging.DEBUG) print(f'Flask logger is redirected to {config["log_file"]}') pm = ProcessManager() pm.add_job('motor', spin_motors) pm.add_job('servo', move_servo) @app.route('/api/v1/healthcheck', methods=['GET']) def healthcheck(): return 'ok', 200 @app.route('/api/v1/wifistrength', methods=['GET']) def wifi_strength(): return check_wifi(), 200 @app.route('/api/v1/motor', methods=['POST'])
spamQ.put(myList) def eggs(eggsQ): time.sleep(5) eggsQ.put('I\'m done cooking eggs!') def bacon(myString, baconQ): x = len(myString) baconQ.put(myString + ' is ' + str(x) + ' chars long!') if __name__ == '__main__': #Create ProcessManager object pManager = ProcessManager() #Create Process string IDs spamID = 'SPAM' eggsID = 'EGGS' baconID = 'tasty' spamOut = mp.Queue() eggsOut = mp.Queue() stringyOut = mp.Queue() #Start processes groceryList = ['milk', 'cheese', 'and for dessert:'] #Create spam process pSpam = mp.Process(target=spam, args=(3.14, groceryList, spamOut)) pSpam.start() #pManager.startProcess(spamID, spam, (3.14, groceryList), (spamOut,)) #Create eggs process pEggs = mp.Process(target=eggs, args=(eggsOut, ))
rr = build_round_robin(args.quantum, sc) schedulers.append(rr) scheduler_found = True break args.scheduler = schedulers or [fifo] return args if __name__ == "__main__": args = read_args() process_list = [Process(*p) for p in read_process(args.filename)] for p in process_list: p.time_io = args.time_io process_manager = ProcessManager(process_list) process_manager.enable_log(args.verbose) for sc in args.scheduler: process_manager.scheduler = sc process_manager.run() if args.iterative: process_manager.enable_log(True) process_manager.begin() while True: cmd = input(">") process_manager.execute() process_manager.next_clock()