def array_process(): manager = Manager() # 父进程创建Queue,并传给各个子进程: q = manager.Array() p = Pool() pw = p.apply_async(write, args=(q,)) time.sleep(0.5) pr = p.apply_async(read, args=(q,)) p.close() p.join()
def __init__(self,img_in_path,col_images_path,n_pic): self.img_in = Image.open(img_in_path) #loads input images n_in,m_in = self.img_in.size #Size of input picture self.n_col,self.m_col = 28,28 #Size of the collage-images self.img_in = self.img_in.resize((n_in//self.n_col*self.n_col,m_in//self.m_col*self.m_col)) #Resize the input image (crop) n_new,m_new = self.img_in.size #size of new (resized) image self.img_out=np.array(Image.new(size=(n_new,m_new),mode="RGB")) #output image col_images_list = os.listdir(col_images_path) self.col_images_array = [color.rgb2lab(np.array(Image.open(f"{col_images_path}/{im}"))) for im in col_images_list][:n_pic] manager = Manager() self.img_in=manager.Array(ctypes.c_double) self.img_in.np.array(self.img_in)) self.n_new,self.m_new = n_new,m_new
class TestManager: """ Class manages the testers and helps them to execute steps in synchronized order. Each tester is executed in a separeted process. """ def __init__(self): self.manager = Manager() self.lock = self.manager.Lock() self.process_done = self.manager.Semaphore(0) self.queue = self.manager.Queue() self.sub_proc = self.manager.Queue() self._setup() def _setup(self): self.testers = [] self.next_steps = [] self.proc_ids = [] self.subprocToKill = [] def add_tester(self, tester): self.testers.append(tester) def start_processes(self, rand_sleep): """create process for each tester""" self.pids = self.manager.Array('l', range(len(self.testers))) for id in range(len(self.testers)): self.process_done.release() next_s = self.manager.Semaphore(0) p = Process(target=self.testers[id].run, args=(self.process_done, next_s, rand_sleep, self.lock, self.sub_proc, self.pids, id, self.queue)) self.proc_ids.append(p) self.next_steps.append(next_s) p.start() self.pids[id] = p.pid def wait_for_processes(self): """wait for all process to finish""" for p in self.proc_ids: p.join() p.terminate() self.lock.acquire() self.lock.release() def run(self, rand_sleep=True): """Execute tester steps""" self.start_processes(rand_sleep) step = -1 will_continue = range(len(self.next_steps)) wait_for = range(len(self.next_steps)) while True: if step >= 0: print("\n\n=================== TestManager step", step, "testers:", wait_for, file=sys.stderr) for _ in wait_for: self.process_done.acquire() if step >= 0: proc, name, status = self.queue.get() print(("Received ", proc, name, status), file=sys.stderr) if status == True: will_continue.append(proc) elif isinstance(status, BaseException): print("Error in tester", proc, name, "step", step) for p in self.proc_ids: p.terminate() while not self.sub_proc.empty(): pid = self.sub_proc.get() try: os.kill(pid, signal.SIGKILL) except: pass raise status if len(will_continue) == 0: break for id in will_continue: self.next_steps[id].release() wait_for = will_continue[:] will_continue = [] step += 1 self.wait_for_processes()
def func1(shareList, shareValue, shareDict, lock): with lock: shareValue.value += 1 shareDict[1] = '1' shareDict[2] = '2' for i in range(len(shareList)): shareList[i] += 1 if __name__ == '__main__': manager = Manager() list1 = manager.list([1, 2, 3, 4, 5]) dict1 = manager.dict() array1 = manager.Array('i', range(10)) value1 = manager.Value('i', 1) lock = manager.Lock() proc = [ Process(target=func1, args=(list1, value1, dict1, lock)) for i in range(20) ] for p in proc: p.start() for p in proc: p.join() print(list1) print(dict1) print(array1) print(value1)
except KeyboardInterrupt: # save dataframe to csv file data_saver.save_data_to_csv() break except: data_saver.save_data_to_csv() break if __name__ == '__main__': use_ros = True # Simulation use_input_traj = False # control from tk GUI manager = Manager() shared_variables = manager.Array('f', [1,1,1, 0,0,0, 0,0,0.425, 0,0,0 ]) # Kp,Kd, xyz, RPY # UNCOMMENT BELOW VARIABLES IF YOU WANT TO RECORD shared_q_cur = manager.dict() shared_q_des = manager.dict() shared_q_dot_cur = manager.dict() shared_q_dot_des = manager.dict() shared_t = manager.Value('d', 0) process_control = Process(target = control_main, args = [shared_variables, use_ros,use_input_traj, shared_t, shared_q_cur, shared_q_des, shared_q_dot_cur, shared_q_dot_des #uncomment if you want to record data ]) process_GUI = Process(target = tk_reconfigure_xyz_rpy, args = [shared_variables]) process_save_data = Process(target = saving_data, args = [shared_t, shared_q_cur, shared_q_des, shared_q_dot_cur, shared_q_dot_des]) process_control.start() if not use_ros: process_GUI.start()
def main(): ## Use argparse to pass options/variables to main process and child processes (Used parts of the code on http://www.pyimagesearch.com/2015/09/14/ball-tracking-with-opencv/ as reference for the argument parser) # Add default arguments ap = argparse.ArgumentParser() ap.add_argument( "-d", "--daemon", help="daemonize program", type=bool, default=False ) ## no gui is loaded, if this is set the program can be run from the commandline without X ap.add_argument("-D", "--debug", help="Enable general program debugging", type=bool, default=False) ap.add_argument("-DS", "--sdebug", help="Enable sensor debugging", type=bool, default=False) ap.add_argument("-DC", "--cdebug", help="Enable camera debugging", type=bool, default=False) ap.add_argument("-cw", "--cwidth", help="Camera Resolution width", type=int, default=320) ap.add_argument("-ch", "--cheight", help="Camera Resolution height", type=int, default=240) ap.add_argument("-pw", "--pwidth", help="Processing Resolution width", type=int, default=320) ap.add_argument("-ph", "--pheight", help="Processing Resolution height", type=int, default=240) ap.add_argument("-b", "--buffer", help="tracing buffer (red line in gui tracing object)", type=int, default=64) args = vars(ap.parse_args()) # Print settings (arguments) on startup print "Starting %s %s" % (str(pName), str(pVersion)) print "-=====[ Options ]=====-" print "--daemon: %s" % str(args["daemon"]) print "--debug: %s" % str(args["debug"]) print "--sdebug: %s" % str(args["sdebug"]) print "--cdebug: %s" % str(args["cdebug"]) print "--cwidth: %s" % str(args["cwidth"]) print "--cheight: %s" % str(args["cheight"]) print "--pwidth: %s" % str(args["pwidth"]) print "--pheight: %s" % str(args["pheight"]) print "--buffer: %s" % str(args["buffer"]) # Read print time.sleep(2) ## Init Data arrays for trasferring data over smp threads (somewhat simple IPC) # Init dataManager dataManager = Manager( ) ## Use Manager from multiprocessing to synchronize data objects (basically pass proxies to threads instead of actual arrays, the server is in the parent program) # Array objects, used for transporting data using proxies to communicate between threads (IPC part) UltrasoneDataOut = dataManager.Array('f', range(4)) ImageProcessorDataOut = dataManager.Array('f', range(9)) PerIntelDataIn = dataManager.Array('f', range(7)) PerIntelDataOut = dataManager.Array('f', range(7)) ## Initialize and start processes # UltrasoneThread ultra = Ultrasone.Ultrasone( args) # Create ultra object from Ultrasone class with args ultrasoneThread = Process( target=ultra.run, args=(UltrasoneDataOut, ) ) # Create ultrasoneThread to run function "run" of Ultrasone.Ultrasone. Parse UltrasoneDataOut proxy to store values ultrasoneThread.start() # Now start the thread! # CameraThread imageProcessor = ImageProcessor.ImageProcessor( args ) # Create imageProcessor object from imageProcessor class with args imageProcessorThread = Process( target=imageProcessor.run, args=(ImageProcessorDataOut, ) ) # Create imageProcessorThread to run function "run" of imageProcessor.ImageProcessor. Parse ImageProcessorDataOut proxy to store values imageProcessorThread.start() # Now start this thread also! # PerimeterIntelThread perIntel = PerimeterIntel.PerimeterIntel( args) # Create perIntel object from PerimeterIntel class with args perIntelThread = Process( target=perIntel.run, args=( PerIntelDataIn, PerIntelDataOut, ) ) ## Create PerIntelThread thread to run function "run" of PerimeterIntel.PerimeterIntel. Pass PerIntelDataIn and PerIntelDataOut proxies to process and store values perIntelThread.start() # Fire! ## Timing settings for displaying statistics on console and transferring data to proxies # Last time the display was updated lastDisplayUpdate = 0 # Last time in us that the proxies were updated. lastDataUpdate = 0 # Print stats every 500ms displayUpdateSpeed = 0.500 # Update data proxies every 10ms dataUpdateSpeed = 0.01 ## Main loop try: while True: ## Main loop for displaying statistics and data handling transmissions between threads time.sleep(0.001) ## avoiding flooding the CPU with the while loop # Datahandler - Will fire directly and everytime lastDataUpdate + 0.01 us is lower then curent epoch time.. if lastDataUpdate + dataUpdateSpeed < time.time(): # Get current epoch time in us lastDataUpdate = time.time() # Pass data from various sources to PerIntelDataIn: PerIntelDataIn[0] = UltrasoneDataOut[ 0] # Left Ultrasone sensor values PerIntelDataIn[1] = UltrasoneDataOut[ 1] # Front Ultrasone sensor values PerIntelDataIn[2] = UltrasoneDataOut[ 2] # Right Ultrasone sensor values PerIntelDataIn[3] = ImageProcessorDataOut[4] # Object detected PerIntelDataIn[4] = ImageProcessorDataOut[5] # Object radius PerIntelDataIn[5] = ImageProcessorDataOut[ 6] # Object location X PerIntelDataIn[6] = ImageProcessorDataOut[ 7] # Object location Y ## make sure not to delay the main loop, sure i could use sleep but that's a loss of cycles.... but you allready knew that. if lastDisplayUpdate + displayUpdateSpeed < time.time( ): ## make sure not to delay the main loop, sure i could use sleep but that's a loss of cycles.... but you allready knew that. lastDisplayUpdate = time.time( ) # Update lastDisplayUpdate with the current epoch time in us # Print statistics. print "lastDisplayUpdate: %s next: %s)" % (lastDisplayUpdate, ( time.time() + displayUpdateSpeed)) print "BOT Statistics:" print "Ultrasone distance L:%i cm F:%s cm R:%i cm" % (int( UltrasoneDataOut[0]), int( UltrasoneDataOut[2]), int(UltrasoneDataOut[1])) print "Ultrasone samples per second: %s" % ( ((1 / UltrasoneDataOut[3]) * 3) * 2 ) ## 1/looptime *3 (amount of samples per run) *2 (amount of sensors) = samples per second print "Camera Resolution: %sx%s" % (ImageProcessorDataOut[0], ImageProcessorDataOut[1]) print "Image Processing Resolution: %sx%s" % ( ImageProcessorDataOut[2], ImageProcessorDataOut[3]) print "Image Processing speed: %s fps" % ( 1 / ImageProcessorDataOut[8] ) ## 1/looptime = opencv thread fps print "Obstructions: LEFT:%s FRONT:%s RIGHT:%s" % ( PerIntelDataOut[0], PerIntelDataOut[1], PerIntelDataOut[2]) print "" print "Object Statistics:" print "Object detected: %s" % ImageProcessorDataOut[4] print "Object Radius: %s" % ImageProcessorDataOut[5] print "Object Location: x:%s y:%s" % (ImageProcessorDataOut[6], ImageProcessorDataOut[7]) ## Try to handle and cleanup incase someone hits CTRL+C except KeyboardInterrupt: print "CTRL+C Shutting down..." sensorThread.join() imageProcessorThread.join() perIntelThread.join()
if args.number: NUMBER_DOIS = args.number data = readDOIsFile(FILENAME) dois_list = data.split("\n") dois_list = subsetDOIs(dois_list, NUMBER_DOIS) num_cores = multiprocessing.cpu_count() # num_cores = 1 # executing metrics evaluations # setting up multi process/progressbars maxrows = num_cores + 1 m = Manager() p = Pool(num_cores, initializer=multi_progress.init, initargs=(RLock(), m.Lock())) position_holders = m.Array('i', [0] * maxrows) list_results = [] metrics_info = [] metrics = test_metric.getMetrics() metrics_pbar = tqdm(metrics, total=len(metrics), position=1) for metric in metrics: principle = metric["principle"].rsplit('/', 1)[-1] metrics_pbar.update(1) metrics_pbar.set_description('Retrieving [' + principle + '] metric informations...') metrics_info.append( test_metric.processFunction(test_metric.getMetricInfo, [metric["@id"]],