Exemple #1
0
    def run(self, input_file, output_file):
        assert (type(output_file) == str)
        assert (type(input_file) == str)

        self._init()
        self.config.input_file = input_file
        self.config.output_file = output_file

        file_reader = Process(target=self._engine_preprocess,
                              args=(input_file, ))
        file_reader.deamon = True
        file_reader.start()

        runtime_list = []
        for num_gpu in self.config.gpu:
            runtime = Process(target=self._engine_model, args=(num_gpu, ))
            runtime.deamon = True
            runtime.start()
            runtime_list.append(runtime)
        file_writer = Process(target=self._engine_postprocess,
                              args=(output_file, ))
        file_writer.deamon = True
        file_writer.start()

        # Terminate
        file_reader.join()
        runtime.join()
        file_writer.join()
        self.logger.warning("ALL JOBS DONE")
def main():
    global request_queue
    summoner_name = ''
    while len(summoner_name) == 0:
        summoner_name = input('Summoner Name: ')

    platform_slug = input('Platform slug (default=na1): ')
    if len(platform_slug) == 0:
        platform_slug = 'na1'
    platform_slug = platform_slug.strip().lower()

    start_sync_manager()

    with open(config_path, 'r') as f:
        config = json.loads(f.read())

    server = http.server.HTTPServer((config['server']['host'], config['server']['port'] + 1),
                                    MyResponseListener)

    summoner_by_name = 'https://{slug}.api.riotgames.com/lol/summoner/v3/summoners/by-name/{name}'
    start_url = summoner_by_name.format(slug=platform_slug, name=summoner_name)
    request_queue.put((start_url, "start"))

    scenario_args = (config, platform_slug, request_queue, response_queue,
                     gamemode_summaries, champion_summaries, requested_matches)
    sm = Process(target=scenario_manager, args=scenario_args, name='Scenario_Manager')
    sm.deamon = True
    sm.start()

    try:
        server.serve_forever()
    except KeyboardInterrupt:
        print('Stopping scenario...')
Exemple #3
0
def fork_train_process():
    """创建守护进程,接收主进程训练消息,存放到队列中,依次对模型进行训练
    """
    logger = logging.getLogger("ProcessTrain")

    def handle_message(msg):
        """处理主进程发送来的消息

        Args:
            msg (dict): 待处理的消息,robot_code,version两个参数
        """
        # 训练nlu模型
        train_robot(**msg)
        # 通知主进程更新模型
        internal_push_nlu(**msg)
        # 通知小语平台训练成功
        notify_training_complete(**msg)

    def consummer():
        """
        队列消费者
        """
        while True:
            msg = global_queue.get()
            try:
                handle_message(msg)
            except Exception:
                logger.error(msg)
                logger.error(traceback.format_exc())

    process = Process(target=consummer)
    process.deamon = True
    process.start()
    return process
Exemple #4
0
 def upload(self):
     if self.sync_dirs:
         for sync_dir in self.sync_dirs:
             sync_dir = sync_dir.strip(".")
             for dirName, subdirList, fileList in os.walk(
                     "%s%s" % (self.base, sync_dir)):
                 thisdir = "".join(dirName.rsplit(self.base)).strip("/")
                 fileList = [
                     os.path.join(dirName, filename)
                     for filename in fileList
                 ]
                 if self.excludes:
                     for ignore in self.excludes:
                         fileList = [
                             n for n in fileList
                             if not fnmatch.fnmatch(n, ignore)
                         ]
                 count = 0
                 for fname in fileList:
                     dest_key = self.generate_dest_key(fname, thisdir)
                     if os.name != 'nt':
                         rv = Process(target=self.skip_or_send,
                                      args=(fname, dest_key))
                         rv.deamon = True
                         rv.start()
                         if count % 20 == 0:
                             rv.join()
                     else:
                         self.skip_or_send(fname, dest_key)
                     count += 1
                 if 'rv' in vars():
                     rv.join()
def run_interactive_learning(stories: Text = None,
                             finetune: bool = False,
                             skip_visualization: bool = False,
                             server_args: Dict[Text, Any] = None,
                             additional_arguments: Dict[Text, Any] = None):
    """Start the interactive learning with the model of the agent."""

    server_args = server_args or {}

    if not skip_visualization:
        p = Process(target=start_visualization, args=("story_graph.dot", ))
        p.deamon = True
        p.start()
    else:
        p = None

    app = run.configure_app(enable_api=True)
    endpoints = AvailableEndpoints.read_endpoints(server_args.get("endpoints"))

    # before_server_start handlers make sure the agent is loaded before the
    # interactive learning IO starts
    if server_args.get("core"):
        app.register_listener(
            partial(run.load_agent_on_start, server_args.get("core"),
                    endpoints, server_args.get("nlu")), 'before_server_start')
    else:
        app.register_listener(
            partial(train_agent_on_start, server_args, endpoints,
                    additional_arguments), 'before_server_start')

    _serve_application(app, stories, finetune, skip_visualization)

    if not skip_visualization:
        p.terminate()
        p.join()
Exemple #6
0
def main(cfg):
    
    # LIVE CONFIGURATION - WHAT TO RECORD
    extras=retrieveRTPMonitored()
    
    mmanage = multiprocessing.Manager()
    statsDict = mmanage.dict()

    framesQueue=Queue()

    collectorProc=Process(target=startCollector, args=(framesQueue,cfg['general']['interface']))
    collectorProc.daemon=True
    collectorProc.start()
    print("[",collectorProc.pid,"] Started main thread")
    time.sleep(3)
 
    viewStoreProc=Process(target=viewAndStore, args=(statsDict,cfg['dbconf']['dbfile'],))

    parsersProc=Process(target=AnalyzerThreads,args=(cfg['general']['num_of_processes'],framesQueue,statsDict,mmanage,cfg['monitor']['ports']))
    parsersProc.deamon=True
    parsersProc.start()
    print("[",parsersProc.pid,"] Started frame parsers")
    viewStoreProc.start()
    print("[",viewStoreProc.pid,"] Started live view stats and dbcollector") 
    
    parsersProc.join()
    viewStoreProc.join()
Exemple #7
0
 def _transform_parallel(self):
     if self.num_workers == None:
         num_workers = multiprocessing.cpu_count()
     # partition the data
     shape = self.train_data.shape
     row_num = shape[0]  # suppose dimension as N.C.W.H
     rows_per_worker = ceil(row_num / num_workers)
     workers = []
     index_list = list(range(0, row_num, rows_per_worker))
     for n, start in enumerate(index_list):
         if n == len(index_list) - 1:
             end = row_num
         else:
             end = index_list[n + 1]
         args = (self._shm_train_raw, shape, (start, end), self.transform)
         a = np.ctypeslib.as_array(self._shm_train_raw.get_obj())
         a = a.reshape(self.train_data.shape)
         proc = Process(target=_transform_proc, args=args)
         proc.deamon = True
         proc.start()
         workers.append(proc)
     # wait for all subprocess finish
     for proc in workers:
         proc.join()
     # reshape
     self._shm_train_nda = self._shm_train_nda.reshape(self.final_shape)
Exemple #8
0
 def calculate_quality_list(self, voi, gantry, couch, calculate_from=0, stepsize=1.0, avoid=[], gradient=True):
     """ TODO: Documentation
     """
     q = Queue(32767)
     process = []
     d = voi.get_voi_cube()
     d.cube = np.array(d.cube, dtype=np.float32)
     voi_cube = DensityProjections(d)
     result = []
     for gantry_angle in gantry:
         p = Process(
             target=self.calculate_angle_quality_thread,
             args=(voi, gantry_angle, couch, calculate_from, stepsize, q, avoid, voi_cube, gradient))
         p.start()
         p.deamon = True
         process.append(p)
         if len(process) > 2:
             tmp = q.get()
             result.append(tmp)
             for p in process:
                 if not p.is_alive():
                     process.remove(p)
     while not len(result) == len(gantry) * len(couch):
         tmp = q.get()
         result.append(tmp)
     return result
        def start_conn_process():
            # Create device info here as it may change after restart.
            config = {
                "digest" : "serial",
                "port" : self.mbed.port,
                "baudrate" : self.mbed.serial_baud,
                "program_cycle_s" : self.options.program_cycle_s,
                "reset_type" : self.options.forced_reset_type,
                "target_id" : self.options.target_id,
                "serial_pooling" : self.options.pooling_timeout,
                "forced_reset_timeout" : self.options.forced_reset_timeout,
                "sync_behavior" : self.options.sync_behavior,
                "platform_name" : self.options.micro,
                "image_path" : self.mbed.image_path,
            }

            if self.options.global_resource_mgr:
                grm_module, grm_host, grm_port = self.options.global_resource_mgr.split(':')

                config.update({
                    "conn_resource" : 'grm',
                    "grm_module" : grm_module,
                    "grm_host" : grm_host,
                    "grm_port" : grm_port,
                })

            # DUT-host communication process
            args = (event_queue, dut_event_queue, config)
            p = Process(target=conn_process, args=args)
            p.deamon = True
            p.start()
            return p
 def start_conn_process():
     # DUT-host communication process
     args = (event_queue, dut_event_queue, config)
     p = Process(target=conn_process, args=args)
     p.deamon = True
     p.start()
     return p
Exemple #11
0
        def start_conn_process():
            # Create device info here as it may change after restart.
            config = {
                "digest": "serial",
                "port": self.mbed.port,
                "baudrate": self.mbed.serial_baud,
                "program_cycle_s": self.options.program_cycle_s,
                "reset_type": self.options.forced_reset_type,
                "target_id": self.options.target_id,
                "serial_pooling": self.options.pooling_timeout,
                "forced_reset_timeout": self.options.forced_reset_timeout,
                "sync_behavior": self.options.sync_behavior,
                "platform_name": self.options.micro,
                "image_path": self.mbed.image_path,
            }

            if self.options.global_resource_mgr:
                grm_module, grm_host, grm_port = self.options.global_resource_mgr.split(
                    ':')

                config.update({
                    "conn_resource": 'grm',
                    "grm_module": grm_module,
                    "grm_host": grm_host,
                    "grm_port": grm_port,
                })

            # DUT-host communication process
            args = (event_queue, dut_event_queue, config)
            p = Process(target=conn_process, args=args)
            p.deamon = True
            p.start()
            return p
 def calculate_quality_list(self,
                            voi,
                            gantry,
                            couch,
                            calculate_from=0,
                            stepsize=1.0,
                            avoid=[],
                            gradient=True):
     """ TODO: Documentation
     """
     q = Queue(32767)
     process = []
     d = voi.get_voi_cube()
     d.cube = np.array(d.cube, dtype=np.float32)
     voi_cube = DensityProjections(d)
     result = []
     for gantry_angle in gantry:
         p = Process(target=self.calculate_angle_quality_thread,
                     args=(voi, gantry_angle, couch, calculate_from,
                           stepsize, q, avoid, voi_cube, gradient))
         p.start()
         p.deamon = True
         process.append(p)
         if len(process) > 2:
             tmp = q.get()
             result.append(tmp)
             for p in process:
                 if not p.is_alive():
                     process.remove(p)
     while not len(result) == len(gantry) * len(couch):
         tmp = q.get()
         result.append(tmp)
     return result
Exemple #13
0
def worker_proc(tur_arg=None, authdata=None, sleeper=None, multipools=None, work=None):
    for wp in range(multipools):
        j = Process(target=uploader.UploadAction, args=(tur_arg, authdata, work))
        j.deamon = True
        j.start()

    for i in xrange(multipools):
        work.put(None)
Exemple #14
0
    def export_bodies(self, folder, width=None, height=None, cX=None, cY=None, workers=5):
        """Parallel Implementation of bodies Export"""
        
        file_format = "{:09d}_{:09d}.jpg"
        video_name, ext = os.path.splitext(self.video_name)
        
        os.makedirs(folder, exist_ok=True)
        img_folder = os.path.join(folder, video_name)
        # cvs_filename = os.path.join(folder, "dataset.csv")
        json_filename = os.path.join(folder, video_name + ".json")
        os.makedirs(img_folder, exist_ok=True)

        body_info = []

        video_size = len(self)
        chunksize = video_size//workers
        lock = Lock()
        
        ws = list()

        pbar = list()


        for w in range(workers - 1):
            start = w * chunksize
            end = (w + 1) * chunksize
            pbar.append(tqdm(total=chunksize))

            p = Process(target=process_video, args=(self[start:end], self.video_path, start, end, img_folder, file_format, lock, pbar[w]))
            p.deamon = True
            ws.append(p)
            p.start()

        start = (workers - 1)* chunksize
        end = len(self)
        pbar.append(tqdm(total=end-start))
        p = Process(target=process_video, args=(self[start:end], self.video_path, start, end, img_folder, file_format, lock, pbar[workers -1]))
        p.deamon = True
        ws.append(p)
        p.start()

        for w in ws:
            w.join()
        self.save(json_filename)
        return
Exemple #15
0
	def confirm(self):
		v = [self.block1.get(), self.block2.get()]
		# main(v[0], v[1])
		p = Process(
			target=self.commu.decor, args=(new_mian, v[0], v[1], "10.1")
		)
		p.deamon = True
		p.start()
		self.commu.process_communication(self.major_msgframe)
Exemple #16
0
def bus_watcher():
    data = request.json
    route = data['route']
    dep_station = data['dep_station']
    at = data['at']

    t = Process(target=notify_rt_oneshot, args=(route, dep_station, at))
    t.deamon = True
    t.start()

    return "Enabled notify for bus %s from %s when it's at %s" % (
        route, dep_station, at)
Exemple #17
0
    def __init__(self):
        print("Making Hexapod ready, please wait..")
        ps3 = PlaystationService()
        if ps3.ps3Connected:
            p2 = Process(target=ps3.joystickcontrole)
            p2.deamon = True
            p2.start()

        hexapod = Hexapod()
        hexapod.boot_up()
        time.sleep(2)
        print("Hexapod is ready to go.")
Exemple #18
0
def spawnProcesses(nbr,target,args):
    processes=[]
    if __name__ == '__main__':
        processes = []
        for x in range(nbr):
           p = Process(target=target,args=args)
           p.deamon = True
           processes.append(p)
           
        
    

    return processes
Exemple #19
0
    def __init__(self):
        print("Making TubberCar ready, please wait..")
        try:
            mqtt = MqttService()
            p1 = Process(target=mqtt.connectandsubscribe,
                         args=(
                             self.subscribeTB,
                             'TubberCar',
                         ))
            p1.deamon = True
            p1.start()
            time.sleep(5)

            ps3 = PlaystationService()
            if (ps3.ps3Connected):
                p2 = Process(target=ps3.joystickcontrole)
                p2.deamon = True
                p2.start()
            time.sleep(2)
        except KeyboardInterrupt:
            print("Exiting program.")
            p1.stop()
Exemple #20
0
 def start_conn_process():
     # Create device info here as it may change after restart.
     config = {
         "digest" : "serial",
         "port" : self.mbed.port,
         "baudrate" : self.mbed.serial_baud,
         "program_cycle_s" : self.options.program_cycle_s,
         "reset_type" : self.options.forced_reset_type
     }
     # DUT-host communication process
     args = (event_queue, dut_event_queue, self.prn_lock, config)
     p = Process(target=conn_process, args=args)
     p.deamon = True
     p.start()
     return p
 def newAlarm(self,seconds,name):
     if self.alarmList[name]!=None:
         if self.alarmList[name].is_alive():
             return False
     ala=Process(target=startout, args=(seconds,name))
     ala.deamon=True
     ala.start()
     self.alarmList[name]=ala
     print '--------------alarmList Below-------------'
     for each in self.alarmList.keys():
         if self.alarmList[each]!=None:
             print each
             print self.alarmList[each].pid
     print '--------------alarmList above-------------'
     return True
Exemple #22
0
 def start_conn_process():
     # Create device info here as it may change after restart.
     config = {
         "digest": "serial",
         "port": self.mbed.port,
         "baudrate": self.mbed.serial_baud,
         "program_cycle_s": self.options.program_cycle_s,
         "reset_type": self.options.forced_reset_type
     }
     # DUT-host communication process
     args = (event_queue, dut_event_queue, self.prn_lock, config)
     p = Process(target=conn_process, args=args)
     p.deamon = True
     p.start()
     return p
Exemple #23
0
def main_funtion(path, core, res):
    """
	path: 包含mxd的文件夹
	study_book{Int}:  开启的多进程数 推荐3~4,新电脑或者高性能CPU可以选7甚至更高
	res{Int}: 出图分辨率
	:return: NONE
	"""
    sets_lists, msg = address_clip(path, core)
    for a_msg in msg:  # 读取分组信息
        print(a_msg + ";\n")
    for set_li in sets_lists:
        time.sleep(0.5)
        # 开启多进程
        p = Process(target=export_jpeg, args=(set_li, res))
        p.deamon = True
        p.start()
Exemple #24
0
    def add_module(self, module_fn):
        remote, work_remote = Pipe()
        p = Process(target=self.run,
                    args=(work_remote, remote, module_fn, self._managed_memory,
                          self._seed))

        p.deamon = True
        p.start()
        work_remote.close()

        self._remotes.append(remote)
        self._ps.append(p)

        self._train_res = None

        return len(self._ps) - 1
Exemple #25
0
    def run(self):
        manager = Manager()
        result = manager.list()

        processes = []
        for i in range(self.max_process):
            proc = Process(target=self._worker, args=(result, ))
            proc.deamon = True
            processes.append(proc)

        for proc in processes:
            proc.start()

        for proc in processes:
            proc.join()

        return result
Exemple #26
0
def Demo():
    if __name__ == "__main__":
        command_queue = Queue()

        t = Process(target=_mp_worker, args=(command_queue,))
        t.deamon = True
        t.start()

        try:
            while True:
                command_queue.put_nowait([SCANCODE.W, 0.080, 0.1])
                command_queue.put_nowait([SCANCODE.W, 0.080, 0.5])
                command_queue.put_nowait([SCANCODE.A, 0.251, 0.5])
                command_queue.put_nowait([SCANCODE.D, 0.251, 0.5])
                command_queue.put_nowait([SCANCODE.S, 0.551, 0.5])
                time.sleep(5)
        except KeyboardInterrupt:
            print("Goodbye!")
Exemple #27
0
def multi():
    processes = []
    dataset = Queue()
    datasetProducer = Process(target=put_dataset, args=(dataset, ))
    datasetProducer.daemon = True
    processes.append(datasetProducer)
    datasetProducer.start()
    for core in range(4):
        work = Process(target=worker, args=(dataset, ))
        work.deamon = True
        processes.append(work)
        work.start()
    try:
        datasetProducer.join()
    except KeyboardInterrupt:
        for process in processes:
            process.terminate()
        print('\n\n------------------------\nALL PROCESSES TERMINATED\n')
Exemple #28
0
 def fileserver(self):
     sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     try:
         Port = int(Parameters["Mode3"])
         Domoticz.Log("Starting file server on port " + str(Port))
         Filelocation = Parameters["Mode2"]
         os.chdir(Filelocation)
         Handler = http.server.SimpleHTTPRequestHandler
         socketserver.TCPServer.allow_reuse_address = True
         server = socketserver.TCPServer(("", Port), Handler)
         p = Process(target=server.serve_forever)
         p.deamon = True
         p.start()
         Domoticz.Log("Files in the '" + Filelocation +
                      "' directory are now available on port " + str(Port))
     except socket.error as e:
         if e.errno == errno.EADDRINUSE:
             Domoticz.Log("Port " + str(Port) + " is already in use")
         else:
             senderror(e)
    def submit(self, func: Callable, fn_args: Any, p_kwargs: Dict,
               timeout: float, callback_timeout: Callable[[Any],
                                                          Any], daemon: bool):
        """
        Submits a callable to be executed with the given arguments.
        Schedules the callable to be executed as func(*args, **kwargs) in a new
         process.
        :param func: the function to execute
        :param fn_args: the arguments to pass to the function. Can be one argument
                or a tuple of multiple args.
        :param p_kwargs: the kwargs to pass to the function
        :param timeout: after this time, the process executing the function
                will be killed if it did not finish
        :param callback_timeout: this function will be called with the same
                arguments, if the task times out.
        :param daemon: run the child process as daemon
        :return: the result of the function, or None if the process failed or
                timed out
        """
        p_args = fn_args if isinstance(fn_args, tuple) else (fn_args, )
        queue = Queue()
        p = Process(target=self._process_run,
                    args=(
                        queue,
                        func,
                        fn_args,
                    ),
                    kwargs=p_kwargs)

        if daemon:
            p.deamon = True

        p.start()
        p.join(timeout=timeout)
        if not queue.empty():
            return queue.get()
        if callback_timeout:
            callback_timeout(*p_args, **p_kwargs)
        if p.is_alive():
            p.terminate()
            p.join()
Exemple #30
0
def main():
    #Setup queue data structures for relayed data
    freeze_support()
    q_1_2 = Queue()
    q_2_1 = Queue()
    kill_flag = Event()

    #Get args
    bind1, bind2, conn1, conn2, v, t = get_args()

    #Setup relay
    if bind1 and bind2:
        host1 = Process(target=bind_relay, args=(bind1, q_2_1, q_1_2, t, kill_flag))
        host2 = Process(target=bind_relay, args=(bind2, q_1_2, q_2_1, t, kill_flag))
    elif conn1 and conn2:
        host1 = Process(target=connect_relay, args=(conn1, q_2_1, q_1_2, t, kill_flag))
        host2 = Process(target=connect_relay, args=(conn2, q_1_2, q_2_1, t, kill_flag))
    else:
        host1 = Process(target=bind_relay, args=(bind1, q_2_1, q_1_2, t, kill_flag))
        host2 = Process(target=connect_relay, args=(conn1, q_1_2, q_2_1, t, kill_flag))
    try:
        #Start relay
        host1.daemon=True
        host2.deamon=True
        host1.start()
        host2.start()
        while True:
            if kill_flag.is_set():
                #We've been told to die by another thread.
                print("[!] - Terminating relays. Please wait.")
                sleep(3)
                exit(0)
            sleep(1)
    except KeyboardInterrupt:
        print("\n[!] - Interrupted - please wait.")
        kill_flag.set()
        sleep(3)
        exit(0)
    except Exception as e:
        print("Exception in main : {}".format(e))
        exit(0)
Exemple #31
0
            plt.set_cmap("jet")

            if f0 > 0:
                extent = [  (f0 - samp_rate/2)/1e6,
                            (f0 + samp_rate/2)/1e6,
                            1e3 * len(data) * float(fft_step) / samp_rate,
                            0]
                im = plt.imshow(data,interpolation='bilinear', extent=extent, aspect='auto')
            else:
                im = plt.imshow(data,interpolation='bilinear', aspect='auto')
            plt.colorbar(im)

        # dump
        if png != "":
            plt.savefig(png,dpi=100)

        if npy != "":
            np.save(npy, data)

        if show:
            #plt.show()
            #raw_input("press return to continue")
            #plt.ion()
            plt.draw()
            plt.pause(.1)

plot_queue = Queue()
p = Process(target=plot_process)
p.deamon = True
p.start()
					continue
			if port == 0:
				print "[MANAGER] Error in assigning ports"
				continue
			print "[MANAGER] Received simulation with sim_id=" + sim_id + ", starting now on port " + str(port)
			starter = Starter()
			starter_thread = Thread(target=starter.main, args=(sim_id, port, ports, semaphore))
			starter_thread.start()

class Tests:
	def __init__(self, t):
		print "[HANDLE TESTS] Thread started"
		self.tests = t

	def handle_tests(self):
		while 1:
			team_name, agent_name = self.tests.get(block=True)
			print "[TESTS] Received test request with agent_name: " + agent_name + " team_name:" + team_name + ", starting now.."
			test = Test()
			test_thread = Thread(target=test.run, args=(team_name, agent_name,))
			test_thread.start()
			test_thread.join()


if __name__ == "__main__":
	manager = Manager()
	manager_thread = Process(target=manager.main)
	manager_thread.deamon = True
	manager_thread.start()
	manager_thread.join()
Exemple #33
0
def multiprocess_dama(sid, img_data, code_type):
    log_name = 'multiprocess_dama'
    data = {'sid': sid}
    parent_conn, child_conn = Pipe()
    begin_time = time.time()
    proccess_flag = 0
    p1 = Process(target=dama_proccess,
                 args=(
                     sid,
                     img_data,
                     code_type,
                     proccess_flag,
                     child_conn,
                 ))  #申请子进程
    p1.deamon = True
    p1.start()  #运行进程
    proccess_flag = 1
    print('A进程启动完毕')
    data['p1'] = 'p1'
    data['A进程启动完毕'] = 'A进程启动完毕'
    while True:
        end_time = time.time()
        #        print (end_time-begin_time)
        if end_time - begin_time > 20 and proccess_flag == 1:
            p2 = Process(target=dama_proccess,
                         args=(
                             sid,
                             img_data,
                             code_type,
                             proccess_flag,
                             child_conn,
                         ))  #申请子进程
            p2.deamon = True
            p2.start()  #运行进程
            proccess_flag = 2
            print('B进程启动完毕')
            data['B进程启动完毕'] = 'B进程启动完毕'
            data['p2'] = 'p2'
        if end_time - begin_time > 40 and proccess_flag == 2:
            p3 = Process(target=dama_proccess,
                         args=(
                             sid,
                             img_data,
                             code_type,
                             proccess_flag,
                             child_conn,
                         ))  #申请子进程
            p3.start()  #运行进程
            p3.deamon = True
            proccess_flag = 3
            print('C进程启动完毕')
            data['C进程启动完毕'] = 'C进程启动完毕'
            data['p3'] = 'p3'

        data['proccess_flag'] = proccess_flag
        if parent_conn.poll():
            task = parent_conn.recv()
            if len(task) > 2:
                print('收到管道信息'.format(task))
                data['收到管道信息'] = task
                parent_conn.close()
                child_conn.close()
                logger(log_name, task[0], task[1], **data)
                return task
        if end_time - begin_time > 60:
            break
        time.sleep(1)
        print(end_time - begin_time)
    logger(log_name, 'crawl_error', '打码平台超时', **data)
    return 'crawl_error', u"打码平台超时", "", None
Exemple #34
0
    to_write = JoinableQueue()

    def writer(q):
        # Call to_write.get() until it returns None
        has_header_dict = {}
        for fname, df in iter(q.get, None):
            if fname in has_header_dict:
                df.to_csv(fname, header=False, mode="a")
            else:
                df.to_csv(fname, header=True, mode="w")
                has_header_dict[fname] = True
            q.task_done()
        q.task_done()

    write_thread = Process(target=writer, args=(to_write, ))
    write_thread.deamon = True
    write_thread.start()

    def process_date(date, write_header=False, write_queue=to_write):
        date_files = glob.glob(args.file + "/*_" + str(date) +
                               ".feather")  # [:NFILES]

        # Read feather files
        run_data = [pd.read_feather(f) for f in date_files]
        tot_df = pd.concat(run_data)

        # force GC to free up lingering cuda allocs
        del run_data
        gc.collect()

        # Get start date of simulation
Exemple #35
0
            try:
                time.sleep(600)
            except:
                if cfg['global']['verbose']:
                    print "Have to end the Sync loop... :("
                return
            
#Including Website
execfile('./website/main.py')

#webservice = Process(target=service_web)
#webservice.deamon = True
#webservice.start()
#if cfg['global']['verbose']:
#    print "Serverstarted. You can use your browser to configure: http://%s:%s" % \
#                                (cfg['webserver']['ip'], cfg['webserver']['port']) 
#
syncservice = Process(target=service_sync)
syncservice.deamon = True
syncservice.start()

def quit(signum, frame):
    if cfg['global']['verbose']:
        print "\nThanks for using always Backup"
    try:
        sys.exit(0)
    except:
        pass

signal.signal(signal.SIGINT, quit)
Exemple #36
0
    while True:
        log.info("Hello from process one")
        time.sleep(1)


def process_two():

    logger.worker_configurer(self.settings['LOGGER'])
    log = logging.getLogger('ShipShape')

    while True:
        msg = "Hello hello from process two"
        print(msg)
        log.info(msg)
        time.sleep(1)


p1 = Process(target=process_one)
p2 = Process(target=process_two)
logging_proc = Process(name="Logger_Listener",
                       target=logger.LoggerListener,
                       args=(q, ))
logging_proc.deamon = True
logging_proc.start()
p1.daemon = True
p2.daemon = True
p1.start()
p2.start()

while True:
    time.sleep(1)
Exemple #37
0
def main(running):
    server = NetServer(
        name='act',
        address=const.LOCAL_ADDR,
        port=const.LOCAL_PORT,
        buffer_size=const.QUEUE_SIZE,
    )
    server_proc = Process(target=server.run)
    server_proc.start()

    client = NetClient(
        client_name='act',
        server_addr=const.SERVER_ADDR,
        buffer_size=const.QUEUE_SIZE,
    )
    if const.UPLOAD_DATA:
        client_proc = Process(target=client.run)
        client_proc.start()

    data_savers = {}

    tube_queue = Q(const.QUEUE_SIZE)
    act_spatial_queue = Q(const.QUEUE_SIZE)
    act_nn_queue = Q(const.QUEUE_SIZE)
    act_comp_queue = Q(const.QUEUE_SIZE)
    filter_queue = Q(const.QUEUE_SIZE)

    tm = ServerPktManager(
        in_queue=server.data_queue,
        out_queue=tube_queue,
        track_list=const.TRACK_LABELS,
        overlap_list=const.ATTACH_LABELS,
    )
    tm_proc = Thread(target=tm.run)
    tm_proc.deamon = True
    tm_proc.start()

    spatial_act = SpatialActDetector(
        in_queue=tube_queue,
        out_queue=act_spatial_queue,
    )
    spatial_proc = Process(target=spatial_act.run)
    spatial_proc.deamon = True
    spatial_proc.start()

    nn_act = NNActDetector(
        in_queue=act_spatial_queue,
        out_queue=act_nn_queue,
        model_path=const.NN_ACT_MODEL_PATH,
        batch_size=const.NN_BATCH,
        tube_size=const.TUBE_SIZE,
        filter_queue=filter_queue,
    )
    nn_act_proc = Process(target=nn_act.run)
    nn_act_proc.deamon = True
    nn_act_proc.start()

    comp_act = CompActDetector(
        in_queue=act_nn_queue,
        out_queue=act_comp_queue,
        filter_queue=filter_queue,
    )
    comp_act_proc = Process(target=comp_act.run)
    comp_act_proc.deamon = True
    comp_act_proc.start()

    out_queue = act_comp_queue
    print('server starts')

    while running[0]:
        server_pkt = out_queue.read()
        if server_pkt is None:
            sleep(0.01)
            continue

        cid = server_pkt.cam_id
        if cid not in data_savers:
            data_savers[cid] = DataWriter(file_path=RES_FOLDER +
                                          '{}.npy'.format(cid))
            print('creat data_saver for video {}'.format(cid))

        logging.info("Cam-{}, Frame-{}, Acts-{}".format(
            server_pkt.cam_id, server_pkt.get_first_frame_id(),
            server_pkt.get_action_info()))
        logging.info("--------------------------------")

        data_pkts = server_pkt.to_data_pkts()
        for p in data_pkts:
            if const.UPLOAD_DATA:
                client.send_data(p)

            if const.SAVE_DATA:
                data_savers[cid].save_data(frame_id=p.frame_id, meta=p.meta)

    if const.SAVE_DATA:
        for cid in data_savers:
            data_savers[cid].save_to_file()

    server.stop()
    print("server finished!")