Exemplo n.º 1
0
	def start( self ):
		self.dataQ = Queue()
		self.messageQ = Queue()
		self.shutdownQ = Queue()	# Queue to tell the Impinj monitor to shut down.
		self.readerStatusCB()
		
		if self.useHostName.GetValue():
			self.impinjProcess = Process(
				name='ImpinjProcess', target=ImpinjServer,
				args=(
					self.dataQ, self.messageQ, self.shutdownQ,
					ImpinjHostNamePrefix + self.impinjHostName.GetValue() + ImpinjHostNameSuffix, ImpinjInboundPort,
					self.getAntennaStr(),
					self.readerStatusCB,
				)
			)
		else:
			self.impinjProcess = Process(
				name='ImpinjProcess', target=ImpinjServer,
				args=(
					self.dataQ, self.messageQ, self.shutdownQ,
					self.impinjHost.GetAddress(), ImpinjInboundPort,
					self.getAntennaStr(),
					self.readerStatusCB,
				)
			)
		self.impinjProcess.daemon = True
		
		self.crossMgrProcess = Process( name='CrossMgrProcess', target=CrossMgrServer,
			args=(self.dataQ, self.messageQ, self.shutdownQ, self.getCrossMgrHost(), CrossMgrPort) )
		self.crossMgrProcess.daemon = True
		
		self.impinjProcess.start()
		self.crossMgrProcess.start()
Exemplo n.º 2
0
    def start(self):
        self.dataQ = Queue()
        self.messageQ = Queue()
        self.shutdownQ = Queue(
        )  # Queue to tell the Alien monitor to shut down.

        self.alienProcess = Process(
            name='AlienProcess',
            target=AlienServer,
            args=(self.dataQ, self.messageQ, self.shutdownQ,
                  self.getNotifyHost(), NotifyPort, HeartbeatPort,
                  self.getAntennaStr(), self.listenForHeartbeat.GetValue(),
                  self.cmdHost.GetAddress(), self.cmdPort.GetValue()))
        self.alienProcess.daemon = True

        self.crossMgrProcess = Process(name='CrossMgrProcess',
                                       target=CrossMgrServer,
                                       args=(self.dataQ, self.messageQ,
                                             self.shutdownQ,
                                             self.getCrossMgrHost(),
                                             CrossMgrPort))
        self.crossMgrProcess.daemon = True

        self.alienProcess.start()
        self.crossMgrProcess.start()
Exemplo n.º 3
0
    def __init__(self, common_params=None, dataset_params=None):
        """
    Args:
      common_params: A dict
      dataset_params: A dict
    """
        if common_params:
            self.image_size = int(common_params['image_size'])
            self.batch_size = int(common_params['batch_size'])

        if dataset_params:
            self.data_path = str(dataset_params['path'])
            self.thread_num = int(int(dataset_params['thread_num']) / 2)
            self.thread_num2 = int(int(dataset_params['thread_num']) / 2)
        #record and image_label queue
        self.record_queue = Queue(maxsize=10000)
        self.image_queue = Queue(maxsize=5000)

        self.batch_queue = Queue(maxsize=100)

        self.record_list = []

        # filling the record_list
        input_file = open(self.data_path, 'r')

        for line in input_file:
            line = line.strip()
            self.record_list.append(line)

        self.record_point = 0
        self.record_number = len(self.record_list)
        self.num_batch_per_epoch = int(self.record_number / self.batch_size)

        t_record_producer = Process(target=self.record_producer)
        t_record_producer.daemon = True
        t_record_producer.start()

        #   for i in range(self.thread_num):
        #    t = Process(target=self.record_customer)
        #   t.daemon = True
        #  t.start()

        #    for i in range(self.thread_num2):
        #     t = Process(target=self.image_customer)
        #    t.daemon = True
        #   t.start()
        self.record_process = Process(target=self.record_customer)
        self.record_process.daemon = True
        self.record_process.start()

        self.image_cus_process = Process(target=self.image_customer)
        self.image_cus_process.daemon = True
        self.image_cus_process.start()
Exemplo n.º 4
0
    def __init__(self, data_path, in_dir):
        """
    Args:
      common_params: A dict
      dataset_params: A dict
    """
        self.input_size = 224
        self.batch_size = 32

        self.data_path = data_path
        self.gray_dir = in_dir
        self.thread_num = 20
        self.thread_num2 = 20
        #record and image_label queue
        self.record_queue = Queue(maxsize=16000)
        self.image_queue = Queue(maxsize=4000)

        self.batch_queue = Queue(maxsize=100)

        self.record_list = []
        input_file = open(self.data_path, 'r')
        print('Dataset {}'.format(self.data_path))

        for line in input_file:
            line = line.strip()
            # name = os.path.split(line)[1]
            # class_name = name.split('_')[0]
            # self.record_list.append(class_name+'/'+name)
            self.record_list.append(line)
        # self.record_list.sort()

        self.record_point = 0
        self.record_number = len(self.record_list)
        print('Total: {}'.format(self.record_number))

        t_record_producer = Process(target=self.record_producer)
        t_record_producer.daemon = True
        t_record_producer.start()

        for i in range(self.thread_num):
            t = Process(target=self.record_customer)
            t.daemon = True
            t.start()

        for i in range(self.thread_num2):
            t = Process(target=self.image_customer)
            t.daemon = True
            t.start()
Exemplo n.º 5
0
    def __init__(self, config, processor_class):
        if config.DEBUG:
            from threading import Thread as Process
        else:
            from multiprocessing import Process
        self._config = config
        self._clients = {}
        self._idle_workers = SimpleQueue()
        self._max_queue_size = getattr(config, 'TCP_MAX_QUEUE_SIZE', None)
        self._running_workers = SimpleQueue()
        self._waiting_tasks = SimpleQueue(self._max_queue_size)
        self._worker_processes = []

        max_buffer_size = getattr(config, 'TCP_MAX_BUFFER_SIZE', None)
        self._worker_server = CallbackTcpServer(
            on_connect=self._on_worker_connect,
            max_buffer_size=max_buffer_size)
        self._worker_server.listen(**config.WORK_ENDPOINT)
        self._connection_server = CallbackTcpServer(
            on_connect=self._on_client_connect,
            max_buffer_size=max_buffer_size)
        for listen_port in config.LISTEN_ENDPOINTS:
            self._connection_server.listen(**listen_port)
        for i in xrange(0, config.WORKER_COUNT):
            processor = processor_class(i, config)
            p = Process(target=processor.run)
            p.processor = processor
            self._worker_processes.append(p)
            p.start()
Exemplo n.º 6
0
def run(prod_list, topics=None, test_message=None):

    tmessage = get_test_message(test_message)
    if tmessage:
        from threading import Thread as Process
        from posttroll.message import Message
    else:
        from multiprocessing import Process

    with open(prod_list) as fid:
        config = yaml.load(fid.read(), Loader=BaseLoader)
    topics = topics or config['product_list'].pop('subscribe_topics', None)

    if not tmessage:
        listener = ListenerContainer(topics=topics)

    while True:
        try:
            if tmessage:
                msg = Message(rawstr=tmessage)
            else:
                msg = listener.output_queue.get(True, 5)
        except KeyboardInterrupt:
            if not tmessage:
                listener.stop()
            return
        except queue_empty:
            continue

        proc = Process(target=process, args=(msg, prod_list))
        proc.start()
        proc.join()
        if tmessage:
            break
Exemplo n.º 7
0
 def escuchar(self):
     try:
         sock = socket(AF_INET, SOCK_STREAM)  # Creamos un socket
         sock.bind((self.host,
                    self.port))  # Conectamos el socket al puerto y host
         sock.listen(5)  # Escuchamos a 5 maximo
         print("Server is running on " + str(self.port))
         print("Waiting for clients....")
         trds = []
         for i in range(5):
             conn, addr = sock.accept()  # Aceptamos la conexion
             self.clients.append(
                 conn
             )  # Insertamos el objeto del socket en la lista clients
             t = Process(target=self.clientHandler, args=(
                 conn,
                 addr))  # Creamos un thread por cada cliente que se conecta
             trds.append(t)  # Insertamos el thread en la lista trds
             t.start()  # Iniciamos el thread
         for t in trds:
             t.join()  # Aceptamos el thread
     except KeyboardInterrupt:
         print("Exiting..")
         sock.close()  # Si ocurre alguna excepcion cerramos el socket
         return
Exemplo n.º 8
0
    def reimport(self, parameters):
        """import_dataset's equivalent without the Django request validation and transformation to Importer's parameter dict.
        Used mostly by Syncer or if reimport is added to GUI.

        :param parameters: Dataset Importer parameters which have been validated during a previous run.
        :type parameters: dict
        """
        parameters = self._preprocess_reimport(parameters=parameters)
        Process(target=_import_dataset, args=(parameters, self._n_processes, self._process_batch_size)).start()
Exemplo n.º 9
0
 def background_run(self):
     """Run the client in the background."""
     self.streaming, self.started, self.can_add_streams = True, True, False
     self._clear_killer()  # Clear the kill queue
     streaming_process = Process(target=self._bg_run, name="StreamProcess")
     # streaming_process.daemon = True
     streaming_process.start()
     self.processes.append(streaming_process)
     Logger.info("Started streaming")
Exemplo n.º 10
0
    def __init__(self, data_path, nthread=12):
        """
    Args:
      common_params: A dict
      dataset_params: A dict
    """
        self.image_size = 256
        self.batch_size = 16
        self.data_path = data_path
        self.thread_num = nthread
        self.thread_num2 = nthread
        self.record_queue = Queue(maxsize=30000)
        self.image_queue = Queue(maxsize=15000)
        self.batch_queue = Queue(maxsize=300)
        self.record_list = []
        self.data_dir = '/srv/glusterfs/xieya/data/places365_standard'

        # filling the record_list
        input_file = open(self.data_path, 'r')

        for line in input_file:
            self.record_list.append(os.path.join(self.data_dir,
                                                 line[:-1]))  # Strip newline.

        self.record_point = 0
        self.record_number = len(self.record_list)
        self.data_size = self.record_number

        self.num_batch_per_epoch = int(self.record_number / self.batch_size)

        t_record_producer = Process(target=self.record_producer)
        t_record_producer.daemon = True
        t_record_producer.start()

        for i in range(self.thread_num):
            t = Process(target=self.record_customer)
            t.daemon = True
            t.start()

        for i in range(self.thread_num2):
            t = Process(target=self.image_customer)
            t.daemon = True
            t.start()
Exemplo n.º 11
0
 def _start_visdom(port):
     p = Process(target=visdom.server.start_server,
                 kwargs={
                     "port": port,
                     "base_url": ''
                 })
     p.start()
     atexit.register(p.terminate)
     time.sleep(20)
     return True
Exemplo n.º 12
0
def _colorize_data_train(data_path, in_dir, out_dir):
    ds = DataSet(data_path, in_dir)
    input_tensor = tf.placeholder(tf.float32,
                                  shape=(_BATCH_SIZE, _INPUT_SIZE, _INPUT_SIZE,
                                         1))
    model = demo._get_model(input_tensor)
    saver = tf.train.Saver()

    config = tf.ConfigProto(allow_soft_placement=True)
    config.gpu_options.allow_growth = True

    save_queue = Queue(100)

    lock = RLock()

    def save_fn():
        while True:
            img_l_batch, img_313_rs_batch, img_name_batch = save_queue.get()
            for idx in range(_BATCH_SIZE):
                img_l = img_l_batch[idx]
                img_rgb, _ = utils.decode(img_l, img_313_rs_batch[idx:idx + 1],
                                          _T)
                io.imsave(os.path.join(out_dir, img_name_batch[idx]), img_rgb)

            lock.acquire()
            global i
            i += _BATCH_SIZE
            lock.release()

    for _ in range(20):
        t = Process(target=save_fn)
        t.daemon = True
        t.start()

    with tf.Session(config=config) as sess:
        saver.restore(sess, _CKPT_PATH)

        start_time = monotonic.monotonic()
        while i < ds.record_number:
            lock.acquire()
            if i % (_BATCH_SIZE * _LOG_FREQ) == 0:
                print("Image count: {0} Time: {1}".format(
                    i,
                    monotonic.monotonic() - start_time))
                start_time = monotonic.monotonic()
            lock.release()

            img_l_batch, img_l_rs_batch, img_name_batch = ds.batch()

            img_313_rs_batch = sess.run(
                model, feed_dict={input_tensor: img_l_rs_batch})

            save_queue.put((img_l_batch, img_313_rs_batch, img_name_batch))
            print("Save queue size: {}".format(save_queue.qsize()))
    print("Colorized: {}".format(i))
    def __init__(self):

        self.image_size = 224
        self.batch_size = 128
        self.data_path = 'E:/chengzirui/dataset/images256/'
        self.thread_num = 10
        self.thread_num2 = 10
        # record and image_label queue
        self.record_queue = Queue(maxsize=10000)
        self.image_queue = Queue(maxsize=5000)

        self.batch_queue = Queue(maxsize=150)

        self.record_list = []

        # filling the record_list
        input_file = open('data_list/places_train.txt', 'r')

        for line in input_file:
            line = line.strip()
            self.record_list.append(line)

        self.record_point = 0
        # 总record大小
        self.record_number = len(self.record_list)

        self.num_batch_per_epoch = int(self.record_number / self.batch_size)

        t_record_producer = Process(target=self.record_producer)
        t_record_producer.daemon = True
        t_record_producer.start()

        for i in range(self.thread_num):
            t = Process(target=self.record_customer)
            t.daemon = True
            t.start()

        for i in range(self.thread_num2):
            t = Process(target=self.image_customer)
            t.daemon = True
            t.start()
Exemplo n.º 14
0
    def __init__(self, common_params=None, dataset_params=None):
        """
        Args:
          common_params: A dict
          dataset_params: A dict
        """
        if common_params:
            self.image_size = int(common_params['image_size'])
            self.batch_size = int(common_params['batch_size'])

        if dataset_params:
            self.data_path = str(dataset_params['path'])
            self.thread_num = int(int(dataset_params['thread_num']))

        # Create record and batch queue for multi-threading
        self.record_queue = Queue(maxsize=10000)
        self.batch_queue = Queue(maxsize=100)

        # Fill in the record_list
        self.record_list = []
        input_file = open(self.data_path, 'r')

        for line in input_file:
            line = line.strip()
            self.record_list.append(line)

        self.record_point = 0
        self.record_number = len(self.record_list)
        self.num_batch_per_epoch = int(self.record_number / self.batch_size)

        # Keep adding record into record_queue
        t_record_producer = Process(target=self.record_producer)
        t_record_producer.daemon = True
        t_record_producer.start()

        # (Multi-threads) Read/Process images and batch them
        for i in range(self.thread_num):
            t = Process(target=self.image_customer)
            t.daemon = True
            t.start()
Exemplo n.º 15
0
    def __init__(self, w=None, h=None, max_side=None):
        self.w = w
        self.h = h
        self.max_side = max_side
        self.queue = Queue(maxsize=20)
        self.queue_pts = Queue(maxsize=20)

        threads = [
            Process(target=worker_display, args=(self.queue_pts, )),
        ]

        for t in threads:
            t.daemon = True
            t.start()
Exemplo n.º 16
0
    def __init__(self,
                 exp_name="main",
                 server="http://localhost",
                 port=8080,
                 auto_close=True,
                 auto_start=False,
                 auto_start_ports=(8080, 8000),
                 **kwargs):
        """
        Creates a new NumpyVisdomLogger object.

        Args:
            name: The name of the visdom environment
            server: The address of the visdom server
            port: The port of the visdom server
            auto_close: Close all objects and kill process at the end of the python script
            auto_start: Flag, if it should try to start a visdom server on the given ports
            auto_start_ports: Ordered list of ports, to try to start a visdom server on (only on the first available
            port)
        """
        super(NumpyVisdomLogger, self).__init__(**kwargs)

        if auto_start:
            auto_port = start_visdom(auto_start_ports)
            if auto_port != -1:
                port = auto_port
                server = "http://localhost"

        self.name = exp_name
        self.server = server
        self.port = port

        self.vis = ExtraVisdom(env=self.name,
                               server=self.server,
                               port=self.port)

        self._value_counter = defaultdict(dict)
        self._3d_histograms = dict()

        self._queue = Queue()
        self._process = Process(target=self.__show, args=(self._queue, ))

        if auto_close:
            # atexit.register(self.close_all)
            if not IS_WINDOWS:
                atexit.register(self.exit)
            atexit.register(self.save_vis)

        self._process.start()
Exemplo n.º 17
0
 def start(self, file_loc, audio_src):
     # from rec_thread import rec_thread
     try:
         from billiard import forking_enable
         forking_enable(0)
     except ImportError:
         pass
     self.should_close.clear()
     self.process = Process(target=rec_thread,
                            args=(file_loc, audio_src, self.should_close))
     self.process.start()
     try:
         forking_enable(1)
     except:
         pass
Exemplo n.º 18
0
def run(prod_list,
        topics=None,
        test_message=None,
        nameserver='localhost',
        addresses=None):
    """Spawn one or multiple subprocesses to run the jobs from the product list."""
    LOG.info("Launching trollflow2")
    tmessage = get_test_message(test_message)
    if tmessage:
        from threading import Thread as Process
        from six.moves.queue import Queue
        from posttroll.message import Message
    else:
        from multiprocessing import Process, Queue

    with open(prod_list) as fid:
        config = yaml.load(fid.read(), Loader=BaseLoader)
    topics = topics or config['product_list'].pop('subscribe_topics', None)

    if not tmessage:
        listener = ListenerContainer(topics=topics,
                                     nameserver=nameserver,
                                     addresses=addresses)

    while True:
        try:
            if tmessage:
                msg = Message(rawstr=tmessage)
            else:
                msg = listener.output_queue.get(True, 5)
        except KeyboardInterrupt:
            if not tmessage:
                listener.stop()
            return
        except Empty:
            continue
        produced_files = Queue()
        proc = Process(target=process, args=(msg, prod_list, produced_files))
        start_time = datetime.now()
        proc.start()
        proc.join()
        try:
            exitcode = proc.exitcode
        except AttributeError:
            exitcode = 0
        check_results(produced_files, start_time, exitcode)
        if tmessage:
            break
Exemplo n.º 19
0
def StartListener( startTime=datetime.datetime.now(), comPort=1, HOST=None, PORT=None ): # HOST, PORT not used.
	global q
	global shutdownQ
	global listener
	
	StopListener()

	if Model.race:
		comPort = (comPort or getattr(Model.race, 'comPort', 1))
	
	q = Queue()
	shutdownQ = Queue()
	listener = Process( target = Server, args=(q, shutdownQ, comPort, startTime) )
	listener.name = 'RaceResultUSB Listener'
	listener.daemon = True
	listener.start()
Exemplo n.º 20
0
def StartListener( startTime = datetime.datetime.now(),
					HOST = DEFAULT_HOST, PORT = DEFAULT_PORT ):
	global q
	global shutdownQ
	global listener
	global dateToday
	dateToday = startTime.date()
	
	StopListener()
	
	q = Queue()
	shutdownQ = Queue()
	listener = Process( target = Server, args=(q, shutdownQ, HOST, PORT, startTime) )
	listener.name = 'JChip Listener'
	listener.daemon = True
	listener.start()
def launch_server():
    ip = gethostbyname(getfqdn())
    port, with_threading = get_options()
    sock = socket(AF_INET, SOCK_STREAM)
    sock.bind(('', port))
    print('Server on: ' + ip + ':' + str(port) + ' with ' + ('threads' if with_threading else 'processes'))
    if with_threading:
        from threading import Thread as Process
    else:
        from multiprocessing import Process
    while True:
        sock.listen(16)
        client_sock, ip = sock.accept()
        print('Connected from ' + ip[0])
        process = Process(target=hash_text, args=(client_sock,))
        process.start()
Exemplo n.º 22
0
def StartListener( startTime=now(), HOST=None, PORT=None ):
	global q
	global shutdownQ
	global listener
	
	StopListener()
	
	if Model.race:
		HOST = (HOST or Model.race.chipReaderIpAddr)
		PORT = (PORT or Model.race.chipReaderPort)
	
	q = Queue()
	shutdownQ = Queue()
	listener = Process( target = Server, args=(q, shutdownQ, HOST, PORT, startTime) )
	listener.name = 'Ultra Listener'
	listener.daemon = True
	listener.start()
Exemplo n.º 23
0
    def init_instruments(self):
        for name, instr in self._instruments.items():
            instr.configure_with_proxy(instr.proxy_obj)

        self.digitizers = [
            v for _, v in self._instruments.items()
            if "Digitizer" in v.instrument_type
        ]
        self.awgs = [
            v for _, v in self._instruments.items()
            if "AWG" in v.instrument_type
        ]
        # Swap the master AWG so it is last in the list
        try:
            master_awg_idx = next(ct for ct, awg in enumerate(self.awgs)
                                  if awg.master)
            self.awgs[-1], self.awgs[master_awg_idx] = self.awgs[
                master_awg_idx], self.awgs[-1]
        except:
            logger.warning("No AWG is specified as the master.")

        for gen_proxy in self.generators:
            gen_proxy.instr.output = True

        # Start socket listening processes, store as keys in a dictionary with exit commands as values
        self.dig_listeners = {}
        ready = Value('i', 0)
        self.dig_run = Event()
        self.dig_exit = Event()
        for chan, dig in self.chan_to_dig.items():
            socket = dig.get_socket(chan)
            oc = self.chan_to_oc[chan]
            p = Process(target=dig.receive_data,
                        args=(chan, oc, self.dig_exit, ready, self.dig_run))
            self.dig_listeners[p] = self.dig_exit
        assert None not in self.dig_listeners.keys()
        for listener in self.dig_listeners.keys():
            listener.start()

        while ready.value < len(self.chan_to_dig):
            time.sleep(0.1)

        if self.cw_mode:
            for awg in self.awgs:
                awg.run()
Exemplo n.º 24
0
 def __init__(self, policy, child_box, left, status_updater):
     self.policy = policy
     self.box = child_box
     self.left = left
     self.expected_size = 0
     self.available_nodes = []
     self.available_edges = []
     self.status_updater = status_updater
     self.in_queue = Queue()
     self.out_queue = Queue()
     self.proc = Process(target=self.__push,
                         args=(self.in_queue, self.out_queue))
     self.proc.start()
     self.last_out = ([], [])
     self.last_in = (array([]), [], [])
     self.retry = False
     self.status_context = status_updater.get_context_id("666")
     self.last_status_message_id = None
Exemplo n.º 25
0
    def import_dataset(self, request):
        """Performs a dataset import session with all of the workflow - data acquisition, extraction, reading,
        preprocessing, and storing.

        :param request: Django request
        """
        self._validate_request(request)

        parameters = self.django_request_to_import_parameters(request.POST)
        parameters = self._preprocess_import(parameters, request.user, request.FILES)

        process = Process(target=_import_dataset, args=(parameters, self._n_processes, self._process_batch_size)).start()
        # process = None
        # _import_dataset(parameters, n_processes=self._n_processes, process_batch_size=self._process_batch_size)

        self._active_import_jobs[parameters['import_id']] = {
            'process': process,
            'parameters': parameters
        }
Exemplo n.º 26
0
def queue_joinif(this_queue, kill_signal=None, timeout=None):
    assert (kill_signal is not None) or (
        timeout is not None
    ), "queue_joinif requires either a kill_signal or timout argument."
    t = Process(target=this_queue.join)
    t.start()
    start_time = time.time()
    while True:
        if kill_signal is not None:
            if kill_signal.is_set():
                return False
        if timeout is not None:
            if (time.time() - start_time > timeout):
                return False
        try:
            t.join(1)
            assert not t.is_alive()
            return True
        except AssertionError:
            pass
    return False
Exemplo n.º 27
0
def main():

    host = 'heidiz.ddns.net'

    port = 38010  # 设置端口好

    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)

    s.connect((host, port))

    recv_connection = s.recv(4096)

    print(recv_connection.decode())

    pre = Queue()
    post = Queue()

    pre_lock = Lock()
    post_lock = Lock()
    #"""
    thread = dict()

    thread['capture'] = Process(target=capture, args=(s, pre, pre_lock))

    #thread['send'] = Process(target = send, args = (s, pre, pre_lock))

    thread['recv'] = Process(target=recv, args=(s, post, post_lock))
    """
    thread = [None] * 4 
    
    thread[0] = gevent.spawn(capture, s, pre, pre_lock)
    
    thread[1] = gevent.spawn(send, s, pre, pre_lock)
    
    thread[2] = gevent.spawn(recv, s, post, post_lock)
    
    thread[3] = gevent.spawn(display, post, post_lock)

    gevent.joinall(thread)
    """
    #thread['display'] = threading.Thread(target = display, args = (post, post_lock))

    start = [t[1].start() for t in thread.items()]

    while True:

        if post.empty():

            time.sleep(1)

            continue

        post_lock.acquire()

        encode = post.get()

        post_lock.release()

        frame = decoder(encode)

        cv2.imshow('frame', frame)

        if cv2.waitKey(1) & 0xFF == ord('q'):

            break

    join = [t[1].join() for t in thread.items()]
Exemplo n.º 28
0
                bcast("Couldn't play " + current)
#######################################################
# Play loaded track
            pygame.mixer.music.play()
# Take user input for controlling player
            while pygame.mixer.music.get_busy():
                if console == False:
                    #font = pygame.font.Font(None, 36)
                    #out = font.render(text, 1, (10, 10, 10))
                    #textpos = out.get_rect()
                    ##textpos.centerx = background.get_rect().centerx
                    screen.blit(background, (0, 0))
                    #pygame.display.flip()
                    control2()
                else:
                    t = Process(None, control())
                    t.daemon = False
                    t.start()
#######################################################
            if not current in played:
                played.append(current)
                i = i + 1
            sleep(0.2)
            songNum = songNum + 1
    bcast("All songs have been played!")
    log('All songs have been played')
    shutdown()
#######################################################
except:
    LogErr()
    shutdown()
Exemplo n.º 29
0
}

# Dict with the xcel data loaded as pandas Dataframes
pd_csv = {}


def create_pd_csv_pd(excel, pd_csv):
    """
    Read the excel files as pandas DataFrame
    """
    pd_csv[excel] = pd.read_pickle("src/" + excel)


processes = []
for val in os.listdir("src/"):
    processes.append(Process(target=create_pd_csv_pd, args=(val, pd_csv,)))
for process in processes:
    process.start()
for process in processes:
    process.join()


# print pd_csv.keys()


def compute_data_sizes(pd_csv):
    """
    Isolate the different Units available
    Compute the sum with unit conversion
    store output in MB
    """
Exemplo n.º 30
0
 def start(self):
     if self.alive():
         return
     self.join()
     self.process = Process(target=self.run)
     self.process.start()