Ejemplo n.º 1
0
class PyAPNSNotification(threading.Thread, NotificationAbstract):
    def __init__(self, host, app_id, cert_file, dev_mode = False, reconnect_interval=10, chunk_size=10):
        super(PyAPNSNotification, self).__init__()
        self.keepRunning = True
        self.is_server_ready = False
        self.notifications = Queue()
        pyapns_client.configure({'HOST': host})
        self.reconnect_interval = reconnect_interval
        self.app_id = app_id
        self.cert_file = cert_file
        self.chunk_size = chunk_size
        if  dev_mode:
            self.mode = 'sandbox'
        else:
            self.mode = 'production'

    def run(self):
        while self.keepRunning or not self.notifications.empty():
            if not self.is_server_ready:
                try:
                    pyapns_client.provision(self.app_id, open(self.cert_file).read(), self.mode)
                    self.is_server_ready = True
                except Exception:
                    if self.keepRunning:
                        self.is_server_ready = False
                        time.sleep(self.reconnect_interval)
                        continue
                    else:
                        break

            tokens = []
            messages = []
            for i in xrange(self.chunk_size):
                if self.notifications.empty() and len(tokens):
                    break

                notification = self.notifications.get()

                if notification is None:
                    self.notifications.task_done()
                    break

                tokens.append(notification['token'])
                messages.append(notification['message'])
                self.notifications.task_done()

            try:
                if len(tokens):
                    pyapns_client.notify(self.app_id, tokens, messages)
            except Exception:
                self.is_server_ready = False
                for i in xrange(len(tokens)):
                    self.notifications.put({'token':tokens[i],'message':messages[i]})

    def stop(self):
        self.keepRunning = False
        self.notifications.put(None)

    def perform_notification(self, token, aps_message):
        self.notifications.put({'token':token,'message':aps_message})
Ejemplo n.º 2
0
    class EndPoint(object):
        '''
        Representation of a stream's endpoint.
        '''
        def __init__(self, channel, idx):
            self._channel = channel
            self._i = idx
            self._queue = Queue()
            
        def receive(self, block = True):
            try:
                if self._queue.empty():
#                    print 'Updating for end point: [%d]' % (self._i)
                    self._channel._update(self._i)
            except StreamClosedException:
                if self._queue.empty():
                    raise
                else:
                    pass
#            print 'Returning for end point: [%d]' % (self._i)
            return self._queue.get()

        def processed(self):
            self._queue.task_done()

        def send(self, r):
            self._queue.put(r)
Ejemplo n.º 3
0
class QueuedWavePlayer(object):

    def __init__(self):
        self.queue = Queue();
        freq = 44100     # audio CD quality
        bitsize = -16    # unsigned 16 bit
        channels = 2     # 1 is mono, 2 is stereo
        buffer = 2048    # number of samples (experiment to get right sound)
        pygame.mixer.init(freq, bitsize, channels, buffer)
        self.channel = pygame.mixer.Channel(1)

        self.logger = logging.getLogger("qplayer")

        t=threading.Thread(target=self.playing_target);
        t.daemon = True;
        t.start()


    def playing_target(self):

        while True:
            file_to_play=self.queue.get()
            self.logger.info("Queuing %s in channel",file_to_play)
            self.channel.queue(pygame.mixer.Sound(file_to_play))
            self.queue.task_done()
            while self.channel.get_queue():
                time.sleep(1);


    def queue_for_playing(self,file_to_play):
        self.queue.put(file_to_play)
Ejemplo n.º 4
0
class IWRCBot():
    def __init__(self, site, safe = True):
        self.other_ns = re.compile(u'14\[\[07(' + u'|'.join(site.namespaces()) + u')')
        interwiki.globalvar.autonomous = True
        self.site = site
        self.queue = Queue()
        self.processed = []
        self.safe = safe
        # Start 20 threads
        for i in range(20):
            t = threading.Thread(target=self.worker)
            t.setDaemon(True)
            t.start()

    def worker(self):
        bot = interwiki.InterwikiBot()
        while True:
            # Will wait until one page is available
            bot.add(self.queue.get())
            bot.queryStep()
            self.queue.task_done()

    def addQueue(self, name):
        if self.other_ns.match(name):
            return
        if self.safe:
            if name in self.processed:
                return
            self.processed.append(name)
        page = pywikibot.Page(self.site, name)
        # the Queue has for now an unlimited size,
        # it is a simple atomic append(), no need to acquire a semaphore
        self.queue.put_nowait(page)
Ejemplo n.º 5
0
class CommandQueue(object):
    """Asynchronous command queue that can be used to communicate with blip.pl 
    in the background."""
    
    def __init__(self):
        self.queue = Queue()
        self.worker = threading.Thread(target=self.__worker)
        self.worker.setDaemon(True)
	
    def __del__(self):
        self.Finish()
        
    def __worker(self):
        while True:
            item = self.queue.get(True)
            item()
            self.queue.task_done()
	    
    def Finish(self):
        """Finishes all commands in the queue"""
	
        self.queue.join()
	
    def HasPendingCommands(self):
        """Returns True if the queue is busy"""
	
        return self.queue.qsize() > 0
	
    def Enqueue(self, command):
        """Enqueues a command in the queue. 
        Command must refer to a function without parameters."""

        self.queue.put(command)
Ejemplo n.º 6
0
class Worker:
    def __init__(self):
        self.q = Queue()
        self.t = Thread(target=self._handle)
        self.t.setDaemon(True)
        self.t.start()

    def _handle(self):
        while True:
            reset_caches()

            fn = self.q.get()
            try:
                fn()
                self.q.task_done()
            except:
                import traceback
                print traceback.format_exc()

    def do(self, fn, *a, **kw):
        fn1 = lambda: fn(*a, **kw)
        self.q.put(fn1)

    def join(self):
        self.q.join()
    def test_request_retries_configurable(self):
        # We guess at some ports that will be unused by Riak or
        # anything else.
        client = self.create_client(http_port=DUMMY_HTTP_PORT,
                                    pb_port=DUMMY_PB_PORT)

        # Change the retry count
        client.retries = 10
        self.assertEqual(10, client.retries)

        # The retry count should be a thread local
        retries = Queue()

        def _target():
            retries.put(client.retries)
            retries.join()

        th = Thread(target=_target)
        th.start()
        self.assertEqual(3, retries.get(block=True))
        retries.task_done()
        th.join()

        # Modify the retries in a with statement
        with client.retry_count(5):
            self.assertEqual(5, client.retries)
            self.assertRaises(IOError, client.ping)
    def test_create_cell_recalculator_should(self, mock_recalculate):
        unrecalculated_queue = Queue()
        unrecalculated_queue.put(1)
        unrecalculated_queue.put(1)
        unrecalculated_queue.task_done = Mock()

        leaf_queue = Queue()
        leaf_queue.put(sentinel.one)
        leaf_queue.put(sentinel.two)
        leaf_queue.task_done = Mock()

        target = create_cell_recalculator(leaf_queue, unrecalculated_queue, sentinel.graph, sentinel.context)
        target()

        self.assertTrue(unrecalculated_queue.empty())
        self.assertEquals(
            unrecalculated_queue.task_done.call_args_list,
            [ ((), {}), ((), {}), ]
        )

        self.assertTrue(leaf_queue.empty())

        self.assertEquals(
            mock_recalculate.call_args_list,
            [
                ((sentinel.one, leaf_queue, sentinel.graph, sentinel.context), {}),
                ((sentinel.two, leaf_queue, sentinel.graph, sentinel.context), {})
            ]
        )

        self.assertEquals(
            leaf_queue.task_done.call_args_list,
            [ ((), {}), ((), {}), ]
        )
Ejemplo n.º 9
0
class FrameSaver( threading.Thread ):
	def __init__( self ):
		threading.Thread.__init__( self )
		self.daemon = True
		self.name = 'FrameSaver'
		self.reset()
	
	def reset( self ):
		self.queue = Queue()
	
	def run( self ):
		self.reset()
		while 1:
			message = self.queue.get()
			if   message[0] == 'Save':
				cmd, fileName, bib, t, frame = message
				#sys.stderr.write( 'save' )
				PhotoFinish.SavePhoto( fileName, bib, t, frame )
				self.queue.task_done()
			elif message[0] == 'Terminate':
				self.queue.task_done()
				self.reset()
				break
	
	def stop( self ):
		self.queue.put( ['Terminate'] )
		self.join()
	
	def save( self, fileName, bib, t, frame ):
		self.queue.put( ['Save', fileName, bib, t, frame] )
Ejemplo n.º 10
0
class ThreadQueue(object):
    def __init__(self):
        self.q = Queue()

        t = Thread(target=self._thread_worker)
        t.setDaemon(True)
        t.start()

    def add_request(self, func, *args, **kwargs):
        """Add a request to the queue. Pass callback= and/or error= as
           keyword arguments to receive return from functions or exceptions.
        """

        self.q.put((func, args, kwargs))

    def _thread_worker(self):
        while True:
            request = self.q.get()
            self.do_request(request)
            self.q.task_done()

    def do_request(self, (func, args, kwargs)):
        callback = kwargs.pop('callback', None)
        error = kwargs.pop('error', None)

        try:
            r = func(*args, **kwargs)
            if not isinstance(r, tuple): r = (r,)
            if callback: self.do_callback(callback, *r)
        except Exception, e:
            if error: self.do_callback(error, e)
            else: print "Unhandled error:", e
Ejemplo n.º 11
0
class FileGenerator(object):
    def __init__(self):
        self.q = Queue()

    def read_generator(self):
        running = True
        while(running):
            try:
                data = self.q.get(block=True, timeout=1)
                self.q.task_done()
                if data is None:
                    running = False
                else:
                    yield data
            except:
                running = False

    def write(self, s):
        self.q.put(s)

    def close(self):
        self.q.put(None)

    def tell(self):
        return 0

    def flush(self):
        return True
Ejemplo n.º 12
0
class DownloadThreadPool(object):
    def __init__(self, size=3):
        self.queue = Queue()
        self.workers = [Thread(target=self._do_work) for _ in range(size)]
        self.initialized = False

    def init_threads(self):
        for worker in self.workers:
            worker.setDaemon(True)
            worker.start()
        self.initialized = True

    def _do_work(self):
        while True:
            url, target = self.queue.get()
            download(url, target, close_target=True, quiet=False)
            self.queue.task_done()

    def join(self):
        self.queue.join()

    def submit(self, url, target):
        if not self.initialized:
            self.init_threads()
        self.queue.put((url, target))
Ejemplo n.º 13
0
class WorkerThread(threading.Thread):
    def __init__(self, *args, **kwargs):
        threading.Thread.__init__(self, *args, **kwargs)
        self.input_queue = Queue()

    def send(self, item):
        self.input_queue.put(item)

    def close(self):
        self.input_queue.put(None)
        self.input_queue.join()

    def run(self):
        while True:
            item = self.input_queue.get()
            if item is None:
                break

            # Process the item: replace with useful work
            print item
            self.input_queue.task_done()

        # Done. Indicate that sentinel was received and return
        self.input_queue.task_done()
        return
Ejemplo n.º 14
0
class Events(threading.Thread):
    def __init__(self, callback):
        super(Events, self).__init__()
        self.queue = Queue()
        self.daemon = True
        self.callback = callback
        self.name = "EVENT-QUEUE"
        self.stop = threading.Event()

    def put(self, type):
        self.queue.put(type)

    def run(self):
        while (not self.stop.is_set()):
            try:
                # get event type
                type = self.queue.get(True, 1)

                # perform callback if we got a event type
                self.callback(type)

                # event completed
                self.queue.task_done()
            except Empty:
                type = None

        # exiting thread
        self.stop.clear()

    # System Events
    class SystemEvent(Event):
        RESTART = "RESTART"
        SHUTDOWN = "SHUTDOWN"
Ejemplo n.º 15
0
class ThreadPool():

    def __init__(self):
        self.q = Queue()
        self.NUM = 3
        self.JOBS = 10

    def do_somthing_using(self,arguments):
        print arguments

    def working(self):
        while True:
            arguments = self.q.get()
            self.do_somthing_using(arguments+1)
            sleep(1)
            self.q.task_done()
    def PoolStart(self):
        for i in range(self.NUM):
            t = Thread(target=self.working)
            t.setDaemon(True)
            t.start()

        for i in range(self.JOBS):
            self.q.put(i)
        self.q.join()
Ejemplo n.º 16
0
class Sceduler:

  def __init__(self, config):

    fs = config.get('scheduler', 'fs', 0)
    dest = config.get('store', 'path', 0)
    self.ioqueue = Queue()
    self.iothread = Thread(target=self.ioprocess)
    self.iothread.daemon = True
    self.observer = Observer()
    self.event_handler = IoTask(self.ioqueue, fs, dest)
    self.observer.schedule(self.event_handler, fs, recursive=True)

  def ioprocess(self):
    while True:
      t = self.ioqueue.get()
      try:
        t.process()
      finally:
        self.ioqueue.task_done()

  def start(self):
    self.observer.start()
    self.iothread.start()

  def stop(self):
    self.observer.stop()
    self.iothread.stop()

  def join(self):
     self.observer.join()
     self.iothread.join()
Ejemplo n.º 17
0
class DecodingThread(threading.Thread):
    """Thread for concurrent simulation.

    A :class:`DecodingThread` is responsible for one specific decoder. As soon as an item is
    placed on the :attr:`jobQueue`, decoding starts. After finishing, the attributes
    :attr:`time`, :attr:`error`, :attr:`objVal` and :attr:`mlCertificate` contain information
    about the solution.

    :param decoder: The :class:`.Decoder` used for this process.
    :param revealSent: If decoding should reveal the sent codeword.

    .. attribute:: jobQueue

      On this queue, pairs (llr, sentCodeword) are put. The process will start decoding
      immediately, and signal :func:`JoinableQueue.task_done` when finished."""

    def __init__(self, decoder, revealSent):
        threading.Thread.__init__(self)
        self.decoder = decoder
        self.jobQueue = Queue()
        self.daemon = True
        self.revealSent = revealSent
        self.time = 0.0
        self.start()

    def run(self):
        while True:
            llr, sent = self.jobQueue.get()
            with Timer() as timer:
                if self.revealSent:
                    self.decoder.decode(llr, sent=sent)
                else:
                    self.decoder.decode(llr)
            self.time = timer.duration
            self.jobQueue.task_done()
Ejemplo n.º 18
0
def test_get_batches_new_chromosome():
    """
    Test to get a batch
    """
    batch_queue = Queue()
    variants = []
    
    first_variant = get_variant_line()
    second_variant = get_variant_line(chrom="2")
    
    variants.append(first_variant)
    variants.append(second_variant)
    
    header = HeaderParser()
    header.parse_header_line("#{0}".format(HEADER))
    
    chromosomes = get_batches(variants=variants, batch_queue=batch_queue, 
    header=header)
    
    batch_1 = batch_queue.get()
    batch_queue.task_done()
    
    batch_2 = batch_queue.get()
    batch_queue.task_done()
    
    assert chromosomes == ['1', '2']
    assert len(batch_1) == 1
    assert len(batch_2) == 1
Ejemplo n.º 19
0
class ThreadPool(object):
    """A thread pool manager."""

    def __init__(self, threads=8):
        """Construct a new thread pool with :ref:`threads` threads.

        :param threads: Number of threads to start in the thread pool.
        """
        self._queue = Queue()
        self._pool = [threading.Thread(target=self._worker)
                      for _ in range(threads)]

    def _worker(self):
        """Waits for and executes jobs from the queue."""
        while True:
            message = self._queue.get()
            if message is None:
                self._queue.task_done()
                return
            job, args, kwargs = message
            try:
                job(*args, **kwargs)
            except Exception, e:
                log.error('thread pool worker failed', exc_info=e)
            self._queue.task_done()
Ejemplo n.º 20
0
def test_get_batches_two_regions():
    """
    Test to get a batch
    """
    batch_queue = Queue()
    variants = []
    first_variant = get_variant_line()
    second_variant = get_variant_line(pos="2", info="Annotation=DDD;Exonic")
    variants.append(first_variant)
    variants.append(second_variant)
    
    header = HeaderParser()
    header.parse_header_line("#{0}".format(HEADER))
    
    chromosomes = get_batches(variants=variants, batch_queue=batch_queue, 
    header=header)
    batch_1 = batch_queue.get()
    batch_queue.task_done()
    
    batch_2 = batch_queue.get()
    batch_queue.task_done()
    
    assert chromosomes == ['1']
    assert len(batch_1) == 1
    assert len(batch_2) == 1
Ejemplo n.º 21
0
def multiget(client, keys, **options):
    """
    Executes a parallel-fetch across multiple threads. Returns a list
    containing :class:`~riak.riak_object.RiakObject` instances, or
    3-tuples of bucket, key, and the exception raised.

    :param client: the client to use
    :type client: :class:`~riak.client.RiakClient`
    :param keys: the bucket/key pairs to fetch in parallel
    :type keys: list of two-tuples -- bucket/key pairs
    :rtype: list
    """
    outq = Queue()

    RIAK_MULTIGET_POOL.start()
    for bucket, key in keys:
        task = Task(client, outq, bucket, key, options)
        RIAK_MULTIGET_POOL.enq(task)

    results = []
    for _ in range(len(keys)):
        if RIAK_MULTIGET_POOL.stopped():
            raise RuntimeError("Multi-get operation interrupted by pool "
                               "stopping!")
        results.append(outq.get())
        outq.task_done()

    return results
Ejemplo n.º 22
0
    def test_interrupt_ctrl_c(self):
        """
        Interrupt "waagent -deprovision" by "ctrl -c"
        """
        self.log.info("Interrupt \"waagent -deprovision\" by \"ctrl -c\"")
        # Start 2 threads:
        # session1 is for running deprovision command and getting output
        # session2 is for getting pid and killing process,

        def session1(q):
            session = self.vm_test01.wait_for_login()
            session.cmd_output("echo {0} | sudo -S sh -c ''".format(self.vm_test01.password))
            session.cmd_output("sudo su -")
            q.put(session.cmd_output("waagent -deprovision").rstrip('\n'))

        def session2():
            time.sleep(5)
            pid = self.vm_test01.get_pid("deprovision")
            self.vm_test01.get_output("kill -2 {0}".format(pid))

        import threading
        from Queue import Queue
        q = Queue()
        thread1 = threading.Thread(target=session1, args=(q,))
        thread1.setDaemon(True)
        thread1.start()
        thread2 = threading.Thread(target=session2)
        thread2.setDaemon(True)
        thread2.start()
        thread1.join()
        output = q.get()
        q.task_done()
        self.log.info(output)
        self.assertNotIn("message=Traceback", output,
                         "Should not raise exception.")
Ejemplo n.º 23
0
def test_get_batches_vep():
    """
    Test to get a batch
    """
    batch_queue = Queue()
    variants = []
    
    first_variant = get_variant_line(info="MQ;CSQ=G|ADK")
    
    second_variant = get_variant_line(pos="2", info="MQ;CSQ=G|ADK")
    
    variants.append(first_variant)
    variants.append(second_variant)

    header = HeaderParser()
    header.parse_header_line("#{0}".format(HEADER))
    header.vep_columns = ['Allele', 'SYMBOL']
    
    chromosomes = get_batches(variants=variants, batch_queue=batch_queue, 
                header=header)
    
    batch_1 = batch_queue.get()
    batch_queue.task_done()
    
    batch_2 = batch_queue.get()
    batch_queue.task_done()
    
    assert chromosomes == ['1']
    assert len(batch_1) == 1
    assert len(batch_2) == 1
Ejemplo n.º 24
0
class ThreadPool(object):
    """Pool of threads consuming tasks from a queue"""
    def __init__(self, workers):
        self.tasks = Queue()
        self.workers = [Worker(self.tasks) for x in xrange(workers)]
        self.state = ThreadPoolState.IDLE

    def apply_async(self, func, args, **kargs):
        """Add a task to the queue"""
        if self.state != ThreadPoolState.IDLE:
            raise ThreadPoolError('ThreadPool cant accept any more tasks')
        self.tasks.put((func, args, kargs))

    def close(self):
        self.state = ThreadPoolState.CLOSED
        while not self.tasks.empty():
            self.tasks.get_nowait()
            self.tasks.task_done()
        for worker in self.workers:
            self.tasks.put((None, (), {}))

    def join(self):
        """Wait for completion of all the tasks in the queue"""
        self.state = ThreadPoolState.WAIT_JOIN
        self.tasks.join()
Ejemplo n.º 25
0
class CQueueObject:
    def __init__(self,sQueueName):
        self.sQueueName = sQueueName
        self.queue = Queue() #(maxsize = 1000)      #处理队列,默认容量无限制

    def GetQueueName(self):
        return self.sQueueName
    def GetQueueSize(self):
        return self.queue.qsize()

    def PutToQueue(self, oObject):
        dictObj = {}
        dictObj['object'] = oObject
        dictObj['ins_time'] = GetCurrentTime()
        self.queue.put(dictObj, block=True)
        return self.queue.qsize()

    def GetFmQueue(self, sThreadName):
        while True:
            try:
                dictObj = self.queue.get(timeout=0.1) #timeout 0.1s
                self.queue.task_done()
                return dictObj
            except Empty: #如果 Queue 是空,就会出异常
                PrintAndSleep(0.1,'%s.GetFmQueue' % sThreadName,False) #避免过多日志
                return None
Ejemplo n.º 26
0
class Listener(object):
    def __init__(self, etype, subject):
        self.type = etype
        self.queue = Queue()
        self.subject = subject
        
    def get(self, block=True, timeout=None):
        try:
            event = self.queue.get(block, timeout)
            self.queue.task_done()
            logger.debug("%s recv event: %s", self, event)
            return event
        except Empty:
            return None
    
    def __repr__(self):
        return "<UeventListener '%s'>" % self.type

    def __str__(self):
        return self.__repr__()
    
    def __enter__(self):
        logger.debug("%s ENTER", self)
        return self
        
    def __exit__(self, t, v, tb):
        logger.debug("%s EXIT", self)
        self.subject.detach(self)
Ejemplo n.º 27
0
class Events(Thread):
    def __init__(self, callback):
        super(Events, self).__init__()
        self.queue = Queue()
        self.daemon = True
        self.alive = True
        self.callback = callback
        self.name = "EVENT-QUEUE"

    def put(self, type):
        self.queue.put(type)

    def run(self):
        while(self.alive):
            try:
                # get event type
                type = self.queue.get(True, 1)

                # perform callback if we got a event type
                self.callback(type)

                # event completed
                self.queue.task_done()
            except Empty:
                type = None

    # System Events
    class SystemEvent(Event):
        RESTART = "RESTART"
        SHUTDOWN = "SHUTDOWN"
Ejemplo n.º 28
0
class NotifWorker(threading.Thread):
    '''
    NotifWorker is a simple worker thread for handling notifications.

    It can be subclassed or call out to external function tasks as appropriate.
    '''
    def __init__(self, is_daemon = True):
        threading.Thread.__init__(self)
        self.name = "NotifWorker"
        self.daemon = is_daemon
        self.tasks = Queue()
        self.start()
        
    def add_task(self, func, *args, **kargs):
        """Add a task to the queue"""
        self.tasks.put((func, args, kargs))
            
    def run(self):
        # wait for a task and process it
        while True:
            func, args, kargs = self.tasks.get()
            # TODO: need a sentinel task to indicate shutdown
            try:
                func(*args, **kargs)
            except Exception, e:
                # TODO: log this somewhere
                print "NotifWorker task raised Exception:"
                print e
            self.tasks.task_done()
Ejemplo n.º 29
0
class Sender(threading.Thread):
	def __init__(self, server):
		threading.Thread.__init__(self,None, None, "Sender")
		self.daemon = True # ce thread est un daemon, il s'arretera quand tous les threads non daemon s'arreteront
		self._server = server
		self._queue = Queue()
	
	def addMsg(self, mask_from, to, msg):
		self._queue.put((mask_from,to,msg))
	
	def run(self):
		self._server.write("Sender loop start", colorConsol.OKGREEN)
		while not self._server.e_shutdown.is_set():
			try:
				mask_from, to, msg = self._queue.get(True, 2)
			except Empty:
				pass
			else:
				self._send(mask_from, to, msg)
				self._queue.task_done()
		self._server.write("Sender loop stop", colorConsol.WARNING)
	
	def _send(self, mask_from, to, msg):
		self._server.write("send : '%s'"%msg)
		for c in self._server.clients:
			if to & (1 << c.id):
				#self._server.write("to : '%s'"%c.id)
				threading.Thread(None, c.send, "Sender send to %s"%c.id, (mask_from, msg)).start()
Ejemplo n.º 30
0
def main(argv):
    ourhost = platform.node()
    queue = Queue()
    threads = [
        Thread(target=ssh_thread, kwargs={"queue": queue, "host": argv[1]}),
        Thread(target=local_thread, kwargs={"queue": queue})
    ]
    for thread in threads:
        thread.daemon = True
        thread.start()

    while True:
        try:
            while all(map(lambda t: t.is_alive(), threads)):
                try:
                    line = queue.get(timeout=0.1).strip("\n")
                    if ourhost+"/L3" in line and "Scheduling" in line:
                        print("Restarting our services")
                        Thread(target=restart_services).start()
                    queue.task_done()
                except KeyboardInterrupt:
                    raise
                except Empty:
                    continue
                except:
                    raise
        except KeyboardInterrupt:
            break
        except Exception as e:
            print("Exception caught: {}, reticulating splines...".format(e.msg))
        finally:
            return 0
Ejemplo n.º 31
0
class SequentialScheduler (IScheduler):

    def __init__ (self):
        self.rq = None
        self.machine = None

    def reschedule (self, machine):

        self.machine = machine
        self.rq = Queue(-1)

        session = Session()
        programs = session.query(Program).order_by(desc(Program.priority)).filter(Program.finished == False).all()
        
        if not programs:
            return

        log.debug("rescheduling, found %d runnable programs" % len(list(programs)))

        for program in programs:
            self.rq.put(program)

        machine.wakeup()

    def next (self):
        if not self.rq.empty():
            return self.rq.get()

        return None

    def done (self, task, error=None):

        if error:
            log.debug("Error processing program %s." % str(task))
            log.exception(error)
        else:
            task.finished = True
        
        self.rq.task_done()
        self.machine.wakeup()
Ejemplo n.º 32
0
class EnclosureWriter(Thread):
    """
    Writes data to Serial port.
        #. Enqueues all commands received from Mycroft enclosures
           implementation
        #. Process them on the received order by writing on the Serial port

    E.g. Displaying a text on Mycroft's Mouth
        #. ``EnclosureMouth`` sends a text command
        #. ``EnclosureWriter`` captures and enqueue the command
        #. ``EnclosureWriter`` removes the next command from the queue
        #. ``EnclosureWriter`` writes the command to Serial port

    Note: A command has to end with a line break
    """
    def __init__(self, serial, client, size=16):
        super(EnclosureWriter, self).__init__(target=self.flush)
        self.alive = True
        self.daemon = True
        self.serial = serial
        self.client = client
        self.commands = Queue(size)
        self.start()

    def flush(self):
        while self.alive:
            try:
                cmd = self.commands.get()
                self.serial.write(cmd + '\n')
                LOGGER.info("Writing: " + cmd)
                self.commands.task_done()
            except Exception as e:
                LOGGER.error("Writing error: {0}".format(e))

    def write(self, command):
        self.commands.put(str(command))

    def stop(self):
        self.alive = False
        self.join()
Ejemplo n.º 33
0
class Events(threading.Thread):
    def __init__(self, callback):
        super(Events, self).__init__()
        self.queue = Queue()
        self.daemon = True
        self.callback = callback
        self.name = "EVENT-QUEUE"
        self.stop = threading.Event()

    def put(self, type):
        self.queue.put(type)

    def run(self):
        """
        Actually runs the thread to process events
        """
        try:
            while not self.stop.is_set():
                try:
                    # get event type
                    type = self.queue.get(True, 1)

                    # perform callback if we got a event type
                    self.callback(type)

                    # event completed
                    self.queue.task_done()
                except Empty:
                    type = None

            # exiting thread
            self.stop.clear()
        except Exception as e:
            logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR)
            logger.log(repr(traceback.format_exc()), logger.DEBUG)

    # System Events
    class SystemEvent(Event):
        RESTART = "RESTART"
        SHUTDOWN = "SHUTDOWN"
Ejemplo n.º 34
0
def solution(A, B, M, X, Y):
    my_queue = Queue()

    count = 0
    stops = 0
    while count < len(A):
        free_cap = Y
        actual_floor = 0
        people_count = 0

        while True:
            weight = A[count]
            if weight <= free_cap:
                my_queue.put(count)
                free_cap -= weight
                print("get: {} w:{} free_w:{}".format(count, weight,free_cap))
                count += 1
                people_count +=1
            else:
                break

            if people_count == X or count == len(A):
                break

        while not my_queue.empty():
            val = my_queue.get()
            people_floor = B[val]
            print ("leave: {} floor:{}".format(val, people_floor))
            if actual_floor != people_floor:
                stops += 1
                actual_floor = people_floor
            else:
                print("same floor")

            my_queue.task_done()

        print("come back!")
        stops += 1

    print("stops: {}".format(stops))
Ejemplo n.º 35
0
def getPath(Raw, startBus, endBus):
    # Function to generate the list of paths

    NeighbourDict = getNeighbours(
        Raw
    )  # key: any bus in the raw file, value: set of all neighbours (line and tf)

    # Use CAPENeighbourDict and BFS to find path from one bus to another. Use the concept given in getNeighboursAtCertainDepthFn
    PathDict = {}
    explored = set()
    #startBus = raw_input('Enter start bus: ')
    #endBus = raw_input('Enter end bus: ')

    frontier = Queue(maxsize=0)
    frontier.put(startBus)

    while not frontier.empty():
        currentBus = frontier.get()
        frontier.task_done()
        if currentBus == endBus:
            break

        NeighBourList = list(NeighbourDict[currentBus])

        explored.add(currentBus)

        for neighbour in NeighBourList:
            if neighbour in explored:
                continue

            if currentBus in PathDict.keys():
                PathDict[neighbour] = PathDict[currentBus] + '->' + neighbour
            else:  # if currentBus is the start bus
                PathDict[neighbour] = currentBus + '->' + neighbour

            frontier.put(neighbour)

    Path = PathDict[endBus]

    return Path
Ejemplo n.º 36
0
def main():
    """main function"""
    context = zmq.Context()
    worker = context.socket(zmq.ROUTER)
    worker.bind('tcp://*:5570')
    print 'Worker started'

    receiver = context.socket(zmq.PULL)
    receiver.bind('tcp://*:5503')

    poll = zmq.Poller()
    poll.register(worker, zmq.POLLIN)
    poll.register(receiver, zmq.POLLIN)
    identifier = ""
    queue = Queue(10000)
    count = 0
    received_count = 0
    begin = time.time()
    while True:
        sockets = dict(poll.poll(10))
        if receiver in sockets:
            msg = receiver.recv()
            queue.put(msg)
            received_count = received_count + 1
            print 'message received %d time %d' % (received_count,
                                                   time.time() - begin)
        if worker in sockets:
            ident, req_msg = worker.recv_multipart()
            #print 'Worker received %s from %s' % (req_msg, ident)
            # replies = randint(0,4)
            # for i in range(replies):
            #     time.sleep(1. / (randint(1,10)))
            if not queue.empty():
                msg = queue.get()
                queue.task_done()
                worker.send_multipart([ident, msg])
                count = count + 1

    worker.close()
Ejemplo n.º 37
0
class WorkQueue(object):
    _stopit = object()

    def __init__(self, maxsize=5):
        self._Q = Queue(maxsize=maxsize)

    def push(self, callable):
        self._Q.put_nowait(callable)  # throws Queue.Full

    def push_wait(self, callable):
        self._Q.put(callable)

    def interrupt(self):
        """Break one call to handle()

        eg. Call N times to break N threads.

        This call blocks if the queue is full.
        """
        self._Q.put(self._stopit)

    def handle(self):
        """Process queued work until interrupt() is called
        """
        while True:
            # TODO: Queue.get() (and anything using thread.allocate_lock
            #       ignores signals :(  so timeout periodically to allow delivery
            try:
                callable = self._Q.get(True, 1.0)
            except Empty:
                continue  # retry on timeout
            try:
                if callable is self._stopit:
                    break
                callable()
            except:
                _log.exception("Error from WorkQueue")
            finally:
                self._Q.task_done()
Ejemplo n.º 38
0
class ThreadPool(object):
    """Pool of threads consuming tasks from a queue"""
    def __init__(self, num_threads):
        self.tasks = Queue()
        self.results = Queue()
        self.num_threads = num_threads

        for _ in range(self.num_threads):
            Worker(self.tasks, self.results)

    def workload(self, func, *args, **kargs):
        """ set current workload for threadpool """

        for _ in range(self.num_threads):
            self.tasks.put((func, args, kargs))

    def query(self):
        """ get return values from worker threads """

        while True:
            yield self.results.get()
            self.results.task_done()
Ejemplo n.º 39
0
def test_queue():
    queue = Queue(1)

    def consumer():
        print 'Consumer waiting'
        queue.get()  # will blocked if queue is empty
        print 'Consumer done'

    def producer():
        print 'Producer putting'
        queue.put(object())  # will blocked if queue is full

        print 'Producer done'

    for _ in range(10):
        threading.Thread(target=producer).start()

    print 'Will consume the items'
    for _ in range(10):
        threading.Thread(target=consumer).start()

    queue.task_done()
Ejemplo n.º 40
0
class ThreadPool():

    def __init__(self, pages, url):
        self.q = Queue()
        self.NUM = 2
        self.JOBS = pages
        self.url=url

    def Clist(self, arg):
        print self.url+str(arg)+".html"
        req = urllib2.Request(self.url+str(arg)+".html")
        html = urllib2.urlopen(req).read()
        #html = html.decode("utf8")
        print html
        Ahref = re.findall('<a class="more".*?href="(.*?)".*?', html, re.S)
        i = 0
        pc = PhoneCrawl()
        for item in Ahref:
            name = str(arg)+"-"+str(i)+".txt"
            pc.CStart(item,name)
            print item+"----------finish!"
            i=i+1

    def working(self):
        while True:
            arguments = self.q.get()
            self.Clist(arguments+1)
            sleep(1)
            self.q.task_done()

    def PoolStart(self):
        for i in range(self.NUM):
            t = Thread(target=self.working)
            t.setDaemon(True)
            t.start()

        for i in range(self.JOBS):
            self.q.put(i)
        self.q.join()
    def test_iteration(self):
        """
        Iteration over the pool resources, even when some are claimed,
        should eventually touch all resources (excluding ones created
        during iteration).
        """

        for i in range(25):
            started = Queue()
            n = 1000
            threads = []
            touched = []
            pool = EmptyListPool()
            rand = SystemRandom()

            def _run():
                psleep = rand.uniform(0.05, 0.1)
                with pool.transaction() as a:
                    started.put(1)
                    started.join()
                    a.append(rand.uniform(0, 1))
                    sleep(psleep)

            for i in range(n):
                th = Thread(target=_run)
                threads.append(th)
                th.start()

            for i in range(n):
                started.get()
                started.task_done()

            for resource in pool:
                touched.append(resource)

            for thr in threads:
                thr.join()

            self.assertItemsEqual(pool.resources, touched)
Ejemplo n.º 42
0
class Executor(object):
    def __init__(self):
        self.models = list()
        self.queue = Queue()
    
    def addModel(self,model):
        self.models.append(model)

    def doWork(self):
        mdl = self.queue.get()
        self.singlerun(mdl)
        self.queue.task_done()

    def singlerun(self,mdl):
        map(lambda x: x.start(),mdl.reader)
        mdl.writer.start()
        mdl.observations = list()
        while(self.until(mdl.cyclecount)):
            time.sleep(mdl.cycletime)
            backup = mdl.data.copy()
            mdl.debug('Start cycle in state: ' + mdl.state)
            trigger = mdl.getTrigger()
            rules = filter(lambda l: (l.prev == mdl.state) and mdl.matchTriggers(trigger,l.trig),mdl.behav)
            rules = filter(lambda l: mdl.applyCondition(l.cond),rules)
            trname = ''
            if (len(rules) > 0):
                rule = random.choice(rules)
                trname = rule.name
                mdl.state = rule.next
                mdl.applyActions(rule.acts)
                mdl.debug('Trigger received: ' + rule.trig)
                mdl.debug('Condition ok: ' + rule.cond)
                mdl.debug('Next state: ' + rule.next)
                mdl.debug('Action applied: ' + rule.acts)
            mdl.record(trname)
            mdl.recordStat()
            mdl.checkEvents(backup)
            mdl.cyclecount += 1
            mdl.cleanChannels()        
Ejemplo n.º 43
0
def test_read_thread(session):
    '''
    Test a situation where threads are created outside of any active
    context (hence dry).
    '''
    cfg = {'db_uri': session, 'schema': SCHEMA}
    with connect(cfg):
        create_tables()
        countries = View('country').read().all()
    nb_cty = len(countries)
    assert nb_cty > 2
    read_threads = []
    out_q = Queue()
    in_queues = []

    for i in range(NB_THREADS):
        in_q = Queue(maxsize=1)
        in_queues.append(in_q)
        t = Thread(target=read, args=(in_q, out_q, cfg))
        t.start()
        read_threads.append(t)

    # Launch metronome to feed input lists
    metro_thread = Thread(target=metronome, args=(in_queues, nb_cty))
    metro_thread.start()
    # Loop on results
    is_full = lambda x : len(x) == nb_cty
    per_thread = defaultdict(list)
    while True:
        t_id, c = out_q.get()
        out_q.task_done()
        per_thread[t_id].append(c)
        if all(map(is_full, per_thread.values())):
            break

    # Join everything
    metro_thread.join()
    for t in read_threads:
        t.join()
Ejemplo n.º 44
0
class ThreadQueue(object):

    def __init__(self):
        self.q = Queue()

        t = Thread(target=self._thread_worker)
        t.setDaemon(True)
        t.start()

    def add_request(self, func, *args, **kwargs):
        """Add a request to the queue. Pass callback= and/or error= as
           keyword arguments to receive return from functions or exceptions.

        """
        self.q.put((func, args, kwargs))

    def _thread_worker(self):
        while True:
            request = self.q.get()
            self.do_request(request)
            self.q.task_done()

    def do_request(self, (func, args, kwargs)):
        callback = kwargs.pop('callback', None)
        error = kwargs.pop('error', None)

        def run():
            try:
                r = func(*args, **kwargs)
                if not isinstance(r, tuple):
                    r = (r,)
                if callback:
                    self.do_callback(callback, *r)
            except Exception, e:
                if error:
                    tb = traceback.format_exception(type(e), e, sys.exc_traceback)
                    self.do_callback(error, *(e, tb))
                else:
                    print "Unhandled error:", e
Ejemplo n.º 45
0
class MessageSender(Thread):
    def __init__(self, s):
        Thread.__init__(self)
        self.daemon = True
        self.s = s
        self.q = Queue()
        self.saved_exception = None

    def sendall(self, msg):
        if self.q == None:
            if self.saved_exception != None:
                raise self.saved_exception
            else:
                return
        self.q.put(msg)

    def clear_exception(self):
        self.saved_exception = None

    def exit(self):
        if self.q != None:
            self.q.put(SystemExit)

    def run(self):
        try:
            while True:
                msg = self.q.get()
                if msg == None:
                    self.s.close()
                    self.q = None
                    break
                if msg == SystemExit:
                    raise SystemExit
                self.s.sendall(msg)
                self.q.task_done()
        except (socket.error, SystemExit) as e:
            self.saved_exception = e
            self.q = None  # Release resources
            self.s.close()
Ejemplo n.º 46
0
class Action(tornado.websocket.WebSocketHandler):
    def __init__(self, *args, **kwargs):
        self.q = Queue()
        self.t = None
        self.car = kwargs.pop('car')
        super(Action, self).__init__(*args, **kwargs)

    def check_origin(self, origin):
        return True

    def _execute_actions(self):
        logging.debug("Action()._execute_action()")
        global car  # TODO: Handle in better way
        while True:
            message = self.q.get()
            if message:
                logging.debug("Message => " + message)
                message = message.split(" ")
                method_name = message[1]
                if hasattr(self.car, method_name):
                    method = getattr(self.car, method_name)
                    method()
                    self.q.task_done()
                else:
                    logging.error("Invalid method " + method_name)
            else:
                car.stop()

    def on_message(self, message):
        if not self.t:
            logging.debug("Thread Create for execute action")
            self.t = Thread(target=self._execute_actions)
            self.t.start()
        try:
            logging.debug("Message recevied: " + message)
            self.q.put(message)
        except tornado.websocket.WebSocketClosedError:
            pass
Ejemplo n.º 47
0
def test():
    started = Queue()
    n = 1000
    threads = []
    touched = []
    pool = EmptyListPool()
    rand = SystemRandom()

    def _run():
        psleep = rand.uniform(0.05, 0.1)
        with pool.take() as a:
            started.put(1)
            started.join()
            a.append(rand.uniform(0, 1))
            if psleep > 1:
                print psleep
            sleep(psleep)

    for i in range(n):
        th = Thread(target=_run)
        threads.append(th)
        th.start()

    for i in range(n):
        started.get()
        started.task_done()

    for element in pool:
        touched.append(element)

    for thr in threads:
        thr.join()

    if set(pool.elements) != set(touched):
        print set(pool.elements) - set(touched)
        return False
    else:
        return True
Ejemplo n.º 48
0
class QueueProcessorThread(Thread):
    """Thread to get and process tasks from a queue until queue.done (custom attr) is True"""
    def __init__(self,
                 action,
                 input_queue=None,
                 problem_list=None,
                 output_action=None,
                 **kargs):
        super(QueueProcessorThread, self).__init__(**kargs)
        self.action = action
        self.input_queue = Queue() if input_queue is None else input_queue
        self.problem_list = [] if problem_list is None else problem_list
        self.output_action = output_action

    def run(self):
        while True:
            try:
                task = self.input_queue.get(block=False)
            except Empty:
                log.debug(
                    "[%s] No tasks found, done? %s" %
                    (self.name, getattr(self.input_queue, "done", False)))
                if getattr(self.input_queue, "done", False):
                    break
                time.sleep(.1)
                continue
            try:
                result = self.action(task)
            except:
                log.exception(
                    "Exception on executing task {task}".format(**locals()))
                self.problem_list.append(task)  # list append is thread safe
            else:
                if self.output_action is not None:
                    self.output_action(result)
            finally:
                self.input_queue.task_done()
        log.debug("[%s] Done!" % self.name)
Ejemplo n.º 49
0
class ThreadPool(object):
	def __init__(self,threadNum):
		#工作队列
		self.workQueue = Queue()
		#结果队列
		self.resultQueue = Queue()
		#线程池
		self.threadPool = []
		#线程数目
		self.threadNum = threadNum

	#启动线程
	def startThreads(self):
		for i in range(self.threadNum):
			self.threadPool.append(Worker(self))

	#等待线程结束
	def workJoin(self,*args,**kargs):
		self.workQueue.join()

	#添加工作任务
	def addJob(self,func,*args,**kargs):
		self.workQueue.put((func,args,kargs))

	#工作任务完成
	def workDone(self,*args):
		self.workQueue.task_done()

	#获得结果
	def getResult(self,*args,**kargs):
		return self.resultQueue.get(*args,**kargs)

	#结束线程
	def stopThreads(self):
		for thread in self.threadPool:
			#thread.join()
			thread.stop()
		del self.threadPool[:]
Ejemplo n.º 50
0
class Threader:
    """Threader class.

    Threader class is responsible for making multiple parallel requests
    """
    def __init__(self, num_threads=5):
        self.concurrent = num_threads
        self.queue = Queue(num_threads * 2)
        self.obj = None

    def attach(self, obj):
        self.obj = obj

    def job(self):
        while True:
            url = self.queue.get()
            if self.obj is not None:
                response = self.obj.worker(url)
                self.result(response)
            self.queue.task_done()

    def result(self, response):
        try:
            self.obj.result(response)
        except Exception:
            print "Exception occured"

    def start(self):
        for i in range(self.concurrent):
            t = Thread(target=self.job)
            t.daemon = True
            t.start()

    def submit(self):
        try:
            self.obj.prepare(self.queue)
        except KeyboardInterrupt:
            sys.exit(1)
Ejemplo n.º 51
0
class Worker:
    def __init__(self):
        self.q = Queue()
        self.t = Thread(target=self._handle)
        self.t.setDaemon(True)
        self.t.start()

    def _handle(self):
        while True:
            fn = self.q.get()
            try:
                fn()
                self.q.task_done()
            except:
                import traceback
                print traceback.format_exc()

    def do(self, fn, *a, **kw):
        fn1 = lambda: fn(*a, **kw)
        self.q.put(fn1)

    def join(self):
        self.q.join()
Ejemplo n.º 52
0
class WorkerThread(Thread):
    def __init__(self, *args, **kwargs):
        Thread.__init__(self, *args, **kwargs)
        self.input_queue = Queue()

    def send(self, item):
        self.input_queue.put(item)

    def close(self):
        self.input_queue.put(None)
        self.input_queue.join()
        print('close....')

    def run(self):
        while True:
            item = self.input_queue.get()
            print('.' * 50)
            if item is None:
                break
            print(item)
            self.input_queue.task_done()
        self.input_queue.task_done()
        return
Ejemplo n.º 53
0
class Parallel:
    def __init__(self, ncpu, njobs):
        self.running = Queue(ncpu)
        self.returned = Queue()
        self.njobs = njobs

    def run(self, cmd, args):
        wrap = Wrap(self, (cmd, args), self.returned)
        thread = Thread(None, wrap)
        thread.start()

    def __call__(self, cmd, args):
        if type(cmd) == str:
            print cmd
            for a in args:
                cmd += " %s " % a
            args = (cmd, )
            cmd = commands.getstatusoutput
        self.running.put((cmd, args))
        ret = cmd(*args)
        self.running.get()
        self.running.task_done()
        return ret
Ejemplo n.º 54
0
class on_thread_thread(BackgroundThread):
    'yet another consumer thread'

    def __init__(self, name, daemon=True):
        BackgroundThread.__init__(self, name=name)
        self.setDaemon(daemon)
        self.work = Queue()
        self.done = False

    def run(self):
        self.BeforeRun()
        try:
            SEHGuard(self._consumer_loop)
        finally:
            self.AfterRun()

    def _consumer_loop(self):
        while not self.done:
            setattr(self, 'loopcount', getattr(self, 'loopcount', 0) + 1)
            func, args, kwargs = self.work.get()
            try:
                func(*args, **kwargs)
            except Exception:
                print_exc()
            self.work.task_done()

        on_thread(self.name)._done()

    def queue(self, func, *a, **k):
        if __debug__:
            import traceback
            self.last_stack = traceback.format_stack()
        self.work.put((func, a, k))

    def join(self):
        self.done = True
        self.work.join()
Ejemplo n.º 55
0
class csvWriter:
	def __init__(self, data, concurrentThreads):
		self.concurrent = concurrentThreads
		self.q = Queue(concurrentThreads)
		self.dataset = data
		self.lock = threading.Lock()

		print("Outputing csv files..")



	def createThreading(self,args,fileName,header):
		for i in range(self.concurrent):
			t = threading.Thread(target=self.writingcsv,args=(args,fileName,header))
			t.daemon = True
			t.start()

		for data in self.dataset:
			self.q.put(data)

		self.q.join()
		if(len(dataset)>0):

			return True
		return False

	def writingcsv(self,args,fileName,header):

		csvfile = open( fileName, 'wb')
		writer = csv.writer(csvfile, delimiter=',', lineterminator='\r\n', quoting=csv.QUOTE_NONE, escapechar=' ')
		writer.writerow(header)
		with self.lock:
			while not self.q.empty():
				data = self.q.get()
				writer.writerow(data)
				self.q.task_done()
		csvfile.close()
Ejemplo n.º 56
0
class SequentialScheduler(IScheduler):
    def __init__(self):
        self.rq = None
        self.machine = None

    def reschedule(self, machine):

        self.machine = machine

        self.rq = Queue(-1)

        #programs = Program.query.all()

        #log.debug("rescheduling, found %d programs." % len(programs))

        #        for program in programs:
        #            for obs in program.observations:
        #                for exp in obs.exposures:
        #                    self.rq.put(exp)

        exps = Exposure.query.filter_by(finished=False).all()

        log.debug("rescheduling, found %d exposures." % len(exps))
        for exp in exps:
            self.rq.put(exp)

        machine.wakeup()

    def next(self):
        if not self.rq.empty():
            return self.rq.get()

    def done(self, task):
        task.finished = True
        task.flush()
        self.rq.task_done()
        self.machine.wakeup()
    def test_thread_safety(self):
        """
        The pool should allocate n objects for n concurrent operations.
        """
        n = 10
        pool = EmptyListPool()
        readyq = Queue()
        finishq = Queue()
        threads = []

        def _run():
            with pool.transaction() as resource:
                readyq.put(1)
                resource.append(currentThread())
                finishq.get(True)
                finishq.task_done()

        for i in range(n):
            th = Thread(target=_run)
            threads.append(th)
            th.start()

        for i in range(n):
            readyq.get()
            readyq.task_done()

        for i in range(n):
            finishq.put(1)

        for thr in threads:
            thr.join()

        self.assertEqual(n, len(pool.resources))
        for resource in pool.resources:
            self.assertFalse(resource.claimed)
            self.assertEqual(1, len(resource.object))
            self.assertIn(resource.object[0], threads)
Ejemplo n.º 58
0
class Threads(object):
	def thread(self,*args):
		thread_count = self.global_options['threads']
		self.stopped = threading.Event()
		self.q = Queue()
		self.q.put(args[0])
		threads = []
		for i in range(thread_count):
			t = threading.Thread(target=self.thread_wrapper,args=args[1:])
			threads.append(t)
			t.setDaemon(True)
			t.start()
		try:
			while not self.q.empty():
				time.sleep(2)
		except KeyboardInterrupt:
			warn2('Waiting for threads to exit...')
			self.stopped.set()
			for t in threads:
				t.join()
			raise
		self.q.join()
		self.stopped.set()

	def thread_wrapper(self,*args):
		thread_name = threading.current_thread().name
		while not self.stopped.is_set():
			try:
				obj = self.q.get_nowait()
			except Empty:
				continue
			try:
				self.module_thread(obj,*args)
			except Exception as e:
				warn(e.message)
			finally:
				self.q.task_done()
Ejemplo n.º 59
0
class workerThread(threading.Thread):
    def __init__(self, *args, **kwargs):
        threading.Thread.__init__(self, *args, **kwargs)
        self.input_queue = Queue()

    def send(self, item):
        self.input_queue.put_nowait(item)

    def close(self):
        self.input_queue.put_nowait(None)
        self.input_queue.join()

    def run(self):
        while True:
            # time.sleep(3)
            item = self.input_queue.get(
            )  # error:if replaced with self.input_queue.get_nowait()
            if item is None:
                print 'thread is closed'
                break
            print item
            self.input_queue.task_done()
        self.input_queue.task_done()
        return
Ejemplo n.º 60
0
class LocalVolatileTaskQueue(TaskQueueBase):

    implements(ITaskQueue)

    def __init__(self, **kwargs):
        self.queue = Queue()

    def __len__(self):
        return self.queue.qsize()

    def put(self, task):
        self.queue.put(task, block=True)

    def get(self, *args, **kwargs):
        try:
            return self.queue.get(block=False)
        except Empty:
            return None

    def task_done(self, *args, **kwargs):
        self.queue.task_done()

    def reset(self):
        self.queue = Queue()