コード例 #1
0
class RunningListenerTestCase(TestCase):
    def setUp(self):
        pg_connector = PostgresConnector(_POSTGRES_DSN)
        self.notif_queue = Queue(1)
        self.listener = PostgresNotificationListener(
            pg_connector, _NOTIF_CHANNEL, self.notif_queue,
            CommonErrorStrategy(), Queue(), fire_on_start=False
        )
        self.listener.log = MagicMock()

    def tearDown(self):
        self.listener.terminate()
        self.listener.join()

    def test_notification_listener(self):
        self.assertTrue(self.notif_queue.empty())
        # Start the listener process
        self.listener.start()
        sleep(1)

        send_notification()

        try:
            error = self.listener.error_queue.get(timeout=1)
            self.assertTrue(False, error)
        except Empty:
            pass

        notif = self.notif_queue.get(timeout=2)
        self.assertIsNotNone(notif)

    def test_notification_stops_listening_on_terminate(self):
        self.assertTrue(self.notif_queue.empty())

        self.listener.start()
        sleep(3)
        self.listener.terminate()

        # Send a notification and ensure the listener is NOT listening
        send_notification()
        exception_raised = False
        try:
            self.notif_queue.get(timeout=5)
        except Empty:
            exception_raised = True
        self.assertTrue(exception_raised)

    def test_notification_listener_terminates(self):
        self.assertTrue(self.notif_queue.empty())
        self.listener.start()
        sleep(1)
        self.listener.terminate()
        sleep(1)
        self.assertFalse(self.listener.is_alive())
コード例 #2
0
ファイル: test.py プロジェクト: smihica/py-q4pg
def test_multiprocess_tasks():
    wait_until_convenient()
    TAG = "message_q"
    def fetch_task(queue):
        pid = os.getpid()
        count = 0
        for dq in q.listen(TAG, timeout=1):
            s = { 'pid': pid, 'data': dq }
            if dq:
                count += 1
                queue.put(s)
                sleep(uniform(0.1, 0.5)) # sleep 0.1~0.5 seconds randomly
            elif q.count(TAG) == 0:
                return count # the number of tasks done by this process


    test_items = range(0, 10000) # enqueue 10000 tasks
    for i in test_items:
        q.enqueue(TAG, i + 1)

    while q.count(TAG) != len(test_items): # wait until test data is ready
        wait_until_convenient()

    jobs = []
    wait_until_convenient()
    queue = Queue()
    start = timer()
    num_p = 30 # the number of processes to use
    for i in range(0, num_p):
        job = Process(target=fetch_task, args=(queue,))
        jobs.append(job)
        job.start() # start task process

    remaining = q.count(TAG)
    while remaining > 0: # wait until the queue is consumed completely
        remaining = q.count(TAG)
        sys.stdout.write('\rRunning test_multiprocess_tasks - remaining %5d/%5d' % (remaining, len(test_items),))
        sys.stdout.flush()
        wait_until_convenient()

    processed_data = set()
    qsize = 0
    while not queue.empty():
        item = queue.get()
        data = item.get('data')
        qsize += 1
        assert data not in processed_data, "failed test_multiprocess_tasks - data %s has been processed already" % (data, )
        processed_data.add(item.get('data'))

    queue.close()
    queue.join_thread()
    for j in jobs:
        j.join()

    assert qsize == len(test_items), "failed test_multiprocess_tasks - tasks are not complete %d/%d" % (qsize, len(test_items), )
    end = timer()
    print("\rOK test_multiprocess_tasks - %d done in %5d seconds" % (qsize, end - start))
コード例 #3
0
ファイル: test_utils.py プロジェクト: MatthieuDartiailh/ecpy
def test_spy():
    """Test the measure spy working.

    """
    q = Queue()
    data = TaskDatabase()
    spy = MeasureSpy(queue=q, observed_database=data,
                     observed_entries=('test',))

    data.notifier(('test', 1))
    assert q.get()

    data.notifier(('test2', 1))
    assert q.empty()

    spy.close()
    assert q.get() == ('', '')
コード例 #4
0
def test_spy():
    """Test the measure spy working.

    """
    q = Queue()
    data = TaskDatabase()
    spy = MeasureSpy(queue=q,
                     observed_database=data,
                     observed_entries=('test', ))

    data.notifier(('test', 1))
    assert q.get()

    data.notifier(('test2', 1))
    assert q.empty()

    spy.close()
    assert q.get() == ('', '')
コード例 #5
0
 def empty(self):
     '''
     Returns True if the Queue is empty, False otherwise
     Raises an exception if too many errors are encountered 
     '''
     dt = 1e-3
     while dt < 1:
         try:
             isEmpty = Queue.empty(self)
             return isEmpty
         except IOError:
             logger.warning('IOError encountered in SafeQueue empty()')
             try:
                 time.sleep(dt)
             except:pass
             dt *= 2
             
     e = IOError('Unrecoverable error')
     raise e
コード例 #6
0
    def empty(self):
        '''
        Returns True if the Queue is empty, False otherwise
        Raises an exception if too many errors are encountered 
        '''
        dt = 1e-3
        while dt < 1:
            try:
                isEmpty = Queue.empty(self)
                return isEmpty
            except IOError:
                logger.warning('IOError encountered in SafeQueue empty()')
                try:
                    time.sleep(dt)
                except:
                    pass
                dt *= 2

        e = IOError('Unrecoverable error')
        raise e
コード例 #7
0
ファイル: smp.py プロジェクト: jucabot/polymr
class MultiCoreEngine():
    
    _mapred = None
    
    _out_queue = None
    _in_queue = None
    _log_queue = None
        
    _processes = None
    
        
    def __init__(self,mapred):
        self._mapred = mapred
            
    def _start(self,name,cpu, module_name, class_name, params):
        fn = None
        
        self._processes = []
        self._in_queue = Queue()
        self._out_queue = Queue()
        self._log_queue = Queue()
        
        if name == "mapper":
            fn = q_run_mapper
        elif name == "reducer":
            fn = q_run_reducer
        
        for i in range(cpu):
            process = Process(target=fn,args=(module_name, class_name ,params, self._in_queue, self._out_queue, self._log_queue))
            self._processes.append(process)
            process.start()
    
    def _stop(self):
        
        for process in self._processes:
            self._in_queue.put("STOP")
        
        while not self._log_queue.empty():
            print self._log_queue.get()
    
    def _get_data_chunks(self):
        chunks = []
        for process in self._processes:
            chunks.append(self._out_queue.get())
        
        return chunks
    
    def _set_data_chunks(self, chunks):
        
        map(self._in_queue.put,chunks)
        
                        
    def _send_lines(self,lines, cpu, lines_len ):
        line_splits = [lines[i* lines_len / cpu : (i+1)* lines_len / cpu] for i in range(cpu) ]
                    
        for i in range(cpu): 
            self._in_queue.put(line_splits[i])
    
    def _terminate(self):
        for process in self._processes:
            process.join()
            process.terminate()
                
        self._in_queue.close()
        self._out_queue.close()
        self._processes = None
        
    def _force_terminate(self):
        for process in self._processes:
            process.terminate()
            
    def _merge_data(self, data):
       
        self._mapred.data = merge_kv_dict(self._mapred.data,data)
                
    def _merge_reduced_data(self, data):
       
        self._mapred.data_reduced = merge_kv_dict(self._mapred.data_reduced,data)
                
    def _split_data(self, num_splits):
        splits = []
        index = 0
        
        len_data = len(self._mapred.data)
        
        chunk_len = int(math.ceil(len_data / float(num_splits)))
        
        if chunk_len == 0:
            splits.append(self._mapred.data)
        else:        
            for i in range(int(math.ceil(len_data/float(chunk_len)))):
                splits.append({})
                
            for (key, value) in self._mapred.data.items():
                
                i = int(math.floor(index / float(chunk_len)))
                       
                splits[i][key]=value
                
                index = index + 1
        
        return splits
    
    
    def _run_map(self,cpu,cache_line,input_reader ):
        
        self._start("mapper",cpu, self._mapred.__class__.__module__,self._mapred.__class__.__name__ ,self._mapred.params)
    
        try:
            map_len = 0
            lines = []
            lines_len = 0
            f = input_reader.read()
              
            for line in f:
                if  lines_len > 0 and lines_len % cache_line == 0:
                    self._send_lines(lines, cpu, lines_len)        
                    lines = []
                    lines_len = 0
               
                lines.append(line)
                lines_len += 1
                map_len += 1
                 
            input_reader.close()
            
            self._send_lines(lines, cpu, lines_len)
            
            self._stop()
            
            map(self._merge_data, self._get_data_chunks())
                
                
            self._terminate()
            
        
        except Exception,e:
            print "ERROR: Exception while mapping : %s\n%s" % (e,traceback.print_exc())
            self._force_terminate()
             
        return map_len
コード例 #8
0
class MultiProcess(object):
'''
Class MultiProcess
An object that can perform multiprocesses
'''
def __init__(self,ncpus=1):
self.ncpus = int(ncpus)
# Parallelization
self._parallel = None
self._paralleltasks = Queue()
self._parallelresults = Queue()
def initiateParallel(self):
self._parallel = [Consumer(self._paralleltasks,self._parallelresults)
for x in range(self.ncpus)]
for consumer in self._parallel:
consumer.start()
def addPoison(self):
for consumer in self._parallel:
self._paralleltasks.put(None)
 
def isTerminated(self):
for consumer in self._parallel:
if consumer.is_alive():
return False
return True
 
def killParallel(self):
for consumer in self._parallel:
consumer.terminate()
def doPHYML(self, indir, tree):
i = 0
dres = {}
redo = open('phymlfail.txt','w')
self.initiateParallel()
for f in os.listdir(indir):
if not f.endswith('.phy'):continue
align = f
obj = PHYML(indir, align, tree)
self._paralleltasks.put(obj)
# Poison pill to stop the workers
self.addPoison()
while True:
while not self._parallelresults.empty():
result = self._parallelresults.get()
if result[1] != 0:
msg(result[0],'ERR')
redo.write('%s\n'%result[0])
else:
msg('%s %d'%(result[0],i),'IMP')
i += 1
if self.isTerminated():
break
time.sleep(0.1)
# Get the last messages
while not self._parallelresults.empty():
result = self._parallelresults.get()
if result[1] != 0:
msg(result[0],'ERR')
redo.write('%s\n'%result[0])
else:
msg('%s %d'%(result[0],i),'IMP')
i += 1
self.killParallel()
return dres
 
class Highlighter:
def __init__(self):
self._msgTypes={'INF':'\033[0m',
'IMP':'\033[1;32m',
'DEV':'\033[1;34m',
'ERR':'\033[1;31m',
'WRN':'\033[1;33m'}
self._reset='\033[0m'
self._default='INF'
 
def ColorMsg(self,msg,msgLevel='INF'):
try:
s=self._msgTypes[msgLevel]+msg+self._reset
except:s=s=self._msgTypes[self._default]+msg+self._reset
return s
 
def msg(message, msgLevel='INF', sameline=False):
o=Highlighter()
if sameline:
sys.stderr.write('\r')
else:
sys.stderr.write(strftime("%H:%M:%S") + ' ')
sys.stderr.write(o.ColorMsg(message,msgLevel))
if not sameline:
sys.stderr.write('\n')
 
def creturn():
sys.stderr.write('\n')
 
def getOptions():
'''Retrieve the options passed from the command line'''
 
usage = "usage: python parallelPHYML.py [options]"
parser = OptionParser(usage)
 
group1 = OptionGroup(parser, "Inputs")
group1.add_option('-a', '--aligndir', action="store", dest='align',
default='OUT',
help='Alignment directory')
group1.add_option('-t', '--tree', action="store", dest='tree',
default='TREE.nwk',
help='Tree file')
group1.add_option('-r', '--threads', action="store", dest='threads',
default=1,
help='Threads [Default: 1]')
parser.add_option_group(group1)
# Parse the options
return parser.parse_args()
(options, args) = getOptions()
 
dres = MultiProcess(options.threads).doPHYML(options.align,options.tree)