Example #1
0
 def test_error(self):
     """
     Exception raised running unit test is reported as an error
     """
     # Parent directory setup
     os.chdir(self.tmpdir)
     sub_tmpdir = tempfile.mkdtemp(dir=self.tmpdir)
     basename = os.path.basename(sub_tmpdir)
     # Child setup
     fh = open(os.path.join(basename, '__init__.py'), 'w')
     fh.write('\n')
     fh.close()
     fh = open(os.path.join(basename, 'test_pool_runner_dotted_fail.py'), 'w')
     fh.write(dedent(
         """
         import unittest
         class A(unittest.TestCase):
             def testError(self):
                 raise AttributeError
         """))
     fh.close()
     module_name = basename + '.test_pool_runner_dotted_fail.A.testError'
     result = Queue()
     poolRunner(module_name, result)
     result.get()
     self.assertEqual(len(result.get().errors), 1)
Example #2
0
  def test_sigpipe(self):
    r, w = os.pipe()
    outstream = os.fdopen(w, 'w')
    task = self.create_task(self.context(console_outstream=outstream))
    raised = Queue(maxsize=1)

    def execute():
      try:
        task.execute()
      except IOError as e:
        raised.put(e)

    execution = threading.Thread(target=execute, name='ConsoleTaskTestBase_sigpipe')
    execution.setDaemon(True)
    execution.start()
    try:
      data = os.read(r, 5)
      self.assertEqual('jake\n', data)
      os.close(r)
    finally:
      task.stop()
      execution.join()

    with self.assertRaises(Empty):
      e = raised.get_nowait()

      # Instead of taking the generic assertRaises raises message, provide a more detailed failure
      # message that shows exactly what untrapped error was on the queue.
      self.fail('task raised {0}'.format(e))
Example #3
0
class AntiFlapping(object):
    """
    AntiFlapping class to process event in a timely maneer
    """
    def __init__(self, window):
        self.window = window
        self.tasks = Queue(maxsize=1)
        self._window_ended = True
        self._thread = Thread(name="AntiFlapping", target=self._run)
        self._thread.start()

    def newEvent(self, func, kwargs={}):
        """
        newEvent Triggered.
        """
        if not self.tasks.full() and self._window_ended:
            self.tasks.put({'func': func, 'args':kwargs})

    def _run(self):
        """
        internal runloop that will fire tasks in order.
        """
        while True:
            task = self.tasks.get()
            self._window_ended = False
            sleep(self.window)
            self._window_ended = True
            if task['args']:
                task['func'](**task['args'])
            else:
                task['func']()
Example #4
0
class DecodingThread(threading.Thread):
    """Thread for concurrent simulation.

    A :class:`DecodingThread` is responsible for one specific decoder. As soon as an item is
    placed on the :attr:`jobQueue`, decoding starts. After finishing, the attributes
    :attr:`time`, :attr:`error`, :attr:`objVal` and :attr:`mlCertificate` contain information
    about the solution.

    :param decoder: The :class:`.Decoder` used for this process.
    :param revealSent: If decoding should reveal the sent codeword.

    .. attribute:: jobQueue

      On this queue, pairs (llr, sentCodeword) are put. The process will start decoding
      immediately, and signal :func:`JoinableQueue.task_done` when finished."""

    def __init__(self, decoder, revealSent):
        threading.Thread.__init__(self)
        self.decoder = decoder
        self.jobQueue = Queue()
        self.daemon = True
        self.revealSent = revealSent
        self.time = 0.0
        self.start()

    def run(self):
        while True:
            llr, sent = self.jobQueue.get()
            with Timer() as timer:
                if self.revealSent:
                    self.decoder.decode(llr, sent=sent)
                else:
                    self.decoder.decode(llr)
            self.time = timer.duration
            self.jobQueue.task_done()
Example #5
0
File: event.py Project: sijis/salt
    def __test_event_fire_ipc_mode_tcp(self):
        events = Queue()

        def get_event(events):
            me = event.MinionEvent(**self.sub_minion_opts)
            events.put_nowait(
                me.get_event(wait=10, tag='salttest', full=False)
            )

        threading.Thread(target=get_event, args=(events,)).start()
        time.sleep(1)   # Allow multiprocessing.Process to start

        ret = self.run_function(
            'event.fire', ['event.fire: just test it!!!!', 'salttest'],
            minion_tgt='sub_minion'
        )
        self.assertTrue(ret)

        eventfired = events.get(block=True, timeout=10)
        self.assertIsNotNone(eventfired)
        self.assertIn('event.fire: just test it!!!!', eventfired)

        ret = self.run_function(
            'event.fire', ['event.fire: just test it!!!!', 'salttest-miss'],
            minion_tgt='sub_minion'
        )
        self.assertTrue(ret)

        with self.assertRaises(Empty):
            eventfired = events.get(block=True, timeout=10)
    def test_create_cell_recalculator_should(self, mock_recalculate):
        unrecalculated_queue = Queue()
        unrecalculated_queue.put(1)
        unrecalculated_queue.put(1)
        unrecalculated_queue.task_done = Mock()

        leaf_queue = Queue()
        leaf_queue.put(sentinel.one)
        leaf_queue.put(sentinel.two)
        leaf_queue.task_done = Mock()

        target = create_cell_recalculator(leaf_queue, unrecalculated_queue, sentinel.graph, sentinel.context)
        target()

        self.assertTrue(unrecalculated_queue.empty())
        self.assertEquals(
            unrecalculated_queue.task_done.call_args_list,
            [ ((), {}), ((), {}), ]
        )

        self.assertTrue(leaf_queue.empty())

        self.assertEquals(
            mock_recalculate.call_args_list,
            [
                ((sentinel.one, leaf_queue, sentinel.graph, sentinel.context), {}),
                ((sentinel.two, leaf_queue, sentinel.graph, sentinel.context), {})
            ]
        )

        self.assertEquals(
            leaf_queue.task_done.call_args_list,
            [ ((), {}), ((), {}), ]
        )
Example #7
0
class LinkQueue:
    """
    A Thread-safe queue of unique elements.
    """

    def __init__(self, maxsize=10000):
        self.items = []
        self.items_lock = threading.Lock()
        self.queue = Queue(maxsize=maxsize)

    def __str__(self):
        return self.items.__str__()

    def put_all(self, items):
        for i in items:
            with self.items_lock:
                self.put(i)

    def put(self, item):
        if item not in self.items:
            self.queue.put(item)
            self.items.append(item)

    def pop(self):
        item = self.queue.get(block=True)
        return item

    def size(self):
        return self.queue.qsize()
Example #8
0
    def download_cover(self, log, result_queue, abort,  # {{{
            title=None, authors=None, identifiers={}, timeout=30, get_best_cover=False):
        cached_url = self.get_cached_cover_url(identifiers)
        if cached_url is None:
            log.info('No cached cover found, running identify')
            rq = Queue()
            self.identify(log, rq, abort, title=title, authors=authors,
                    identifiers=identifiers)
            if abort.is_set():
                return
            results = []
            while True:
                try:
                    results.append(rq.get_nowait())
                except Empty:
                    break
            results.sort(key=self.identify_results_keygen(
                title=title, authors=authors, identifiers=identifiers))
            for mi in results:
                cached_url = self.get_cached_cover_url(mi.identifiers)
                if cached_url is not None:
                    break
        if cached_url is None:
            log.info('No cover found')
            return

        if abort.is_set():
            return
        br = self.browser
        log('Downloading cover from:', cached_url)
        try:
            cdata = br.open_novisit(cached_url, timeout=timeout).read()
            result_queue.put((self, cdata))
        except:
            log.exception('Failed to download cover from:', cached_url)
def main():
    print "[Facebook Album Downloader v1]"
    start = timeit.default_timer()

    # hide images
    prefs = {"profile.managed_default_content_settings.images": 2}
    extensions = webdriver.ChromeOptions()
    extensions.add_experimental_option("prefs", prefs)
    browser = webdriver.Chrome(executable_path="chromedriver", chrome_options=extensions)

    findAlbum(browser)
    createAlbumPath()

    queue = Queue()

    for x in range(max_workers):
        worker = DownloadWorker(queue)
        worker.daemon = True
        worker.start()

    print "[Getting Image Links]"
    linkImages = getImageLinks(browser)
    print "[Found: " + str(len(linkImages)) + "]"

    for fullRes in linkImages:
        queue.put(fullRes)

    print "[Downloading...]"
    queue.join()

    browser.quit()

    stop = timeit.default_timer()
    print "[Time taken: %ss]" % str(stop - start)
    raw_input("Press any key to continue...")
Example #10
0
  def read(self, timeout=None):
    read_queue = Queue()

    def enqueue_output():
      for block in iter(self._proc.stdout.read, b''):
        read_queue.put(block)

      read_queue.put('')

    thread = Thread(target=enqueue_output)
    thread.daemon = True
    thread.start()

    output = ''

    try:
      started = time()

      while timeout is None or not float_info.epsilon > timeout:
        s = read_queue.get(timeout=timeout)

        if s:
          output += s
        else:
          return output

        if not timeout is None:
          timeout -= (time() - started)
    except Empty:
      return output
class SharedCounter(object):
    """Thread-safe counter.

    Please note that the final value is not synchronized, this means
    that you should not update the value by using a previous value, the only
    reliable operations are increment and decrement.

    Example

        >>> max_clients = SharedCounter(initial_value=10)

        # Thread one
        >>> max_clients += 1 # OK (safe)

        # Thread two
        >>> max_clients -= 3 # OK (safe)

        # Main thread
        >>> if client >= int(max_clients): # Max clients now at 8
        ...    wait()


        >>> max_client = max_clients + 10 # NOT OK (unsafe)

    """

    def __init__(self, initial_value):
        self._value = initial_value
        self._modify_queue = Queue()

    def increment(self, n=1):
        """Increment value."""
        self += n
        return int(self)

    def decrement(self, n=1):
        """Decrement value."""
        self -= n
        return int(self)

    def _update_value(self):
        self._value += sum(consume_queue(self._modify_queue))
        return self._value

    def __iadd__(self, y):
        """``self += y``"""
        self._modify_queue.put(y * +1)
        return self

    def __isub__(self, y):
        """``self -= y``"""
        self._modify_queue.put(y * -1)
        return self

    def __int__(self):
        """``int(self) -> int``"""
        return self._update_value()

    def __repr__(self):
        return "<SharedCounter: int(%s)>" % str(int(self))
class IWRCBot():
    def __init__(self, site, safe = True):
        self.other_ns = re.compile(u'14\[\[07(' + u'|'.join(site.namespaces()) + u')')
        interwiki.globalvar.autonomous = True
        self.site = site
        self.queue = Queue()
        self.processed = []
        self.safe = safe
        # Start 20 threads
        for i in range(20):
            t = threading.Thread(target=self.worker)
            t.setDaemon(True)
            t.start()

    def worker(self):
        bot = interwiki.InterwikiBot()
        while True:
            # Will wait until one page is available
            bot.add(self.queue.get())
            bot.queryStep()
            self.queue.task_done()

    def addQueue(self, name):
        if self.other_ns.match(name):
            return
        if self.safe:
            if name in self.processed:
                return
            self.processed.append(name)
        page = pywikibot.Page(self.site, name)
        # the Queue has for now an unlimited size,
        # it is a simple atomic append(), no need to acquire a semaphore
        self.queue.put_nowait(page)
class WorkerThread(Thread):

    def __init__(self):
        """Create a worker thread. Start it by calling the start() method."""
        self.queue = Queue()
        Thread.__init__(self)

    def stop(self):
        """Stop the thread a.s.a.p., meaning whenever the currently running
        job is finished."""
        self.working = 0
        self.queue.put(None)

    def scheduleWork(self, func, *args, **kwargs):
        """Schedule some work to be done in the worker thread."""
        self.queue.put((func, args, kwargs))

    def run(self):
        """Fetch work from a queue, block when there's nothing to do.
        This method is called by Thread, don't call it yourself."""
        self.working = 1
        while self.working:
            work = self.queue.get()
            if work is None or not self.working:
                break
            func, args, kwargs = work
            pool = NSAutoreleasePool.alloc().init()
            try:
                func(*args, **kwargs)
            finally:
                # delete all local references; if they are the last refs they
                # may invoke autoreleases, which should then end up in our pool
                del func, args, kwargs, work
                del pool
Example #14
0
class Source:
    def __init__(self, task_list, results, timeout, verbose = False):
        self.tasks = Queue()
        for task in task_list:
            self.tasks.put_nowait(task)

        self.results = results
        self.timeout = timeout
        self.verbose = verbose

    def start(self, worker_count):
        t0 = datetime.now()

        sink = Sink(self.results)
        self.workers = [ Worker(_+1, self.tasks, sink, self.timeout, self.verbose) for _ in range(worker_count) ]
        if self.verbose:
            print('[P] Starting workers.')
        for w in self.workers:
            w.t0 = t0
            w.start()
        ans = self.join_workers()
        if self.verbose:
            print('[P] Finished.')
        return ans

    def join_workers(self):
        try:
            for w in self.workers:
                w.join(20000)
            return True
        except KeyboardInterrupt:
            for w in self.workers:
                w.stop = True
            return False
class HttpPool(object):   
    def __init__(self, threads_count, fail_op, log):   
        self._tasks = Queue()   
        self._results = Queue()   
           
        for i in xrange(threads_count):   
            thread.start_new_thread(get_remote_data,    
                                                            (self._tasks, self._results, fail_op, log))   
               
    def add_task(self, tid, host, url, params, headers = {}, method = 'GET', timeout = None):   
        task = {   
            'id' : tid,   
            'conn_args' : {'host' : host} if timeout is None else {'host' : host, 'timeout' : timeout},   
            'headers' : headers,   
            'url' : url,   
            'params' : params,   
            'method' : method,   
            }   
        try:   
            self._tasks.put_nowait(task)   
        except Full:   
            return False  
        return True  
           
    def get_results(self):   
        results = []   
        while True:   
            try:   
                res = self._results.get_nowait()   
            except Empty:   
                break  
            results.append(res)   
        return results   
Example #16
0
File: cron.py Project: jkol36/flock
def track_followers():
	lock = Lock()
	queue = Queue()
	threads = []
	for acc in SocialProfile.objects.filter(jobs__isnull=False).distinct():
		if acc.is_executing_jobs:
			continue
		jobs = acc.jobs.filter(action="TRACK_FOLLOWERS")
		if jobs:
			threads.append(JobExecuter(lock=lock,account=acc, queue=queue, jobs=jobs))
	for thread in threads:
		thread.account.is_executing_jobs = True
		thread.account.save()
		thread.start()

	while threads:
		try:
			executer = queue.get(timeout=1)
		except:
			executer = None
		if executer:
			threads.remove(executer)
			executer.account.is_executing_jobs = False
			executer.account.save()
		else:
			threads[:] = [t for t in threads if t.isAlive()]
 def __init__(self, threads_count, fail_op, log):   
     self._tasks = Queue()   
     self._results = Queue()   
        
     for i in xrange(threads_count):   
         thread.start_new_thread(get_remote_data,    
                                                         (self._tasks, self._results, fail_op, log))   
Example #18
0
File: cron.py Project: jkol36/flock
def fetch_account_info():
	queue = Queue()
	threads = []
	lock = Lock()

	for acc in SocialProfile.objects.filter(jobs__isnull=False).distinct():
		jobs = acc.jobs.filter(Q(action="GET_ACCOUNT_INFO") | Q(action="LOOKUP_ID"))
		if jobs:
			threads.append(AccountFetch(account=acc, queue=queue, jobs=jobs))
				
	for thread in threads:
		thread.account.is_executing_jobs = True
		thread.account.save()
		thread.start()

	while threads:
		try:
			executer = queue.get(timeout=1)
	
		except:
			executer = None
			print executer
		if executer:
			threads.remove(executer)
			executer.account.is_executing_jobs = False
			executer.account.save()
		else:
			threads[:] = [t for t in threads if t.isAlive()]
Example #19
0
class BlockingDirectoryIterator(object):
    """
    iterator that blocks and yields new files added to a directory

    use like this:
        for filename in PollingDirectoryIterator('/tmp','A*.DAT').get_files():
            print filename
    """
    def __init__(self, directory, wildcard, interval=1):
        self._values = Queue()
        self._exception = None
        self._ready = Event()
        self._poller = DirectoryPoller(directory, wildcard, self._on_condition, self._on_exception, interval)
        self._poller.start()
    def __iter__(self):
        return self
    def get_files(self):
        while True:
            # could have exception or list of filenames
            out = self._values.get()
            if isinstance(out, Exception):
                raise out
            else:
                yield out
    def cancel(self):
        self._poller.shutdown()
    def _on_condition(self, filenames):
        for file in filenames:
            self._values.put(file)
    def _on_exception(self, exception):
        self._values.put(exception)
Example #20
0
    def __init__( self, date, warcs, viral, logs, identifiers ):
        self.warcs = []
        self.viral = []
        self.date = date
        self.wq = Queue()
        self.vq = Queue()

        for i in range(NUM_THREADS):
            worker = Thread(target=create_warcs, args=(self.wq, self.warcs))
            worker.setDaemon(True)
            worker.start()

        for warc in warcs:
            self.wq.put(warc)
        self.wq.join()

        for i in range(NUM_THREADS):
            worker = Thread(target=create_warcs, args=(self.vq, self.viral))
            worker.setDaemon(True)
            worker.start()

        for warc in viral:
            self.vq.put(warc)
        self.vq.join()

        self.logs = []
        for log in logs:
            self.logs.append( ZipContainer( path=log ) )
        self.identifiers = identifiers
        self.createDomainMets()
        self.createCrawlerMets()
Example #21
0
class PooledPg:
	"""A very simple PostgreSQL connection pool.

	After you have created the connection pool,
	you can get connections using getConnection().
	"""

	def __init__(self, maxconnections, *args, **kwargs):
		"""Set up the PostgreSQL connection pool.

		maxconnections: the number of connections cached in the pool
		args, kwargs: the parameters that shall be used to establish
			the PostgreSQL connections using pg.connect()
		"""
		# Since there is no connection level safety, we
		# build the pool using the synchronized queue class
		# that implements all the required locking semantics.
		from Queue import Queue
		self._queue = Queue(maxconnections)
		# Establish all database connections (it would be better to
		# only establish a part of them now, and the rest on demand).
		for i in range(maxconnections):
			self.cache(PgConnection(*args, **kwargs))

	def cache(self, con):
		""""Add or return a connection to the pool."""
		self._queue.put(con)

	def connection(self):
		""""Get a connection from the pool."""
		return PooledPgConnection(self, self._queue.get())
Example #22
0
 def get (self):
     if Queue.empty(self):
         return None
     else:
         indexer = Queue.get(self)
         indexer.refreshEMSState()
         return indexer
Example #23
0
File: event.py Project: sijis/salt
    def __test_event_fire_master(self):
        events = Queue()

        def get_event(events):
            me = event.MasterEvent(self.master_opts['sock_dir'])
            events.put_nowait(
                me.get_event(wait=10, tag='salttest', full=False)
            )

        threading.Thread(target=get_event, args=(events,)).start()
        time.sleep(1)   # Allow multiprocessing.Process to start

        ret = self.run_function(
            'event.fire_master',
            ['event.fire_master: just test it!!!!', 'salttest']
        )
        self.assertTrue(ret)

        eventfired = events.get(block=True, timeout=10)
        self.assertIsNotNone(eventfired)
        self.assertIn(
            'event.fire_master: just test it!!!!', eventfired['data']
        )

        ret = self.run_function(
            'event.fire_master',
            ['event.fire_master: just test it!!!!', 'salttest-miss']
        )
        self.assertTrue(ret)

        with self.assertRaises(Empty):
            eventfired = events.get(block=True, timeout=10)
def ThreadV():
	global queue
	global sina
	queue = Queue()
	sina = sina_data()
	#程序开始运行时,统计服务器还剩余的remaining_ip_hits数和reset_time_in_seconds数,详细信息见http://open.weibo.com/wiki/Account/rate_limit_status
	rateLimit = client.account.rate_limit_status.get()
	print 'remaining_ip_hits:%d reset_time_in_seconds:%d\n'%(rateLimit['remaining_ip_hits'],rateLimit['reset_time_in_seconds'])
	time.sleep(2)
	#bp_statuses_log为记录断点日志文件
	if not os.path.exists('/home/mii/weibo_crawler/lijun_thread/bp_statuses_log'):
		place = 0
		f = open('/home/mii/weibo_crawler/lijun_thread/bp_statuses_log','w')
		f.close()
	elif len(open('/home/mii/weibo_crawler/lijun_thread/bp_statuses_log').read()) == 0:
		place = 0
	else:
		place = int(open('bp_statuses_log').read().strip())
		Count.count = place
	#从断点处开始获取大V昵称,并放入队列queue中
	keys = open('shanghai3','r').readlines()[place:]
	for key in keys:
		queue.put(key)
	#开启多线程
	n = 5
	for i in range(n):
		t = threadv()
		t.start()
Example #25
0
class TestStatsdLoggingDelegation(unittest.TestCase):
    def setUp(self):
        self.port = 9177
        self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        self.sock.bind(('localhost', self.port))
        self.queue = Queue()
        self.reader_thread = Thread(target=self.statsd_reader)
        self.reader_thread.setDaemon(1)
        self.reader_thread.start()

    def tearDown(self):
        # The "no-op when disabled" test doesn't set up a real logger, so
        # create one here so we can tell the reader thread to stop.
        if not getattr(self, 'logger', None):
            self.logger = utils.get_logger({
                'log_statsd_host': 'localhost',
                'log_statsd_port': str(self.port),
            }, 'some-name')
        self.logger.increment('STOP')
        self.reader_thread.join(timeout=4)
        self.sock.close()
        del self.logger
        time.sleep(0.15)  # avoid occasional "Address already in use"?

    def statsd_reader(self):
        while True:
            try:
                payload = self.sock.recv(4096)
                if payload and 'STOP' in payload:
                    return 42
                self.queue.put(payload)
            except Exception, e:
                sys.stderr.write('statsd_reader thread: %r' % (e,))
                break
    def test_request_retries_configurable(self):
        # We guess at some ports that will be unused by Riak or
        # anything else.
        client = self.create_client(http_port=DUMMY_HTTP_PORT,
                                    pb_port=DUMMY_PB_PORT)

        # Change the retry count
        client.retries = 10
        self.assertEqual(10, client.retries)

        # The retry count should be a thread local
        retries = Queue()

        def _target():
            retries.put(client.retries)
            retries.join()

        th = Thread(target=_target)
        th.start()
        self.assertEqual(3, retries.get(block=True))
        retries.task_done()
        th.join()

        # Modify the retries in a with statement
        with client.retry_count(5):
            self.assertEqual(5, client.retries)
            self.assertRaises(IOError, client.ping)
Example #27
0
class EmitterThread(threading.Thread):

    def __init__(self, *args, **kwargs):
        self.__name = kwargs['name']
        self.__emitter = kwargs.pop('emitter')()
        self.__logger = kwargs.pop('logger')
        self.__config = kwargs.pop('config')
        self.__max_queue_size = kwargs.pop('max_queue_size', 100)
        self.__queue = Queue(self.__max_queue_size)
        threading.Thread.__init__(self, *args, **kwargs)
        self.daemon = True

    def run(self):
        while True:
            (data, headers) = self.__queue.get()
            try:
                self.__logger.debug('Emitter %r handling a packet', self.__name)
                self.__emitter(data, self.__logger, self.__config)
            except Exception:
                self.__logger.error('Failure during operation of emitter %r', self.__name, exc_info=True)

    def enqueue(self, data, headers):
        try:
            self.__queue.put((data, headers), block=False)
        except Full:
            self.__logger.warn('Dropping packet for %r due to backlog', self.__name)
Example #28
0
    def run(self):
        '''
        Does the job
        '''
        self.parser.add_option("-l", "--list", default=False, action="store_true", 
            help = "If present, list hosts configured in site.xml")
        self.parser.add_option("-a", "--artm", default=False, action="store_true", 
            help = "If present, include lo-art-1 to cycle/off")
        self.parser.add_option("-c", "--cob", default=False, action="store_true", 
            help = "If present, reboot only cob-* machines. cob-dmc is not rebooted because it belong to CONTROL subsystem.")
        self.parser.add_option("-o", "--off", default=False, action="store_true", 
            help = "If present, turn off the machines instead cycle them.")
        self.parser.add_option("-t", "--timeout", default=150, 
            help = "Set timeout to wait the recovered hosts. Default is 150 secs")
        self.parse()
        self.parse_args()
        self.get_hosts()
        if self.list is False:
            lastpdu = 'none'
            for host in self.hosts:
                currentpdu = str(self.get_pdu(host)[0])
                if currentpdu != lastpdu:
                    lastpdu = currentpdu
                    self.pstrip_cmd(self.get_pdu(host))
                    time.sleep(1)
                else:
                    time.sleep(2)
                    lastpdu = currentpdu
                    self.pstrip_cmd(self.get_pdu(host))
            if self.verbose:
                print self._get_time()+" Waiting for hosts ..."
            if self.off is False:
                queue = Queue()
                for host in self.hosts:
                    queue.put(host)
                    self.remaining_hosts.append(host)
                for host in self.hosts:
                    rh =  Thread(target=self.recover_host,args=(host, self.timeout,queue))
                    rh.setDaemon(True)
                    rh.start()
                queue.all_tasks_done.acquire()
                try:
                    endtime = time.time() + self.timeout
                    while queue.unfinished_tasks:
                        remaining = endtime -  time.time()
                        if remaining <= 0.0:
                            raise timeOut('Time Out Raise!!!')
                        queue.all_tasks_done.wait(remaining)
                except timeOut:
                    print "%s Probably %d hosts are still rebooting, please check ..." % (self._get_time(), int(queue.unfinished_tasks))
                    print "%s Please check these hosts:" % self._get_time()
                    for h in self.remaining_hosts:
                        print "%s ---> \033[31m%s\033[0m" % (self._get_time(), h)
                finally:
                    queue.all_tasks_done.release()

        else:
            print "Hosts configured in site.xml"
            for host in self.hosts:
                print host
Example #29
0
def main():
    """
    Main function of the proxy scanner.
    """
    global pl, output, q

    parser = ArgumentParser(description='Scans a list of proxies to determine which work for HTTPS.')
    parser.add_argument('--output', default='output/proxies.txt', type=str,
        help='The file in which to store the found proxies.')
    parser.add_argument('--threads', default=10, type=int,
        help='Number of threads to use.')

    args = parser.parse_args()
    output = args.output

    threads = args.threads
    q = Queue(threads * 3)

    print 'Starting threads.'
    for x in xrange(threads):
        t = Thread(target=check_proxies)
        t.daemon = True
        t.start()

    print 'Queueing proxies.'
    for proxy in proxies.proxies:
        q.put(proxy)
    q.join()

    save_proxies()
Example #30
0
 def test_normalRun(self):
     """
     Runs normally
     """
     saved_coverage = process.coverage
     process.coverage = MagicMock()
     self.addCleanup(setattr, process, 'coverage', saved_coverage)
     # Parent directory setup
     os.chdir(self.tmpdir)
     sub_tmpdir = tempfile.mkdtemp(dir=self.tmpdir)
     basename = os.path.basename(sub_tmpdir)
     # Child setup
     fh = open(os.path.join(basename, '__init__.py'), 'w')
     fh.write('\n')
     fh.close()
     fh = open(os.path.join(basename, 'test_pool_runner_dotted.py'), 'w')
     fh.write(dedent(
         """
         import unittest
         class A(unittest.TestCase):
             def testPass(self):
                 pass
         """))
     fh.close()
     module_name = basename + '.test_pool_runner_dotted.A.testPass'
     result = Queue()
     poolRunner(module_name, result, 1)
     result.get()
     self.assertEqual(len(result.get().passing), 1)