def test_wings_push_switches(self): """Testing push switches""" # init wings settings = {"pins": {"wings": {"left_switch": 17, "right_switch": 4, "position": 25, "movement": 22}}} event_queue = Queue() logging.basicConfig() logger = logging.getLogger(name="TuxEatPi") logger.setLevel(logging.DEBUG) wings = FakeWings(settings, event_queue, logger) # Test calibrate self.assertEqual(wings.get_position(), "down") # test left switch event wings.push_wing('left') event = event_queue.get(timeout=5) self.assertEqual(event.component, 'FakeWings') self.assertEqual(event.pin_id, wings.pins.get('left_switch')) self.assertEqual(event.name, 'left_switch') # test left switch event wings.push_wing('right') event = event_queue.get(timeout=5) self.assertEqual(event.component, 'FakeWings') self.assertEqual(event.pin_id, wings.pins.get('right_switch')) self.assertEqual(event.name, 'right_switch')
def crawl(self, urls, follow_links=False): links, seen = set(), set() queue = Queue() converged = threading.Event() def execute(): while not converged.is_set(): try: url = queue.get(timeout=0.1) except Empty: continue if url not in seen: seen.add(url) hrefs, rel_hrefs = self.execute(url) links.update(hrefs) if follow_links: for href in rel_hrefs: if href not in seen: queue.put(href) queue.task_done() for url in urls: queue.put(url) for _ in range(self._threads): worker = threading.Thread(target=execute) worker.daemon = True worker.start() queue.join() converged.set() return links
class PreviewDispatcherThread(QThread): """ Thread used to dispatch the element to each preview worker thread. :param queue: The main queue containing the elements to process. :param mo_signal: The signal to pass to the MO preview worker, updates the MO preview. :param nmm_signal: The signal to pass to the NMM preview worker, updates the NMM preview. :param code_signal: The signal to pass to the code preview worker, updates the code preview. """ def __init__(self, queue, code_signal, **kwargs): super().__init__() self.queue = queue self.gui_queue = Queue() self.code_queue = Queue() self.code_thread = PreviewCodeWorker(self.code_queue, code_signal) self.code_thread.start() self.gui_thread = PreviewGuiWorker(self.gui_queue, **kwargs) self.gui_thread.start() def run(self): while True: # wait for next element element = self.queue.get() if element is not None: element.write_attribs() element.load_metadata() element.sort() # dispatch to every queue self.gui_queue.put(element) self.code_queue.put(element)
class BlockingInProcessChannel(InProcessChannel): def __init__(self, *args, **kwds): # type: (object, object) -> object super(BlockingInProcessChannel, self).__init__(*args, **kwds) self._in_queue = Queue() def call_handlers(self, msg): self._in_queue.put(msg) def get_msg(self, block=True, timeout=None): """ Gets a message if there is one that is ready. """ if timeout is None: # Queue.get(timeout=None) has stupid uninteruptible # behavior, so wait for a week instead timeout = 604800 return self._in_queue.get(block, timeout) def get_msgs(self): """ Get all messages that are currently ready. """ msgs = [] while True: try: msgs.append(self.get_msg(block=False)) except Empty: break return msgs def msg_ready(self): """ Is there a message that has been received? """ return not self._in_queue.empty()
def ExtractVideoInfo(courseURL): """ 提取视频信息。 """ queue = Queue() APIcaller = FlvcdAPICaller() parser = Open163Parser(courseURL) for i in range(10): worker = Worker(queue, parser, APIcaller) worker.daemon = True worker.start() parser.fillQ(queue) queue.join() videoList = parser.getResult() videoInfo = { "courseURL":courseURL, "videoList":videoList, } # dump complete video information. json.dump(videoInfo, open("videoList.json", "w")) print("Complete video information written to videoList.json.") # dump video URLs. urls = [] for video in videoList: urls.append(video['url']+'\n') with open('urls.txt', 'w') as out: out.writelines(urls) print("Video URLs written to urls.txt.")
def track(self): queue = Queue() thread = Thread(target=self._update_status, args=(queue,)) thread.start() widgets = ['Processing...', AnimatedMarker()] progress_indicator = ProgressBar(widgets=widgets, maxval=UnknownLength) progress_indicator.start() content = {} for indicator_count in itertools.count(): if not queue.empty(): content = queue.get() if isinstance(content, Exception): raise content widgets[0] = self._get_message(content) progress_indicator.update(indicator_count) if content.get('processed'): break sleep(0.1) progress_indicator.finish() self.__content = content return content
def _port_ping(self, hosts: Queue, interface: str, results: set): self.logger.debug("{}: Starting TCP SYN ping thread.".format(threading.current_thread().name)) while True: ip = hosts.get() # type: IPAddress ip_str = str(ip) # Send SYN with random Src Port for each Dst port for dstPort in self.portstoscan: srcPort = random.randint(1025, 65534) resp = sr1(IP(dst=ip_str) / TCP(sport=srcPort, dport=dstPort, flags=ScapyTCPFlag.SYN), timeout=1, verbose=False, iface=interface) if resp and resp.haslayer(TCP): if resp[TCP].flags == (TCPFlag.SYN | TCPFlag.ACK) or resp[TCP].flags == (TCPFlag.RST | TCPFlag.ACK): # Send Reset packet (RST) send(IP(dst=ip_str) / TCP(sport=srcPort, dport=dstPort, flags=ScapyTCPFlag.RST), iface=interface, verbose=False) # We know the port is closed or opened (we got a response), so we deduce that the host exists node = NetworkNode() node.ip = ip node.mac = EUI(resp.src) node.host = resolve_ip(resp[IP].src) results.add(node) self.logger.debug( "Found a live host by pinging port {port_nbr}: {live_host}.".format(port_nbr=dstPort, live_host=str(node))) # We don't need to test the other ports. We know the host exists. break hosts.task_done()
class JQueryChaliceRequestHandler(BaseHTTPRequestHandler): server_version = "Extremon/0.1" def do_GET(self): self.outq=Queue(maxsize=10) self.running=True self.server.add_consumer(self) self.send_response(200) self.send_header("Content-type", "text/plain") self.send_header("Access-Control-Allow-Origin", "*") self.end_headers() self.missed=0 self.running=True try: while self.running: try: message = self.outq.get() + bytes('%s.timestamp=%.2f\n%s.missed=%d\n\n' % (self.server.prefix,time.time(),self.server.prefix,self.missed),'UTF-8') self.wfile.write(bytes(str(len(message)) + ";", 'UTF-8')) self.wfile.write(message) self.wfile.write(b';') self.outq.task_done() except error: self.running=False finally: self.server.remove_consumer(self) def write(self,data): try: self.outq.put(data,block=False) except Full: self.missed+=1
def wrapper(*args, **kargs): q = Queue() def callback(value): q.put(None) def errback(failure): # Retrieve and save full exception info try: failure.raiseException() except: q.put(sys.exc_info()) def g(): try: d = func(*args, **kargs) try: d.addCallbacks(callback, errback) # Check for a common mistake and display a nice error # message except AttributeError: raise TypeError("you must return a twisted Deferred " "from your test case!") # Catch exceptions raised in the test body (from the # Twisted thread) except: q.put(sys.exc_info()) reactor.callFromThread(g) try: error = q.get(timeout=timeout) except Empty: raise TimeExpired("timeout expired before end of test (%f s.)" % timeout) # Re-raise all exceptions if error is not None: exc_type, exc_value, tb = error raise exc_type(exc_value).with_traceback(tb)
def test_handle_failing_upload_xlog(self): sleeps = [] def sleep(sleep_amount): sleeps.append(sleep_amount) time.sleep(0.001) callback_queue = Queue() storage = MockStorageRaising() self.transfer_agent.sleep = sleep self.transfer_agent.get_object_storage = storage assert os.path.exists(self.foo_path) is True self.transfer_queue.put({ "callback_queue": callback_queue, "file_size": 3, "filetype": "xlog", "local_path": self.foo_path, "metadata": {"start-wal-segment": "00000001000000000000000C"}, "site": self.test_site, "type": "UPLOAD", }) with pytest.raises(Empty): callback_queue.get(timeout=0.1) alert_file_path = os.path.join(self.config["alert_file_dir"], "upload_retries_warning") assert os.path.exists(alert_file_path) is True os.unlink(alert_file_path) expected_sleeps = [0.5, 1, 2, 4, 8, 16, 20, 20] assert sleeps[:8] == expected_sleeps
def is_alive(ip_addr): lock = threading.Lock() probe_ports = [22, 3389] q = Queue() status = False for port in probe_ports: q.put(port) class Probe(threading.Thread): def __init__(self): threading.Thread.__init__(self) def run(self): try: self.port = q.get(block=False) except Empty: return False if tcp_probe(ip_addr, self.port): with lock: nonlocal status status = True # print("Success to connect to " + ip_addr + " " + str(self.port)) # else: # print("Failed to connect to " + ip_addr + " " + str(self.port)) q.task_done() for x in range(5): p = Probe() p.daemon = True p.start() q.join() return status
def test_producer_consumer_with_queues(self): # we currently just stress yappi, no functionality test is done here. yappi.start() import time if utils.is_py3x(): from queue import Queue else: from Queue import Queue from threading import Thread WORKER_THREAD_COUNT = 50 WORK_ITEM_COUNT = 2000 def worker(): while True: item = q.get() # do the work with item q.task_done() q = Queue() for i in range(WORKER_THREAD_COUNT): t = Thread(target=worker) t.daemon = True t.start() for item in range(WORK_ITEM_COUNT): q.put(item) q.join()# block until all tasks are done #yappi.get_func_stats().sort("callcount").print_all() yappi.stop()
def runexternal_out_and_err(cmd, check_memleak=True): # pylint: disable=unused-argument command = shlex.split(cmd) p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if p.stdout is not None: q_stdout = Queue() t_stdout = Thread(target=read_in_thread, args=(p.stdout, q_stdout)) t_stdout.start() else: q_stdout = None ret_stdout = '' if p.stderr is not None: q_stderr = Queue() t_stderr = Thread(target=read_in_thread, args=(p.stderr, q_stderr)) t_stderr.start() else: q_stderr = None ret_stderr = '' if q_stdout is not None: ret_stdout = q_stdout.get().decode('ascii') if q_stderr is not None: ret_stderr = q_stderr.get().decode('ascii') waitcode = p.wait() if waitcode != 0: ret_stderr = ret_stderr + '\nERROR ret code = %d' % waitcode return (ret_stdout, ret_stderr)
def __init__(self): self.active_calls = [] self.waiting_calls = Queue() self.respondents = [] self.free_respondents = Queue() self.managers = [] self.directors = []
def main(): ts = time.time() # create a queue to communicate with the worker threads queue=Queue() # Create 2 wroker threads for x in range(6): worker = doExpbatWorker(queue) # setting daemon to True will let then main thread exit even though the workers are blocking worker.demon = True worker.start() #for i in range(9): # queue.put(('~/'+str(i)+'.bat','dfdf')) jb = [] batpath='g:/migration/exp_script/' csvpath='g:/migration/mig_xw/' jb.append((batpath+'cps_xw_studentcourse.bat', batpath+'cps_xw_studentcourse.bat '+' AcademicAdministration '+ csvpath+'cps_xw_studentcourse.csv 202.205.160.199 jwc wangbin')) jb.append((batpath+'cps_xw_avgscore.bat', batpath+'cps_xw_avgscore.bat '+' AcademicAdministration '+ csvpath+'cps_xw_avgscore.csv 202.205.160.199 jwc wangbin')) #jb.append((batpath+'exmm_composescore330.bat', batpath+'exmm_composescore330.bat '+' zhejiang '+ csvpath+'exmm_composescore330.csv 202.205.160.183 sa !!!WKSdatatest!!!')) #jb.append((batpath+'cps_xw_avgscore.bat', batpath+'cps_xw_avgscore.bat '+' AcademicAdministration '+ csvpath+'cps_xw_avgscore.csv 202.205.160.199 jwc wangbin')) #jb.append((batpath+'exmm_xkStandardplan330.bat', batpath+'exmm_xkStandardplan330.bat '+' zhejiang '+ csvpath+'exmm_xkstandsartplan330.csv 202.205.160.183 sa !!!WKSdatatest!!!')) #jb.append((batpath+'exmm_xkStandard330.bat', batpath+'exmm_xkStandard330.bat '+' zhejiang '+ csvpath+'exmm_xkstandsart330.csv 202.205.160.183 sa !!!WKSdatatest!!!')) excl=[] for item in jb: find = False for i in excl: if i in item[0]: find = True break if find == False: #if 'exemptapply' in item[0]: queue.put(item) queue.join() print('took %s minuters '%((time.time()-ts)/60,))
class HandlerThread(Thread): def __init__(self, bot, lock): self.bot = bot self.queue = Queue() self.lock = lock super().__init__() def run(self): while True: try: items = None args = self.queue.get() with self.lock: items = self.bot.__irccallbacks__[args[0]] for item in items: if not get_core(item): if self.bot.verbose: print("[command thread:%s] calling fn %s" % (datetime.datetime.utcnow(), item.__name__)) item(self.bot, *(args[1])) except BaseException as e: if not isinstance(e, SystemExit) and not isinstance(e, KeyboardInterrupt): traceback.print_exc() def push(self, cname, *args): self.queue.put(tuple([cname] + list(args)))
def __init__(self, token_file, dev=False): """ Not only does it represent a client connection to the discord server, but it also initializes the used api tokens and a representation of the League client by generating a League object. :param str token_file: location of the token file containing the api tokens :type token_file: str :param dev: allows the bot to start in a development environment with a separate discord bot token :type dev: bool :returns: GanjaClient -- the GanjaClient object acting as the discord client """ super(GanjaClient, self).__init__() with open(token_file) as f: data = json.load(f) self.server_token = data['token'] self.dev_token = data['dev_token'] self.wolfram = data['wolfram_token'] open_token = data['open_league_token'] riot_token = data['league_token'] self.database = '.databases/' self.http_header = {'User-Agent': 'Mozilla/5.0', 'Accept': 'text/html,application/json'} self.list_commands = {} self.voice = None self.player = None self.last_channel = None self.queue = Queue() self.queue_name = Queue() self.league = League(open_token, riot_token, self.http_header) for i in os.listdir('data'): with open('data/' + i) as f: lines = f.read().splitlines() self.list_commands[i] = lines if dev: self.token = self.dev_token else: self.token = self.server_token
def _put(self, xxx_todo_changeme): # Only consider re-evaluation if we are still on the same eval # session. (eval_sess, is_reeval) = xxx_todo_changeme if is_reeval and self._curr_eval_sess is not eval_sess: return replace = True if hasattr(eval_sess, "ctlr") and eval_sess.ctlr and eval_sess.ctlr.keep_existing: # Allow multiple eval sessions; currently used for variable # highlighting (bug 80095), may pick up additional uses. Note that # these sessions can still get wiped out by a single replace=False # caller. replace = False if replace: # We only allow *one* eval session at a time. # - Drop a possible accumulated eval session. if len(self.queue): self.queue.clear() ## - Abort the current eval session. if not is_reeval and self._curr_eval_sess is not None: self._curr_eval_sess.ctlr.abort() # Lazily start the eval thread. if not self.isAlive(): self.start() Queue._put(self, (eval_sess, is_reeval)) if replace: assert len(self.queue) == 1
class Metric(object): """ This class stores generic time-series data in a queue. Values are stored as (timestamp, value) tuples """ def __init__(self): self.metric = Queue() def push(self, value, timestamp=None): if timestamp is None: timestamp = int(time.time()) elif not isinstance(timestamp, int): raise ValueError( "Timestamp should be an integer, but it is '%s'" % type(timestamp)) self.metric.put((timestamp, value)) def next(self): try: return self.metric.get_nowait() except Empty: raise StopIteration def get(self): # TODO: decide what we should return here return None def __iter__(self): return self
class EventListener(FileSystemEventHandler): """ Listens for changes to files and re-runs tests after each change. """ def __init__(self, extensions=[]): super(EventListener, self).__init__() self.event_queue = Queue() self.extensions = extensions or DEFAULT_EXTENSIONS def on_any_event(self, event): """ Called when a file event occurs. Note that this gets called on a worker thread. """ # Filter for allowed event types if not isinstance(event, WATCHED_EVENTS): return src_path = os.path.relpath(event.src_path) dest_path = None if isinstance(event, FileMovedEvent): dest_path = os.path.relpath(event.dest_path) # Filter files that don't match the allowed extensions if not event.is_directory and self.extensions != ALL_EXTENSIONS: src_ext = os.path.splitext(src_path)[1].lower() src_included = src_ext in self.extensions dest_included = False if dest_path: dest_ext = os.path.splitext(dest_path)[1].lower() dest_included = dest_ext in self.extensions if not src_included and not dest_included: return self.event_queue.put((type(event), src_path, dest_path))
def main(): # These three parameters are user defined client_id = "*****" username = "******" password = "******" tokenDict = get_tokenDict(client_id, username, password) ts = time() download_dir = setup_download_dir() links = [l for l in get_links(client_id, tokenDict) if l.endswith(".jpg")] # Create a queue to communicate with the worker threads queue = Queue() # Create worker threads for x in range(int(argv[1])): worker = DownloadWorker(queue) # Setting daemon to True will let the main thread exit even though the workers are blocking worker.daemon = True worker.start() # Put the tasks into the queue as a tuple for link in links: logger.info("Queueing {}".format(link)) queue.put((download_dir, link)) # Causes the main thread to wait for the queue to finish processing all the tasks queue.join() print("Took {}".format(time() - ts))
def runexternal_out_and_err(cmd, check_memleak = True): command = shlex.split(cmd) p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if p.stdout is not None: q_stdout = Queue() t_stdout = Thread(target=read_in_thread, args=(p.stdout, q_stdout)) t_stdout.start() else: q_stdout = None ret_stdout = '' if p.stderr is not None: q_stderr = Queue() t_stderr = Thread(target=read_in_thread, args=(p.stderr, q_stderr)) t_stderr.start() else: q_stderr = None ret_stderr = '' if q_stdout is not None: ret_stdout = q_stdout.get().decode('ascii') if q_stderr is not None: ret_stderr = q_stderr.get().decode('ascii') p.wait() return (ret_stdout, ret_stderr)
def bfsreduceall(sudokuObject): source = sudokuObject.solutiondriverNoGuess() if source == "Bad Response": return None elif type(source) is Sudoku: return source Q = Queue([sudokuObject]) loop = 1 startminnodes = None while not Q.isempty(): # print("loop no",loop) if loop > 2: return dfsreduceall(sudokuObject) # if startminnodes is not None: # for node in startminnodes:print(node.allowedset,node.id) # print("Q.unqueue()",Q) v = Q.unqueue() unfnodes = v.getOrderedMinnodesUnfilled() # unfinished nodes if loop == 1: startminnodes = unfnodes for minnode in unfnodes: for permutedvalue in minnode.allowedset: global numsudokuobjects numsudokuobjects += 1 newsudokuObject = sudokuObject.__deepcopy__() newsudokuObject.nodes[minnode.id].setValue(permutedvalue) postsolveobject = newsudokuObject.solutiondriverNoGuess() if type(postsolveobject) is Sudoku: return postsolveobject elif postsolveobject != "Bad Response": Q.enqueue(newsudokuObject) loop += 1 return None
def download_cover(self, log, result_queue, abort, # {{{ title=None, authors=None, identifiers={}, timeout=30, get_best_cover=False): cached_url = self.get_cached_cover_url(identifiers) if cached_url is None: log.info('No cached cover found, running identify') rq = Queue() self.identify(log, rq, abort, title=title, authors=authors, identifiers=identifiers) if abort.is_set(): return results = [] while True: try: results.append(rq.get_nowait()) except Empty: break results.sort(key=self.identify_results_keygen( title=title, authors=authors, identifiers=identifiers)) for mi in results: cached_url = self.get_cached_cover_url(mi.identifiers) if cached_url is not None: break if cached_url is None: log.info('No cover found') return if abort.is_set(): return br = self.browser log('Downloading cover from:', cached_url) try: cdata = br.open_novisit(cached_url, timeout=timeout).read() result_queue.put((self, cdata)) except: log.exception('Failed to download cover from:', cached_url)
class ScraperThread(QThread): result_signal = pyqtSignal(dict) def __init__(self, parent=None): super(ScraperThread, self).__init__(parent) self._queue = Queue() self._stop = False def run(self): self._stop = False while not self._queue.empty() and not self._stop: processed_url = self._queue.get() result = requests.get(processed_url) self.result_signal.emit({'headers': result.headers}) def clear_queue(self): self._queue = Queue() @property def queue(self): return self._queue @queue.setter def queue(self, urls): for url in urls: self._queue.put(url.strip()) @property def stop(self): return self._stop @stop.setter def stop(self, stop): self._stop = stop
class BlockingShellSocketChannel(ShellSocketChannel): def __init__(self, context, session, address=None): super(BlockingShellSocketChannel, self).__init__(context, session, address) self._in_queue = Queue() def call_handlers(self, msg): #io.rprint('[[Shell]]', msg) # dbg self._in_queue.put(msg) def msg_ready(self): """Is there a message that has been received?""" if self._in_queue.qsize() == 0: return False else: return True def get_msg(self, block=True, timeout=None): """Get a message if there is one that is ready.""" return self._in_queue.get(block, timeout) def get_msgs(self): """Get all messages that are currently ready.""" msgs = [] while True: try: msgs.append(self.get_msg(block=False)) except Empty: break return msgs
class Uploader(threading.Thread): def __init__(self, backend): super().__init__() self._backend = backend self._upload_queue = Queue() self._keep_going = True self._retry = None def queue_image(self, path): """Queue an image for upload.""" self._upload_queue.put(path) def run(self): while self._keep_going: if self._retry is None: path = self._upload_queue.get() else: path = self._retry if path is None: break with open(path, "rb") as img: success = self._backend.upload_image(img.read()) if not success: # try again self._retry = path else: self._retry = None def stop(self): """Terminate the thread.""" self._keep_going = False self._upload_queue.put(None)
def spin_server(queue: Queue, transport: AbstractTransport): while True: ''' do stuff ''' b64image = transport.handle_client() queue.put(b64image, False)
class Actor: def __init__(self): self._mailbox = Queue() def send(self, msg): self._mailbox.put(msg) def recv(self): msg = self._mailbox.get() if msg is ActorExit: raise ActorExit() return msg def start(self): self._terminated = Event() t = Thread(target=self._bootstrap) t.daemon = True t.start() def _bootstrap(self): try: self.run() except ActorExit: pass finally: self._terminated.set() def join(self): self._terminated.wait() def run(self): while True: msg = self.recv()
def __init__(self): """ """ Plugin.__init__(self) self.queue_in = Queue() self.queue_out = Queue() self.queue_saver = None self.quit_event = Event() self.pool = [] self.raw_saver = None self.ai = None # this is the azimuthal integrator to use self.npt = 2000 self.npt_azim = 256 self.input_files = [] self.method = "full_ocl_csr" self.unit = "q_nm^-1" self.output_file = None self.mask = None self.wavelength = None self.polarization_factor = None self.do_SA = False self.dummy = None self.delta_dummy = None self.norm = 1 self.error_model = None # "poisson" self.save_raw = None self.raw_nxs = None self.raw_ds = None self.raw_compression = None self.integration_method = "integrate1d" self.sigma_clip_thresold = 3 self.sigma_clip_max_iter = 5 self.medfilt1d_percentile = (10, 90)