def poll(self): """[summary] Yields: [type] -- [description] """ logging.debug(f'') now = int(time.time()) jobs = queue.Queue() results = queue.Queue() feeds = [feed for feed in self.feeds if feed.should_poll()] for feed in feeds: jobs.put(feed) count = len(feeds) logging.info('Starting worker threads') for i in range(min(count, settings.MAX_WORKER_THREADS)): util.start_thread(self.worker, now, jobs, results) while count: items = results.get() count -= 1 if items: yield items logging.info('Worker threads completed')
def start(self, workers=4, ready=None): """Starts dispatching the downloaded pastes to the list of analyzers""" with self.__lock: if not self.running: if len(self.analyzers) == 0: self.logger.warning( "No analyzers added! At least one analyzer must be added prior to use!" ) return None self.running = True start_thread(self._start_analyzing, "PasteDispatcher", exception_event=self.__exception_event) # Start thread pool with worker threads # for i in range(workers): # thread = Thread(target=self._pool_thread, name="analyzer_{0}".format(i)) # self.__thread_pool.add(thread) # thread.start() if ready is not None: ready.set() return self.action_queue
def start(self, ready=None): with self.__lock: if not self.running: self.running = True start_thread(self._start, "ActionHandler", self.__exception_event) if ready is not None: ready.set()
def init(): container = CallbackContainer() message = '\n'.join(sys.argv[1:]) name = r'\\.\pipe\FeedNotifier_%s' % wx.GetUserId() if client(name, message): return None, message else: util.start_thread(server, name, container) return container, message
def init(): container = CallbackContainer() message = "\n".join(sys.argv[1:]) name = r"\\.\pipe\FeedNotifier_%s" % wx.GetUserId() if client(name, message): return None, message else: util.start_thread(server, name, container) return container, message
def server(host, port, callback_func): class Handler(SocketServer.StreamRequestHandler): def __init__(self, callback_func, *args, **kwargs): self.callback_func = callback_func SocketServer.StreamRequestHandler.__init__(self, *args, **kwargs) def handle(self): data = self.rfile.readline().strip() self.callback_func(data) server = SocketServer.TCPServer((host, port), functools.partial(Handler, callback_func)) util.start_thread(server.serve_forever)
def poll(self): if self.polling: return if not self.enabled: return if settings.DISABLE_WHEN_IDLE and idle.get_idle_duration() > settings.USER_IDLE_TIMEOUT: return if not self.manager.should_poll(): return self.polling = True self.frame.icon.set_icon('icons/feed_go.png') util.start_thread(self._poll_thread)
def poll(self): if self.polling: return if not self.enabled: return if settings.DISABLE_WHEN_IDLE and idle.get_idle_duration() > settings.USER_IDLE_TIMEOUT: return if not self.manager.should_poll(): return self.polling = True self.icon.set_icon('icons/feed_go.png') util.start_thread(self._poll_thread)
def server(host, port, callback_func): class Handler(socketserver.StreamRequestHandler): def __init__(self, callback_func, *args, **kwargs): self.callback_func = callback_func socketserver.StreamRequestHandler.__init__( self, *args, **kwargs) def handle(self): data = self.rfile.readline().strip() self.callback_func(data) server = socketserver.TCPServer( (host, port), functools.partial(Handler, callback_func)) util.start_thread(server.serve_forever)
def run(controller, force=False): """[summary] Arguments: controller {[type]} -- [description] Keyword Arguments: force {bool} -- [description] (default: {False}) """ if force or settings.CHECK_FOR_UPDATES: util.start_thread(do_check, controller, force) # EOF
def start(self, paste_queue): """Start the scraping process and download the paste metadata""" self.paste_queue = paste_queue self.running = True start_thread(self._body_downloader, "BodyDownloader", self._exception_event) while self.running: self._last_scrape_time = int(time.time()) pastes = self._get_recent(limit=100) counter = 0 if pastes is not None: for paste in pastes: # check if paste is in list of known pastes if paste.key in self._known_pastes: # Do nothing, if it's already known continue self.logger.debug("Paste is unknown - adding ot to list {}".format(paste.key)) self._tmp_paste_queue.put(paste) self._known_pastes.append(paste.key) counter += 1 if self._stop_event.is_set() or self._exception_event.is_set(): self.running = False break self.logger.debug("{0} new pastes fetched!".format(counter)) # Delete some of the last pastes to not run into memory/performance issues if len(self._known_pastes) > 1000: self.logger.debug("known_pastes > 1000 - cleaning up!") start_index = len(self._known_pastes) - self._known_pastes_limit self._known_pastes = self._known_pastes[start_index:] if self._stop_event.is_set() or self._exception_event.is_set(): self.logger.debug('stopping {0}'.format(self.name)) self.running = False break # check if time since last current_time = int(time.time()) diff = current_time - self._last_scrape_time # if the last scraping happened less than 60 seconds ago, # wait until the 60 seconds passed if diff < 60: sleep_time = 60 - diff time.sleep(sleep_time)
def start(self, direct=False): try: self._prepare() if direct: self._run() return self.dict(True) else: workers = max(min(min(MAX_WORKERS - workerthreads, MAX_WORKERS_PROCESS), len(self.tasks)), 1) while workers>0: util.start_thread(self._worker) workers -= 1 return self.id except Exception, exc: fault.log(exc) if direct: raise
def start(self): """Starts scraping pastes from the provided sources""" with self.__lock: if not self.running: # There needs to be at least one scraper if len(self.scrapers) == 0: self.logger.warning("No scrapers added! At least one scraper must be added prior to use!") return None self.running = True # Start all scraper threads for scraper in self.scrapers: start_thread(scraper.start, scraper.name, paste_queue=self.paste_queue, exception_event=self.__exception_event) # Return the update queue so the main thread can insert updates return self.paste_queue
def poll(self): now = int(time.time()) jobs = Queue.Queue() results = Queue.Queue() feeds = [feed for feed in self.feeds if feed.should_poll()] for feed in feeds: jobs.put(feed) count = len(feeds) logging.info('Starting worker threads') for i in range(min(count, settings.MAX_WORKER_THREADS)): util.start_thread(self.worker, now, jobs, results) while count: items = results.get() count -= 1 if items: yield items logging.info('Worker threads completed')
def run(self, consumers_processed_event): agent_threads = [] for agent in self.agents: thread = start_thread(target=agent.initialize, args=(consumers_processed_event, ), name=f"{agent}") agent_threads.append(thread) for thread in agent_threads: thread.join()
def make_consumers_calls(consumers, router): threads = [] for consumer in consumers: thread = start_thread(target=consumer.make_call, args=(router, ), name=f"{consumer}") threads.append(thread) random_sleep_between_calls() for thread in threads: thread.join()
def check_credentials(parent, show_popup_if_no_action_needed=True): """Check if Gmail API credentials have been set up. Prompt user to set them up if not.""" try: credentials = authenticate(connect_to_google=False) except google.auth.exceptions.RefreshError: credentials = None if credentials is None: msg = 'In order to use the email feature, you must have a Gmail account. You need to' \ ' log in to Google and authorize this app to send email on your' \ ' behalf. Do you wish to do this now? (You will be redirected to your browser)' caption = 'Google Authentication Required' dlg = wx.MessageDialog(parent=None, message=msg, caption=caption, style=wx.OK | wx.CANCEL) result = dlg.ShowModal() dlg.Destroy() if result == wx.ID_OK: wx.YieldIfNeeded( ) # Make sure dialog goes away before we open a new one wait_dialog = wx.ProgressDialog( title='Waiting for response from Google...', message='Respond to prompts in your browser. This\n' 'dialog will close automatically after you\n' 'approve or deny the authorization request.') wait_dialog.Pulse() dialog_done = threading.Event() start_thread(authenticate_with_google, parent, dialog_done) check_for_done_loop(dialog_done) if parent.error_msg: return False return True elif show_popup_if_no_action_needed: dlg = wx.MessageDialog(parent=None, message='Email is already properly set up.', caption='No Action Needed', style=wx.OK) dlg.ShowModal() dlg.Destroy() return credentials is not None
def server(host, port, callback_func): """[summary] Arguments: host {[type]} -- [description] port {[type]} -- [description] callback_func {[type]} -- [description] """ class Handler(socketserver.StreamRequestHandler): """[summary] Arguments: socketserver {[type]} -- [description] """ def __init__(self, callback_func, *args, **kwargs): """[summary] Arguments: callback_func {[type]} -- [description] """ self.callback_func = callback_func socketserver.StreamRequestHandler.__init__(self, *args, **kwargs) def handle(self): """[summary] """ data = self.rfile.readline().strip() self.callback_func(data) server = socketserver.TCPServer((host, port), functools.partial(Handler, callback_func)) util.start_thread(server.serve_forever)
def _start_analyzing(self): while self.running: try: # Get paste from queue paste = self.paste_queue.get(True, 1) # TODO implement thread pool to limit number of parallel executed threads start_thread(self._process_paste, "process_paste", paste=paste, exception_event=self.__exception_event) except Empty: if self.__stop_event.is_set(): self.logger.debug("orderly stopping") self.running = False break elif self.__exception_event.is_set(): self.logger.critical( "stopping due to exception in another thread") self.running = False break continue
def init(): """initialize the thread server Returns: [type] -- [description] """ logging.debug('initializing') container = CallbackContainer() message = '\n'.join(sys.argv[1:]) name = r'\\.\pipe\FeedNotifier_%s' % wx.GetUserId() if client(name, message): logging.debug("Initialized: return message='%s' and name='%s'", message, name) return None, message else: logging.debug("Initialized: message='%s' and name='%s'", message, name) logging.debug('Jump to util::start_thread()') util.start_thread(server, name, container) logging.debug('return of util::start_thread()') return container, message
def start_download(self): """[summary] """ util.start_thread(self.download)
[consumer for consumer in consumers if not consumer.processed]) set_all_consumers_processed(True) consumers_processed_event.set() if __name__ == "__main__": faker = Faker('en_US') start_date = datetime.now() consumers = [generate_consumer(faker) for i in range(CONSUMERS_COUNT)] agents = [generate_agent(faker) for i in range(AGENTS_COUNT)] router = Router(agents) consumers_processed_event = Event() router_thread = start_thread(target=router.run, args=(consumers_processed_event, )) consumers_processed_thread = start_thread( target=check_for_processed_consumers, args=( consumers, consumers_processed_event, )) consumers_thread = start_thread(target=make_consumers_calls, args=( consumers, router, )) consumers_thread.join() router_thread.join()
def run(controller, force=False): if force or settings.CHECK_FOR_UPDATES: util.start_thread(do_check, controller, force)
def start_download(self): util.start_thread(self.download)