def verify_async_requests(self, requests_ids: list) -> dict:
        """
        :param requests_ids: list of request ids
        :return: dict of requests ids and their result (boolean)
        """
        futures = set()
        all_completed_tasks = set()
        max_pending = self.threads_size

        with ThreadPoolExecutor(max_workers=self.threads_size) as pool_v2:

            for request_id in requests_ids:
                futures.add(
                    pool_v2.submit(self.verify_request,
                                   request_id['requestId']))

                while len(futures) > max_pending:
                    completed_tasks, futures = wait(futures, None,
                                                    FIRST_COMPLETED)
                    all_completed_tasks.update(completed_tasks)

        completed_tasks, futures = wait(futures, None, ALL_COMPLETED)
        all_completed_tasks.update(completed_tasks)

        results = dict()

        for index, task in enumerate(all_completed_tasks):
            results[requests_ids[index]['requestId']] = task.result()

        return results
Exemplo n.º 2
0
def main():
    start3 = time.time()
    seed = {"a": "a", "b": "a", "q": "a", "w": "a", "e": "a", "r": "a", "t": "a", "y": "a", "u": "a", "i": "a",
            "o": "a", "p": "a", "s": "a", "d": "f", "g": "a", "h": "l"}
    with ProcessPoolExecutor() as pool:
        futures = [pool.submit(sayhello, k, v) for k, v in seed.iteritems()]
        wait(futures, return_when=ALL_COMPLETED)
        print 'wait all'
        pool.map(sayhello, seed.keys(), seed.values())
        pool.shutdown()
        print 'shutdown pool'
Exemplo n.º 3
0
 def _run_test_task(self):
     """
     执行测试任务
     :return:
     """
     with ThreadPoolExecutor(max_workers=self.parallel) as executor:
         futures = list()
         stop_event = Event()
         for test_case in self.test_task.test_cases:
             if test_case.result != TestCaseResult.PASS:
                 futures.append(executor.submit(self._run_test_case, test_case, stop_event))
         wait(futures, return_when=FIRST_EXCEPTION)  # 第一次抛出异常时返回结果
    def stream_data_extension(self, generator: Generator) -> dict:
        """
        :param generator: generator of data extension
        :return: result of dict of query range with the request id in format of : {'10-100': %RequestId}
        """
        start_time = time.time()
        fetched_rows = 0
        futures = set()
        all_completed_tasks = set()

        with ThreadPoolExecutor(max_workers=self.threads_size) as pool:
            for chunk in self.chunk_grouper(generator):
                try:
                    de_length = len(chunk['items']) if isinstance(
                        chunk, dict) else len(chunk)
                    iterator_range = f'{fetched_rows}-{fetched_rows + de_length}'
                    futures.add(
                        pool.submit(self._update_data_extension_rows,
                                    de=copy.deepcopy(chunk),
                                    iterator_range=iterator_range))

                    while len(futures) > self.threads_size:
                        completed_tasks, futures = wait(
                            futures, None, FIRST_COMPLETED)
                        all_completed_tasks.update(completed_tasks)

                    fetched_rows += de_length
                    de_logger.info(
                        f'{iterator_range} DE is send to salesforce')
                    de_logger.info(
                        f'send rate {fetched_rows / (time.time() - start_time)} per sec'
                    )
                except Exception as e:
                    de_logger.warning(
                        f'Failed to read/submit data extension: {e}')

        de_logger.info(f'DE - Waiting for {len(futures)} jobs to complete...')
        completed_tasks, futures = wait(futures, None, ALL_COMPLETED)
        all_completed_tasks.update(completed_tasks)

        de_logger.info(
            f'Send rate {fetched_rows / (time.time() - start_time)} per sec')
        de_logger.info(
            f'DE - fetched_rows sent time: {time.time() - start_time}')

        results = dict()

        for task in all_completed_tasks:
            query_range, result = task.result()
            results[query_range] = result

        return results
Exemplo n.º 5
0
def get_books_from_db():
    sort = [('hot', 1)]
    # find = book.find({}, {"_id": 1}).sort(sort)
    find = book.find({"hot": {"$gt": 1}}, {"_id": 1, "hot": 1})
    for f in find:
        # print(str(f['hot']) + "   " + str(f["_id"]))
        # updateBook(f["_id"], "https://www.biquge.com.cn/book/%s/" % f["_id"])
        with ThreadPoolExecutor() as t:
            all_task = [
                t.submit(updateBook, f["_id"],
                         "https://www.biquge.com.cn/book/%s/" % f["_id"])
                for f in find
            ]
            wait(all_task, return_when=FIRST_COMPLETED)
Exemplo n.º 6
0
def get_size(path: str, ex: Optional[Executor] = None) -> int:
    size = 0
    futures = []
    for d, _, files in os.walk(path):
        if ex is None:
            size += sum([os.path.getsize(os.path.join(d, f)) for f in files])
        else:
            for f in files:
                futures.append(ex.submit(os.path.getsize, os.path.join(d, f)))

    if ex is not None:
        wait(futures)
        size = sum(f.result() for f in futures)

    return size
Exemplo n.º 7
0
def main():
    worker = 1
    print('qsize {}'.format(q.qsize()))
    with futures.ThreadPoolExecutor(max_workers=worker + 1) as executor:
        th_producer = executor.submit(producer)
        wait(fs=[SUCCESSFUL_FUTURE, th_producer])
        consumers = []
        for index in range(worker):
            consumers.append(executor.submit(consumer, index))
        wait(fs=[SUCCESSFUL_FUTURE, consumers[0]])
        q.join()
        print('Queue is Empty')
        QUEUE_BREAKER['IsStop'] = True
        executor.shutdown(True)
        print('shutdown')

    print('main exit')
    return 0
Exemplo n.º 8
0
    def wait_for_emit(self, timeout: Optional[float]) -> bool:
        """
        Blocks until a new record is emitted.

        :param timeout: Maximum time to block before returning.
        :returns: ``True`` if there was a status change, ``False`` in case of a timeout.
        """
        done, not_done = wait([self._emit_future], timeout=timeout)
        self._emit_future = Future()  # reset future
        return len(done) == 1
Exemplo n.º 9
0
    def getPullRequestForRepositoryUseConcurrent(helper, statistic, limit=-1, start=-1):
        if start == -1:
            # 获取项目pull request的数量
            # requestNumber = helper.getTotalPullRequestNumberForProject()
            requestNumber = helper.getMaxSolvedPullRequestNumberForProject()

            print("total pull request number:", requestNumber)

            resNumber = requestNumber
        else:
            resNumber = start

        if limit == -1:
            limit = resNumber

        executor = ThreadPoolExecutor(max_workers=20)
        future_tasks = [executor.submit(ProjectAllDataFetcher.getSinglePullRequestWithExceptionCatch,
                                        helper, statistic,
                                        pull_number) for pull_number in range(resNumber, max(0, resNumber - limit), -1)]
        wait(future_tasks, return_when=ALL_COMPLETED)
Exemplo n.º 10
0
 def _recv_updates(self, timeout: Optional[float]):
     """ Await updates from a paired MPFuture """
     try:
         future = base.wait([run_in_background(self.connection.poll, timeout), self._shutdown_trigger],
                            return_when=base.FIRST_COMPLETED)[0].pop()
         if future is self._shutdown_trigger:
             raise BrokenPipeError()
         if not future.result():
             raise TimeoutError()
         self._state, result, exception = self.connection.recv()
         self._result = result if result is not None else self._result
         self._exception = exception if exception is not None else self._exception
         if self._state in self.TERMINAL_STATES:
             self.connection.close()
     except TimeoutError as e:
         raise e
     except (BrokenPipeError, OSError) as e:
         if self._state in (base.PENDING, base.RUNNING):
             self._state, self._exception = base.FINISHED, e
Exemplo n.º 11
0
from concurrent.futures import ThreadPoolExecutor
from concurrent.futures._base import wait

class AsyncThreadPoolGetter():

  def ___init___(self):
    self.loaded_data = pd.DataFrame()

  def request_from_url(self, url)
      print("Do your request here and save the results to a class variable using self.loaded_data")

  def run_urls(self, urls)
    
    with ThreadPoolExecutor(max_workers=max_threads) as executor:

        futures_to_todo = []
        for url in urls:
            futures_to_todo.append(executor.submit(fn=request_from_url, url=url))

        wait(futures_to_todo)
        print("Finished async job. Access self.loaded_data now")