Exemple #1
0
def process_tablelist(search_key):
    global status, job_id, table_list
    thread_list = []
    table_list = []
    t = ThreadPool(processes=len(cfg.bq_Projects))
    for project_name in cfg.bq_Projects:
        t_result = t.apply_async(thread_table_list, (project_name, search_key))
        thread_list.append(t_result)
    for t in thread_list:
        if (len(t.get()) > 0):
            table_list = table_list + t.get()
    return table_list
    def test_concurrent_changes_dont_deadlock(self):
        self.set_delay(0.1)

        def try_and_write(i):
            result = requests.post(Server(0).item(i % 3), str(i), timeout=5)
            self.assertIn(result.status_code, [201, 503])

        threads = ThreadPool(processes=4).map_async(try_and_write, range(20))

        try:
            threads.get()
        except requests.exceptions.Timeout:
            self.fail("Request deadlocked and timed out")
    def test_concurrent_changes_dont_deadlock(self):
        self.set_delay(0.1)

        def try_and_write(i):
            result = requests.post(Server(0).item(i % 3), str(i), timeout=5)
            self.assertIn(result.status_code, [201, 503])

        threads = ThreadPool(processes=4).map_async(try_and_write, range(20))

        try:
            threads.get()
        except requests.exceptions.Timeout:
            self.fail("Request deadlocked and timed out")
Exemple #4
0
    def avail_check(function_to_call: Callable) -> NoReturn:
        """Speaks an error message if any of the lights aren't reachable.

        Args:
            function_to_call: Takes the function/method that has to be called as an argument.
        """
        status = ThreadPool(processes=1).apply_async(func=thread_worker,
                                                     args=[function_to_call])
        speaker.speak(run=True)
        if failed := status.get(timeout=5):
            plural_ = "lights aren't available right now!" if failed > 1 else "light isn't available right now!"
            speaker.speak(
                text=
                f"I'm sorry sir! {support.number_to_words(input_=failed, capitalize=True)} {plural_}"
            )
Exemple #5
0
def main():
    if len(sys.argv) < 2:
        print_usage()
        sys.exit(0)

    terms = read_terms(os.path.abspath(sys.argv[1]))
    output_queue = Queue.Queue()
    output_file = 'output_%s.csv' % time.strftime("%m-%d-%Y-%H%M%S")
    output_file = open(output_file, 'wb')
    try:
        output_writer = ThreadPool(1).apply_async(write_output, (output_queue, output_file))
        yahoo_results = ThreadPool(1).apply_async(imap_queue, (yahoo, terms, output_queue))
        bing_results = ThreadPool(1).apply_async(imap_queue, (bing, terms, output_queue))
        yahoo_results.get()
        bing_results.get()
        output_queue.join()
    finally:
        output_file.close()
Exemple #6
0
def main():
    if len(sys.argv) < 2:
        print_usage()
        sys.exit(0)

    terms = read_terms(os.path.abspath(sys.argv[1]))
    output_queue = Queue.Queue()
    output_file = 'output_%s.csv' % time.strftime("%m-%d-%Y-%H%M%S")
    output_file = open(output_file, 'wb')
    try:
        output_writer = ThreadPool(1).apply_async(write_output,
                                                  (output_queue, output_file))
        yahoo_results = ThreadPool(1).apply_async(imap_queue,
                                                  (yahoo, terms, output_queue))
        bing_results = ThreadPool(1).apply_async(imap_queue,
                                                 (bing, terms, output_queue))
        yahoo_results.get()
        bing_results.get()
        output_queue.join()
    finally:
        output_file.close()
Exemple #7
0
def gather_all_unsorted(
    threadpool: ThreadPool, func: Callable, params: Sequence, *args: Any, **kwargs: Any
) -> Iterator[Tuple[Any, Any]]:

    """Runs multiple tasks concurrently and returns all results in the order
    of execution finish as soon as possible.
    """

    for i, param in enumerate(params):
        threadpool.add_task(i, func, param, *args, **kwargs)

    for i in range(len(params)):
        yield threadpool.get()
Exemple #8
0
def meetings() -> None:
    """Controller for meetings."""
    with db.connection:
        cursor = db.connection.cursor()
        meeting_status = cursor.execute(
            "SELECT info, date FROM ics").fetchone()
    if meeting_status and meeting_status[1] == datetime.now().strftime(
            '%Y_%m_%d'):
        speaker.speak(text=meeting_status[0])
    elif meeting_status:
        Process(target=meetings_writer).start()
        speaker.speak(
            text=
            f"Meetings table is outdated {env.title}. Please try again in a minute or two."
        )
    else:
        if shared.called_by_offline:
            Process(target=meetings_writer).start()
            speaker.speak(
                text=
                f"Meetings table is empty {env.title}. Please try again in a minute or two."
            )
            return
        meeting = ThreadPool(processes=1).apply_async(
            func=meetings_gatherer)  # Runs parallely and awaits completion
        speaker.speak(
            text=f"Please give me a moment {env.title}! I'm working on it.",
            run=True)
        try:
            speaker.speak(text=meeting.get(timeout=60), run=True)
        except ThreadTimeoutError:
            logger.error(
                "Unable to read the calendar schedule within 60 seconds.")
            speaker.speak(
                text=
                f"I wasn't able to read your calendar within the set time limit {env.title}!",
                run=True)
Exemple #9
0
def backup(tg_client, chat_id, back_up_folders):
    """
        This function starts the backup process.
    """

    async_result = ThreadPool(processes=1).apply_async(
        lambda: speedtest.Speedtest().upload() / 8, ())

    file_log = get_logger()

    print("\nGetting list of uploaded files")
    old_files = get_uploaded_files(tg_client, chat_id, back_up_folders)
    file_log.info("Found %s files already uploaded", len(old_files))

    new_files = []
    print("Getting list of files to upload")

    for folder in back_up_folders:
        new_files.extend(get_new_files(folder, old_files))

    file_log.info("Found %s new files to upload", len(new_files))

    if len(new_files) == 0:
        return show_results(0, 0, "")

    total_files = len(new_files)
    net_speed = async_result.get()
    (done, failed, errors) = (0, 0, "")

    file_log.info("Measured internet speed to be %s Bps", net_speed)

    print_progress_bar(0, total_files)
    tg_client.send_message(
        chat_id=chat_id,
        text=
        f"Backup started on {datetime.today().strftime('%Y-%m-%d %I:%M %p')}")
    tg_client.send_message(
        chat_id=chat_id,
        text=f"Backing up {total_files} files @ {net_speed / 1000000} MBps.")

    for (new_file, folder) in new_files:
        task = send_file(tg_client, chat_id, new_file, folder)
        if task.error_info is None:
            wait_for_upload(tg_client, task.update, net_speed)
            done += 1
        else:
            failed += 1
            errors += str(task.error_info) + "\n\n"
            file_log.error("Error uploading %s %s", new_file, task.error_info)

        print_progress_bar(done + failed,
                           total_files,
                           "",
                           suffix=f"{done + failed} of {total_files} done")

    tg_client.send_message(
        chat_id=chat_id,
        text=f"Backup ended on {datetime.today().strftime('%Y-%m-%d %I:%M %p')}"
    ).wait()

    return show_results(done, failed, errors)
Exemple #10
0
    # init thread pool
    sencore_pool = ThreadPool(
        processes=len(data_to_process)
    ).map_async(sencore_start, data_to_process)

    # wait until all threads done
    counter = 0
    while not sencore_pool.ready() and counter < TIMEOUT:
        time.sleep(0.1)
        counter += 0.1
    if counter >= TIMEOUT:
        exit(1)

    # get results from thread pool
    data = sencore_pool.get()

    # # generate Zabbix metrics
    zabbix_metrics_all = [to_zabbix(d[0], d[1]) for d in data if d]

    # autodiscovery metrics
    zabbix_metrics_discovery = reduce(add, [zma["autodiscovery"] for zma in zabbix_metrics_all])
    zabbix_metrics_discovery = sorted(zabbix_metrics_discovery, key=lambda x: x[0])

    # static metrics
    zabbix_metrics_static = reduce(add, [zma["static"] for zma in zabbix_metrics_all])

    # dynamic metrics
    zabbix_metrics_dynamic = reduce(add, [zma["dynamic"] for zma in zabbix_metrics_all])

    send2zabbix_metrics = zabbix_metrics_static