def wait_futures(futures):
    results = []

    for future in concurrent.futures.as_completed(futures.keys()):
        if future.exception() is None:
            source, to_find = futures.get(future)
            if future.result():
                results.append(
                    f"La stringa {to_find} e` presente nel file {source}")
            else:
                results.append(
                    f"La stringa {to_find} non e' presente nel file {source}")

    return results
Exemple #2
0
    def read(self):
        futures = {}
        with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
            for disk in self.disks:
                futures[executor.submit(self.get_temperature, disk)] = disk

            for fut in concurrent.futures.as_completed(futures.keys()):
                disk = futures.get(fut)
                if not disk:
                    continue
                try:
                    temp = fut.result()
                    if temp is None:
                        continue
                    self.dispatch_value(disk, 'temperature', temp, data_type='temperature')
                except Exception as e:
                    collectd.info(traceback.format_exc())
Exemple #3
0
    def read(self):
        futures = {}
        with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
            for disk in self.disks:
                futures[executor.submit(self.get_temperature, disk)] = disk

            for fut in concurrent.futures.as_completed(futures.keys()):
                disk = futures.get(fut)
                if not disk:
                    continue
                try:
                    temp = fut.result()
                    if temp is None:
                        continue
                    self.dispatch_value(disk, 'temperature', temp, data_type='temperature')
                except Exception as e:
                    collectd.info(traceback.format_exc())
Exemple #4
0
async def work(projects):
    loop = asyncio.get_event_loop()
    futures = {}
    finished = 0
    total = len(projects)
    with concurrent.futures.ThreadPoolExecutor(max_workers=10) as pool:
        for project in projects:
            future = loop.run_in_executor(pool, check_repo, project)
            futures[future] = os.path.basename(project)
        pending = list(futures.keys())
        while pending and working:
            done, pending = await asyncio.wait(
                pending, return_when=asyncio.FIRST_COMPLETED)
            for future in done:
                result = await future
                if result is None: result = ''
                finished += 1
                print(f'({finished}/{total}) {result}')
def process_urls_parallel(analysis_urls, script_file, container_timeout,
                          max_containers):
    futures = {}
    processed_url_ids = set()
    urls = analysis_urls.copy()
    with concurrent.futures.ProcessPoolExecutor(
            max_workers=max_containers) as executor:
        while len(urls) > 0:
            ## Submit jobs to container ##
            for i in range(min(len(urls), max_containers)):
                id = urls.keys()[0]
                itm = urls.pop(id)
                url = itm['url']
                visit_count = itm['count']
                if i != 0 and i % 5 == 0:
                    time.sleep(200)
                if visit_count == 0:
                    ## initiates docker container for the first time
                    futures[executor.submit(initiate_container, url, str(id),
                                            script_file, visit_count,
                                            container_timeout)] = (str(id),
                                                                   visit_count)
                else:
                    ## Resumes docker container and waits for notifications
                    futures[executor.submit(resume_container, url, str(id),
                                            script_file, visit_count,
                                            container_timeout)] = (str(id),
                                                                   visit_count)

            try:
                ##  Keep docker container active for specific duration and stop the containe and export data
                for future in concurrent.futures.as_completed(
                        futures, timeout=container_timeout):
                    id, v_count = futures.pop(future)
                    try:
                        logging.info(get_time() + 'Container_' + str(id) +
                                     ': Completed successfully!!')
                    except concurrent.futures.TimeoutError as ex:
                        logging.info(get_time() + 'Container_' + str(id) +
                                     ': Timeout occured!!')
                    except Exception as exc:
                        logging.info(get_time() + 'Container_' + str(id) +
                                     ': Exception ')
                        logging.info(exc)

                    res = export_container(id, v_count)
                    stop_container(id)
                    if res:
                        processed_url_ids.add(id)
            except Exception as e:
                ##  Stop the containers that didn't complete before timeout and export data
                for future in futures.keys():
                    id, v_count = futures.pop(future)
                    try:
                        logging.info(get_time() + 'Container_' + str(id) +
                                     ': Timeout Occured!!')
                    except concurrent.futures.TimeoutError as ex:
                        logging.info(get_time() + 'Container_' + str(id) +
                                     ': Timeout occured!!')
                    except Exception as exc:
                        logging.info(get_time() + 'Container_' + str(id) +
                                     ': Exception ')
                        logging.info(exc)

                    res = export_container(id, v_count)
                    stop_container(id)
                    if res:
                        processed_url_ids.add(id)
    return processed_url_ids