Ejemplo n.º 1
0
def thread_opt():
    time_list = []
    log = EasyLogger(name="io_threading", log_file_name="./log/io_threading.log")
    for thread_num in range(1, 1000):
        time = get_time_download_sites(thread_num=thread_num)
        log.info("{} threads: finish in {} seconds".format(thread_num, time))
        time_list.append(time)
        print(time_list)
    return time_list
Ejemplo n.º 2
0
from easyLogger import EasyLogger


async def download_site(session, url):
    async with session.get(url) as response:
        print("Read {0} from {1}".format(response.content_length, url))


async def download_all_sites(sites):
    async with aiohttp.ClientSession() as session:
        tasks = []
        for url in sites:
            task = asyncio.ensure_future(download_site(session, url))
            tasks.append(task)
        await asyncio.gather(*tasks, return_exceptions=True)


if __name__ == "__main__":
    log = EasyLogger(name="io_asynchronous",
                     log_file_name="./log/io_asynchronous.log")
    sites = [
        "https://www.jython.org",
        "http://olympus.realpython.org/dice",
    ] * 80
    start_time = time.time()
    asyncio.get_event_loop().run_until_complete(download_all_sites(sites))
    duration = time.time() - start_time
    log.info("Downloaded {} in {} seconds".format(len(sites), duration))
    # print(f"Downloaded {len(sites)} sites in {duration} seconds")
Ejemplo n.º 3
0
import multiprocessing
import time
from easyLogger import EasyLogger


def cpu_bound(number):
    return sum(i * i for i in range(number))


def find_sums(numbers):
    with multiprocessing.Pool() as pool:
        pool.map(cpu_bound, numbers)


if __name__ == "__main__":
    log = EasyLogger(name="cpu_synchronous",
                     log_file_name="./log/cpu_synchronous.log")
    numbers = [5_000_000 + x for x in range(20)]

    start_time = time.time()
    find_sums(numbers)
    duration = time.time() - start_time
    log.info("Duration {} seconds".format(duration))
    # print(f"Duration {duration} seconds")
Ejemplo n.º 4
0
def set_global_session():
    global session
    if not session:
        session = requests.Session()


def download_site(url):
    with session.get(url) as response:
        name = multiprocessing.current_process().name
        print(f"{name}:Read {len(response.content)} from {url}")


def download_all_sites(sites):
    with multiprocessing.Pool(initializer=set_global_session) as pool:
        pool.map(download_site, sites)


if __name__ == "__main__":
    log = EasyLogger(name="io_multiprocessing",
                     log_file_name="./log/io_multiprocessing.log")
    sites = [
        "https://www.jython.org",
        "http://olympus.realpython.org/dice",
    ] * 80
    start_time = time.time()
    download_all_sites(sites)
    duration = time.time() - start_time
    log.info("Downloaded {} in {} seconds".format(len(sites), duration))
    # print(f"Downloaded {len(sites)} in {duration} seconds")