def test_maxtasksperchild(shutdown_only): def f(args): return os.getpid() pool = Pool(5, maxtasksperchild=1) assert len(set(pool.map(f, range(20)))) == 20 pool.terminate() pool.join()
def launch_long_running_tasks(num_pool=5): # doing the work, collecting data, updating the database # create an Actor pool of num_pool workers nodes pool = Pool(num_pool) results = [] for result in pool.map(task, range(1, 500, 10)): results.append(result) pool.terminate() return results
def fitness_function_3(x, np_ext, np_int, m_l, m_i, m_ext, m_int): m_ext_tp = [i[0] for i in m_ext] #asignar valor de coordenada de malla externa m_ext_a_sh = [i[1] for i in m_ext] p = Pool() args = [[i, m_int, m_i, m_ext_tp, m_ext_a_sh] for i in x] total = p.map(iterateArrays, args) p.close() p.join() return np.array(total)
# Let's try multiprocess for each core # Since this is CPU I/O bound task, we should get better performance # the serial and threading # start = time.time() mp_pool = mp.Pool(get_cpu_count()) with mp_pool as p: prime_numbers = p.map(is_prime, list(range(num))) end = time.time() mp_pool.terminate() print( f"Multi Process access: Time elapsed: {end - start:4.2f} sec to compute all primes in {num} are {sum(list(prime_numbers))}" ) # Let's try that with Ray multiprocessing pool ray.init() ray_pool = Pool(get_cpu_count()) lst = list(range(num)) results = [] start = time.time() for result in ray_pool.map(is_prime, lst): results.append(result) end = time.time() ray_pool.terminate() print( f"Ray Distributed Multi Process access: Time elapsed: {end - start:4.2f} sec to compute all primes in {num} are {sum(results)}" ) ray.shutdown()
page += processes return curr_buf if __name__ == '__main__': start = time.time() ray.init() processes = os.cpu_count() csv_data = [['標題', '評分', '價格', '上市日期', '標籤', '評論']] result_ids = [] reviews_buffer = [] for i in range(1, processes + 1): result_ids.append(crawler.remote(i, processes)) results = ray.get(result_ids) for res in results: reviews_buffer += res pool = Pool(processes=processes) data = pool.map(process_review, [[reviews_buf[1], reviews_buf[0]] for reviews_buf in reviews_buffer]) pool.close() for d in data: csv_data += d with open('output.csv', 'w', newline='', encoding='utf-8-sig') as csvfile: writer = csv.writer(csvfile) writer.writerows(csv_data) end = time.time() print(f'執行時間 {end - start} 秒')
import time from ray.util.multiprocessing import Pool def f(index): time.sleep(5) return index if __name__ == "__main__": pool = Pool(5) for result in pool.map(f, range(10)): print(result)