def exe_query_Ntimes_pool(query, N): #START CONNECTION POOL pool = co.create_connection_pool(1, 5, "postgis_test", "postgres", "admin", "localhost", "5432") times = [] #for i in range(N): # t = get_image(connection,'altifr_75m_0150_6825',154938.251,6821208.497, 500, 500) # times.append(t) for i in range(N): results = qu.query_with_pool(pool, query) times.append(results[1]) pool.closeall bd.plot_perf(times, 'chart_pool')
def query_table_overviews(max_o, table, nbthreads, nbpool): names_list = qu.table_overviews_list(max_o, table) qlist = [] for name in names_list: #val = names[name] query = "SELECT ST_AsGDALRaster(ST_Union({}.rast), 'GTiff') FROM {}".format( name, name) #print(name) qlist.append(query) if nbthreads == 0: pool = co.create_connection_pool(1, nbpool, "postgis_test", "postgres", "admin", "localhost", "5432", 0) #connection = co.create_connection("postgis_test","postgres","admin","localhost","5432") starts = [] ends = [] wends = [] fends = [] f = open("sync_overviews.txt", "w") qlist.reverse() for q in qlist: print("sync query") todo = QueryExecution(q, pool) todo.startSeqQuery() f.write("{}, {}, {}, {}, {}, {}, {}, {}\n".format( todo.query, todo.query_time_start, todo.query_time_submit, todo.query_time_end, todo.wait_time_start, todo.wait_time_end, todo.fetch_time_start, todo.fetch_time_end)) starts.append(todo.query_time_start) ends.append(todo.query_time_submit) wends.append(todo.wait_time_end) fends.append(todo.fetch_time_end) f.close() bd.plot_start_end_phases(starts, ends, wends, fends, 'sync_overviews_phases') else: #print(names_list) #qlist.reverse() start_multith_tasks(nbthreads, nbpool, qlist)
def start_multith_file(nbthreads, nbpool, file, chartname): #https://www.tutorialspoint.com/concurrency_in_python/concurrency_in_python_pool_of_threads.htm pool = co.create_connection_pool(1, nbpool, "postgis_test", "postgres", "admin", "localhost", "5432", 1) times_exe = [] times_wait = [] times_fetch = [] times_total = [] starts = [] ends = [] dict_queries = get_queries(file) queries = list(dict_queries.keys()) print(queries) #while count < N: futures = [] perf = [] with ThreadPoolExecutor(max_workers=nbthreads) as executor: for i in range(len(queries)): futures.append( executor.submit(exe_wait_fetch_dict, pool, queries.pop(), times_exe, times_wait, times_fetch, times_total, dict_queries, perf, starts, ends)) #count +=1 wait(futures, return_when='ALL_COMPLETED') f = open("results_multithreading.txt", "w") for l in perf: #print(l) f.write(l) f.close() bd.plot_start_end(starts, ends, chartname)
def start_multith(N, nbthreads, nbpool, query): #https://www.tutorialspoint.com/concurrency_in_python/concurrency_in_python_pool_of_threads.htm pool = co.create_connection_pool(1, nbpool, "postgis_test", "postgres", "admin", "localhost", "5432", 1) count = 0 results = [] connections = [] cursors = [] dispo = nbpool times_exe = [] times_wait = [] times_fetch = [] times_total = [] start = time.perf_counter() #while count < N: futures = [] with ThreadPoolExecutor(max_workers=nbthreads) as executor: for i in range(N): futures.append( executor.submit(exe_wait_fetch, pool, query, times_exe, times_wait, times_fetch, times_total)) #count +=1 end = time.perf_counter() total_prog = end - start print("total time for N = {} executions : {} s".format(N, total_prog)) print("mean total time : {} s".format(np.mean(times_total))) bd.plot_fig(times_exe, times_wait, times_fetch, times_total, 'callback_threads_test')
def query_async_pool_Ntimes(query, N, nbpool): pool = co.create_connection_pool(1, nbpool, "postgis_test", "postgres", "admin", "localhost", "5432", 1) #time.sleep(3) times_exe = [] times_fetch = [] times_wait = [] times_total = [] cursors = [] connections = [] counter = 0 dispo = nbpool while counter < N: print("Starting connections and queries") while dispo != 0: aconn = pool.getconn() dispo = dispo - 1 counter += 1 connections.append(aconn) if (aconn): print("get conn ok") #time.sleep(3) my_wait(aconn) #psycopg2.extras.wait_select(aconn) #print("wait aconn ok") acurs = aconn.cursor() start = time.perf_counter() acurs.execute(query) end = time.perf_counter() runtime_exe = end - start times_exe.append(runtime_exe) cursors.append(acurs) #pool.putconn(aconn) print("Gettting results") for cur in cursors: swait = time.perf_counter() my_wait(cur.connection) ewait = time.perf_counter() runtime_wait = ewait - swait times_wait.append(runtime_wait) result = cur.fetchall() qu.test_raster_results(result) end_fetch = time.perf_counter() runtime_fetchall = end_fetch - ewait times_fetch.append(runtime_fetchall) total = end_fetch - start times_total.append(total) print("query done") #acurs.close() pool.putconn(connections[cursors.index(cur)]) dispo += 1 cursors = [] connections = [] #results = execute_read_query(ps_connection, query) #bd.plot_perf(times_exe,'execution') #bd.plot_perf(times_wait,'wait') #bd.plot_perf(times_fetch,'fetch') bd.plot_fig(times_exe, times_wait, times_fetch, times_total, 'test_async_pool_perf')
def start_multith_tasks(nbthreads, nbpool, queries): pool = co.create_connection_pool(1, nbpool, "postgis_test", "postgres", "admin", "localhost", "5432", 1) N = len(queries) time.sleep(1) start = time.perf_counter() # class ThreadSafePool: # def __init__(self, pool): # self.pool = pool # self.mutex = threading.Lock() # def getconn(self): # self.mutex.acquire() # connection = self.pool.getconn() # self.mutex.release() # return connection allTasks = TasksList() allTasks.initQueries(queries, pool) # for task, param in allTasks.tasks: # task(param) def workOnTask(tasksList): while tasksList.executeNext(): pass futures = [] with ThreadPoolExecutor(max_workers=nbthreads) as executor: for i in range(nbthreads): futures.append(executor.submit(workOnTask, allTasks)) wait(futures, return_when='ALL_COMPLETED') end = time.perf_counter() total_prog = end - start starts = [] ends = [] fends = [] wends = [] for future in futures: hasThrown = future.exception() if hasThrown: raise hasThrown print(future.result) f = open("async_overviews.txt", "w") for execQuery in allTasks.execQueries: print(execQuery.result) f.write("{}, {}, {}, {}, {}, {}, {}, {}\n".format( execQuery.query, execQuery.query_time_start, execQuery.query_time_submit, execQuery.query_time_end, execQuery.wait_time_start, execQuery.wait_time_end, execQuery.fetch_time_start, execQuery.fetch_time_end)) starts.append(execQuery.query_time_start) ends.append(execQuery.query_time_submit) wends.append(execQuery.wait_time_end) fends.append(execQuery.fetch_time_end) f.close() bd.plot_start_end_phases(starts, ends, wends, fends, 'async_overviews_phases') print("total time for N = {} executions : {} s".format(N, total_prog))