def start(self): create_path('logfile') gevent.spawn(self.manager).join() tasks = [gevent.spawn(self.worker, i) for i in range(self.gevent_num)] gevent.joinall(tasks) # self.start_worker() print('finish, data stored in %s' % (self.goal_path))
def save(name, model, year, interval): db_path = "data/%s.db" % name tools.create_path(db_path) db = shelve.open(db_path) db_year = db.get(year, {}) db_year[interval] = model.params() db[year] = db_year print sorted(db[year].keys()) db.sync() db.close() figure = "{name}/f{year}_{pt1}_{pt2}".format( name=name, year=year, pt1=interval[0], pt2=interval[1]) tools.save_figure(figure, model.canvas)
def ProcessPIV(args, bga, bgb, reflection, stg): # read images into numpy arrays file_a, file_b, counter = args frame_a = tools.imread(file_a) frame_b = tools.imread(file_b) # removing background and reflections if bga is not None: frame_a = frame_a - bga frame_b = frame_b - bgb frame_a[reflection == 255] = 0 frame_b[reflection == 255] = 0 #applying a static mask (taking out the regions where we have walls) pnts = draw.polygon(stg['YM'], stg['XM'], frame_a.shape) frame_a[pnts] = 0 frame_b[pnts] = 0 plt.imshow(frame_a, cmap='gray') plt.show() # main piv processing u, v = pyprocess.extended_search_area_piv( frame_a, frame_b, \ window_size=stg['WS'], overlap=stg['OL'], dt=stg['DT'], search_area_size=stg['SA'], sig2noise_method=None) x, y = pyprocess.get_coordinates(image_size=frame_a.shape, window_size=stg['WS'], overlap=stg['OL']) u, v, mask = validation.local_median_val(u, v, 2000, 2000, size=2) if stg['BVR'] == 'on': u, v = filters.replace_outliers(u, v, method='localmean', max_iter=10, kernel_size=2) u, *_ = smoothn(u, s=0.5) v, *_ = smoothn(v, s=0.5) x, y, u, v = scaling.uniform(x, y, u, v, stg['SC']) # saving the results save_file = tools.create_path(file_a, 'Analysis') tools.save(x, y, u, v, mask, save_file + '.dat')
parser = argparse.ArgumentParser(description=param_list[0][0]) for i in range(len(param_list)): if i > 0: parser.add_argument(param_list[i][0], type=type(param_list[i][1]), default=param_list[i][1]) args = parser.parse_args() print("create argparse success") current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') save_info = [['time: ', current_time], ['param_info: ', param_list]] save_list(save_info, 'create_info', main_save_path + 'create_info') # create weights path create_path(args.weights_path) # create memory path create_path(args.memory_path) # create log path create_path(args.log_path) # create save_list path create_path(args.list_path) print("create save path success") history_reward = [] # import env: if args.task == 'l2r': from wrapper_env.l2renv import WrapperEnv else: from wrapper_env.wrapperEnv import WrapperEnv env = WrapperEnv(game=args.task)
print('finish, data stored in %s' % (self.goal_path)) #----------class definition---------- #----------function definition---------- #----------function definition---------- #----------main function---------- if __name__ == '__main__': # test goal path # goal_path='/home/xmucpp/Git_lib/cpp-data-cleaning/test-result' # formal goal path goal_path = '/home/xmucpp/cppdata/Organized' create_path(goal_path) # test set # csvdicts = ['/home/xmucpp/Git_lib/cpp-data-cleaning/TinySet-10'] # formal set csvdicts = ['/home/xmucpp/cppdata/Sources'] # run it # reganizating(csvdicts,goal_path,gevent_num=100) GeventQueue(csvdicts, goal_path, gevent_num=100).start() #----------main function----------
def submit_path(): b_name = request.get_json().get('b_name') b_name = b_name.replace('/', '').replace('\\', '') b_path = tools.create_path(b_name) return jsonify({"b_path": b_path})