def main(input_csv_path, work_base_folder, input_data_folder, date_format="%Y%m%d_%H%M%S", str_start=0, str_end=15): """Interpolate track from given csv file and pickle it to disk""" proj = pyproj.Proj(utils.projStr) csvfile = open(input_csv_path) reader = csv.DictReader(csvfile) track_list = filter(lambda p: p is not None, [date_to_timestamp(row) for row in reader]) csvfile.close() track_array = numpy.array(track_list) print track_array T = track_array[:,0] X = track_array[:,1] Y = track_array[:,2] fx = interp1d(T, X, kind='cubic') fy = interp1d(T, Y, kind='cubic') # List files date_str = [p[str_start:str_end] for p in utils.list_folder_sorted_ext(input_data_folder, ".shp")] date_obj = [datetime.datetime.strptime(p, date_format) for p in date_str] timestamps = [time.mktime(p.timetuple()) for p in date_obj] interp_track = [(p, proj(fx(p), fy(p))) for p in timestamps] interp_track_dict = dict(interp_track) pp(interp_track_dict) with open(os.path.join(work_base_folder, utils.case_name + ".pickle"), "w") as track_dump: cPickle.dump(interp_track_dict, track_dump)
def main(workspace, date_format="%Y%m%d_%H%M%S", track_pickle=None): stage1 = utils.stage1_folder stage2 = utils.stage2_folder utils.create_dirs([stage2]) track_pickle = track_pickle or utils.track_pickle track_dict = cPickle.load(open(track_pickle)) error_list = [] disperiveness_list = {} output_file = open( os.path.join(utils.work_base_folder, "Dispersiveness.csv"), "w") output_file.write("date_string,dispersiveness\n") for q in utils.list_folder_sorted_ext(folder=stage1, ext=".shp"): f = os.path.join(stage1, q) adv_feature = utils.relocate(q, stage2) r = execute(f, adv_feature, track_dict, date_format) if r is None: error_list.append(r) else: output_file.write("%s,%f\n" % (q, r)) output_file.close() print "Done" print "Errors:", error_list
def main(input_csv_path, work_base_folder, input_data_folder, date_format="%Y%m%d_%H%M%S", str_start=0, str_end=15): """Interpolate track from given csv file and pickle it to disk""" proj = pyproj.Proj(utils.projStr) csvfile = open(input_csv_path) reader = csv.DictReader(csvfile) track_list = [ p for p in [date_to_timestamp_ibtracs(row) for row in reader] if p is not None ] csvfile.close() track_array = numpy.array(track_list) print(track_array) T = track_array[:, 0] X = track_array[:, 1] Y = track_array[:, 2] fx = interp1d(T, X, kind='cubic') fy = interp1d(T, Y, kind='cubic') # List files date_str = utils.list_folder_sorted_ext(input_data_folder, ".img") date_obj = [utils.smart_lookup_date(p, date_format) for p in date_str] pp(date_obj) timestamps = [time.mktime(p.timetuple()) for p in date_obj] interp_track_dict = {} for i in range(len(timestamps)): p = timestamps[i] try: interp_track_dict[p] = {} interp_track_dict[p]['lon_lat'] = (float(fx(p)), float(fy(p))) interp_track_dict[p]['pos'] = proj(fx(p), fy(p)) vx = derivative(fx, p) vy = derivative(fy, p) interp_track_dict[p]['dir'] = int(math.degrees(math.atan2(vy, vx))) except: pp("ERROR") pp(interp_track_dict) with open(os.path.join(work_base_folder, utils.case_name + ".pickle"), "wb") as track_dump: pickle.dump(interp_track_dict, track_dump)
def main(): error_list = [] output_file = open(os.path.join(utils.work_base_folder, "dispersiveness.csv"), "w") output_file.write("date_string,dispersiveness\n") for q in utils.list_folder_sorted_ext(folder=stage2, ext=".shp"): try: f = os.path.join(stage2, q) # Get dispersiveness dispersive = generate_dispersiveness(f) output_file.write("%s,%f\n" % (q, dispersive)) except Exception, ex: print ex.message error_list.append(q)
def main(workspace=None): pwd = utils.work_base_folder cnt_polygon_folder = os.path.join(pwd, utils.cnt_polygon_folder) stage1_folder = os.path.join(pwd, utils.stage1_folder) arcpy.env.workspace = "in_memory" arcpy.env.overwriteOutput = True utils.create_dirs([stage1_folder, arcpy.env.workspace]) targets = utils.list_folder_sorted_ext( os.path.join(pwd, cnt_polygon_folder), ".shp") for j in targets: # We need give a full path, otherwise ArcGIS will look for it in workspace. orig = os.path.join(pwd, cnt_polygon_folder, j) q = utils.relocate(orig, stage1_folder) # Main geoprocessing routine execute(orig, q, arcpy.env.workspace) print("OK")
for value in values: sub_level_dir = os.path.join(output_dir, str(value)) if not os.path.exists(sub_level_dir): os.makedirs(sub_level_dir) with arcpy.da.SearchCursor(input_feature, [sort_column], where_clause="%s=%s" % (filter_column, str(value))) as cur: cur.reset() sort_field = list(reversed(sorted([row[0] for row in cur]))) if not sort_field: continue if len(sort_field) < top_count: threshold = sort_field[-1] else: threshold = sort_field[top_count - 1] # Now output it output_name = arcpy.ValidateTableName(os.path.basename(input_feature) + "_" + filter_column) arcpy.Select_analysis(input_feature, os.path.join(sub_level_dir, output_name), where_clause='%s>=%s AND %s=%s' % (sort_column, str(threshold), filter_column, str(value))) print("Select top %d in %s -> %s" % (top_count, input_feature, output_name)) if __name__ == "__main__": base_folder = sys.argv[1] os.chdir(base_folder) source = 'adv_metric' output = 'top_1' utils.create_dirs([output]) for q in utils.list_folder_sorted_ext(folder="adv_metric", ext='.shp'): select_top_n(os.path.join(source, q), output)
def start_mp(work_base_folder, file_list=None, masks=None, levels=(15, 20, 25, 30, 35, 40), working_mode="wrf", stage2_datetime_format="refl_3_5km_%Y_%m_%d_%H_%M", skip_list=(), discard=False): arcpy.CheckOutExtension("Spatial") # pool = multiprocessing.Pool(4) proj = pyproj.Proj(utils.projStr) arcpy.env.workspace = "in_memory" arcpy.env.overwriteOutput = True # 1. We need setup folders base = work_base_folder cnt_folder = os.path.join(base, utils.cnt_folder) cnt_polygon_folder = os.path.join(base, utils.cnt_polygon_folder) stage1_folder = os.path.join(base, utils.stage1_folder) stage2_folder = os.path.join(base, utils.stage2_folder) utils.create_dirs( [cnt_folder, cnt_polygon_folder, stage1_folder, stage2_folder], discard) # Contour if file_list is not None: base_input = file_list # Since we have sorted timestamp input, we don't have to sort them, otherwise it could be wrong else: base_input = utils.list_folder_sorted_ext( base, ".nc" if working_mode == "wrf" else ".img") # Get the full name of input files, contour polygon base_input_path = [ os.path.join(base, p) for p in [_f for _f in base_input if _f] ] cnt_output_path = [ utils.relocate(p, cnt_folder, ".shp") for p in base_input_path ] levels_arg = [levels] * len(base_input_path) # if not masks: # mask_list = [None] * len(base_input) # elif type(masks) is str: # mask_list = [masks] * len(base_input) # elif len(masks) == len(base_input): # mask_list = masks # else: # raise ValueError("mask_list is not valid. It must be a list, or a single mask, or None") mask_list = [None] * len(base_input) # Get the contour function contour_func = wrfout_contour.execute if working_mode == "wrf" else array_contour.execute if 'contour' not in skip_list: for (p, q) in create_func_args_tuple(contour_func, base_input_path, cnt_output_path, levels_arg, mask_list): p(*q) else: pprint("Contour skipped") arcpy.Delete_management("in_memory") # Fill contour, we need refresh filelist in cnt_folder cnt_input = utils.list_folder_sorted_ext(cnt_folder, ".shp") cnt_input_path = [os.path.join(cnt_folder, p) for p in cnt_input] cnt_output_polygon_path = [ utils.relocate(p, cnt_polygon_folder, ".shp").replace("-", "_") for p in cnt_input ] levels_arg = [levels] * len(cnt_input_path) if 'smooth' not in skip_list: for (p, q) in create_func_args_tuple(contour_polygon_filling.execute, cnt_input_path, cnt_output_polygon_path, levels_arg): p(*q) else: pprint("Smoothing and polygonize skipped") arcpy.Delete_management("in_memory") # Get basic metric bsm_input = utils.list_folder_sorted_ext(cnt_polygon_folder, ".shp") bsm_input_path = [os.path.join(cnt_polygon_folder, p) for p in bsm_input] bsm_output_path = [ utils.relocate(p, stage1_folder, ".shp") for p in bsm_input ] levels_arg = [levels] * len(bsm_input_path) if 'basic' not in skip_list: for (p, q) in create_func_args_tuple(add_basic_geometry.execute, bsm_input_path, bsm_output_path): p(*q) else: pprint("Basic shape metrics skipped") arcpy.Delete_management("in_memory") # We will automatically interpolate track for radar. #if working_mode != "wrf": #import interpolate_track #interpolate_track.main(utils.ibtrac, work_base_folder, stage1_folder, "%Y%m%d_%H%M%S") asm_input = utils.list_folder_sorted_ext(stage1_folder, ".shp") asm_input_path = [os.path.join(stage1_folder, p) for p in asm_input] asm_output_path = [ utils.relocate(p, stage2_folder, ".shp") for p in asm_input ] # It is a little bit complex for advanced shape metrics track_pickle = os.path.join(work_base_folder, "%s.pickle" % utils.case_name) track_dict = pickle.load(open(track_pickle, "rb")) l = len(asm_input) date_format = stage2_datetime_format levels_arg = [levels] * l func_stub = create_func_args_tuple(add_advanced_geometry.execute, asm_input_path, asm_output_path, [track_dict] * l, [date_format] * l, levels_arg) # if not __debug__: # dispersiveness = pool.map(call_func, func_stub) # else: # No matter how fast from beginning, this step must be single-threaded, because return may hang. result_pickle = os.path.join(work_base_folder, "advanced.pickle") if 'adv' not in skip_list: dispersiveness = [] for p, q in func_stub: r = p(*q) dispersiveness.append(r[1]) variable_strs = r[0] gc.collect() pickle.dump((variable_strs, dispersiveness), open(result_pickle, "wb")) else: print("Advanced calculation skipped, load computed data from pickle") variable_strs, dispersiveness = pickle.load(open(result_pickle, "rb")) level_strs = list(map(str, levels)) # "dispersiveness, closure_str, fragmentation, roundness, displacements_n, displacements_e" # variable_strs = ["dispersiveness", "closure", "frag", "asymmetry", "dis_e", "dis_n", "extent_move", "extent_geom"] header_list = list( map('-'.join, list(itertools.product(variable_strs, level_strs)))) with open( os.path.join(work_base_folder, "dispersiveness_%d.csv" % os.getpid()), "w") as dispersive_output: dispersive_output.write("time_string," + ",".join(header_list) + ",comment\n") for a, d in zip(asm_input, dispersiveness): dispersive_output.write(a + "," + "".join(d) + "\n") dispersive_output.close()