def calc_mms(path_mms, out_dir_mms, method): list_cells = os.listdir(out_dir_mms) for fn_cell in list_cells: list_ply = os.listdir(out_dir_mms + fn_cell) print fn_cell, len(list_ply) if len(list_ply) == 1: fn_ply = list_ply[0] fn = out_dir_mms + fn_cell + '\\' + fn_ply data = read_bin(fn, 7) d = rasterize(data, 0.1, dim=2) list_keys = d.keys() new_points = [] data = np.array(data) for key in list_keys: list_xyz = data[d[key]] new_points.append(list(method(list_xyz, axis=0))) write_points(new_points, path_mms + fn_cell + '.ply') del new_points else: list_ply = os.listdir(out_dir_mms + fn_cell) list_data = [] list_d = [] list_keys = [] for fn_ply in list_ply: fn = out_dir_mms + fn_cell + '\\' + fn_ply data = read_bin(fn, 7) d = rasterize(data, 0.1, dim=2) list_data.append(np.array(data)) list_d.append(d) list_keys.extend(d.keys()) del data, d list_keys = list(set(list_keys)) new_points = [] for key in list_keys: list_xyz = np.array([0, 0, 0]) for i in xrange(len(list_d)): data = list_data[i] d = list_d[i] if d.has_key(key): list_xyz = mergecloud(list_xyz, data[d[key]]) list_xyz = list_xyz[1:] new_points.append(list(method(list_xyz, axis=0))) write_points(new_points, path_mms + fn_cell + '.ply') del new_points
def final_dtm(path_final, path_mms, path_ref, in_dir_ref): list_mms = os.listdir(path_mms) list_ref = os.listdir(path_ref) d_mms = dict() for mms in list_mms: d_mms[mms[:17]] = mms d_ref = dict() for ref in list_ref: d_ref[ref[:17]] = ref # Add merged dtm, single mms or single ref into final dtm model list_all_cells = list(set(d_mms.keys() + d_ref.keys())) for cell in tqdm(list_all_cells): mm, nn = read_cellname(cell) data = np.array([0, 0, 0]) if d_ref.has_key(cell): fn_ref = path_ref + d_ref[cell] data_ref = read_bin(fn_ref, 7) data = mergecloud(data, data_ref) if d_mms.has_key(cell): fn_mms = path_mms + d_mms[cell] data_mms = read_bin(fn_mms, 7) data = mergecloud(data, data_mms) write_points_double(data[1:] + [mm, nn, 0], path_final + cell + '.ply') print cell print 'mms+ref saved' # Add the not processed ref dtm into the final dtm model list_all_ref = os.listdir( in_dir_ref) # Reference DTM DTM_2009 0.5m Resolusion list_ref_rest = list( set(list_all_ref) - set(list_all_cells)) # Difference with the processed cells for cell in tqdm(list_ref_rest): mm, nn = read_cellname(cell) fn = in_dir_ref + cell + '\\' + cell + '.ply' data = read_ply(fn, 7) write_points_double(data + [mm, nn, 0], path_final + cell + '.ply') print 'rest ref saved'
def local_to_UTM_rest_ref_core(fn, args): in_dir, out_dir, r, x_offset, y_offset = args m, n = fn[:17].split('_') [mm, nn] = coord(m, n, r, x_offset, y_offset) new_pointcloud = np.array(read_bin(in_dir + fn, 7)) write_points_double(np.array(new_pointcloud) + [mm, nn, 0], out_dir + fn) return True
def cal_ref_main(list_all_cells, path, out_fn): result_points = np.array([0, 0, 0]) for cell in list_all_cells: x0, y0 = read_cellname(cell[:17]) fn = path + cell data = read_bin(fn, 7) if len(data) > 0: data = np.array(data) + [x0, y0, 0] result_points = mergecloud(result_points, data) del data write_points_double(result_points[1:], out_fn)
def local_to_UTM_update_ref_core(fn, args): in_dir, out_dir, r, x_offset, y_offset = args m, n = fn[:17].split('_') [mm, nn] = coord(m, n, r, x_offset, y_offset) list_duplicate = os.listdir(in_dir + fn) if len(list_duplicate) == 1: new_pointcloud = np.array( read_bin(in_dir + fn + '\\' + list_duplicate[0], 7)) write_points_double( np.array(new_pointcloud) + [mm, nn, 0], out_dir + fn) else: pointcloud = np.array([0, 0, 0]) for fn_under in list_duplicate: pointcloud = mergecloud( pointcloud, np.array(read_bin(in_dir + fn + '//' + fn_under, 7))) pointcloud = pointcloud[1:] d = rasterize(pointcloud, 0.5, dim=2) new_pointcloud = [] for key in d.keys(): new_pointcloud.append(np.mean(pointcloud[d[key]], axis=0)) ## if np.std(pointcloud[d[key]][:,2])>0.1: ## print fn, np.std(pointcloud[d[key]][:,2]), np.mean( pointcloud[d[key]], axis = 0) write_points_double( np.array(new_pointcloud) + [mm, nn, 0], out_dir + fn)
def local_to_UTM_core(fn, args): mms_dir, out_dir, update_dir, list_update, r, x_offset, y_offset, z_offset = args m, n = fn[:17].split('_') [mm, nn] = coord(m, n, r, x_offset, y_offset) args = mms_dir, 0, 0, r, x_offset, y_offset, 0, 0 #data = read_bin(mms_dir + fn, 7) data = load_mms(fn, args) data_mms = np.array(data) + [mm, nn, -z_offset] if fn in list_update: data_update = np.array(read_bin(update_dir + fn, 7)) if len(data_update) > 0: data_mms = mergecloud(data_mms, data_update + [mm, nn, 0]) write_points_double(data_mms, out_dir + fn) return True
def split_ref_to_tiles(ref_path, ref_out_dir, r, x_offset, y_offset, res_ref): check_and_create(ref_out_dir) list_ref_ply = os.listdir(ref_path) print("Loading") num_point_per_kacheln = int(pow(r / res_ref, 2)) imcomplete = [] for fn_ref in list_ref_ply: data = read_bin_double(ref_path + fn_ref, 9) data = np.array(data) - [x_offset, y_offset, 0] d = rasterize(data, r, dim=2) check_and_create(ref_out_dir + fn_ref) for cell_idx in d.keys(): x, y = [int(idx) for idx in cell_idx.split('+')] ply_name, cell_name = coord_fn_from_cell_index(x, y, '') subdata = data[d[cell_idx]] - [x * r, y * r, 0] output_fn = ref_out_dir + fn_ref + '\\' + cell_name + '.ply' write_points(subdata, output_fn) if len(d[cell_idx]) < num_point_per_kacheln: imcomplete.append(cell_name + '.ply') del subdata del data, d print("Load finished") list_ref_tiles = os.listdir(ref_out_dir) removed = [] for i in xrange(len(list_ref_tiles) - 1): list_left = ref_out_dir + list_ref_tiles[i] list_right = ref_out_dir + list_ref_tiles[i + 1] left = os.listdir(list_left) right = os.listdir(list_right) intersect = set(left).intersection(right) for fn in intersect: data_left = read_bin(list_left + '\\' + fn, 7) data_right = read_bin(list_right + '\\' + fn, 7) data_new = mergecloud(data_left, data_right) if len(data_new) == num_point_per_kacheln: os.remove(list_left + '\\' + fn) os.remove(list_right + '\\' + fn) write_points(data_new, ref_out_dir + fn) removed.append(fn) intersection = list(set(imcomplete) - set(removed)) check_and_create(ref_out_dir + 'reduced\\') for fn_list in list_ref_tiles: path = ref_out_dir + fn_list + '\\' list_path = os.listdir(path) for fn_file in list_path: if fn_file in intersection: shutil.move(path + fn_file, ref_out_dir + 'reduced\\' + fn_file) else: shutil.move(path + fn_file, ref_out_dir + fn_file) [os.rmdir(ref_out_dir + fn_list) for fn_list in list_ref_tiles] print 'Finish spliting'
def cal_ref_duplicate(full_list, out_dir_update, path): list_cells = os.listdir(out_dir_update) for fn_cell in list_cells: list_ply = os.listdir(out_dir_update + fn_cell) if len(list_ply) == 1: copytoDst(out_dir_update + fn_cell, list_ply[0], path) else: list_count = [] for fn_ply in list_ply: fn = out_dir_update + fn_cell + '\\' + fn_ply data = read_bin(fn, 7) list_count.append(len(data)) if np.mean(list_count) == 2500: copytoDst(out_dir_update + fn_cell, list_ply[0], path) else: list_count = np.array(list_count) new_count = list_count[list_count != 2500] new_list = np.array(list_ply)[list_count != 2500] if len(new_count) == 1: copytoDst(out_dir_update + fn_cell, new_list[0], path) else: union = [] list_data = [] list_d = [] for fn_ply in new_list: fn = out_dir_update + fn_cell + '\\' + fn_ply data = read_bin(fn, 7) d = rasterize(data, 0.5, dim=2) list_data.append(data) list_d.append(d) del_list = set(full_list) - set(d.keys()) union.extend(del_list) res = list(set(full_list) - set(union)) new_points = [] for key in res: list_z = [] x = 0 y = 0 for i in xrange(len(list_d)): data = list_data[i] d = list_d[i] list_z.append(data[d[key][0]][2]) x, y = data[d[key][0]][0:2] new_points.append([x, y, np.median(list_z)]) if len(res) > 0: write_points(new_points, path + fn_cell + '.ply') if np.std(list_z) > 0.5: print fn_cell, len(res), np.mean(list_z), np.median( list_z), np.std(list_z)
def update_dtm(list_shift_img, raster_size, radius, ref_cut_dir, ref_update_dir, shift, res_ref, list_pointcloud_ref, ref_out_dir): # nonvalue nonvalue = -999.0 check_and_create(ref_cut_dir) check_and_create(ref_update_dir) for fn in tqdm(list_shift_img.keys()): img = list_shift_img[fn] - shift single_len = img.shape[0] new_size = (2 * radius + 1) * img.shape[0] neighbour = np.zeros((new_size, new_size)) m, n = fn[:17].split('_') int_m = Hexa2Decimal(m) int_n = Hexa2Decimal(n) combi = np.array( list((product(range(-radius, radius + 1), range(-radius, radius + 1))))) combi_global = combi + [int_m, int_n] neigh_list = [ coord_fn_from_cell_index(m, n, '')[1] + '.ply' for m, n in combi_global ] not_in_list = [] for neigh, loc in zip(neigh_list, combi): if neigh in list_shift_img.keys(): a, b = (loc + radius) * single_len neighbour[a:a + single_len, b:b + single_len] = list_shift_img[neigh] - shift else: if neigh in list_pointcloud_ref: a, b = (loc + radius) * single_len not_in_list.append([neigh, (a, b)]) print fn, not_in_list img = neighbour img = np.nan_to_num(img) filtered, boundbuffer, mask = apply_gaussian(img, 0, 0, nonvalue, 'linear') boundbuffer = np.nan_to_num(boundbuffer) a, b = (np.array([0, 0]) + radius) * single_len update = boundbuffer[a:a + single_len, b:b + single_len] upmask = mask[a:a + single_len, b:b + single_len] data_ref = read_bin(ref_out_dir + fn, 7) d_ref = rasterize(data_ref, res_ref, dim=2) data_ref = np.array(data_ref) raster_size = single_len data_output = [] for i in xrange(0, raster_size): for j in xrange(0, raster_size): string = str.join('+', [str(i), str(j)]) index = d_ref[string] if upmask[i, j] == 0: data_output.append(data_ref[index][0] + [0, 0, update[i, j]]) write_points(data_output, ref_cut_dir + fn) for fn_not, (a, b) in not_in_list: update = boundbuffer[a:a + single_len, b:b + single_len] print np.sum(update) if abs(np.sum(update)) > 0.01: data_ref = read_bin(ref_out_dir + fn_not, 7) d_ref = rasterize(data_ref, res_ref, dim=2) data_ref = np.array(data_ref) data_output = [] for i in xrange(0, raster_size): for j in xrange(0, raster_size): string = str.join('+', [str(i), str(j)]) index = d_ref[string] data_output.append(data_ref[index][0] + [0, 0, update[i, j]]) check_and_create(ref_update_dir + fn_not) write_points( data_output, ref_update_dir + fn_not + '//' + fn_not + '_from_' + fn)