def condense_lcm_folder(basis_folder, TOL=None): """ Condenses the ``fort.13`` lanudse classification mesh files in ``landuse_*`` folders in ``basis_dir`` by removing values taht are below ``TOL``. :param string basis_dir: the path to directory containing the ``landuse_##`` folders :param double TOL: Tolerance close to zero, default is 1e-7 """ folders = glob.glob(os.path.join(basis_folder, "landuse_*")) for i in range(0 + rank, len(folders), size): mann_dict = f13.read_nodal_attr_dict(folders[i]) mann_dict = condense_bv_dict(mann_dict, TOL) f13.update_mann(mann_dict, folders[i])
def condense_lcm_folder(basis_folder, TOL=None): """ Condenses the ``fort.13`` lanudse classification mesh files in ``landuse_*`` folders in ``basis_dir`` by removing values taht are below ``TOL``. :param string basis_dir: the path to directory containing the ``landuse_##`` folders :param double TOL: Tolerance close to zero, default is 1e-7 """ folders = glob.glob(os.path.join(basis_folder, "landuse_*")) for i in range(0+rank, len(folders), size): mann_dict = f13.read_nodal_attr_dict(folders[i]) mann_dict = condense_bv_dict(mann_dict, TOL) f13.update_mann(mann_dict, folders[i])
def get_basis_vectors(path=None): """ Each dict is structured as follows: keys -- node number values -- weighting for spatial averaging currently assumes only 1 folder per landuse classification with the name ``landuse_*/`` :param string path: folder containing the landuse folders :rtype: list :returns: list of dicts for each landuse classification. """ landuse_folders = glob.glob(path + "/landuse_*") landuse_folders.sort() basis_vec = [f13.read_nodal_attr_dict(folder) for folder in landuse_folders] return basis_vec
def get_basis_vectors(path=None): """ Each dict is structured as follows: keys -- node number values -- weighting for spatial averaging currently assumes only 1 folder per landuse classification with the name ``landuse_*/`` :param string path: folder containing the landuse folders :rtype: list :returns: list of dicts for each landuse classification. """ landuse_folders = glob.glob(path+'/landuse_*') landuse_folders.sort() basis_vec = [f13.read_nodal_attr_dict(folder) for folder in landuse_folders] return basis_vec
def compare(basis_dir=None, default=0.012): """ Create a set of diagnostic plots in basis_dir/figs :param string basis_dir: directory containing the test folder and landuse folders :param float default: default Manning's *n* """ if basis_dir is None: basis_dir = os.getcwd() tables = tm.read_tables(os.path.join(basis_dir, 'test')) domain = dom.domain(basis_dir) domain.read_spatial_grid() fm.mkdir(os.path.join(basis_dir, 'figs')) old_files = glob.glob(os.path.join(basis_dir, 'figs', '*.png')) for fid in old_files: os.remove(fid) domain.get_Triangulation(path=basis_dir) original = f13.read_nodal_attr_dict(os.path.join(basis_dir, 'test')) original = tmm.dict_to_array(original, default, domain.node_num) weights = np.array(tables[0].land_classes.values()) lim = (np.min(weights), np.max(weights)) bv_dict = tmm.get_basis_vectors(basis_dir) combo = tmm.combine_basis_vectors(weights, bv_dict, default, domain.node_num) bv_array = tmm.get_basis_vec_array(basis_dir, domain.node_num) plt.basis_functions(domain, bv_array, path=basis_dir) plt.field(domain, original, 'original', clim=lim, path=basis_dir) plt.field(domain, combo, 'reconstruction', clim=lim, path=basis_dir) plt.field(domain, original-combo, 'difference', path=basis_dir) combo_array = tmm.combine_bv_array(weights, bv_array) plt.field(domain, combo_array, 'combo_array', clim=lim, path=basis_dir) plt.field(domain, original-combo_array, 'diff_ori_array', path=basis_dir) plt.field(domain, combo-combo_array, 'diff_com_array', path=basis_dir) combo_bv = tmm.combine_basis_vectors(np.ones(weights.shape), bv_dict, default, domain.node_num) plt.field(domain, combo_bv, 'combo_bv', path=basis_dir)
def prep_all(self, removeBinaries=False, class_nums=None, condense=True, TOL=None): """ Assumes that all the necessary input files are in ``self.basis_dir``. This function generates a ``landuse_##`` folder in ``self.basis_dir`` for every land classification number containing a ``fort.13`` file specific to that land classification number. .. todo:: Update so that landuse folders can be prepped n at a time and so that this could be run on a HPC system Currently, the parallel option preps the first folder and then all the remaining folders at once. :param binary parallel: Flag whether or not to simultaneously prep landuse folders. :param binary removeBinarues: Flag whether or not to remove ``*.asc.binary`` files when completed. :param list class_nums: List of integers indicating which classes to prep. This assumes all the ``*.asc.binary`` files are already in existence. :param bool condense: Flag whether or not to condense ``fort.13`` to only non-zero values within a tolerance. :param double TOL: Tolerance below which to consider a Manning's n value to be zero if ``condense == True`` """ if class_nums is None: class_nums = range(len(self.__landclasses)) if rank > class_nums: print "There are more MPI TASKS than land classes." print "This code only scales to MPI_TASKS = len(land_classes)." print "Extra MPI TASKS will not be used." return # Are there any binary files? binaries = glob.glob(os.path.join(self.basis_dir, '*.asc.binary')) # If not create them if not(binaries) and rank == 0: # set up first landuse folder first_script = self.setup_landuse_folder(class_nums[0]) # set up remaining land-use classifications script_list = self.setup_landuse_folders(False) # run grid_all_data in this folder subprocess.call(['./'+first_script], cwd=self.basis_dir) class_nums.remove(0) landuse_folder = 'landuse_00' self.cleanup_landuse_folder(os.path.join(self.basis_dir, landuse_folder)) fm.rename13([landuse_folder], self.basis_dir) if condense: print "Removing values below TOL" landuse_folder_path = os.path.join(self.basis_dir, landuse_folder) # read fort.13 file mann_dict = f13.read_nodal_attr_dict(landuse_folder_path) # condense fort.13 file condensed_bv = tmm.condense_bv_dict(mann_dict, TOL) # write new file f13.update_mann(condensed_bv, landuse_folder_path) elif rank == 0: script_list = self.setup_landuse_folders() else: script_list = None class_nums = None class_nums = comm.bcast(class_nums, root=0) script_list = comm.bcast(script_list, root=0) if len(class_nums) != len(script_list): temp = [script_list[i] for i in class_nums] script_list = temp # run remaining bash scripts for i in range(0+rank, len(script_list), size): # run griddata subprocess.call(['./'+script_list[i]], cwd=self.basis_dir) # clean up folder match_string = r"grid_all_(.*)_"+self.file_name[:-3]+r"\.sh" landuse_folder = re.match(match_string, script_list[i]).groups()[0] self.cleanup_landuse_folder(os.path.join(self.basis_dir, landuse_folder)) # rename fort.13 file fm.rename13([landuse_folder], self.basis_dir) if condense: print "Removing values below TOL" landuse_folder_path = os.path.join(self.basis_dir, landuse_folder) # read fort.13 file mann_dict = f13.read_nodal_attr_dict(landuse_folder_path) # condense fort.13 file condensed_bv = tmm.condense_bv_dict(mann_dict, TOL) # write new file f13.update_mann(condensed_bv, landuse_folder_path) print "Done" # remove unnecessary files if removeBinaries and rank == 0: binaries = glob.glob(os.path.join(self.basis_dir, '*.asc.binary')) for f in binaries: os.remove(f)