def test_append_random_one_arg(self): base_name = "tmp_abc" size = 10 full_name = utils.append_random(base_name, size) self.assertIn(base_name, full_name) self.assertGreater(len(full_name), len(base_name)) self.assertGreaterEqual(len(full_name), len(base_name) + size) self.assertTrue(legal_name(full_name)) full_name2 = utils.append_random(base_name, size) self.assertNotEqual(full_name, full_name2)
def test_append_random_suffix(self): base_name = "tmp_abc" size = 10 full_name = utils.append_random(base_name, suffix_length=size) self.assertIn(base_name, full_name) self.assertGreater(len(full_name), len(base_name)) self.assertGreaterEqual(len(full_name), len(base_name) + size) self.assertTrue(legal_name(full_name)) full_name2 = utils.append_random(base_name, suffix_length=size) # There is a low chance of collision. self.assertNotEqual(full_name, full_name2)
def main(): options, flags = gs.parser() original = options["input"] output = options["output"] cats = options["cats"] rules = parse(original, cats) if flags["c"] and flags["s"]: gs.warning( _("The extent of the output reclassified raster cannot be changed") ) if flags["s"]: reclass(original, output, rules) else: output_tmp = append_random("tmp", 8) TMP.append(output_tmp) reclass(original, output_tmp, rules) if flags["c"]: gs.use_temp_region() atexit.register(gs.del_temp_region) gs.run_command("g.region", zoom=output_tmp) gs.mapcalc(output + " = " + output_tmp) gs.run_command("r.colors", map=output, raster=original, quiet=True) gs.raster_history(output)
def patch_analysis_per_subregion_parallel(development_diff, subregions, threshold, tmp_clump, tmp_name, nprocs): gcore.run_command("r.clump", input=development_diff, output=tmp_clump, overwrite=True, quiet=True) cats = (gcore.read_command("r.describe", flags="1n", map=subregions, quiet=True).strip().splitlines()) params = [] toremove = [] for cat in cats: tmp_clump_cat = append_random(tmp_name, suffix_length=8) toremove.append(tmp_clump_cat) params.append((tmp_clump_cat, subregions, cat, tmp_clump, threshold)) with Pool(processes=nprocs) as pool: results = pool.map_async(analyse_subregion, params).get() subregions_data = dict(zip(cats, results)) gcore.run_command("g.remove", type="raster", flags="f", name=toremove, quiet=True) return subregions_data