def _convert_lccm_input(self, flt_directory_in, flt_directory_out): gc.collect() t1 = time() lc = LandCoverDataset(in_storage=StorageFactory().get_storage( 'flt_storage', storage_location=flt_directory_in), out_storage=StorageFactory().get_storage( 'flt_storage', storage_location=flt_directory_out)) # lc.get_header() # added 23 june 2009 by mm mask = lc.get_mask() idx = where(mask == 0)[0] lcsubset = DatasetSubset(lc, idx) print "Converting:" lcsubset.write_dataset(attributes=["relative_x"], out_table_name="land_covers") lc.delete_one_attribute("relative_x") lcsubset.write_dataset(attributes=["relative_y"], out_table_name="land_covers") lc.delete_one_attribute("relative_y") lc.flush_dataset() gc.collect() # lc_names = lc.get_primary_attribute_names() for attr in lc.get_primary_attribute_names(): print " ", attr lcsubset.write_dataset(attributes=[attr], out_table_name="land_covers") lc.delete_one_attribute(attr) logger.log_status("Data conversion done. " + str(time() - t1) + " s")
def _create_flt_file(self, current_year, flt_directory_in, flt_directory_out): logger.log_status("Convert output data for ", str(current_year)) flt_directory_out = os.path.join(flt_directory_out, 'land_covers') if not os.path.exists(flt_directory_out): os.makedirs(flt_directory_out) lc = LandCoverDataset(in_storage = StorageFactory().get_storage('flt_storage', storage_location = flt_directory_in)) relative_x = lc.get_attribute("relative_x") relative_y = lc.get_attribute("relative_y") flat_indices = relative_x * self.ncols * 1.0 + relative_y if flat_indices[5*self.ncols:] is None or len(flat_indices[5*self.ncols:]) == 0: offset = 0 else: offset = 5*self.ncols logger.start_block("Converting") try: for attr_name in lc.get_primary_attribute_names(): if attr_name not in ["relative_x", "relative_y"]: attr_name = "lct" #-------------- only output lct now logger.log_status(" ", attr_name) attr = ma.filled(lc.get_attribute(attr_name), self.nodata_values).astype(float32) self._create_flt_file2(os.path.join(flt_directory_out, attr_name+".lf4"), attr, flat_indices, offset) self._create_header(os.path.join(flt_directory_out, attr_name+".hdr")) #<-- added 26 may 09 by mm del attr break #-------------- only output lct now lc.load_dataset(attributes='*') if lc.get_computed_attribute_names() is not None: flt_directory_out = os.path.join(flt_directory_out, "computed") if not os.path.exists(flt_directory_out): os.makedirs(flt_directory_out) for attr_name in lc.get_computed_attribute_names(): if attr_name not in ["_hidden_id_"]: if attr_name[0:5] == "probs": logger.log_status(" ", attr_name) attr = ma.filled(lc.get_attribute(attr_name), self.nodata_values).astype(float32) self._create_flt_file2(os.path.join(flt_directory_out, attr_name+".lf4"), attr, flat_indices, offset) self._create_header(os.path.join(flt_directory_out, attr_name+".hdr")) #<-- added 26 may 09 by mm del attr finally: # lc.flush_dataset() # added 23 jun 2009 - not tested... logger.end_block()
def _convert_lccm_input(self, flt_directory_in, flt_directory_out): gc.collect() t1 = time() lc = LandCoverDataset(in_storage = StorageFactory().get_storage('flt_storage', storage_location = flt_directory_in), out_storage = StorageFactory().get_storage('flt_storage', storage_location = flt_directory_out)) # lc.get_header() # added 23 june 2009 by mm mask = lc.get_mask() idx = where(mask==0)[0] lcsubset = DatasetSubset(lc, idx) print "Converting:" lcsubset.write_dataset(attributes=["relative_x"], out_table_name="land_covers") lc.delete_one_attribute("relative_x") lcsubset.write_dataset(attributes=["relative_y"], out_table_name="land_covers") lc.delete_one_attribute("relative_y") lc.flush_dataset() gc.collect() # lc_names = lc.get_primary_attribute_names() for attr in lc.get_primary_attribute_names(): print " ", attr lcsubset.write_dataset(attributes=[attr], out_table_name="land_covers") lc.delete_one_attribute(attr) logger.log_status("Data conversion done. " + str(time()-t1) + " s")
# 4. Years - date pair of input data; year is concatenated to flt_directory_in specified in #1 #years = [1991, 1995] #years = [1995, 1999] #years = [2002] #years = sys.argv[3] years = [2007, 2007] lc1 = LandCoverDataset(in_storage = StorageFactory().get_storage('flt_storage', storage_location = os.path.join(flt_directory_in, str(years[0]))), out_storage = StorageFactory().get_storage('flt_storage', storage_location = os.path.join(flt_directory_out, str(years[0])))) agents_index = where(lc1.get_attribute(index_attribute))[0] lc1subset = DatasetSubset(lc1, agents_index) print "Writing set 1:" for attr in lc1.get_primary_attribute_names(): print " ", attr lc1subset.write_dataset(attributes=[attr], out_table_name="land_covers") lc1.delete_one_attribute(attr) # leaving this line in causes the processing of every other input data file; commenting it causes memory error lc2 = LandCoverDataset(in_storage = StorageFactory().get_storage('flt_storage', storage_location = os.path.join(flt_directory_in, str(years[1]))), out_storage = StorageFactory().get_storage('flt_storage', storage_location = os.path.join(flt_directory_out, str(years[1])))) lc2subset = DatasetSubset(lc2, agents_index) print "Writing set 2:" for attr in lc2.get_primary_attribute_names(): print " ", attr lc2subset.write_dataset(attributes=[attr], out_table_name="land_covers") lc2.delete_one_attribute(attr) # leaving this line in causes the processing of every other input data file ; commenting it causes memory error
out_storage=StorageFactory().get_storage( 'flt_storage', storage_location=flt_directory_out)) lc.get_header() # added 23 june 2009 by mm mask = lc.get_mask() idx = where(mask == 0)[0] lcsubset = DatasetSubset(lc, idx) print "Converting:" lcsubset.write_dataset(attributes=["relative_x"], out_table_name="land_covers") #lcsubset.write_dataset(attributes=["relative_x"], out_table_name="land_covers", # valuetypes=valuetypes) lc.delete_one_attribute("relative_x") lcsubset.write_dataset(attributes=["relative_y"], out_table_name="land_covers") #lcsubset.write_dataset(attributes=["relative_y"], out_table_name="land_covers", # valuetypes=valuetypes) lc.delete_one_attribute("relative_y") # srcdir = os.path.join(flt_directory_out, "land_covers", "computed") # shutil.move(os.path.join(srcdir,"relative_x.li4"), os.path.join(flt_directory_out, "land_covers")) # shutil.move(os.path.join(srcdir,"relative_y.li4"), os.path.join(flt_directory_out, "land_covers")) # shutil.rmtree(srcdir) for attr in lc.get_primary_attribute_names(): print " ", attr lcsubset.write_dataset(attributes=[attr], out_table_name="land_covers") # lcsubset.write_dataset(attributes=[attr], out_table_name="land_covers", # valuetypes=valuetypes) lc.delete_one_attribute(attr) logger.log_status("Data conversion done. " + str(time() - t1) + " s")
def _create_flt_file(self, current_year, flt_directory_in, flt_directory_out): logger.log_status("Convert output data for ", str(current_year)) flt_directory_out = os.path.join(flt_directory_out, 'land_covers') if not os.path.exists(flt_directory_out): os.makedirs(flt_directory_out) lc = LandCoverDataset(in_storage=StorageFactory().get_storage( 'flt_storage', storage_location=flt_directory_in)) relative_x = lc.get_attribute("relative_x") relative_y = lc.get_attribute("relative_y") flat_indices = relative_x * self.ncols * 1.0 + relative_y if flat_indices[5 * self.ncols:] is None or len( flat_indices[5 * self.ncols:]) == 0: offset = 0 else: offset = 5 * self.ncols logger.start_block("Converting") try: for attr_name in lc.get_primary_attribute_names(): if attr_name not in ["relative_x", "relative_y"]: attr_name = "lct" #-------------- only output lct now logger.log_status(" ", attr_name) attr = ma.filled(lc.get_attribute(attr_name), self.nodata_values).astype(float32) self._create_flt_file2( os.path.join(flt_directory_out, attr_name + ".lf4"), attr, flat_indices, offset) self._create_header( os.path.join(flt_directory_out, attr_name + ".hdr")) #<-- added 26 may 09 by mm del attr break #-------------- only output lct now lc.load_dataset(attributes='*') if lc.get_computed_attribute_names() is not None: flt_directory_out = os.path.join(flt_directory_out, "computed") if not os.path.exists(flt_directory_out): os.makedirs(flt_directory_out) for attr_name in lc.get_computed_attribute_names(): if attr_name not in ["_hidden_id_"]: if attr_name[0:5] == "probs": logger.log_status(" ", attr_name) attr = ma.filled( lc.get_attribute(attr_name), self.nodata_values).astype(float32) self._create_flt_file2( os.path.join(flt_directory_out, attr_name + ".lf4"), attr, flat_indices, offset) self._create_header( os.path.join( flt_directory_out, attr_name + ".hdr")) #<-- added 26 may 09 by mm del attr finally: # lc.flush_dataset() # added 23 jun 2009 - not tested... logger.end_block()
relative_x = lc.get_attribute("relative_x") relative_y = lc.get_attribute("relative_y") flat_indices = relative_x * ncols * 1.0 + relative_y if flat_indices[5*ncols:] is None or len(flat_indices[5*ncols:]) == 0: offset = 0 else: offset = 5*ncols #if os.path.exists("indices.lf4"): # os.remove("indices.lf4") #flat_indices.tofile("indices.lf4") logger.start_block("Converting") try: for attr_name in lc.get_primary_attribute_names(): if attr_name not in ["relative_x", "relative_y"]: #attr_name = "lct" #-------------- only output lct now logger.log_status(" ", attr_name) attr = ma.filled(lc.get_attribute(attr_name), nodata_values).astype(float32) # print attr.size _create_flt_file(os.path.join(flt_directory_out, attr_name+".lf4"), attr, flat_indices, offset) _create_header(os.path.join(flt_directory_out, attr_name+".hdr")) #<-- added 26 may 09 by mm del attr #break #-------------- only output lct now lc.load_dataset(attributes='*') if lc.get_computed_attribute_names() is not None: flt_directory_out = os.path.join(flt_directory_out, "computed") if not os.path.exists(flt_directory_out): os.makedirs(flt_directory_out)
#years = [2002] #years = sys.argv[3] years = [2007, 2007] lc1 = LandCoverDataset(in_storage=StorageFactory().get_storage( 'flt_storage', storage_location=os.path.join(flt_directory_in, str(years[0]))), out_storage=StorageFactory().get_storage( 'flt_storage', storage_location=os.path.join( flt_directory_out, str(years[0])))) agents_index = where(lc1.get_attribute(index_attribute))[0] lc1subset = DatasetSubset(lc1, agents_index) print "Writing set 1:" for attr in lc1.get_primary_attribute_names(): print " ", attr lc1subset.write_dataset(attributes=[attr], out_table_name="land_covers") lc1.delete_one_attribute( attr ) # leaving this line in causes the processing of every other input data file; commenting it causes memory error lc2 = LandCoverDataset(in_storage=StorageFactory().get_storage( 'flt_storage', storage_location=os.path.join(flt_directory_in, str(years[1]))), out_storage=StorageFactory().get_storage( 'flt_storage', storage_location=os.path.join( flt_directory_out, str(years[1])))) lc2subset = DatasetSubset(lc2, agents_index)