def run_simulation(X, n_frames, grow_func, ignite_func, burn_func): """ calls the function "grow_func", "ignite_func" and "burn_func" on X n_frames times. Overwrites the X matrice, and plots the result as animation. Returns a list of wildfire burned area for each iteration. """ # Colours for visualization: brown for EMPTY, dark green for TREE and orange # for FIRE. Note that for the colormap to work, this list and the bounds list # must be one larger than the number of different values in the array. colors_list = [(0.2, 0, 0), (0, 0.5, 0), (1, 0, 0), 'orange'] cmap = colors.ListedColormap(colors_list) bounds = [0, 1, 2, 3] norm = colors.BoundaryNorm(bounds, cmap.N) burned_list = [] fig = plt.figure(figsize=(15, 8)) ax1 = fig.add_subplot(121) #ax.set_axis_off() im = ax1.imshow(X, cmap=cmap, norm=norm) #, interpolation='nearest') ax2 = fig.add_subplot(122) n_trees = np.nan * np.ones((n_frames, )) n_trees[0] = np.sum(X == TREE) pl, = ax2.plot(np.arange(n_frames), n_trees) #, interpolation='nearest') ax2.set_xlabel('iteration') ax2.set_ylabel('# of trees') bar = PB(min_value=0, max_value=n_frames) # The animation function: called to produce a frame for each generation. def animate(i): bar.update(i) im.set_data(animate.X) grow_func(animate.X) ignite_func(animate.X) burned = burn_func(animate.X) burned_list.append(burned) animate.n_trees[i] = np.sum(animate.X == TREE) ax2.plot(np.arange(n_frames), animate.n_trees, 'r') ax2.set_xlim(0, n_frames) ax2.set_ylim(0, np.multiply(*animate.X.shape)) # Bind our grid to the identifier X in the animate function's namespace. animate.X = X animate.n_trees = n_trees anim = animation.FuncAnimation(fig, animate, frames=n_frames) display(HTML(anim.to_jshtml(fps=10))) return burned_list
def wrapper(*args, **kwargs): pbar = PB(maxval=10) pbar.start() print(Fore.GREEN) start_time = int(time.time()) fun(*args, **kwargs) end_time = int(time.time()) for i in range(1, end_time-start_time): pbar.update(i) time.sleep(0.1) pbar.finish() print(Style.RESET_ALL)
time_span = 0 predict_span = 50 grid_circ = 7 data_dir = "../data/h5_predict" if not os.path.exists(data_dir): os.makedirs(data_dir) aq_count = 0 for aq_name in aq_location.keys(): aggregate = 0 ti.sleep(0.1) bar = PB(initial_value=0, maxval=delta_time + 1, widgets=[aq_name, ' ', Bar('=', '[', ']'), ' ', Percentage()]) valid_count = 0 # Validate the near grid matrix algorithm # plt.figure() # plt.title(aq_name) # plt.plot(aq_location[aq_name][0], aq_location[aq_name][1], '.') # plt.plot(grid_coor_array[:, 0], grid_coor_array[:, 1], '.') # plt.show() # Exporting data from start to end predict_matrix = [] dt_int_array = []
tmpdict["time"] = [ n["time"].split("T")[1].split("+")[0] for n in self.result.items()[0][1] ] tmpdict["load"] = [ '%.2f' % (n["free"] / 1024 / 1024 / 1024) for n in self.result.items()[0][1] ] return tmpdict else: return "bad result" def changetime(t=8): now = int(time.time() - t * 3600) befor = int(now - 3600) strtime = time.strftime("%Y-%m-%dT%H:%M:%MZ", time.localtime(befor)) return strtime if __name__ == '__main__': tt = changetime() pbar = PB(maxval=10) pbar.start() print(Fore.GREEN) for i in range(1, 11): pbar.update(i) time.sleep(0.1) pbar.finish() print(Style.RESET_ALL)
print("slow algorithm found the right edges:", all(all_edges_slow)) print("fast algorithm found the right edges:", all(all_edges_fast)) from time import time from progressbar import ProgressBar as PB N_meas = 100 N = 200 e = [] print("======== delta method (slow) ========") print("N_meas =", N_meas) start = time() bar = PB() for meas in bar(range(N_meas)): for node in range(N - 1): for neigh in range(node + 1, N): e = edge_index(N, node, neigh) i, j = node_indices_slow(N, e) end = time() print("took", end - start, "seconds") print("======== bounds method (fast) ========") print("N_meas =", N_meas) start = time() bar = PB()
def simulate(self, N_time_steps): """Simulate a swarm according to the rules. Parameters ---------- N_time_steps : int Number of time steps to simulate. Returns ------- positions : numpy.ndarray of shape ``(self.number_of_fish, N_time_steps+1, 3_)`` Keeping track of the fish's positions for each time step. directions : numpy.ndarray of shape ``(self.number_of_fish, N_time_steps+1, 3_)`` Keeping track of the fish's directions for each time step. """ # create result arrays and fill in initial positions positions = np.empty((self.number_of_fish, N_time_steps + 1, 3)) directions = np.empty((self.number_of_fish, N_time_steps + 1, 3)) for i in range(self.number_of_fish): positions[i, 0, :] = self.fish[i].position directions[i, 0, :] = self.fish[i].direction bar = PB(max_value=N_time_steps) # for each time step for t in range(1, N_time_steps + 1): # iterate through fish pairs for i in range(self.number_of_fish - 1): F_i = self.fish[i] r_i = F_i.position v_i = F_i.direction for j in range(i + 1, self.number_of_fish): F_j = self.fish[j] relationship_counted = False for X in self.box_copies[0]: if relationship_counted: break for Y in self.box_copies[1]: for Z in self.box_copies[2]: r_j = F_j.position + np.array([X, Y, Z]) v_j = F_j.direction # get their distance, and unit distance vector r_ij = (r_j - r_i) distance = np.linalg.norm(r_ij) r_ij /= distance r_ji = -r_ij # if their are within the repulsion zone, just add each other to # the repulsion events if distance < self.repulsion_radius: F_i.zor_update(r_ij) F_j.zor_update(r_ji) relationship_counted = True elif distance < self.repulsion_radius + self.orientation_width + self.attraction_width: # if they are within the hollow balls of orientation and attraction zone, # decide whether the fish can see each other angle_i = np.arccos( np.clip(np.dot(r_ij, v_i), -1.0, 1.0)) angle_j = np.arccos( np.clip(np.dot(r_ji, v_j), -1.0, 1.0)) if self.verbose: print("angle_i", angle_i, self.angle_of_perception) print("angle_j", angle_j, self.angle_of_perception) # if i can see j, add j's influence if angle_i < self.angle_of_perception: if distance < self.repulsion_radius + self.orientation_width: F_i.zoo_update(v_j) else: F_i.zoa_update(r_ij) # if j can see i, add i's influence if angle_j < self.angle_of_perception: if distance < self.repulsion_radius + self.orientation_width: F_j.zoo_update(v_i) else: F_j.zoa_update(r_ji) relationship_counted = True # for each fish for i in range(self.number_of_fish): F_i = self.fish[i] # evaluate the new demanded direction and reset the influence counters new_v = F_i.evaluate_direction(self.turning_rate * self.dt, self.noise_sigma) # evaluate the demanded positional change according to the direction dr = self.speed * new_v * self.dt # check for boundary conditions for dim in range(3): # if new position would be out of boundaries if dr[dim]+F_i.position[dim] > self.box_lengths[dim] or \ dr[dim]+F_i.position[dim] < 0.0: # if this boundary is periodic if not self.reflect_at_boundary[dim]: if dr[dim] + F_i.position[dim] > self.box_lengths[ dim]: dr[dim] -= self.box_lengths[dim] else: dr[dim] += self.box_lengths[dim] else: # if this boundary is reflective dr[dim] *= -1 new_v[dim] *= -1 # update the position and direction F_i.position += dr F_i.direction = new_v # save position and direction positions[i, t, :] = F_i.position directions[i, t, :] = F_i.direction bar.update(t) return positions, directions
f[i, j] = r * (0.045 - 0.017) + 0.016 f = f.flatten() #f = 0.060 k = 0.062 # intialize the figures A, B = get_initial_A_and_B(N) # how many updates should be computed before a new frame is drawn updates_per_frame = 30 # these are the arguments which have to passed to the update function animation_arguments = (updates_per_frame, A, B, DA, DB, f, k, delta_t, L) from progressbar import ProgressBar as PB bar = PB() for step in bar(range(30000)): update(*animation_arguments[1:]) if step in [400, 1000, 4000, 8000, 15000, 29999]: fig, imA = get_initial_artists(A, B) fig.savefig('img/n_1000_hires_{:d}.png'.format(step)) # show the animation #ani.save('img/gray_scott_varying_feed_rate.mp4', writer=writer) # show the animation
def downscale(sFileGARPoint, sFileMask, sBuildingType, sExposureType): #sBuildingType = "UFB" #sBuildingType = "UCB" bDisplayImages = False print(" ") log_print("Curve: " + sBuildingType) #sFileGARPoint = '/Users/lauro/Desktop/EU_ACP_UNISDR_Africa/DATA/UR_Tanzania/AFR_buiA_GAR_5km_20180313/africa_tza.shp' #sFileMask = '/Users/lauro/Desktop/EU_ACP_UNISDR_Africa/DATA/UR_Tanzania/AFR_buiA_GUF_100m_nations_20180315/UR_Tanzania.tif' #sFileGARPoint = '/Users/lauro/Desktop/EU_ACP_UNISDR_Africa/DATA/Swaziland/AFR_buiA_GAR_5km_20180313/africa_swz.shp' #sFileMask = '/Users/lauro/Desktop/EU_ACP_UNISDR_Africa/DATA/Swaziland/AFR_buiA_GUF_100m_nations_20180315/Swaziland.tif' # # from points 10km to raster 10km sFileGARPointBase = os.path.basename(sFileGARPoint) dir_out = os.path.join(os.path.dirname(sFileGARPoint), sFileGARPointBase.split('.')[0] + "_" + "outputs") if not os.path.exists(dir_out): os.mkdir(dir_out) sFileGARPointBuildingType = os.path.join( dir_out, sFileGARPointBase.split('.')[0] + '_' + sBuildingType + ".shp") #os.system('ogr2ogr -overwrite -f "ESRI Shapefile" -where "curve=\''+sBuildingType+'\'" ' + sFileGARPointBuildingType + ' ' + sFileGARPoint) inDriver = ogr.GetDriverByName("ESRI Shapefile") inDataSource = inDriver.Open(os.path.join(sFileGARPoint), 0) inLayer = inDataSource.GetLayer() inLayerDefn = inLayer.GetLayerDefn() log_print("Reading GAR shape file...") log_print("Total features: " + str(inLayer.GetFeatureCount())) #inLayer.SetAttributeFilter("curve = \''+sBuildingType+'\'") (fXmin, fXmax, fYmin, fYmax) = inLayer.GetExtent() iXLowRes = 0.04167 iYLowRes = 0.04167 fXmin = fXmin - iXLowRes / 2 fYmin = fYmin - iYLowRes / 2 fXmax = fXmax + iXLowRes / 2 fYmax = fYmax + iYLowRes / 2 outDriver = ogr.GetDriverByName("ESRI Shapefile") outDataSource = outDriver.CreateDataSource(sFileGARPointBuildingType) proj = inLayer.GetSpatialRef() ##Creating the layer with its fields outLayer = outDataSource.CreateLayer(sFileGARPointBuildingType, proj, ogr.wkbPoint) field_exposure = ogr.FieldDefn(sExposureType, ogr.OFTReal) outLayer.CreateField(field_exposure) pop = [] geoms = [] ii = 0 for feature in inLayer: local_curve = feature.GetField("curve") if local_curve is not None and '.' in local_curve: # ii=ii+1;print (str(ii) +' - '+ local_curve.split('.')[0]) if local_curve.split('.')[0] == sBuildingType: if feature.GetGeometryRef().GetPoint() not in geoms: geoms.append(feature.GetGeometryRef().GetPoint()) pop.append(feature.GetField(sExposureType)) else: id = geoms.index(feature.GetGeometryRef().GetPoint()) pop[id] = pop[id] + feature.GetField(sExposureType) for i in range(len(geoms)): outFeature = ogr.Feature(outLayer.GetLayerDefn()) outFeature.SetField(sExposureType, pop[i]) point = ogr.Geometry(ogr.wkbPoint) point.AddPoint(*geoms[i]) outFeature.SetGeometry(point) outLayer.CreateFeature(outFeature) total_exposure = sum(pop) log_print("Total unique features in outLayer: " + str(outLayer.GetFeatureCount())) log_print("Total original exposure value (for curve type=" + sCurveType + ", exposure type=" + sExposureType + "): %.2f" % total_exposure) inLayer.ResetReading() outDriver = None outFeature = None outLayer = None inDataSource = None outDataSource = None # for i in range (0, len(geoms)): log_print("Converting points to raster...") sFileGARRasterEXPLowRes = os.path.join( dir_out, sFileGARPointBase.split('.')[0] + "_" + sBuildingType + "_" + sExposureType + ".tif") if bVerbose: print('gdal_rasterize -co compress=DEFLATE -a ' + sExposureType + ' -l "' + os.path.basename(sFileGARPointBuildingType).split('.')[0] + '" -tr ' + str(iXLowRes) + ' ' + str(iYLowRes) + ' -te ' + str(fXmin) + ' ' + str(fYmin) + ' ' + str(fXmax) + ' ' + str(fYmax) + ' "' + sFileGARPointBuildingType + '" "' + sFileGARRasterEXPLowRes + '"') os.system('gdal_rasterize -co compress=DEFLATE -a ' + sExposureType + ' -l "' + os.path.basename(sFileGARPointBuildingType).split('.')[0] + '" -tr ' + str(iXLowRes) + ' ' + str(iYLowRes) + ' -te ' + str(fXmin) + ' ' + str(fYmin) + ' ' + str(fXmax) + ' ' + str(fYmax) + ' "' + sFileGARPointBuildingType + '" "' + sFileGARRasterEXPLowRes + '"') # if bVerbose: print('gdal_rasterize -co compress=DEFLATE -a '+sExposureType+' -where "curve=\''+sBuildingType+'\'" -l ' + os.path.basename(sFileGARPoint).split('.')[0] + ' -tr ' + str(iXLowRes) + ' ' + str(iYLowRes) + ' -te ' + str(fXmin) + ' ' + str(fYmin) + ' ' + str(fXmax) + ' ' + str(fYmax) + ' ' + sFileGARPoint + ' ' +sFileGARRasterEXPLowRes) # os.system('gdal_rasterize -co compress=DEFLATE -a '+sExposureType+' -where "curve=\''+sBuildingType+'\'" -l ' + os.path.basename(sFileGARPoint).split('.')[0] + ' -tr ' + str(iXLowRes) + ' ' + str(iYLowRes) + ' -te ' + str(fXmin) + ' ' + str(fYmin) + ' ' + str(fXmax) + ' ' + str(fYmax) + ' ' + sFileGARPoint + ' ' +sFileGARRasterEXPLowRes) [xsize_orig, ysize_orig, geotransform, geoproj, data_low] = readFile(sFileGARRasterEXPLowRes) log_print("Original GAR map size: (" + str(ysize_orig) + "," + str(xsize_orig) + ") (rows,cols)") uniq = non_unique(data_low.ravel().tolist()) data_low = np.reshape(uniq, data_low.shape) #writeGeotiffSingleBand(sFileGARRasterEXPLowRes, geotransform, geoproj, data_low, nan_value=NAN_VALUE) writeGeotiffSingleBand(sFileGARRasterEXPLowRes, geotransform, geoproj, data_low) if bDisplayImages: plot_image(data_low) del data_low, uniq sFileGARRasterEXPHiRes = os.path.join( dir_out, sFileGARPointBase.split('.')[0] + "_" + sBuildingType + "_" + sExposureType + "_regrid.tif") match_geotrans, match_proj = rasterRegrid(sFileGARRasterEXPLowRes, sFileMask, sFileGARRasterEXPHiRes, "nearest") [xsize, ysize, geotransform_high, geoproj_high, data_high] = readFile(sFileGARRasterEXPHiRes) #[xsize, ysize, geotransform_high, geoproj_high, data_mask] = readFile(sFileMask, fix_nan_value=255) [xsize, ysize, geotransform_high, geoproj_high, data_mask] = readFile(sFileMask) #data_mask_zeroes = data_mask.copy() #data_mask[data_mask==0] = np.nan data_high_masked = data_high * data_mask if bDisplayImages: plot_image(data_mask) plot_image(data_high) plot_image(data_high_masked) log_print("Starting counting...") # take out the nan values from the array values = data_high_masked.ravel() values = values[~np.isnan(values)] dictCounter = Counter() # show a progressbar if the array is "big" # count 1000 elements at a time if len(values) > 1000000: stride = 1000 bar = PB() for v in bar(range(0, len(values), stride)): dictCounter.update(values[v:v + stride]) else: dictCounter.update(values) log_print("Data counted") data_mask_gar = np.copy(data_high) ratio = abs( (geotransform[1] * geotransform[5]) / (geotransform_high[1] * geotransform_high[5])) #number of high res pixels in low res pixel data_gar_excluded = [] unique = np.unique(data_high.ravel()) for key in unique: if key not in dictCounter.keys() and key != 0: dictCounter[key] = ratio data_mask_gar[data_mask_gar == key] = -9999 data_gar_excluded.append(key) excluded_exposure = sum(data_gar_excluded) log_print("GAR exposure value not overlapping mask: %.2f (%.1f %%)" % (excluded_exposure, excluded_exposure / total_exposure * 100)) #building mask (union of the original mask + non-zero values form GAR) data_mask_gar[data_mask_gar != -9999] = 0 data_mask_gar[data_mask_gar == -9999] = 1 data_mask = data_mask_gar + data_mask data_mask[data_mask > 0] = 1 data_high_masked = data_high * data_mask #plot_image(data_mask) del data_high, data_mask_gar if bVerbose: log_print("Counter length: " + str(dictCounter.__len__())) if bVerbose: log_print("Unique length: " + str(len(np.unique(data_high_masked)))) for key, value in dictCounter.items(): data_high_masked[data_high_masked == key] = key / value if bVerbose: print('Amount of pixel for population ' + str(key) + ': ' + str(value)) if bDisplayImages: fig = plt.figure() cax = plt.imshow(data_high_masked[:, :]) plt.colormaps() cbar = fig.colorbar( cax, orientation='vertical') # vertically oriented colorbar #cbar.ax.set_yticklabels(['< -1', '0', 1, 2,'> 10']) plt.show() sFileDownscaled = os.path.join( dir_out, sFileGARPointBase.split('.')[0] + "_" + sBuildingType + "_" + sExposureType + "_HighRes.tif") bandMetadata = {} bandMetadata[("unit", "1")] = "USD" if sExposureType == "VALHUM": description = 'VALHUM in ' + sBuildingType + ' (downscaling of GAR2015 data)' elif sExposureType == "VALFIS": description = 'VALFIS in ' + sBuildingType + ' (downscaling of GAR2015 data)' iNbands = 1 data_high_masked_USD = data_high_masked * 1000000 #plot_image(data_high_masked_USD) #writeFile(sFileDownscaled, match_geotrans, match_proj, data_high_masked_USD,bandMetadata, description, {'Building_type': sBuildingType}, iNbands, nan_value=NAN_VALUE) writeFile(sFileDownscaled, match_geotrans, match_proj, data_high_masked_USD, bandMetadata, description, {'Building_type': sBuildingType}, iNbands) log_print("Total downscaled exposure value (for curve type=" + sCurveType + ", exposure type=" + sExposureType + "): %.2f" % np.sum(data_high_masked)) log_print("Exposure downscaled. Final result save in file: " + sFileDownscaled) sMaskFileDownscaled = os.path.join( dir_out, sFileGARPointBase.split('.')[0] + "_mask_HighRes.tif") #writeFile(sMaskFileDownscaled, match_geotrans, match_proj, data_mask, bandMetadata, description, {}, iNbands,nan_value=NAN_VALUE) writeFile(sMaskFileDownscaled, match_geotrans, match_proj, data_mask, bandMetadata, description, {}, iNbands) log_print("Mask saved in file: " + sMaskFileDownscaled)