def _print_array_stats(arr, file_output): """Print array stats. """ Logger.info("Raster ASCII output file {} saved".format(file_output)) Logger.info("\tArray stats: min={} max={} mean={}".format( np.min(arr), np.max(arr), np.mean(arr)))
def __init__(self): super(Stream, self).__init__() Logger.info('Stream: ON') self.streams = Gl.streams self.nReaches = len(self.streams[0]) self.cell_stream = Gl.cell_stream self.reach = [] for i in range(self.nReaches): self.reach.append( Reach(self.streams[0][i], self.streams[1][i], self.streams[2][i], self.streams[3][i], self.streams[4][i], self.streams[5][i], self.streams[6][i], self.streams[7][i], self.streams[8][i], self.streams[9][i], self.streams[10][i], self.streams[11][i], self.streams[12][i], self.streams[13][i], self.streams[14][i])) self.streams_loc = Gl.streams_loc self.mat_stream_reach = Gl.mat_stream_reach for i in self.rr: for j in self.rc[i]: self.arr[i][j].state += self.mat_stream_reach[i][j] self.STREAM_RATIO = Gl.STREAM_RATIO
def __init__(self): Logger.info("Diffuse approach") if (Globals.r is None or Globals.r is None): exit("Global variables are not assigned") r = Globals.r c = Globals.c self.H = np.zeros([r, c], float)
def __init__(self, L_sub, Ks, vg_n, vg_l): super(SubsurfacePass, self).__init__() # jj self.n = 0 self.q_subsurface = None # self.arr = np.zeros([0],float) Logger.info("Subsurface: OFF")
def removeCellsWithSameHeightNeighborhood( mat_dem, mat_nan, rows, cols ): # function determines if cell neighborhood has exactly same values of height a and than it save that cell as NoData "Returns an array with the values of heights, adjusted for the value of NoData cells" bad_cells = [] # finding problem cells with same height neogborhood for i in range(rows): for j in range(cols): c = [i, j] count_nbrs = 0 point_m = mat_dem[i][j] if i > 0 and i < (rows - 1) and j > 0 and j < ( cols - 1 ): # non edge cells - edge cells are excluded thanks to slope trimming nbrs = [ mat_dem[i - 1][j - 1], mat_dem[i - 1][j], mat_dem[i - 1][j + 1], mat_dem[i][j - 1], mat_dem[i][j + 1], mat_dem[i + 1][j - 1], mat_dem[i + 1][j], mat_dem[i + 1][j + 1] ] for k in range(8): if point_m > 0 and point_m == nbrs[k]: count_nbrs = count_nbrs + 1 if count_nbrs >= 7: # compare number of neighbours with the same height bad_cells.append(c) bc = 1 Logger.info( "Possible water circulation! Check the input DTM raster for flat areas with the same height neighborhood." ) # all problem cells set as NoData if len(bad_cells) > 0: for i in range(rows): for j in range(cols): if bc == 1: bc_i = bad_cells[0][0] bc_j = bad_cells[0][1] if bc_i == i and bc_j == j: mat_dem[i][j] = -3.40282346639e+038 mat_nan[i][j] = -3.40282346639e+038 bad_cells.pop(0) if len(bad_cells) == 0: bc = 0 else: break return mat_dem, mat_nan
def __init__(self): self.args = Args() self._print_fn = print self._print_logo_fn = print # default logging level (can be modified by provider) Logger.setLevel(logging.DEBUG) # storage writter must be defined self.storage = None
def _add_logging_handler(handler, formatter=None): """Register new logging handler. :param handler: loggging handler to be registerered :param formatter: logging handler formatting """ if not formatter: formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s - [%(module)s:%(lineno)s]" ) handler.setFormatter(formatter) Logger.addHandler(handler)
def _save_data(data, filename): """Save data into pickle. """ if filename is None: raise ProviderError('Output file for saving data not defined') dirname = os.path.dirname(filename) if not os.path.exists(dirname): os.makedirs(dirname) with open(filename, 'wb') as fd: pickle.dump(data, fd, protocol=2) Logger.info('Pickle file created in <{}> ({} bytes)'.format( filename, sys.getsizeof(data)))
def __init__(self, id_, point_x, point_y, point_x_1, point_y_1, to_node, length, sklon, smoderp, number, shape, b, m, roughness, q365): self.id_ = id_ self.pointsFrom = [point_x, point_y] self.pointsTo = [point_x_1, point_y_1] self.to_node = to_node self.length = length if sklon < 0: Logger.info( "Slope in reach part {} indicated minus slope in stream". format(id_)) self.slope = abs(sklon) self.smoderp = smoderp self.no = number self.shape = shape self.b = b self.m = m self.roughness = roughness self.q365 = q365 self.V_in_from_field = 0.0 self.V_in_from_field_cum = 0.0 self.V_in_from_reach = 0.0 self.V_out_cum = 0.0 # L^3 self.vol_rest = 0.0 self.h = 0.0 # jj mozna pocatecni podminka? ikdyz to je asi q365 co... self.h_max = 0.0 self.timeh_max = 0.0 self.V_out = 0.0 self.vs = 0.0 self.Q_out = 0.0 self.Q_max = 0.0 self.timeQ_max = 0.0 self.V_out_domain = 0.0 if shape == 0: # obdelnik self.outflow_method = stream_f.rectangle elif shape == 1: # trapezoid self.outflow_method = stream_f.trapezoid elif shape == 2: # triangle self.outflow_method = stream_f.triangle elif shape == 3: # parabola self.outflow_method = stream_f.parabola else: self.outflow_method = stream_f.rectangle
def __init__(self): super(CumulativeSubsurface, self).__init__() Logger.info('Subsurface') self.data.update({ # cumulative exfiltration volume [m3] 'exfiltration': CumulativeData('core', 'cExfiltr_m3'), # 1 # cumulative percolation volume [m3] 'percolation': CumulativeData('core', 'cPercol_m3'), # 2 # maximum water level in the subsurface soil layer [m] 'h_sub': CumulativeData('core', 'mWLevelSub_M'), # 3 # maximum subsurface flux [m3s-1] 'q_sub': CumulativeData('core', 'mQSub_m3_s'), # 4 # cumulative outflow volume in subsurface soil layer [m3] 'vol_sub': CumulativeData('core', 'cVOutSub_m3') # 5 })
def rillCalculations(sur, pixelArea, l, rillRatio, n, slope, delta_t, ratio, ppp=False): raw_input() h_rill = sur.h_rill b = sur.rillWidth V_to_rill = h_rill * pixelArea sur.V_to_rill = V_to_rill b_tmp = b courant = courantMax + 1.0 while (courant > courantMax): b = b_tmp # if sur.state != 2 : # b = 0 # print '\t', b, b, V_rill_runoff, V_rill_rest, q, v, courant = rill( V_to_rill, rillRatio, l, b, delta_t, ratio, n, slope, pixelArea, ppp) # if ppp : ### print '\t', b, V_rill_runoff, V_rill_rest, courant if (courant > courantMax): Logger.debug('------ ratio += 1 -----') raw_input() ratio += 1 if (ratio > 10): return b_tmp, V_to_rill, V_rill_runoff, V_rill_rest, 0.0, 0.0, 11, courant qMax = max(q) vMax = max(v) # print raw_input('..') # print "V_to_rill, V_rill_runoff", V_to_rill, V_rill_runoff return b, V_to_rill, V_rill_runoff, V_rill_rest, qMax, vMax, ratio, courant
def output_filepath(self, name, item='temp'): """Get ArcGIS data path. TODO: item needs to be set for each raster reparatelly. Now all is in temp dir. :param name: layer name :return: full path """ #try: # item = self._data[name] #except: # item = 'temp' path = os.path.join(Globals.get_outdir(), item, 'data.gdb', name) Logger.debug('File path: {}'.format(path)) return path
def _load_data(filename): """Load data from pickle. :param str filename: file to be loaded """ if filename is None: raise ProviderError('Input file for loading data not defined') with open(filename, 'rb') as fd: if sys.version_info > (3, 0): data = pickle.load(fd, encoding='bytes') data = { key.decode(): val.decode if isinstance(val, bytes) else val for key, val in data.items() } else: data = pickle.load(fd) Logger.debug('Size of loaded data is {} bytes'.format( sys.getsizeof(data))) return data
def compute_h(A, m, b, err=0.0001, max_iter=20): def feval(h): return b * h + m * h * h - A def dfdheval(h): return b + 2.0 * m * h # prvni odhad vysky h_pre = A / b h = h_pre iter_ = 1 while (feval(h_pre) > err): h = h_pre - feval(h_pre) / dfdheval(h_pre) h_pre = h if iter_ >= max_iter: Logger.error("if file %s %s %s %s %s %s %s %s", frameinfo.filename, "near line ", frameinfo.lineno, "\n\t newton solver didnt converge after", max_iter, 'iterations (max_iter=', max_iter, ')') break iter_ += 1 # print 'check', A, b*h+m*h*h return h
def __init__(self, mi, t): Logger.fatal( 'Maximum of iterations (max_iter = {}) was exceeded of at time [s]: {}' .format(mi, t))
def __init__(self): Logger.info("Multiflow direction algorithm") self.inflows, fd_rill = mfd.new_mfda(mat_dem, mat_nan, mat_fd, vpix, spix, rows, cols) self.inflowsRill = D8_.new_inflows(fd_rill)
def courant(self, rainfall, delta_t, ratio): # ratio se muze zmensit a max_delta_t_mult zvetsit # pokud je courant v ryhach <= 0.2 # # pokud je courant > 0.5 deje se opak # to je ale reseno lokalne # v ./src/processes/rill.py # # if (self.cour_most_rill < 0.1) : # ratio = max(1,ratio-1) # ratio nemuze byt mensi nez 1 # if ratio == 1 : # self.max_delta_t_mult = 1.0 # else : # self.max_delta_t_mult = min(1.0, self.max_delta_t_mult*1/(0.9)) # # max_delta_t_mult nemuze byt vetsi nez 1.0 # ratio se drzi na 10 # vyse nelze jit # proto se zmensuje max_delta_t_mult # ktery nasobi vysledne delta # # if ((ratio > self.maxratio) or (self.cour_most_rill > 1.0)) : # ratio = self.maxratio # ratio = 1 # self.max_delta_t_mult *= 0.9 # pokud je maximalni courant mimo dovolena kryteria # mensi nez cour_least a vetsi nez cour_crit # explicitne se dopocita dt na nejvetsi mozne # xor if ((self.cour_most < self.cour_least) != (self.cour_crit <= self.cour_most)): # pokud se na povrchu nic nedeje # nema se zmena dt cim ridit # a zmeni se podle maxima nasobeneho max_delta_t_mult # max_delta_t_mult se meni podle ryh, vyse v teto funkci # if (self.cour_speed == 0.0): return self.max_delta_t * self.max_delta_t_mult, ratio dt = round((Gl.mat_efect_cont[self.i, self.j] * self.cour_crit * self.cour_coef) / self.cour_speed, 4) # nove dt nesmi byt vetsi nez je maxdt * max_delta_t_mult # max_delta_t_mult se meni podle ryh, vyse v teto funkci # return dt*self.max_delta_t_mult, ratio # return min(dt,self.max_delta_t*self.max_delta_t_mult), ratio # print 'asdf', self.cour_speed, dt, self.max_delta_t_mult return min(dt * self.max_delta_t_mult, self.max_delta_t * self.max_delta_t_mult), ratio # pokud je courant v povolenem rozmezi # skontrolje se pouze pokud neni vetsi nez maxdt * max_delta_t_mult # max_delta_t_mult se meni podle ryh, vyse v teto funkci else: # print 'fdafdsfasdfadsfadsfadsfaf' # return delta_t, ratio # print 'asdf', dt, dt*self.max_delta_t_mult, ratio if ((ratio <= self.maxratio) and (self.cour_most_rill < 0.5)): return delta_t, ratio else: return delta_t * self.max_delta_t_mult, ratio Logger.critical( 'courant.cour() missed all its time step conditions\n no rule to preserve or change the time step!' )
def __init__(self): Logger.info("D8 flow algorithm") self.inflows = D8_.new_inflows(Globals.get_mat_fd())
def __del__(self): for fd in self.files: Logger.debug('Hydrographs file "{}" closed'.format(fd.name)) fd.close()
def __init__(self): points = Globals.get_array_points() ipi = points.shape[0] jpj = 5 point_int = [[0] * jpj for i in range(ipi)] rr, rc = GridGlobals.get_region_dim() pixel_area = GridGlobals.get_pixel_area() self.inSurface = [] self.inStream = [] for ip in range(ipi): for jp in [0, 1, 2]: point_int[ip][jp] = int(points[ip][jp]) for ip in range(ipi): for jp in [3, 4]: point_int[ip][jp] = points[ip][jp] # tento cylkus meze budy, ktere jsou # v i,j cylku o jednu vedle rrows a rcols outsideDomain = False del_ = [] for ip in range(ipi): l = point_int[ip][1] m = point_int[ip][2] for ipp in rr: if l == ipp: for jpp in rc[ipp]: if m == jpp: outsideDomain = True if not (outsideDomain): del_.append(ip) outsideDomain = False point_int = [i for j, i in enumerate(point_int) if j not in del_] ipi -= len(del_) counter = 0 # mat_stream_seg is alway presented if stream == True # if (mat_stream_seg != None) and (stream == True): if Globals.isStream: for ip in range(ipi): l = point_int[ip][1] m = point_int[ip][2] if Globals.get_mat_stream_reach(l, m) >= 1000: self.inStream.append(counter) counter += 1 else: self.inSurface.append(counter) counter += 1 else: self.inSurface = [i for i in range(ipi)] self.inStream.append(-99) self.inSurface.append(-99) self.n = ipi self.point_int = point_int self.subflow = Globals.subflow self.rill = Globals.isRill self.stream = Globals.isStream self.pixel_area = pixel_area iStream = 0 iSurface = 0 self.header = [] for i in range(self.n): if i == self.inStream[iStream]: header = '# Hydrograph at the point with coordinates: {} {}{}'.format( self.point_int[i][3], self.point_int[i][4], os.linesep) header += '# A pixel size is [m2]:{}'.format(os.linesep) header += '# {}{}'.format(self.pixel_area, os.linesep) if not Globals.extraOut: header += '# time[s];deltaTime[s];rainfall[m];reachWaterLevel[m];reachFlow[m3/s];reachVolRunoff[m3]' else: header += '# time[s];deltaTime[s];Rainfall[m];Waterlevel[m];V_runoff[m3];Q[m3/s];V_from_field[m3];V_rests_in_stream[m3]' header += os.linesep iStream += 1 self.header.append(header) elif i == self.inSurface[iSurface]: header = '# Hydrograph at the point with coordinates: {} {}{}'.format( self.point_int[i][3], self.point_int[i][4], os.linesep) header += '# A pixel size is [m2]:{}'.format(os.linesep) header += '# {}{}'.format(self.pixel_area, os.linesep) if not Globals.extraOut: header += '# time[s];deltaTime[s];rainfall[m];totalWaterLevel[m];surfaceFlow[m3/s];surfaceVolRunoff[m3]' else: header += '# time[s];deltaTime[s];Rainfall[m];Water_level_[m];Sheet_Flow[m3/s];Sheet_V_runoff[m3];Sheet_V_rest[m3];Infiltration[m];Surface_retetion[m];State;V_inflow[m3];WlevelTotal[m]{}' if Globals.isRill: header += ';WlevelRill[m];Rill_width[m];Rill_flow[m3/s];Rill_V_runoff[m3];Rill_V_rest;Surface_Flow[m3/s];Surface_V_runoff[m3]' header += ';SurfaceBil[m3]' if Globals.subflow: header += ';Sub_Water_level_[m];Sub_Flow_[m3/s];Sub_V_runoff[m3];Sub_V_rest[m3];Percolation[];exfiltration[]' if Globals.extraOut: header += ';V_to_rill.m3.;ratio;courant;courantrill;iter' header += os.linesep iSurface += 1 self.header.append(header) self.files = [] for i in range(self.n): filename = 'point{}.dat'.format(str(self.point_int[i][0]).zfill(3)) fd = open(os.path.join(Globals.get_outdir(), filename), 'w') fd.writelines(self.header[i]) self.files.append(fd) del self.inStream[-1] del self.inSurface[-1] Logger.info("Hydrographs files has been created...")
def __init__(self): super(StreamPass, self).__init__() self.reach = None Logger.info('Stream: OFF')
def __init__(self): Logger.info("Kinematic approach") super(Kinematic, self).__init__()
def __init__(self, msg): Logger.fatal(msg)
def __init__(self): Logger.fatal('Water level reached negative value')
def __init__(self): Logger.fatal('Global variable is still None')
def __init__(self, L_sub=0.010, Ks=0.001, vg_n=1.5, vg_l=0.5): Logger.info("Subsurface:") super(Subsurface, self).__init__(L_sub=L_sub, Ks=Ks, vg_n=vg_n, vg_l=vg_l)
def load_precipitation(fh): y2 = 0 try: fh = open(fh, "r") x = [] for line in fh.readlines(): z = line.split() if len(z) == 0: continue elif z[0].find('#') >= 0: continue else: if (len(z) == 0): # if raw in text file is empty continue elif ( (float(z[0]) == 0) & (float(z[1]) > 0) ): # if the record start with zero minutes the line has to be corrected raise ErrorInRainfallRecord() elif ( (float(z[0]) == 0) & (float(z[1]) == 0) ): # if the record start with zero minutes and rainfall the line is ignored continue else: y0 = float(z[0]) * 60.0 # prevod na vteriny y1 = float(z[1]) / 1000.0 # prevod na metry if y1 < y2: raise NonCumulativeRainData() y2 = y1 mv = y0, y1 x.append(mv) fh.close # Values ordered by time ascending dtype = [('cas', float), ('value', float)] val = np.array(x, dtype=dtype) x = np.sort(val, order='cas') # Test if time time is more than once the same state = 0 k = 1 itera = len(x) # iter is needed in main loop for k in range(itera): if x[k][0] == x[k - 1][0] and itera != 1: state = 1 y = np.delete(x, k, 0) if state == 0: x = x else: x = y # Amount of rainfall in individual intervals if len(x) == 0: sr = 0 else: sr = np.zeros([itera, 2], float) for i in range(itera): if i == 0: sr_int = x[i][1] / x[i][0] sr[i][0] = x[i][0] sr[i][1] = sr_int else: sr_int = (x[i][1] - x[i - 1][1]) / (x[i][0] - x[i - 1][0]) sr[i][0] = x[i][0] sr[i][1] = sr_int #for i, item in enumerate(sr): #print item[0], '\t', item[1] return sr, itera except IOError: Logger.critical("The rainfall file does not exist!") except: Logger.critical("Unexpected error:", sys.exc_info()[0]) raise