def calc_ti_geometries(self): elog.info("TOPMODEL: Building Geometry Objects") tigeoms = [] satgeoms = [] with open(self.topo_input, "r") as sr: lines = sr.readlines() ncols = int(lines[0].split(" ")[-1].strip()) nrows = int(lines[1].split(" ")[-1].strip()) lowerx = float(lines[2].split(" ")[-1].strip()) lowery = float(lines[3].split(" ")[-1].strip()) cellsize = float(lines[4].split(" ")[-1].strip()) nodata = float(lines[5].split(" ")[-1].strip()) # read ti data data = np.genfromtxt(self.topo_input, delimiter=" ", skip_header=6) # build X and Y coordinate arrays xi = np.linspace(lowerx, lowerx + ncols * cellsize, ncols) yi = np.linspace(lowery + nrows * cellsize, lowery, nrows) x, y = np.meshgrid(xi, yi) # generate 2d arrays from xi, yi x = x.ravel() # convert to 1-d y = y.ravel() # convert to 1-d data = data.ravel() # convert to 1-d # remove all nodata points from x, y arrays nonzero = np.where(data != nodata) x = x[nonzero] y = y[nonzero] tigeoms = geometry.build_point_geometries(x, y) self.ti_geoms = tigeoms
def connect_to_ODM2_db(title, desc, engine, address, db, user, pwd): # create a db session session = dbconnection2.createConnection(engine, address, db, user, pwd) db_connections = {} if session: # get the connection string connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = {'name':title, 'session': session, 'connection_string':connection_string, 'description':desc, 'args': {'name':title,'desc':desc ,'engine':engine,'address':address,'db': db, 'user': user,'pwd': pwd}} elog.info('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) sPrint('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) else: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return None return db_connections
def createBox(self, xCoord, yCoord, id=None, name=None, type=datatypes.ModelTypes.TimeStep): if name: x, y = xCoord, yCoord if type == datatypes.ModelTypes.Data: # Strip out last bit of the name (normally includes an id), e.g. "rainfall-5" -> "rainfall" sub = name.rfind('-') - name.__len__() name = name[:sub] name = name.replace("_", " ") model_box = ModelBox(type, (x, y), name, id) self.FloatCanvas.AddObject(model_box) self.models[model_box] = id model_box.Bind(FC.EVT_FC_LEFT_DOWN, self.on_model_left_clicked) model_box.Bind(FC.EVT_FC_RIGHT_DOWN, self.on_launch_context) model_box.Bind(FC.EVT_FC_LEFT_DCLICK, self.on_model_double_click) self.FloatCanvas.Draw() msg = 'model [%s] has been added to the canvas.' % name elog.info(msg) sPrint(msg, MessageType.INFO)
def connect_to_db(title, desc, engine, address, db=None, user=None, pwd=None): d = {} session = dbconnection2.createConnection(engine, address, db, user, pwd) if not session: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return # adjusting timeout session.engine.pool._timeout = 30 connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d[db_id] = {'name':title, 'session': session, 'connection_string':connection_string, 'description':desc, 'args': dict(address=connection_string, desc=desc, engine=engine,id=db_id,name=db, user=None, pwd=None,default=False,db=None)} elog.info('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) sPrint('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) return d
def calc_ti_geometries(self): elog.info('TOPMODEL: Building Geometry Objects') tigeoms = [] satgeoms = [] with open(self.topo_input, 'r') as sr: lines = sr.readlines() ncols = int(lines[0].split(' ')[-1].strip()) nrows = int(lines[1].split(' ')[-1].strip()) lowerx = float(lines[2].split(' ')[-1].strip()) lowery = float(lines[3].split(' ')[-1].strip()) cellsize = float(lines[4].split(' ')[-1].strip()) nodata = float(lines[5].split(' ')[-1].strip()) # read ti data data = np.genfromtxt(self.topo_input, delimiter=' ', skip_header=6) # build X and Y coordinate arrays xi = np.linspace(lowerx, lowerx + ncols * cellsize, ncols) yi = np.linspace(lowery + nrows * cellsize, lowery, nrows) x, y = np.meshgrid(xi, yi) # generate 2d arrays from xi, yi x = x.ravel() # convert to 1-d y = y.ravel() # convert to 1-d data = data.ravel() # convert to 1-d # remove all nodata points from x, y arrays nonzero = np.where(data != nodata) x = x[nonzero] y = y[nonzero] tigeoms = geometry.build_point_geometries(x, y) self.ti_geoms = tigeoms
def create_database_connections_from_args(title, desc, engine, address, db, user, pwd): # fixme: all database connections should use the updated ODM library if engine == 'sqlite': return connect_to_db(title, desc, engine, address, db, user, pwd) # old database connection api d = { 'name': title, 'desc': desc, 'engine': engine, 'address': address, 'db': db, 'user': user, 'pwd': pwd } # database connections dictionary db_connections = {} # build database connection #dbconn = odm2.api.dbconnection() session = dbconnection.createConnection(engine, address, db, user, pwd) # add connection string to dictionary (for backup/debugging) # d['connection_string'] = connection_string # create a session if session: # get the connection string connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = { 'name': d['name'], 'session': session, 'connection_string': connection_string, 'description': d['desc'], 'args': d } elog.info('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) sPrint('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) else: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return None return db_connections
def create_database_connections_from_file(ini): # database connections dictionary db_connections = {} # parse the dataabase connections file cparser = ConfigParser.ConfigParser(None, multidict) cparser.read(ini) sections = cparser.sections() # create a session for each database connection in the ini file for s in sections: # put ini args into a dictionary d = {} options = cparser.options(s) d['name'] = s for option in options: d[option] = cparser.get(s, option) # build database connection session = dbconnection2.createConnection(d['engine'], d['address'], d['database'], d['username'], d['password']) if session: # adjusting timeout session.engine.pool._timeout = 30 connection_string = session.engine.url # add connection string to dictionary (for backup/debugging) d['connection_string'] = connection_string # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = { 'name': d['name'], 'session': session, 'connection_string': connection_string, 'description': d['description'], 'args': d } elog.info('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) sPrint('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) else: msg = 'Could not establish a connection with the following database: ***@%s/%s' % ( d['address'], d['database']) elog.error(msg) sPrint(msg, MessageType.ERROR) return db_connections
def create_database_connections_from_args(title, desc, engine, address, db, user, pwd): # fixme: all database connections should use the updated ODM library if engine == 'sqlite': return connect_to_db(title, desc, engine, address, db, user, pwd) # old database connection api d = {'name':title, 'desc':desc , 'engine':engine, 'address':address, 'db': db, 'user': user, 'pwd': pwd} # database connections dictionary db_connections = {} # build database connection #dbconn = odm2.api.dbconnection() session = dbconnection.createConnection(engine,address,db,user,pwd) # add connection string to dictionary (for backup/debugging) # d['connection_string'] = connection_string # create a session if session: # get the connection string connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = {'name':d['name'], 'session': session, 'connection_string':connection_string, 'description':d['desc'], 'args': d} elog.info('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) sPrint('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) else: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return None return db_connections
def create_database_connections_from_file(ini): # database connections dictionary db_connections = {} # parse the dataabase connections file cparser = ConfigParser.ConfigParser(None, multidict) cparser.read(ini) sections = cparser.sections() # create a session for each database connection in the ini file for s in sections: # put ini args into a dictionary d = {} options = cparser.options(s) d['name'] = s for option in options: d[option] = cparser.get(s,option) # build database connection session = dbconnection2.createConnection(d['engine'],d['address'],d['database'],d['username'],d['password']) if session: # adjusting timeout session.engine.pool._timeout = 30 connection_string = session.engine.url # add connection string to dictionary (for backup/debugging) d['connection_string'] = connection_string # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = {'name':d['name'], 'session': session, 'connection_string':connection_string, 'description':d['description'], 'args': d} elog.info('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) sPrint('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) else: msg = 'Could not establish a connection with the following database: ***@%s/%s' % (d['address'],d['database']) elog.error(msg) sPrint(msg, MessageType.ERROR) return db_connections
def on_delete(self, event): # Links are placed in a queue that will be deleted permanently when clicking on save and close. if self.link_name_list_box.GetSelection() < 0: elog.info("Please select a link to delete") return # get the link id linkid = self.get_selected_link_id() self.links_to_delete.append(linkid) index = self.link_name_list_box.GetSelection() self.link_name_list_box.Delete(index)
def connect_to_db(title, desc, engine, address, db=None, user=None, pwd=None): d = {} session = dbconnection2.createConnection(engine, address, db, user, pwd) if not session: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return # adjusting timeout session.engine.pool._timeout = 30 connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d[db_id] = { 'name': title, 'session': session, 'connection_string': connection_string, 'description': desc, 'args': dict(address=connection_string, desc=desc, engine=engine, id=db_id, name=db, user=None, pwd=None, default=False, db=None) } elog.info('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) sPrint('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) return d
def connect_to_ODM2_db(title, desc, engine, address, db, user, pwd): # create a db session session = dbconnection2.createConnection(engine, address, db, user, pwd) db_connections = {} if session: # get the connection string connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = { 'name': title, 'session': session, 'connection_string': connection_string, 'description': desc, 'args': { 'name': title, 'desc': desc, 'engine': engine, 'address': address, 'db': db, 'user': user, 'pwd': pwd } } elog.info('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) sPrint('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) else: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return None return db_connections
def populate_variable_list(self): if len(engineAccessors.getAllLinks()) < 1: elog.info("No links have been added") return else: models = {} # compile a list of model ids and names that exist in the configuration links = engineAccessors.getAllLinks() for link in links: s_id = link['source_component_id'] t_id = link['target_component_id'] if s_id not in models.keys(): models[s_id] = link['source_component_name'] if t_id not in models.keys(): models[t_id] = link['target_component_name'] # sort models self._data = self.sort_output_model(models) self.insert_data(self._data) self.table.auto_size_table() self.table.alternate_row_color()
def updatePlotArea(self, series_keys): """ Updates the WOF plot with the selected data Args: series_keys: list of series keys of the data that will be plot Returns: None """ self.plot.clear_plot() for key in series_keys: series_info = self.wofSeries.series_info[key] data = self.wofSeries.getData(key) plotData = [] noData = None # make sure data is found if data is not None: # get the first data element only if len(data[0].values[0]) > 1: values = data[0].values[0].value else: elog.info( "There are no values. Try selecting a larger date range" ) return for value in values: plotData.append((value._dateTime, value.value)) noData = data[0].variable.noDataValue ylabel = data[0].variable.unit.unitName self.plot.plot_dates(plotData, series_info.var_name, noData, ylabel) self.plot.display_legend(0)
def createBox(self, xCoord, yCoord, id=None, name=None, type=datatypes.ModelTypes.TimeStep): if name: x, y = xCoord, yCoord if type == datatypes.ModelTypes.Data: # Strip out last bit of the name (normally includes an id), e.g. "rainfall-5" -> "rainfall" sub = name.rfind('-')-name.__len__() name = name[:sub] name = name.replace("_", " ") model_box = ModelBox(type, (x,y), name, id) self.FloatCanvas.AddObject(model_box) self.models[model_box] = id model_box.Bind(FC.EVT_FC_LEFT_DOWN, self.on_model_left_clicked) model_box.Bind(FC.EVT_FC_RIGHT_DOWN, self.on_launch_context) model_box.Bind(FC.EVT_FC_LEFT_DCLICK, self.on_model_double_click) self.FloatCanvas.Draw() msg = 'model [%s] has been added to the canvas.' % name elog.info(msg) sPrint(msg, MessageType.INFO)
def updatePlotArea(self, series_keys): """ Updates the WOF plot with the selected data Args: series_keys: list of series keys of the data that will be plot Returns: None """ self.plot.clear_plot() for key in series_keys: series_info = self.wofSeries.series_info[key] data = self.wofSeries.getData(key) plotData = [] noData = None # make sure data is found if data is not None: # get the first data element only if len(data[0].values[0]) > 1: values = data[0].values[0].value else: elog.info("There are no values. Try selecting a larger date range") return for value in values: plotData.append((value._dateTime, value.value)) noData = data[0].variable.noDataValue ylabel = data[0].variable.unit.unitName self.plot.plot_dates(plotData, series_info.var_name, noData, ylabel) self.plot.display_legend(0)
def run(self, inputs): # todo: Do ts variable all have the same shape (i.e. same timestep?) # NOTES: # C_tsvarArray (or RegArray in RunUEB C code) stores the time series values for all inputs # C_tsvarArray[0] --> Ta (air temp) # C_tsvarArray[1] --> Precipitation (mm/day) # C_tsvarArray[2] --> V (wind speed) # C_tsvarArray[3] --> RH (relative humidity) # C_tsvarArray[4] --> atmospheric pressure ? # C_tsvarArray[5] --> Qsiobs # C_tsvarArray[6] --> Qli # C_tsvarArray[7] --> Qnetob # C_tsvarArray[8] --> Qg # C_tsvarArray[9] --> Snowalb # C_tsvarArray[10] --> Tmin # C_tsvarArray[11] --> Tmax # C_tsvarArray[12] --> Vapor Pressure of air # # get output exchange items # self.swe = self.outputs()['Snow Water Equivalent'] # swit = self.outputs()['Surface Water Input Total'] try: # Initialize SiteState SiteState = numpy.zeros((32,)) # loop over all activeCells for i in xrange(len(self.activeCells)): # todo: remove, this is for debugging # if i > 10: # break # track grid cell self.C_uebCellY = self.activeCells[i][0] self.C_uebCellX = self.activeCells[i][1] for s in xrange(32): if self.C_strsvArray.contents[s].svType == 1: SiteState[s] = self.C_strsvArray.contents[s].svArrayValues[self.C_uebCellY][self.C_uebCellX] else: SiteState[s] = self.C_strsvArray.contents[s].svdefValue # convert SiteState into a ctype C_SiteState = (c_float * len(SiteState))(*SiteState) # todo: get all data at beginning of run, then slice as necessary here # get the input data for the current geometry prcp = self.inputs()['Precipitation'].getValues2(geom_idx_start=i, geom_idx_end=i) temp = self.inputs()['Temperature'].getValues2(geom_idx_start=i, geom_idx_end=i) # skip calculation if no temperatures are provided for the current point (i.e completely missing data) if max(temp[:-1]) > 0: # set the input data for this geometry for idx in range(len(prcp)): # set air temperature and precipitation values in tsvarArray (index 0 and 1) self.C_tsvarArray.contents[0][idx] = temp[idx][0] self.C_tsvarArray.contents[1][idx] = prcp[idx][0] # RUN THE UEB CALCS self.__uebLib.RUNUEB(self.C_tsvarArray, C_SiteState, self.C_parvalArray, byref(pointer(self.C_outvarArray)), self.C_ModelStartDate, self.C_ModelStartHour, self.C_ModelEndDate, self.C_ModelEndHour, self.C_ModelDt, self.C_ModelUTCOffset) # outvararray 70var * numtimesteps # set output data numtimesteps = len(self.__swe.getDates2()) values = numpy.array(self.C_outvarArray[17][0:numtimesteps]) # convert C_type into numpy array values[numpy.isnan(values)] = self.__swe.noData() # set nan values to noData self.__swe.setValuesBySlice(values, geometry_index_slice=(i, i+1, 1)) # set data in wrapper values = numpy.array(self.C_outvarArray[25][0:numtimesteps]) # convert C_type into numpy array values[numpy.isnan(values)] = self.__swit.noData() # set nan values to noData self.__swit.setValuesBySlice(values, geometry_index_slice=(i, i+1, 1)) # set data in wrapper # if i % round((len(self.activeCells)) / 10) == 0: # print "%d of %d elements complete " % ((i+1), len(self.activeCells)) # sys.stdout.flush() elog.info("... %d of %d elements complete " % ((i+1), len(self.activeCells)), overwrite=True) except Exception, e: elog.critical('UEB run failed.') elog.critical(e) return False
def run_feed_forward(obj, ds=None): """ executes a feed forward coupled model simulation Args: obj: engine coordinator object ds: dataSaveInfo object Returns: None """ # set engine status obj.status.set(stdlib.Status.NOTREADY) # todo: determine unresolved exchange items (utilities) sim_st = time.time() # determine execution order elog.info('Determining execution order... ') sPrint('Determining execution order... ', MessageType.INFO) exec_order = obj.determine_execution_order() for i in range(0, len(exec_order)): elog.info('%d.) %s' % (i + 1, obj.get_model(model_id=exec_order[i]).name())) links = {} spatial_maps = {} # todo: move this into function elog.info('Generating spatial maps... ') sPrint('Generating spatial maps... ') for modelid in exec_order: # get links l = obj.get_from_links_by_model(modelid) links[modelid] = l # build spatial maps for linkid, link in l.iteritems(): source = link.source_exchange_item() target = link.target_exchange_item() key = generate_link_key(link) # set default spatial interpolation to ExactMatch if link.spatial_interpolation() is None: sPrint('Spatial interpolation not provided, using the default mapping approach: \'Spatial Index\'', MessageType.WARNING) link.spatial_interpolation(spatial_index()) spatial_interp = link.spatial_interpolation() source_geoms = source.getGeometries2() target_geoms = target.getGeometries2() if len(source_geoms) == 0: sPrint('Cannot continue simulation, %s -- %s contains 0 geometries' % link.source_component().name(), source.name() ) if len(target_geoms) == 0: sPrint('Cannot continue simulation, %s -- %s contains 0 geometries' % link.target_component().name(), target.name()) # save the spatial mapping based on link key spatial_maps[key] = spatial_interp.transform(source_geoms, target_geoms) # prepare all models for modelid in exec_order: model_inst = obj.get_model_object(modelid).instance() model_inst.prepare() if model_inst.status() != stdlib.Status.READY: msg = 'Cannot continue with simulation because model "%s" has a status of "%s" after the preparation' \ ' phase. Status must be "%s" in order to proceed with simulation ' \ % (model_inst.name(), model_inst.status(), stdlib.Status.READY) elog.critical(msg) sPrint(msg, MessageType.ERROR) return False # update engine status obj.status.set(stdlib.Status.READY) # loop through models and execute run for modelid in exec_order: # update engine status obj.status.set(stdlib.Status.RUNNING) st = time.time() # get the current model instance model_inst = obj.get_model_object(modelid).instance() sPrint('Executing module: %s \n' % model_inst.name(), MessageType.INFO) try: # retrieve inputs from database sPrint("[1 of 3] Retrieving input data... ") input_data = model_inst.inputs() # pass these inputs ts to the models' run function sPrint("[2 of 3] Performing calculation... ") model_inst.run(input_data) # update links sPrint("[3 of 3] Updating links... ") oei = model_inst.outputs() update.update_links_feed_forward(links[modelid], oei, spatial_maps) model_inst.finish() elog.info('..module simulation completed in %3.2f seconds' % (time.time() - st)) except Exception: # set the model status to failed model_inst.status(stdlib.Status.FAILED) return # raise Exception('Error during model execution') # set the model status to successful model_inst.status(stdlib.Status.SUCCESS) sPrint('------------------------------------------\n' + ' Simulation Summary \n' + '------------------------------------------\n' + 'Completed without error :)\n' + 'Simulation duration: %3.2f seconds\n' % (time.time() - sim_st) + '------------------------------------------') # update engine status obj.status.set(stdlib.Status.SUCCESS) # save simulation results model_ids = exec_order if ds is None: msg = 'Simulation results will not be stored in database because database connection was not provided prior to simulation.' elog.info(msg) sPrint(msg, messageType=MessageType.INFO) else: database.save(obj, ds, model_ids)
def run(self, inputs): # todo: Do ts variable all have the same shape (i.e. same timestep?) # NOTES: # C_tsvarArray (or RegArray in RunUEB C code) stores the time series values for all inputs # C_tsvarArray[0] --> Ta (air temp) # C_tsvarArray[1] --> Precipitation (mm/day) # C_tsvarArray[2] --> V (wind speed) # C_tsvarArray[3] --> RH (relative humidity) # C_tsvarArray[4] --> atmospheric pressure ? # C_tsvarArray[5] --> Qsiobs # C_tsvarArray[6] --> Qli # C_tsvarArray[7] --> Qnetob # C_tsvarArray[8] --> Qg # C_tsvarArray[9] --> Snowalb # C_tsvarArray[10] --> Tmin # C_tsvarArray[11] --> Tmax # C_tsvarArray[12] --> Vapor Pressure of air # # get output exchange items # self.swe = self.outputs()['Snow Water Equivalent'] # swit = self.outputs()['Surface Water Input Total'] try: # Initialize SiteState SiteState = numpy.zeros((32, )) # loop over all activeCells for i in xrange(len(self.activeCells)): # todo: remove, this is for debugging # if i > 10: # break # track grid cell self.C_uebCellY = self.activeCells[i][0] self.C_uebCellX = self.activeCells[i][1] for s in xrange(32): if self.C_strsvArray.contents[s].svType == 1: SiteState[s] = self.C_strsvArray.contents[ s].svArrayValues[self.C_uebCellY][self.C_uebCellX] else: SiteState[s] = self.C_strsvArray.contents[s].svdefValue # convert SiteState into a ctype C_SiteState = (c_float * len(SiteState))(*SiteState) # todo: get all data at beginning of run, then slice as necessary here # get the input data for the current geometry prcp = self.inputs()['Precipitation'].getValues2( geom_idx_start=i, geom_idx_end=i) temp = self.inputs()['Temperature'].getValues2( geom_idx_start=i, geom_idx_end=i) # skip calculation if no temperatures are provided for the current point (i.e completely missing data) if max(temp[:-1]) > 0: # set the input data for this geometry for idx in range(len(prcp)): # set air temperature and precipitation values in tsvarArray (index 0 and 1) self.C_tsvarArray.contents[0][idx] = temp[idx][0] self.C_tsvarArray.contents[1][idx] = prcp[idx][0] # RUN THE UEB CALCS self.__uebLib.RUNUEB( self.C_tsvarArray, C_SiteState, self.C_parvalArray, byref(pointer(self.C_outvarArray)), self.C_ModelStartDate, self.C_ModelStartHour, self.C_ModelEndDate, self.C_ModelEndHour, self.C_ModelDt, self.C_ModelUTCOffset) # outvararray 70var * numtimesteps # set output data numtimesteps = len(self.__swe.getDates2()) values = numpy.array( self.C_outvarArray[17] [0:numtimesteps]) # convert C_type into numpy array values[numpy.isnan( values)] = self.__swe.noData() # set nan values to noData self.__swe.setValuesBySlice( values, geometry_index_slice=(i, i + 1, 1)) # set data in wrapper values = numpy.array( self.C_outvarArray[25] [0:numtimesteps]) # convert C_type into numpy array values[numpy.isnan( values)] = self.__swit.noData() # set nan values to noData self.__swit.setValuesBySlice( values, geometry_index_slice=(i, i + 1, 1)) # set data in wrapper # if i % round((len(self.activeCells)) / 10) == 0: # print "%d of %d elements complete " % ((i+1), len(self.activeCells)) # sys.stdout.flush() elog.info("... %d of %d elements complete " % ((i + 1), len(self.activeCells)), overwrite=True) except Exception, e: elog.critical('UEB run failed.') elog.critical(e) return False
def parse_args(coordinator, arg): if ''.join(arg).strip() != '': if arg[0] == 'help': if len(arg) == 1: elog.info(h.help()) else: elog.info(h.help_function(arg[1])) elif arg[0] == 'add' : if len(arg) == 1: elog.info(h.help_function('add')) else: coordinator.add_model(arg[1]) elif arg[0] == 'remove': if len(arg) == 1: elog.info(h.help_function('remove')) else: coordinator.remove_model_by_id(arg[1]) elif arg[0] == 'link': if len(arg) != 5: elog.info(h.help_function('link')) else: coordinator.add_link(arg[1],arg[2],arg[3],arg[4]) elif arg[0] == 'showme': if len(arg) == 1: elog.info(h.help_function('showme')) else: coordinator.get_configuration_details(coordinator, arg[1]) elif arg[0] == 'connect_db': if len(arg) == 1: elog.info(h.help_function('connect_db')) else: coordinator.connect_to_db(arg[1:]) elif arg[0] == 'default_db': if len(arg) == 1: elog.info(h.help_function('default_db')) else: coordinator.set_default_db(arg[1:]) elif arg[0] == 'run': elog.info('Running Simulation in Feed Forward Mode') coordinator.run_simulation() elif arg[0] == 'load': if len(arg) == 1: elog.info(h.help_function('load')) else: coordinator.load_simulation(arg[1:]) elif arg[0] == 'db': if len(arg) == 1: elog.info(h.help_function('db')) else: coordinator.show_db_results(arg[1:]) #todo: show database time series that are available elif arg[0] == 'info': print h.info() else: print 'Command not recognized. Type "help" for a complete list of commands.'
def write_simulation_json(models, canvas_shapes, links, path): json_models = [] json_links = [] for i in range(len(models)): model = models[i] if "type" not in model: sPrint("model does not have type as key. Model should have 'type' as key. Incorrect format", MessageType.DEBUG) return if model["type"] == "NETCDF": json_models.append(save_netcdf_json(model, canvas_shapes, links, path)) else: # parse the original parameters to identify model inputs params = parse_json(model['params']['path']) # model input properties model_inputs = dict() if 'model_inputs' in params: for input in params['model_inputs']: # get the variable name var = input['variable'] # get the variable value from the model model_inputs[var] = model['params'][var] # set the model type to mdl if mdl path is present if 'path' in model['params']: model_type = 'MDL' else: model_type = model['type'] # canvas object properties bbox = canvas_shapes[i].BoundingBox model_properties = dict(xcoordinate=str((bbox[0][0] + bbox[1][0]) / 2), ycoordinate=str((bbox[0][1] + bbox[1][1]) / 2), name=model['name'], id=model['id'], model_inputs=model_inputs, path=model['params']['path'], model_type=model_type ) json_models.append(model_properties) for link in links: link = dict(from_name=link['source_component_name'], from_id=link['source_component_id'], from_item=link['output_name'], from_item_id=link['output_id'], to_name=link['target_component_name'], to_id=link['target_component_id'], to_item=link['input_name'], to_item_id=link['input_name'], temporal_transformation=link['temporal_interpolation'], spatial_transformation=link['spatial_interpolation'] ) json_links.append(link) # add models and links to obj that will be serialized sim = dict(models=json_models, links=json_links) with open(path, 'w') as f: sim_json = json.dumps(sim, sort_keys=True, indent=4, separators=(',', ': ')) f.write(sim_json) elog.info('Configuration saved: ', path) sPrint('Configuration was saved successfully: ' + str(path))
def get_configuration_details(coordinator, arg): if len(coordinator.__models.keys()) == 0: elog.warning('No models found in configuration.') if arg.strip() == 'summary': elog.info('Here is everything I know about the current simulation...\n') # print model info if arg.strip() == 'models' or arg.strip() == 'summary': # loop through all known models for name,model in coordinator.__models.iteritems(): model_output = [] model_output.append('Model: '+name) model_output.append('desc: ' + model.description()) model_output.append('id: ' + model.id()) for item in model.get_input_exchange_items() + model.get_output_exchange_items(): model_output.append(str(item.id())) model_output.append( 'name: '+item.name()) model_output.append( 'description: '+item.description()) model_output.append( 'unit: '+item.unit().UnitName()) model_output.append( 'variable: '+item.variable().VariableNameCV()) model_output.append( ' ') # get formatted width w = get_format_width(model_output) # print model info elog.info(' |'+(w)*'-'+'|') elog.info(' *'+format_text(model_output[0], w,'center')+'*') elog.info(' |'+(w)*'='+'|') elog.info(' |'+format_text(model_output[1], w,'left')+'|') elog.info(' |'+format_text(model_output[2], w,'left')+'|') elog.info(' |'+(w)*'-'+'|') for l in model_output[3:]: elog.info(' |'+format_text(l,w,'left')+'|') elog.info(' |'+(w)*'-'+'|') elog.info(' ') # print link info if arg.strip() == 'links' or arg.strip() == 'summary': # string to store link output link_output = [] for linkid,link in coordinator.__links.iteritems(): # get the link info From, To = link.get_link() link_output.append('LINK ID : ' + linkid) link_output.append('from: ' + From[0].name() + ' -- output --> ' + From[1].name()) link_output.append('to: ' + To[0].name() + ' -- input --> ' + To[1].name()) # get the formatted width w = get_format_width(link_output) # print the output elog.info(' |'+(w)*'-'+'|') elog.info(' *'+format_text(link_output[0], w,'center')+'*') elog.info(' |'+(w)*'='+'|') for l in link_output[1:]: elog.info(' |'+format_text(l,w,'left')+'|') elog.info(' |' + w * '-' + '|') # print database info if arg.strip() == 'db' or arg.strip() == 'summary': for id, db_dict in coordinator._db.iteritems(): # string to store db output db_output = [] # get the session args name = db_dict['name'] desc = db_dict['description'] engine = db_dict['args']['engine'] address = db_dict['args']['address'] user = db_dict['args']['user'] pwd = db_dict['args']['pwd'] db = db_dict['args']['db'] db_output.append('DATABASE : ' + id) db_output.append('name: '+name) db_output.append('description: '+desc) db_output.append('engine: '+engine) db_output.append('address: '+address) db_output.append('database: '+db) db_output.append('user: '******'connection string: '+db_dict['args']['connection_string']) # get the formatted width w = get_format_width(db_output) # print the output elog.info(' |'+(w)*'-'+'|') elog.info(' *'+format_text(db_output[0], w,'center')+'*') elog.info(' |'+(w)*'='+'|') for l in db_output[1:]: elog.info(' |'+format_text(l,w,'left')+'|') elog.info(' |'+(w)*'-'+'|')
def run_feed_forward(obj, ds=None): """ executes a feed forward coupled model simulation Args: obj: engine coordinator object ds: dataSaveInfo object Returns: None """ # set engine status obj.status.set(stdlib.Status.NOTREADY) # todo: determine unresolved exchange items (utilities) sim_st = time.time() # determine execution order elog.info("Determining execution order... ") sPrint("Determining execution order... ", MessageType.INFO) exec_order = obj.determine_execution_order() for i in range(0, len(exec_order)): elog.info("%d.) %s" % (i + 1, obj.get_model(model_id=exec_order[i]).name())) links = {} spatial_maps = {} # todo: move this into function elog.info("Generating spatial maps... ") sPrint("Generating spatial maps... ") for modelid in exec_order: # get links l = obj.get_from_links_by_model(modelid) links[modelid] = l # build spatial maps for linkid, link in l.iteritems(): source = link.source_exchange_item() target = link.target_exchange_item() key = generate_link_key(link) # set default spatial interpolation to ExactMatch if link.spatial_interpolation() is None: sPrint( "Spatial interpolation not provided, using the default mapping approach: 'Spatial Index'", MessageType.WARNING, ) link.spatial_interpolation(spatial_index()) spatial_interp = link.spatial_interpolation() source_geoms = source.getGeometries2() target_geoms = target.getGeometries2() if len(source_geoms) == 0: sPrint( "Cannot continue simulation, %s -- %s contains 0 geometries" % link.source_component().name(), source.name(), ) if len(target_geoms) == 0: sPrint( "Cannot continue simulation, %s -- %s contains 0 geometries" % link.target_component().name(), target.name(), ) # save the spatial mapping based on link key spatial_maps[key] = spatial_interp.transform(source_geoms, target_geoms) # prepare all models for modelid in exec_order: model_inst = obj.get_model_object(modelid).instance() model_inst.prepare() if model_inst.status() != stdlib.Status.READY: msg = ( 'Cannot continue with simulation because model "%s" has a status of "%s" after the preparation' ' phase. Status must be "%s" in order to proceed with simulation ' % (model_inst.name(), model_inst.status(), stdlib.Status.READY) ) elog.critical(msg) sPrint(msg, MessageType.ERROR) return False # update engine status obj.status.set(stdlib.Status.READY) # loop through models and execute run for modelid in exec_order: # update engine status obj.status.set(stdlib.Status.RUNNING) st = time.time() # get the current model instance model_inst = obj.get_model_object(modelid).instance() sPrint("Executing module: %s \n" % model_inst.name(), MessageType.INFO) try: # retrieve inputs from database sPrint("[1 of 3] Retrieving input data... ") input_data = model_inst.inputs() # pass these inputs ts to the models' run function sPrint("[2 of 3] Performing calculation... ") model_inst.run(input_data) # update links sPrint("[3 of 3] Updating links... ") oei = model_inst.outputs() update.update_links_feed_forward(links[modelid], oei, spatial_maps) model_inst.finish() elog.info("..module simulation completed in %3.2f seconds" % (time.time() - st)) except Exception: # set the model status to failed model_inst.status(stdlib.Status.FAILED) return # raise Exception('Error during model execution') # set the model status to successful model_inst.status(stdlib.Status.SUCCESS) sPrint( "------------------------------------------\n" + " Simulation Summary \n" + "------------------------------------------\n" + "Completed without error :)\n" + "Simulation duration: %3.2f seconds\n" % (time.time() - sim_st) + "------------------------------------------" ) # update engine status obj.status.set(stdlib.Status.SUCCESS) # save simulation results model_ids = exec_order if ds is None: msg = "Simulation results will not be stored in database because database connection was not provided prior to simulation." elog.info(msg) sPrint(msg, messageType=MessageType.INFO) else: database.save(obj, ds, model_ids)
def log(self, fmt, *args): elog.info((fmt % args))
def __init__(self, config_params): """ initialization that will occur when loaded into a configuration """ super(topmodel, self).__init__(config_params) if LooseVersion(np.__version__) < LooseVersion("1.9.0"): elog.error("Could not load TOPMODEL, NumPY version 1.9.0 or greater required") raise Exception("Could not load TOPMODEL, NumPY version 1.9.0 or greater required") elog.info("Begin Component Initialization") # build inputs and outputs elog.info("Building exchange items") io = mdl.build_exchange_items_from_config(config_params) # set inputs and outputs self.inputs(value=io[stdlib.ExchangeItemType.INPUT]) self.outputs(value=io[stdlib.ExchangeItemType.OUTPUT]) # model_inputs inputs = config_params["model inputs"][0] # read input parameters elog.info("Reading input parameters") self.topo_input = inputs["ti"] self.fac_input = inputs["fac"] # read model input parameters self.c = float(inputs["m"]) self.Tmax = float(inputs["tmax"]) self.R = float(inputs["r"]) self.interception = float(inputs["interception"]) self.ti = [] self.freq = [] # read topographic input file elog.info("Reading topographic input data") self.read_topo_input() elog.info("Building input/output geometries") self.ti_geoms = None self.output_soil_moisture_geoms = None self.calc_ti_geometries() # set precipitation geometries elog.info("Setting excess precipitation geometries") self.outputs()["excess"].addGeometries2(self.ti_geoms) # set saturation geometries # elog.info('Setting soil saturation geometries') # self.outputs()['soil moisture'].addGeometries2(self.ti_geoms) # ---- calculate saturation deficit elog.info("Calculating initial saturation deficit") TI_freq = [x * y for x, y in zip(self.ti, self.freq)] self.lamda_average = sum(TI_freq) / sum(self.freq) # catchment average saturation deficit(S_bar) self.s_average = (-1.0) * self.c * ((math.log10(self.R / self.Tmax)) + self.lamda_average) elog.info("Component Initialization Completed Successfully")
def __init__(self, config_params): """ initialization that will occur when loaded into a configuration """ super(topmodel, self).__init__(config_params) if LooseVersion(np.__version__) < LooseVersion('1.9.0'): elog.error( 'Could not load TOPMODEL, NumPY version 1.9.0 or greater required' ) raise Exception( 'Could not load TOPMODEL, NumPY version 1.9.0 or greater required' ) elog.info('Begin Component Initialization') # build inputs and outputs elog.info('Building exchange items') io = mdl.build_exchange_items_from_config(config_params) # set inputs and outputs self.inputs(value=io[stdlib.ExchangeItemType.INPUT]) self.outputs(value=io[stdlib.ExchangeItemType.OUTPUT]) # model_inputs inputs = config_params['model inputs'][0] # read input parameters elog.info('Reading input parameters') self.topo_input = inputs["ti"] self.fac_input = inputs["fac"] #read model input parameters self.c = float(inputs['m']) self.Tmax = float(inputs["tmax"]) self.R = float(inputs["r"]) self.interception = float(inputs["interception"]) self.ti = [] self.freq = [] # read topographic input file elog.info('Reading topographic input data') self.read_topo_input() elog.info('Building input/output geometries') self.ti_geoms = None self.output_soil_moisture_geoms = None self.calc_ti_geometries() # set precipitation geometries elog.info('Setting excess precipitation geometries') self.outputs()['excess'].addGeometries2(self.ti_geoms) # set saturation geometries # elog.info('Setting soil saturation geometries') # self.outputs()['soil moisture'].addGeometries2(self.ti_geoms) # ---- calculate saturation deficit elog.info('Calculating initial saturation deficit') TI_freq = [x * y for x, y in zip(self.ti, self.freq)] self.lamda_average = sum(TI_freq) / sum(self.freq) # catchment average saturation deficit(S_bar) self.s_average = (-1.) * self.c * ( (math.log10(self.R / self.Tmax)) + self.lamda_average) elog.info('Component Initialization Completed Successfully')