def addToCanvas(self, event): end, siteobject, start, variable_code = self._preparationToGetValues() var_codes_temp = self.get_all_selected_variable_site_codes() if len(var_codes_temp) > 1: elog.warning( "We do not support adding more then one item to the canvas at this point. We added " + var_codes_temp[0]) if variable_code is None: # no table row selected return args = dict( model_type='wof', wsdl=self.parent.api.wsdl, site=siteobject.site_code, variable=variable_code, start=start, end=end, network=siteobject.network, ) engine.addModel(**args) self.Close()
def on_close(self, event): dial = wx.MessageDialog(None, 'Are you sure to quit?', 'Question', wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION) dial.SetYesNoLabels(yes="Quit", no="Cancel") if event == None or dial.ShowModal() == wx.ID_YES: # kill multiprocessing e = Engine() msg = e.close() elog.debug('Closing Engine Processes: %s' % msg) # kill all threads threads = {t.name:t for t in threading.enumerate()} mainthread = threads.pop('MainThread') elog.debug('Closing EMIT Threads: %s' % msg) non_daemon = [] for t in threads.itervalues(): # check if the thread is a daemon, if so, it should not cause any problems if t.isDaemon(): elog.debug('%s daemon=%s' %(t.name, t.isDaemon())) else: # add this thread to the non-daemon list non_daemon.append(t) for t in non_daemon: elog.warning('%s is not a daemon thread and may cause problems while shutting down' % t.name) t.join(1) # determine if there are any non-daemon threads that are still alive non_daemon_and_alive = [] for t in threads.itervalues(): if not t.isDaemon() and t.isAlive(): non_daemon_and_alive.append(t) # attempt to stop non-daemon threads try: for t in non_daemon_and_alive: t._Thread__stop() except Exception, e: elog.error('Error encountered closing thread %s: %s' % (t.name, e)) # close the main thread self.Destroy() wx.App.ExitMainLoop wx.WakeUpMainThread
def transform(self, ingeoms, outgeoms): if len(ingeoms) != len(outgeoms): sPrint('input and output geometries have different lengths. This may lead to inconsistencies during %s spatial mapping.' % self.name(), MessageType.WARNING) elog.warning('input and output geometries have different lengths. This may lead to inconsistencies during %s spatial mapping.' % self.name()) # determine the minimum length array max_idx = min(len(ingeoms), len(outgeoms)) # loop from 0 to min length and map geometry indices mapped = [] for i in range(0, max_idx): mapped.append([ingeoms[i],outgeoms[i]]) return mapped
def draw_link(self, event): R1 = None R2 = None for R, id in self.models.iteritems(): if id == event.source_id: R1 = R elif id == event.target_id: R2 = R if R1 is None or R2 is None: elog.warning("Could not find Model identifier in loaded models") sPrint("Could not find Model identifier in loaded models", MessageType.WARNING) raise Exception('Could not find Model identifier in loaded models') # This is what actually draws the line self.createLine(R1, R2)
def __init__(self, args): ''' Initializes the base wrapper and basic model parameters :param args: dictionary of arguments. Should contain timestep name, timestep value, model code, model description, general simulation_start, general simulation_end ''' super(Wrapper, self).__init__() try: if 'time_step' in args.keys(): dt = {args['time_step'][0]['name']: float(args['time_step'][0]['value'])} t = datetime.timedelta(**dt) self.time_step(t.total_seconds()) else: elog.warning('Missing "time_step" parameters in *.mdl. You may encounter errors if you continue') if 'model' in args.keys(): self.name(args['model'][0]['code']) self.description(args['model'][0]['description']) else: elog.warning('Missing "model" parameters in *.mdl. You may encounter errors if you continue') if 'general' in args: self.simulation_start(parser.parse(args['general'][0]['simulation_start'])) self.simulation_end(parser.parse(args['general'][0]['simulation_end'])) else: elog.warning('Missing "general" parameters in *.mdl. You may encounter errors if you continue') except: elog.error('Malformed parameters found in *.mdl')
def addToCanvas(self, event): end, siteobject, start, variable_code = self._preparationToGetValues() var_codes_temp = self.get_all_selected_variable_site_codes() if len(var_codes_temp) > 1: elog.warning("We do not support adding more then one item to the canvas at this point. We added " + var_codes_temp[0]) if variable_code is None: # no table row selected return args = dict(model_type='wof', wsdl=self.parent.api.wsdl, site=siteobject.site_code, variable=variable_code, start=start, end=end, network=siteobject.network, ) engine.addModel(**args) self.Close()
def __init__(self, args): ''' Initializes the base wrapper and basic model parameters :param args: dictionary of arguments. Should contain timestep name, timestep value, model code, model description, general simulation_start, general simulation_end ''' super(Wrapper, self).__init__() try: if 'time_step' in args.keys(): dt = { args['time_step'][0]['name']: float(args['time_step'][0]['value']) } t = datetime.timedelta(**dt) self.time_step(t.total_seconds()) else: elog.warning( 'Missing "time_step" parameters in *.mdl. You may encounter errors if you continue' ) if 'model' in args.keys(): self.name(args['model'][0]['code']) self.description(args['model'][0]['description']) else: elog.warning( 'Missing "model" parameters in *.mdl. You may encounter errors if you continue' ) if 'general' in args: self.simulation_start( parser.parse(args['general'][0]['simulation_start'])) self.simulation_end( parser.parse(args['general'][0]['simulation_end'])) else: elog.warning( 'Missing "general" parameters in *.mdl. You may encounter errors if you continue' ) except: elog.error('Malformed parameters found in *.mdl')
def initialize_timeseries_variable_array(self, forcing_data, number_of_timesteps): # number_of_timesteps += 10 variables = {'Ta':0, 'Prec':1, 'v':2, 'RH':3, 'AP':4, 'Qsi':5, 'Qli':6, 'Qnet':7, 'Qg':8, 'Snowalb':9, 'Tmin':10, 'Tmax':11, 'Vp':12} # loop over all of the variables for i in range(13): # get the variable name var_name = self.C_strinpforcArray.contents[i].infName # get the index corresponding to this variable idx = variables[var_name] # get the type of input forcing (0: dat, 1: netcdf, 2: parameter) infType = self.C_strinpforcArray.contents[i].infType # dat file if infType == 0: # initialize the array self.C_tsvarArray.contents[i] = (c_float * number_of_timesteps)() # read the DAT file and set data with open(os.path.join(self.base_dir, self.C_strinpforcArray.contents[i].infFile), 'r') as f: lines = f.readlines() row = 0 for l in range(1, len(lines)): # DAT format: Year Month Day Hour value data = lines[l].strip().split() # save the value column self.C_tsvarArray.contents[i][row] = float(data[-1]) # increment the row row += 1 # netcdf file elif infType == 1: # # initialize the array self.C_tsvarArray.contents[i] = (c_float * number_of_timesteps)() message = 'NetCDF input files are not supported at this time. Pass NetCDF values at runtime via data components instead.' elog.warning(message) # # initialize the array # self.C_tsvarArray.contents[i] = (c_float * number_of_timesteps)() # # get netcdf variables # tvar = self.C_strinpforcArray.contents[i].inftimeVar # var = self.C_strinpforcArray.contents[i].infvarName # numfiles = self.C_strinpforcArray.contents[i].numNcfiles # # todo add support for multiple netcdf files # if numfiles > 1: raise Exception('Cannot process multiple netcdf files for a single variable. Support for this feature is coming soon.') # f = self.C_strinpforcArray.contents[i].infFile # f = f + '0.nc' if f[-2:] != '.nc' else f # # # read netcdf data and set values # handle = nc.Dataset(os.path.join(self.base_dir, f), 'r') # values = [v.flatten() for v in handle.variables[var][:]] # for row in range(len(values)): # self.C_tsvarArray.contents[i][row] = values[row] # parameter value elif infType == 2: # initialize space for parameters and set values self.C_tsvarArray.contents[i] = (c_float * 2)() self.C_tsvarArray.contents[i][0] = self.C_strinpforcArray.contents[i].infType self.C_tsvarArray.contents[i][1] = self.C_strinpforcArray.contents[i].infdefValue # input component elif infType == 3: # initialize the array self.C_tsvarArray.contents[i] = (c_float * number_of_timesteps)() pass # variable not used (computed internally) else: self.C_tsvarArray.contents[i] = (c_float * 2)() self.C_tsvarArray.contents[i][0] = self.C_strinpforcArray.contents[i].infType self.C_tsvarArray.contents[i][1] = self.C_strinpforcArray.contents[i].infdefValue
def build_exchange_items_from_config(params): # get all inputs and outputs iitems = params['input'] if 'input' in params else [] oitems = params['output'] if 'output' in params else [] eitems = iitems + oitems items = {stdlib.ExchangeItemType.INPUT:[],stdlib.ExchangeItemType.OUTPUT:[]} # loop through each input/output and create an exchange item for io in eitems: variable = None unit = None srs = None geoms = [] # get all input and output exchange items as a list iotype = stdlib.ExchangeItemType.OUTPUT if io['type'].upper() == stdlib.ExchangeItemType.OUTPUT else stdlib.ExchangeItemType.INPUT for key, value in io.iteritems(): sPrint(key, MessageType.DEBUG) if key == 'variable_name_cv': sPrint('Creating Variable', MessageType.DEBUG) variable = create_variable(value) sPrint('Done Creating Variable', MessageType.DEBUG) if 'variable_definition' in io.keys(): variable.VariableDefinition(io['variable_definition']) elif key == 'unit_type_cv': unit = create_unit(value) elif key == 'elementset' : # check if the value is a path if os.path.dirname(value ) != '': gen_path = os.path.abspath(os.path.join(params['basedir'],value)) if not os.path.isfile(gen_path): # get filepath relative to *.mdl elog.critical('Could not find file at path %s, generated from relative path %s'%(gen_path, value)) raise Exception('Could not find file at path %s, generated from relative path %s'%(gen_path, value)) # parse the geometry from the shapefile geoms, srs = utilities.spatial.read_shapefile(gen_path) srs = srs.AutoIdentifyEPSG() # otherwise it must be a wkt else: try: # get the wkt text string value = value.strip('\'').strip('"') # parse the wkt string into a stdlib.Geometry object geom = utilities.geometry.fromWKT(value) for g in geom: geoms.append(g) except: elog.warning('Could not load component geometry from *.mdl file') # this is OK. Just set the geoms to [] and assume that they will be populated during initialize. geom = [] if 'espg_code' in io: srs = utilities.spatial.get_srs_from_epsg(io['epsg_code']) # generate a unique uuid for this exchange item id = uuid.uuid4().hex # create exchange item ei = stdlib.ExchangeItem(id, name=variable.VariableNameCV(), desc=variable.VariableDefinition(), unit= unit, variable=variable, # srs_epsg=srs, #todo: this is causing problems type=iotype) # add geometry to exchange item (NEW) ei.addGeometries2(geoms) # save exchange items based on type items[ei.type()].append(ei) return items
def initialize_timeseries_variable_array(self, forcing_data, number_of_timesteps): # number_of_timesteps += 10 variables = { 'Ta': 0, 'Prec': 1, 'v': 2, 'RH': 3, 'AP': 4, 'Qsi': 5, 'Qli': 6, 'Qnet': 7, 'Qg': 8, 'Snowalb': 9, 'Tmin': 10, 'Tmax': 11, 'Vp': 12 } # loop over all of the variables for i in range(13): # get the variable name var_name = self.C_strinpforcArray.contents[i].infName # get the index corresponding to this variable idx = variables[var_name] # get the type of input forcing (0: dat, 1: netcdf, 2: parameter) infType = self.C_strinpforcArray.contents[i].infType # dat file if infType == 0: # initialize the array self.C_tsvarArray.contents[i] = (c_float * number_of_timesteps)() # read the DAT file and set data with open( os.path.join( self.base_dir, self.C_strinpforcArray.contents[i].infFile), 'r') as f: lines = f.readlines() row = 0 for l in range(1, len(lines)): # DAT format: Year Month Day Hour value data = lines[l].strip().split() # save the value column self.C_tsvarArray.contents[i][row] = float(data[-1]) # increment the row row += 1 # netcdf file elif infType == 1: # # initialize the array self.C_tsvarArray.contents[i] = (c_float * number_of_timesteps)() message = 'NetCDF input files are not supported at this time. Pass NetCDF values at runtime via data components instead.' elog.warning(message) # # initialize the array # self.C_tsvarArray.contents[i] = (c_float * number_of_timesteps)() # # get netcdf variables # tvar = self.C_strinpforcArray.contents[i].inftimeVar # var = self.C_strinpforcArray.contents[i].infvarName # numfiles = self.C_strinpforcArray.contents[i].numNcfiles # # todo add support for multiple netcdf files # if numfiles > 1: raise Exception('Cannot process multiple netcdf files for a single variable. Support for this feature is coming soon.') # f = self.C_strinpforcArray.contents[i].infFile # f = f + '0.nc' if f[-2:] != '.nc' else f # # # read netcdf data and set values # handle = nc.Dataset(os.path.join(self.base_dir, f), 'r') # values = [v.flatten() for v in handle.variables[var][:]] # for row in range(len(values)): # self.C_tsvarArray.contents[i][row] = values[row] # parameter value elif infType == 2: # initialize space for parameters and set values self.C_tsvarArray.contents[i] = (c_float * 2)() self.C_tsvarArray.contents[i][ 0] = self.C_strinpforcArray.contents[i].infType self.C_tsvarArray.contents[i][ 1] = self.C_strinpforcArray.contents[i].infdefValue # input component elif infType == 3: # initialize the array self.C_tsvarArray.contents[i] = (c_float * number_of_timesteps)() pass # variable not used (computed internally) else: self.C_tsvarArray.contents[i] = (c_float * 2)() self.C_tsvarArray.contents[i][ 0] = self.C_strinpforcArray.contents[i].infType self.C_tsvarArray.contents[i][ 1] = self.C_strinpforcArray.contents[i].infdefValue
def get_configuration_details(coordinator, arg): if len(coordinator.__models.keys()) == 0: elog.warning('No models found in configuration.') if arg.strip() == 'summary': elog.info('Here is everything I know about the current simulation...\n') # print model info if arg.strip() == 'models' or arg.strip() == 'summary': # loop through all known models for name,model in coordinator.__models.iteritems(): model_output = [] model_output.append('Model: '+name) model_output.append('desc: ' + model.description()) model_output.append('id: ' + model.id()) for item in model.get_input_exchange_items() + model.get_output_exchange_items(): model_output.append(str(item.id())) model_output.append( 'name: '+item.name()) model_output.append( 'description: '+item.description()) model_output.append( 'unit: '+item.unit().UnitName()) model_output.append( 'variable: '+item.variable().VariableNameCV()) model_output.append( ' ') # get formatted width w = get_format_width(model_output) # print model info elog.info(' |'+(w)*'-'+'|') elog.info(' *'+format_text(model_output[0], w,'center')+'*') elog.info(' |'+(w)*'='+'|') elog.info(' |'+format_text(model_output[1], w,'left')+'|') elog.info(' |'+format_text(model_output[2], w,'left')+'|') elog.info(' |'+(w)*'-'+'|') for l in model_output[3:]: elog.info(' |'+format_text(l,w,'left')+'|') elog.info(' |'+(w)*'-'+'|') elog.info(' ') # print link info if arg.strip() == 'links' or arg.strip() == 'summary': # string to store link output link_output = [] for linkid,link in coordinator.__links.iteritems(): # get the link info From, To = link.get_link() link_output.append('LINK ID : ' + linkid) link_output.append('from: ' + From[0].name() + ' -- output --> ' + From[1].name()) link_output.append('to: ' + To[0].name() + ' -- input --> ' + To[1].name()) # get the formatted width w = get_format_width(link_output) # print the output elog.info(' |'+(w)*'-'+'|') elog.info(' *'+format_text(link_output[0], w,'center')+'*') elog.info(' |'+(w)*'='+'|') for l in link_output[1:]: elog.info(' |'+format_text(l,w,'left')+'|') elog.info(' |' + w * '-' + '|') # print database info if arg.strip() == 'db' or arg.strip() == 'summary': for id, db_dict in coordinator._db.iteritems(): # string to store db output db_output = [] # get the session args name = db_dict['name'] desc = db_dict['description'] engine = db_dict['args']['engine'] address = db_dict['args']['address'] user = db_dict['args']['user'] pwd = db_dict['args']['pwd'] db = db_dict['args']['db'] db_output.append('DATABASE : ' + id) db_output.append('name: '+name) db_output.append('description: '+desc) db_output.append('engine: '+engine) db_output.append('address: '+address) db_output.append('database: '+db) db_output.append('user: '******'connection string: '+db_dict['args']['connection_string']) # get the formatted width w = get_format_width(db_output) # print the output elog.info(' |'+(w)*'-'+'|') elog.info(' *'+format_text(db_output[0], w,'center')+'*') elog.info(' |'+(w)*'='+'|') for l in db_output[1:]: elog.info(' |'+format_text(l,w,'left')+'|') elog.info(' |'+(w)*'-'+'|')