def createLine(self, R1, R2, image_name="questionMark.png"): if R1 == R2: elog.error('Cannot link a model to itself') sPrint('Cannot link a model to itself', MessageType.ERROR) return else: # Get the center of the objects on the canvas x1, y1 = R1.XY x2, y2 = R2.XY points = [(float(x1), float(y1)), (float(x2), float(y2))] # No link between the two models add the first line = SmoothLineWithArrow(points, image_name=image_name) self.links[line] = [R1, R2] self.arrows[line.Arrow] = [R1, R2] line.type = LogicCanvasObjects.ShapeType.Link # Calculate length of line, use to show/hide arrow line.Arrow.type = LogicCanvasObjects.ShapeType.ArrowHead self.FloatCanvas.AddObject(line) # For some reason we have to add line.Arrow in order to bind to it self.FloatCanvas.AddObject(line.Arrow) line.Arrow.PutInBackground() line.Arrow.Bind(FC.EVT_FC_LEFT_DOWN, self.on_arrow_clicked) line.Arrow.Bind(FC.EVT_FC_RIGHT_DOWN, self.on_launch_context) self.FloatCanvas.Draw()
def insert_feature_actions_bulk(self, SamplingFeatureIDs = [], ActionIDs = []): """ Performs a bulk insert of feature action ids :return: list of feature action ids FeatureActionID (integer) SamplingFeatureID (integer) ActionID (integer) """ if len(SamplingFeatureIDs) != len(ActionIDs): elog.error('Length SamplingFeatureIDs, and ActionIDs must be equal') return False valCount = len(SamplingFeatureIDs) # get the last record index res = self.cursor.execute('SELECT last_insert_rowid() FROM FeatureActions') self.conn.commit() startID = res.fetchone() or (-1,) startID = startID[0] + 1 # increment the last id FeatureActionIDs = range(startID, startID + valCount, 1) vals = zip(FeatureActionIDs, SamplingFeatureIDs, ActionIDs) self.cursor.executemany('INSERT OR IGNORE INTO FeatureActions VALUES (?, ?, ?)', vals) self.conn.commit() # return the feature action ids return FeatureActionIDs
def addGeometries2(self, geometries=None): """ adds geometries to the exchange item :param geom: list of geometries or a single value :return: None """ # make sure the input geometries are iterable if not isinstance(geometries,list) and not isinstance(geometries,numpy.ndarray): elog.error('Encountered an error while adding geometries: Unsupported argument type: %s'%type(geometries)) sPrint('Encountered an error while adding geometries: Unsupported argument type: %s'%type(geometries), MessageType.ERROR) return 0 geoms = [] count = 0 for g in geometries: if isinstance(g, Geometry2): geoms.append(g) count += 1 # save geometries self.__geoms2.extend(geoms) # notify that not all geometries were saved if len(geoms) != count: elog.error('Encountered unsupported geometry types while adding geometries to exchange item. Not all items may have been saved.') sPrint('Encountered unsupported geometry types while adding geometries to exchange item. Not all items may have been saved.', MessageType.WARNING) return 0 return 1
def insert_feature_actions_bulk(self, SamplingFeatureIDs=[], ActionIDs=[]): """ Performs a bulk insert of feature action ids :return: list of feature action ids FeatureActionID (integer) SamplingFeatureID (integer) ActionID (integer) """ if len(SamplingFeatureIDs) != len(ActionIDs): elog.error( 'Length SamplingFeatureIDs, and ActionIDs must be equal') return False valCount = len(SamplingFeatureIDs) # get the last record index res = self.cursor.execute( 'SELECT last_insert_rowid() FROM FeatureActions') self.conn.commit() startID = res.fetchone() or (-1, ) startID = startID[0] + 1 # increment the last id FeatureActionIDs = range(startID, startID + valCount, 1) vals = zip(FeatureActionIDs, SamplingFeatureIDs, ActionIDs) self.cursor.executemany( 'INSERT OR IGNORE INTO FeatureActions VALUES (?, ?, ?)', vals) self.conn.commit() # return the feature action ids return FeatureActionIDs
def connect_to_ODM2_db(title, desc, engine, address, db, user, pwd): # create a db session session = dbconnection2.createConnection(engine, address, db, user, pwd) db_connections = {} if session: # get the connection string connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = {'name':title, 'session': session, 'connection_string':connection_string, 'description':desc, 'args': {'name':title,'desc':desc ,'engine':engine,'address':address,'db': db, 'user': user,'pwd': pwd}} elog.info('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) sPrint('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) else: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return None return db_connections
def connect_to_db(title, desc, engine, address, db=None, user=None, pwd=None): d = {} session = dbconnection2.createConnection(engine, address, db, user, pwd) if not session: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return # adjusting timeout session.engine.pool._timeout = 30 connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d[db_id] = {'name':title, 'session': session, 'connection_string':connection_string, 'description':desc, 'args': dict(address=connection_string, desc=desc, engine=engine,id=db_id,name=db, user=None, pwd=None,default=False,db=None)} elog.info('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) sPrint('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) return d
def setValuesByTime(self, values, timevalue, geometry_index_slice=(None, None, None)): try: # get time index idx, date = self.getDates2(timevalue) # get index ranges gb, ge, gstep = geometry_index_slice # convert the values into a numpy array if they aren't already if not isinstance(values, numpy.ndarray): values = numpy.array(values) # set the values[times][geoms] self.__values2[idx, gb:ge:gstep] = values except Exception as e: elog.error('Error ExchangeItem.setValuesByTime: %s' % e) sPrint( 'Error setting values for times %s, geometries %s' % (str(time_index_slice), str(geometry_index_slice)), MessageType.ERROR) return False return True
def __init__(self, args): ''' Initializes the base wrapper and basic model parameters :param args: dictionary of arguments. Should contain timestep name, timestep value, model code, model description, general simulation_start, general simulation_end ''' super(Wrapper, self).__init__() try: if 'time_step' in args.keys(): dt = {args['time_step'][0]['name']: float(args['time_step'][0]['value'])} t = datetime.timedelta(**dt) self.time_step(t.total_seconds()) else: elog.warning('Missing "time_step" parameters in *.mdl. You may encounter errors if you continue') if 'model' in args.keys(): self.name(args['model'][0]['code']) self.description(args['model'][0]['description']) else: elog.warning('Missing "model" parameters in *.mdl. You may encounter errors if you continue') if 'general' in args: self.simulation_start(parser.parse(args['general'][0]['simulation_start'])) self.simulation_end(parser.parse(args['general'][0]['simulation_end'])) else: elog.warning('Missing "general" parameters in *.mdl. You may encounter errors if you continue') except: elog.error('Malformed parameters found in *.mdl')
def create_database_connections_from_args(title, desc, engine, address, db, user, pwd): # fixme: all database connections should use the updated ODM library if engine == 'sqlite': return connect_to_db(title, desc, engine, address, db, user, pwd) # old database connection api d = { 'name': title, 'desc': desc, 'engine': engine, 'address': address, 'db': db, 'user': user, 'pwd': pwd } # database connections dictionary db_connections = {} # build database connection #dbconn = odm2.api.dbconnection() session = dbconnection.createConnection(engine, address, db, user, pwd) # add connection string to dictionary (for backup/debugging) # d['connection_string'] = connection_string # create a session if session: # get the connection string connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = { 'name': d['name'], 'session': session, 'connection_string': connection_string, 'description': d['desc'], 'args': d } elog.info('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) sPrint('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) else: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return None return db_connections
def create_database_connections_from_file(ini): # database connections dictionary db_connections = {} # parse the dataabase connections file cparser = ConfigParser.ConfigParser(None, multidict) cparser.read(ini) sections = cparser.sections() # create a session for each database connection in the ini file for s in sections: # put ini args into a dictionary d = {} options = cparser.options(s) d['name'] = s for option in options: d[option] = cparser.get(s, option) # build database connection session = dbconnection2.createConnection(d['engine'], d['address'], d['database'], d['username'], d['password']) if session: # adjusting timeout session.engine.pool._timeout = 30 connection_string = session.engine.url # add connection string to dictionary (for backup/debugging) d['connection_string'] = connection_string # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = { 'name': d['name'], 'session': session, 'connection_string': connection_string, 'description': d['description'], 'args': d } elog.info('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) sPrint('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) else: msg = 'Could not establish a connection with the following database: ***@%s/%s' % ( d['address'], d['database']) elog.error(msg) sPrint(msg, MessageType.ERROR) return db_connections
def validate_json_model(data): """ Checks json model's values are valid :param data: dictionary where the values are a list of dictionary. Type(dict: [dict]) :return: """ try: # if no sections are found, than the file format must be incorrect if len(data) == 0: raise Exception('> [Exception] Invalid model configuration file') var_cv = os.path.join(io.getAppDataDir(), 'dat/var_cv.dat') unit_cv = os.path.join(io.getAppDataDir(), 'dat/units_cv.dat') var = pickle.load(open(var_cv, 'rb')) unit = pickle.load(open(unit_cv, 'rb')) ignorecv = int(data["options"][0]["ignorecv"]) for key, value in data.iteritems(): if isinstance(value, list): for item in value: for k, v in item.iteritems(): if v == "simulation_start" or v == "simulation_end": try: datetime.datetime.strptime(v, getattr(json_types, k)) except ValueError: raise ValueError("Incorrect data format, should be " + getattr(json_types, k)) else: if not ignorecv: if k == "variable_name_cv": if v not in var: raise Exception(v + ' is not a valid controlled vocabulary term') if k == "unit_type_cv": if v not in unit: raise Exception(v + ' is not a valid controlled vocabulary term') software = data["software"][0] relpath = software["filepath"] # Change name to filepath basedir = data["basedir"] abspath = os.path.abspath(os.path.join(basedir, relpath)) sys.path.append(basedir) if not os.path.isfile(abspath): raise Exception(abspath + " is not a valid file") classname = software["classname"] filename = os.path.basename(abspath) module = imp.load_source(filename.split(".")[0], abspath) m = getattr(module, classname) except Exception, e: elog.error('Configuration Parsing Error: ' + str(e)) sPrint('Configuration Parsing Error: ' + str(e), MessageType.ERROR) return 0
def srs(self, srs_epsg=None): if srs_epsg is not None: self.__srs = osr.SpatialReference() if self.__srs.ImportFromEPSG(srs_epsg) != 0: from osgeo import gdal msg = gdal.GetLastErrorMsg() elog.error('Could Not set srs: %s' % msg) sPrint('Unable to load spatial reference %s: %s ' % (str(srs_epsg), msg), MessageType.ERROR) sPrint('This may cause errors with future operations. It is recommended that the GDAL_DATA path is fixed in the ' + 'EMIT environment settings before continuing.', MessageType.CRITICAL) return self.__srs
def parseValues(self, sitecode, variable, start=None, end=None): data = self.getValuesObject(sitecode, variable, start.strftime("%Y-%m-%d"), end.strftime("%Y-%m-%d")) valuesList = [] if data is not None: for values in data[0].values[0].value: # values_list = [[date1, value1], [date2, value2]] valuesList.append([values._dateTime, values.value]) else: elog.debug("data is None") elog.error("Failed to retrieve data") return valuesList
def setValuesBySlice(self, values, time_index_slice=(None, None, None), geometry_index_slice=(None, None, None)): """ sets datavalues for the component. :param values: Values to set # :param value_index_slice: tuple representing the start, stop, and step range of the values :param time_index_slice: tuple representing the start, stop, and step range of the date times :param geometry_index_slice: tuple representing the start, stop, and step range of the geometries :return: True if successful, otherwise False """ if len(self.__values2) == 0 or len(self.__times2) == 0: elog.critical( 'Error ExchangeItem.setValuesBySlice: Exchange item values and/or times arrays were not set properly' ) sPrint( 'Exchange item values and/or times arrays were not set properly Make sure "initializeDatesValues" is being called during/after component initialization.', MessageType.CRITICAL) return False try: # get index ranges tb, te, tstep = time_index_slice gb, ge, gstep = geometry_index_slice # set initial values for missing slice indices tb = 0 if tb is None else tb gb = 0 if gb is None else gb te = len(self.__times2) if te is None else te ge = len(self.__geoms2) if ge is None else ge tstep = 1 if tstep is None else tstep gstep = 1 if gstep is None else gstep # convert the values into a numpy array if they aren't already if not isinstance(values, numpy.ndarray): values = numpy.array(values) # set the values[times][geoms] # reshape the input array to fit the desired slicing target_shape = ((te - tb) / tstep, (ge - gb) / gstep) values = values.reshape(target_shape) self.__values2[tb:te:tstep, gb:ge:gstep] = values except Exception as e: elog.error('Error ExchangeItem.setValuesBySlice: %s' % e) sPrint( 'Error setting values for times %s, geometries %s: %s' % (str(time_index_slice), str(geometry_index_slice), e), MessageType.ERROR) return False return True
def addGeometry(self, geometry): geoms = [] try: if isinstance(geometry, Geometry2): geoms.append(geometry) else: elog.error('Attempt to add unsupported geometry type: %s'%type(geometry)) sPrint('Attempt to add unsupported geometry type: %s'%type(geometry), MessageType.ERROR) return 0 except Exception, e: elog.error('Encountered an error while adding geometry: %s'%e) sPrint('Encountered an error while adding geometry: %s'%e, MessageType.ERROR)
def create_database_connections_from_file(ini): # database connections dictionary db_connections = {} # parse the dataabase connections file cparser = ConfigParser.ConfigParser(None, multidict) cparser.read(ini) sections = cparser.sections() # create a session for each database connection in the ini file for s in sections: # put ini args into a dictionary d = {} options = cparser.options(s) d['name'] = s for option in options: d[option] = cparser.get(s,option) # build database connection session = dbconnection2.createConnection(d['engine'],d['address'],d['database'],d['username'],d['password']) if session: # adjusting timeout session.engine.pool._timeout = 30 connection_string = session.engine.url # add connection string to dictionary (for backup/debugging) d['connection_string'] = connection_string # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = {'name':d['name'], 'session': session, 'connection_string':connection_string, 'description':d['description'], 'args': d} elog.info('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) sPrint('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) else: msg = 'Could not establish a connection with the following database: ***@%s/%s' % (d['address'],d['database']) elog.error(msg) sPrint(msg, MessageType.ERROR) return db_connections
def create_database_connections_from_args(title, desc, engine, address, db, user, pwd): # fixme: all database connections should use the updated ODM library if engine == 'sqlite': return connect_to_db(title, desc, engine, address, db, user, pwd) # old database connection api d = {'name':title, 'desc':desc , 'engine':engine, 'address':address, 'db': db, 'user': user, 'pwd': pwd} # database connections dictionary db_connections = {} # build database connection #dbconn = odm2.api.dbconnection() session = dbconnection.createConnection(engine,address,db,user,pwd) # add connection string to dictionary (for backup/debugging) # d['connection_string'] = connection_string # create a session if session: # get the connection string connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = {'name':d['name'], 'session': session, 'connection_string':connection_string, 'description':d['desc'], 'args': d} elog.info('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) sPrint('Connected to : %s [%s]'%(connection_string.__repr__(),db_id)) else: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return None return db_connections
def on_close(self, event): dial = wx.MessageDialog(None, 'Are you sure to quit?', 'Question', wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION) dial.SetYesNoLabels(yes="Quit", no="Cancel") if event == None or dial.ShowModal() == wx.ID_YES: # kill multiprocessing e = Engine() msg = e.close() elog.debug('Closing Engine Processes: %s' % msg) # kill all threads threads = {t.name:t for t in threading.enumerate()} mainthread = threads.pop('MainThread') elog.debug('Closing EMIT Threads: %s' % msg) non_daemon = [] for t in threads.itervalues(): # check if the thread is a daemon, if so, it should not cause any problems if t.isDaemon(): elog.debug('%s daemon=%s' %(t.name, t.isDaemon())) else: # add this thread to the non-daemon list non_daemon.append(t) for t in non_daemon: elog.warning('%s is not a daemon thread and may cause problems while shutting down' % t.name) t.join(1) # determine if there are any non-daemon threads that are still alive non_daemon_and_alive = [] for t in threads.itervalues(): if not t.isDaemon() and t.isAlive(): non_daemon_and_alive.append(t) # attempt to stop non-daemon threads try: for t in non_daemon_and_alive: t._Thread__stop() except Exception, e: elog.error('Error encountered closing thread %s: %s' % (t.name, e)) # close the main thread self.Destroy() wx.App.ExitMainLoop wx.WakeUpMainThread
def srs(self, srs_epsg=None): if srs_epsg is not None: self.__srs = osr.SpatialReference() if self.__srs.ImportFromEPSG(srs_epsg) != 0: from osgeo import gdal msg = gdal.GetLastErrorMsg() elog.error('Could Not set srs: %s' % msg) sPrint( 'Unable to load spatial reference %s: %s ' % (str(srs_epsg), msg), MessageType.ERROR) sPrint( 'This may cause errors with future operations. It is recommended that the GDAL_DATA path is fixed in the ' + 'EMIT environment settings before continuing.', MessageType.CRITICAL) return self.__srs
def addGeometry(self, geometry): geoms = [] try: if isinstance(geometry, Geometry2): geoms.append(geometry) else: elog.error('Attempt to add unsupported geometry type: %s' % type(geometry)) sPrint( 'Attempt to add unsupported geometry type: %s' % type(geometry), MessageType.ERROR) return 0 except Exception, e: elog.error('Encountered an error while adding geometry: %s' % e) sPrint('Encountered an error while adding geometry: %s' % e, MessageType.ERROR)
def setValuesBySlice (self, values, time_index_slice=(None, None, None), geometry_index_slice=(None, None, None)): """ sets datavalues for the component. :param values: Values to set # :param value_index_slice: tuple representing the start, stop, and step range of the values :param time_index_slice: tuple representing the start, stop, and step range of the date times :param geometry_index_slice: tuple representing the start, stop, and step range of the geometries :return: True if successful, otherwise False """ if len(self.__values2) == 0 or len(self.__times2) == 0: elog.critical('Error ExchangeItem.setValuesBySlice: Exchange item values and/or times arrays were not set properly') sPrint('Exchange item values and/or times arrays were not set properly Make sure "initializeDatesValues" is being called during/after component initialization.', MessageType.CRITICAL) return False try: # get index ranges tb, te, tstep = time_index_slice gb, ge, gstep = geometry_index_slice # set initial values for missing slice indices tb = 0 if tb is None else tb gb = 0 if gb is None else gb te = len(self.__times2) if te is None else te ge = len(self.__geoms2) if ge is None else ge tstep = 1 if tstep is None else tstep gstep = 1 if gstep is None else gstep # convert the values into a numpy array if they aren't already if not isinstance(values, numpy.ndarray): values = numpy.array(values) # set the values[times][geoms] # reshape the input array to fit the desired slicing target_shape = ((te-tb) / tstep, (ge-gb) / gstep) values = values.reshape(target_shape) self.__values2[tb:te:tstep, gb:ge:gstep] = values except Exception as e: elog.error('Error ExchangeItem.setValuesBySlice: %s' % e) sPrint('Error setting values for times %s, geometries %s: %s' % (str(time_index_slice), str(geometry_index_slice), e), MessageType.ERROR) return False return True
def connect_to_db(title, desc, engine, address, db=None, user=None, pwd=None): d = {} session = dbconnection2.createConnection(engine, address, db, user, pwd) if not session: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return # adjusting timeout session.engine.pool._timeout = 30 connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d[db_id] = { 'name': title, 'session': session, 'connection_string': connection_string, 'description': desc, 'args': dict(address=connection_string, desc=desc, engine=engine, id=db_id, name=db, user=None, pwd=None, default=False, db=None) } elog.info('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) sPrint('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) return d
def __init__(self, args): ''' Initializes the base wrapper and basic model parameters :param args: dictionary of arguments. Should contain timestep name, timestep value, model code, model description, general simulation_start, general simulation_end ''' super(Wrapper, self).__init__() try: if 'time_step' in args.keys(): dt = { args['time_step'][0]['name']: float(args['time_step'][0]['value']) } t = datetime.timedelta(**dt) self.time_step(t.total_seconds()) else: elog.warning( 'Missing "time_step" parameters in *.mdl. You may encounter errors if you continue' ) if 'model' in args.keys(): self.name(args['model'][0]['code']) self.description(args['model'][0]['description']) else: elog.warning( 'Missing "model" parameters in *.mdl. You may encounter errors if you continue' ) if 'general' in args: self.simulation_start( parser.parse(args['general'][0]['simulation_start'])) self.simulation_end( parser.parse(args['general'][0]['simulation_end'])) else: elog.warning( 'Missing "general" parameters in *.mdl. You may encounter errors if you continue' ) except: elog.error('Malformed parameters found in *.mdl')
def transform(temporal_map, source_values): """ transforms source data into target data array using temporal map :param temporal_map: temporal mapping list (this can be a numpy array of a list) :param source_values: numpy array of values :return: numpy array of target values """ if type(source_values) == list: source_values = numpy.array(source_values) if type(source_values) != numpy.ndarray: raise ValueError('Invalid source_values type for time mapping') try: # transform the datavalues from source to target using temporal map return source_values[temporal_map] except IndexError, e: elog.error('IndexError encountered when performing temporal mapping: %s' % e) sPrint('IndexError encountered when performing temporal mapping: %s. Cannot continue with simulation.' % e)
def connect_to_ODM2_db(title, desc, engine, address, db, user, pwd): # create a db session session = dbconnection2.createConnection(engine, address, db, user, pwd) db_connections = {} if session: # get the connection string connection_string = session.engine.url # save this session in the db_connections object db_id = uuid.uuid4().hex[:5] d['id'] = db_id db_connections[db_id] = { 'name': title, 'session': session, 'connection_string': connection_string, 'description': desc, 'args': { 'name': title, 'desc': desc, 'engine': engine, 'address': address, 'db': db, 'user': user, 'pwd': pwd } } elog.info('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) sPrint('Connected to : %s [%s]' % (connection_string.__repr__(), db_id)) else: elog.error('Could not establish a connection with the database') sPrint('Could not establish a connection with the database', MessageType.ERROR) return None return db_connections
def setValuesByTime(self, values, timevalue, geometry_index_slice=(None, None, None)): try: # get time index idx, date = self.getDates2(timevalue) # get index ranges gb, ge, gstep = geometry_index_slice # convert the values into a numpy array if they aren't already if not isinstance(values, numpy.ndarray): values = numpy.array(values) # set the values[times][geoms] self.__values2[idx, gb:ge:gstep] = values except Exception as e: elog.error('Error ExchangeItem.setValuesByTime: %s' % e) sPrint('Error setting values for times %s, geometries %s' % (str(time_index_slice), str(geometry_index_slice)), MessageType.ERROR) return False return True
def addGeometries2(self, geometries=None): """ adds geometries to the exchange item :param geom: list of geometries or a single value :return: None """ # make sure the input geometries are iterable if not isinstance(geometries, list) and not isinstance( geometries, numpy.ndarray): elog.error( 'Encountered an error while adding geometries: Unsupported argument type: %s' % type(geometries)) sPrint( 'Encountered an error while adding geometries: Unsupported argument type: %s' % type(geometries), MessageType.ERROR) return 0 geoms = [] count = 0 for g in geometries: if isinstance(g, Geometry2): geoms.append(g) count += 1 # save geometries self.__geoms2.extend(geoms) # notify that not all geometries were saved if len(geoms) != count: elog.error( 'Encountered unsupported geometry types while adding geometries to exchange item. Not all items may have been saved.' ) sPrint( 'Encountered unsupported geometry types while adding geometries to exchange item. Not all items may have been saved.', MessageType.WARNING) return 0 return 1
def connect(sessionFactory): driver = sessionFactory.engine.url.drivername if 'sqlite' in driver: return sqlite(sessionFactory) # todo: implement MsSQL in dbapi_v2 elif 'mssql' in driver: elog.error('MsSQL not supported yet') return None # todo: implement postgres in dbapi_v2 elif 'postgresql' in driver: return postgres(sessionFactory) # elog.error('PostgreSQL not supported yet') # return None # todo: implement mysql in dbapi_v2 elif 'mysql' in driver: elog.error('MySQL not supported yet') return None
def remove_link(self, link_obj): dlg = wx.MessageDialog( None, 'You are about to remove all data mappings that are associated with this link. Are you sure you want to perform this action?', 'Question', wx.YES_NO | wx.YES_DEFAULT | wx.ICON_WARNING) if dlg.ShowModal() != wx.ID_NO: # remove the link entry in self.links link = self.links.pop(link_obj) # remove the link from the cmd from_id = link[0].ID to_id = link[1].ID # get the link id links = engine.getLinksBtwnModels(from_id, to_id) # remove all links for link in links: success = engine.removeLinkById(link['id']) if not success: elog.error('ERROR|Could not remove link: %s' % link['id']) sPrint('ERROR|Could not remove link: %s' % link['id'], MessageType.ERROR) links = engine.getLinksBtwnModels(to_id, from_id) # Doing this a second time to remove bidirectional links for link in links: success = engine.removeLinkById(link['id']) if not success: elog.error('ERROR|Could not remove link: %s' % link['id']) sPrint('ERROR|Could not remove link: %s' % link['id'], MessageType.ERROR) self.remove_link_image(link_object=link_obj) self.remove_link_object_by_id(from_id, to_id)
def transform(temporal_map, source_values): """ transforms source data into target data array using temporal map :param temporal_map: temporal mapping list (this can be a numpy array of a list) :param source_values: numpy array of values :return: numpy array of target values """ if type(source_values) == list: source_values = numpy.array(source_values) if type(source_values) != numpy.ndarray: raise ValueError('Invalid source_values type for time mapping') try: # transform the datavalues from source to target using temporal map return source_values[temporal_map] except IndexError, e: elog.error( 'IndexError encountered when performing temporal mapping: %s' % e) sPrint( 'IndexError encountered when performing temporal mapping: %s. Cannot continue with simulation.' % e)
def remove_link(self, link_obj): dlg = wx.MessageDialog(None, 'You are about to remove all data mappings that are associated with this link. Are you sure you want to perform this action?', 'Question', wx.YES_NO | wx.YES_DEFAULT | wx.ICON_WARNING) if dlg.ShowModal() != wx.ID_NO: # remove the link entry in self.links link = self.links.pop(link_obj) # remove the link from the cmd from_id = link[0].ID to_id = link[1].ID # get the link id links = engine.getLinksBtwnModels(from_id, to_id) # remove all links for link in links: success = engine.removeLinkById(link['id']) if not success: elog.error('ERROR|Could not remove link: %s' % link['id']) sPrint('ERROR|Could not remove link: %s' % link['id'], MessageType.ERROR) links = engine.getLinksBtwnModels(to_id, from_id) # Doing this a second time to remove bidirectional links for link in links: success = engine.removeLinkById(link['id']) if not success: elog.error('ERROR|Could not remove link: %s' % link['id']) sPrint('ERROR|Could not remove link: %s' % link['id'], MessageType.ERROR) self.remove_link_image(link_object=link_obj) self.remove_link_object_by_id(from_id, to_id)
def update_ei_table(self, type=stdlib.ExchangeItemType.INPUT): # get the selected item (either input or output) if type == stdlib.ExchangeItemType.INPUT: item = self.get_selected_input_exchange_item() data = self.raw_input_data else: item = self.get_selected_output_exchange_item() data = self.raw_output_data # get the metadata for this exchange item raw_data = self.get_item_in_raw_data(data, item.keys()[0]) # populate the exchange item table if raw_data: self.edit_grid(type.lower(), 1, 1, item.keys()[0]) self.edit_grid(type.lower(), 2, 1, item.values()[0].GetGeometryName()) self.edit_grid(type.lower(), 3, 1, raw_data["geometry"]["count"]) self.edit_grid(type.lower(), 4, 1, item.values()[0].GetCoordinateDimension()) self.edit_grid(type.lower(), 5, 1, raw_data["geometry"]["extent"]) else: msg = 'Failed to populate exchange item metadata' elog.error(msg) sPrint(msg, MessageType.ERROR)
def __init__(self, config_params): """ initialization that will occur when loaded into a configuration """ super(weap, self).__init__(config_params) sPrint('WEAP Model - Begin Component Initialization') # open WEAP via COM try: self.weap_model = com.Dispatch("WEAP.WEAPApplication") self.weap_model.Visible = 0 except: msg = 'Failed to load the WEAP application. Make sure that WEAP is installed and running correctly on your machine before proceeding' elog.error(msg) raise Exception(msg) sPrint('..parsing model inputs') model_inputs = self.get_model_inputs(config_params) # move the input weap dir into the weap AreasDirectoryPath area_path = model_inputs['area_path'] active_area = model_inputs['active_area'] destPath = os.path.join(self.weap_model.AreasDirectory, active_area) sPrint('..moving model files in weap working directory') if os.path.exists(destPath): shutil.rmtree(destPath) os.mkdir(destPath) def copytree(src, dst): for item in os.listdir(model_inputs['area_path']): s = os.path.join(src, item) d = os.path.join(dst, item) if os.path.isdir(s): shutil.copytree(s, d) else: shutil.copy2(s, d) copytree(area_path, destPath) # close and restart the weap model for the changes to take effect del self.weap_model self.weap_model = com.Dispatch("WEAP.WEAPApplication") # set the active area for the model self.weap_model.ActiveArea = active_area # get start, end, timestep info from weap instance self.start_year = self.weap_model.BaseYear self.end_year = self.weap_model.EndYear self.first_step = self.weap_model.FirstTimestep self.num_steps = self.weap_model.NumTimeSteps sPrint('..simulation timespan %d - %d' % (self.start_year, self.end_year)) sPrint('..first timestep = %d, total timesteps per year %d' % (self.first_step, self.num_steps)) # collect all the branch names sPrint('..collecting variable names') self.variables = {} for b in self.weap_model.Branches: for v in b.Variables: if v.IsResultVariable: v_name = v.Name if v_name in self.variables: self.variables[v_name].append(b.FullName) else: self.variables[v_name] = [b.FullName] # build inputs and outputs # sPrint('..building exchange items') # io = mdl.build_exchange_items_from_config(config_params) # set inputs and outputs # self.inputs(value=io[stdlib.ExchangeItemType.INPUT]) # self.outputs(value=io[stdlib.ExchangeItemType.OUTPUT]) # model_inputs # inputs = config_params['model inputs'][0] # read input parameters # sPrint('..reading input parameters') # sPrint('..building input/output geometries') sPrint('..component initialization completed successfully')
def __init__(self, config_params): """ initialization that will occur when loaded into a configuration """ super(topmodel, self).__init__(config_params) if LooseVersion(np.__version__) < LooseVersion('1.9.0'): elog.error( 'Could not load TOPMODEL, NumPY version 1.9.0 or greater required' ) sPrint( 'Could not load TOPMODEL, NumPY version 1.9.0 or greater required', MessageType.ERROR) raise Exception( 'Could not load TOPMODEL, NumPY version 1.9.0 or greater required' ) sPrint('Begin Component Initialization') # build inputs and outputs sPrint('Building exchange items') io = mdl.build_exchange_items_from_config(config_params) # set inputs and outputs self.inputs(value=io[stdlib.ExchangeItemType.INPUT]) self.outputs(value=io[stdlib.ExchangeItemType.OUTPUT]) # read input parameters sPrint('Reading input parameters') #read model input parameters self.topo_input = config_params['ti'] self.fac_input = config_params['fac'] self.c = config_params['m'] self.Tmax = config_params["tmax"] self.R = config_params["r"] self.interception = config_params["interception"] self.ti = [] self.freq = [] # read topographic input file sPrint('Reading topographic input data') self.read_topo_input() sPrint('Building input/output geometries') self.ti_geoms = None self.output_soil_moisture_geoms = None self.calc_ti_geometries() # set precipitation geometries sPrint('Setting excess precipitation geometries') self.outputs()['excess'].addGeometries2(self.ti_geoms) # set saturation geometries # elog.info('Setting soil saturation geometries') # self.outputs()['soil moisture'].addGeometries2(self.ti_geoms) # ---- calculate saturation deficit sPrint('Calculating initial saturation deficit') TI_freq = [x * y for x, y in zip(self.ti, self.freq)] self.lamda_average = sum(TI_freq) / sum(self.freq) # catchment average saturation deficit(S_bar) self.s_average = (-1.) * self.c * ( (math.log10(self.R / self.Tmax)) + self.lamda_average) sPrint('Component Initialization Completed Successfully')
def insert_timeseries_result_values_bulk(self, ResultIDs=1, DataValues=[], ValueDateTimes=[], QualityCodeCV='unknown', TimeAggregationIntervalUnitsID=1, TimeAggregationInterval=1, CensorCodeCV='nc', ValueDateTimeUTCOffset=-6): """ Performs a bulk insert of time series result values Args: ResultIDs: int DataValues: [float] ValueDateTimes: [datetime] QualityCodeCV: str TimeAggregationIntervalUnitsID: int TimeAggregationInterval: int CensorCodeCV: str ValueDateTimeUTCOffset: int Returns: 1 if successful, otherwise 0 """ if ResultIDs is None: elog.error('Result ID cannot be None') return False if len(DataValues) != len(ValueDateTimes): elog.error('Length of Values and Dates must be equal') return False # convert datetime into apsw accepted format value_date_times = numpy.array([str(d) for d in ValueDateTimes]) # isolate all of the finite values (i.e. nan will violate the not # null constraint) data_exists = numpy.isfinite(DataValues) value_date_times = value_date_times[data_exists] data = numpy.array(DataValues)[data_exists] # convert parameters values into arrays valCount = len(data) censor_codes = [CensorCodeCV] * valCount quality_codes = [QualityCodeCV] * valCount time_unit_ids = [TimeAggregationIntervalUnitsID] * valCount time_intervals = [TimeAggregationInterval] * valCount time_offsets = [ValueDateTimeUTCOffset] * valCount result_ids = [ResultIDs] * valCount # get the last record index nextID = self.get_next_insert_id(models.TimeSeriesResultValues) # build array of value ids valueIDs = range(nextID, nextID + valCount, 1) # prepare all data for inserting vals = zip(valueIDs, result_ids, data, value_date_times, time_offsets, censor_codes, quality_codes, time_intervals, time_unit_ids) # insert values in chunks of 10,000 sPrint('Begin inserting %d value' % len(vals), MessageType.DEBUG) chunk_size = 10000 # number of records to insert at a time data_inserted = False for i in range(0, len(vals), chunk_size): # get the start and end indices for the bulk insert sidx = i eidx = i + chunk_size if (i + chunk_size) < len(vals) else len( vals) # perform execute many self.cursor.executemany('INSERT INTO TimeSeriesResultValues VALUES' ' (?, ?, ?, ?, ?, ?, ?, ?, ?)', vals[sidx: eidx]) # print the percent complete percent_complete = float(eidx) / float(len(vals)) * 100 sPrint('.. inserted %d records, %3.1f %% complete' % ((eidx - sidx), percent_complete), MessageType.DEBUG) # set data inserted to True so that it is committed to the db data_inserted = True # only attempt a commit if data was inserted if data_inserted: self.conn.commit() return 1
def create_simulation(self, coupledSimulationName, user_obj, action_date, action_utc_offset, ei, simulation_start, simulation_end, timestep_value, timestep_unit, description, name): """ Inserts a simulation record into the database Args: coupledSimulationName: The name of the coupled model simulation user_obj: object containing the user config_params: ei: simulation_start: simulation_end: timestep_value: timestep_unit: description: name: Returns: """ bench_insert_fa = 0 # todo: handle multiple affiliations sPrint('Inserting person', MessageType.DEBUG) person = self.createPerson(user_obj) sPrint('inserting organization', MessageType.DEBUG) organization = self.createOrganization(user_obj) sPrint('inserting affiliation', MessageType.DEBUG) affiliation = self.createAffiliation(organization.OrganizationID, person.PersonID, user_obj) # get the timestep unit id sPrint('inserting time step unit', MessageType.DEBUG) timestepunit = self.createTimeStepUnit(timestep_unit, timestep_unit) # insert method sPrint('inserting method', MessageType.DEBUG) method = self.createMethod(organization) sPrint('inserting action', MessageType.DEBUG) actions = self.read.getActions(type='Simulation') actionid = None for action in actions: if abs(action.BeginDateTime - action_date) < \ datetime.timedelta(seconds=1) and \ action.BeginDateTimeUTCOffset == action_utc_offset: actionid = action.ActionID break if actionid is None: actions = self.read.getActions() actionid = int(actions[-1].ActionID) + 1 if len(actions) > 0 else 1 self.cursor.execute( 'INSERT INTO Actions VALUES ' '(?, ?, ?, ?, ?, ?, ?, ?, ?)', [ actionid, 'Simulation', method.MethodID, action_date.strftime('%Y-%m-%d %H:%M:%S'), action_utc_offset, None, None, None, None ]) self.conn.commit() sPrint('inserting actionby', MessageType.DEBUG) ab = self.cursor.execute('SELECT * FROM ActionBy').fetchall() bridgeid = int(ab[-1][0]) + 1 if len(ab) > 0 else 1 self.cursor.execute( 'INSERT INTO ActionBy VALUES (?,?,?,?,?)', [bridgeid, actionid, affiliation[0].AffiliationID, True, None]) self.conn.commit() # create processing level sPrint('inserting processing levels', MessageType.DEBUG) processinglevels = self.read.getProcessingLevels(codes=[2]) if not processinglevels: pl = models.ProcessingLevels() pl.ProcessingLevelCode = 2 pl.Definition = 'Derived Product' pl.Explanation ='Derived products require scientific and ' \ 'technical interpretation and include ' \ 'multiple-sensor data. An example might be basin' \ ' average precipitation derived from rain gages ' \ 'using an interpolation procedure.' self.write.createProcessingLevel(pl) processinglevels = self.read.getProcessingLevels(codes=[2]) processinglevel = processinglevels[0] # create dataset sPrint('inserting dataset', MessageType.DEBUG) ds = models.DataSets() ds.DataSetAbstract = description ds.DataSetTitle = 'Input for Simulation : %s' % name ds.DataSetTypeCV = 'Simulation Input' ds.DataSetUUID = uuid.uuid4().hex # this must be unique, so using uuid ds.DataSetCode = 'Input_%s_%s' % (name, ds.DataSetUUID) self.write.createDataset(ds) datasets = self.read.getDataSets(codes=[ds.DataSetCode]) dataset = datasets[0] # make sure the exchange item is represented as a list if not hasattr(ei, '__len__'): ei = [ei] # loop over output exchange items for e in ei: geometries = numpy.array(e.getGeometries2()) dates = numpy.array(e.getDates2()) # create variable sPrint('inserting variables', MessageType.DEBUG) variables = self.read.getVariables( codes=[e.variable().VariableNameCV()]) if not variables: v = models.Variables() v.VariableCode = e.variable().VariableNameCV() v.VariableNameCV = e.variable().VariableDefinition() v.VariableTypeCV = 'unknown' v.NoDataValue = -999 self.write.createVariable(v) variables = self.read.getVariables( codes=[e.variable().VariableNameCV()]) variable = variables[0] units = self.read.getUnits(name=e.unit().UnitName()) sPrint('inserting units', MessageType.DEBUG) if not units: u = models.Units() u.UnitsAbbreviation = e.unit().UnitAbbreviation() u.UnitsName = e.unit().UnitName() u.UnitsTypeCV = e.unit().UnitTypeCV() self.write.createUnit(u) units = self.read.getUnits(name=e.unit().UnitName()) unit = units[0] # create spatial reference sPrint('inserting srs', MessageType.DEBUG) srs = e.srs() refcode = "%s:%s" % (srs.GetAttrValue( "AUTHORITY", 0), srs.GetAttrValue("AUTHORITY", 1)) spatialref = self.read.getSpatialReferences(srsCodes=[refcode]) if not spatialref: if srs == "": sr = models.SpatialReferences() sr.SRSCode = refcode sr.SRSName = srs.GetAttrValue("GEOGCS", 0) sr.SRSDescription = "%s|%s|%s" % \ (srs.GetAttrValue("PROJCS", 0), srs.GetAttrValue("GEOGCS", 0), srs.GetAttrValue("DATUM", 0)) self.write.createSpatialReference(sr) spatialref = self.read.getSpatialReferences( srsCodes=[refcode]) else: sPrint( "Could not set spatial reference. Make sure GDAL_DATA path is set in system", MessageType.WARNING) # todo: insert sampling features bulk st = time.time() samplingfeaturesids = [] for i in range(0, len(geometries)): # create sampling features geom_wkt = geometries[i].ExportToWkt() geom_type = geometries[i].type samplingFeature = self.insert_sampling_feature( type='site', geometryType=geom_type, WKTgeometry=geom_wkt) samplingfeaturesids.append(samplingFeature.SamplingFeatureID) bench_insert_sf = (time.time() - st) sPrint('inserting sampling features...%3.5f sec' % bench_insert_sf, MessageType.DEBUG) st = time.time() featureactions = [] action_ids = [actionid] * len(samplingfeaturesids) featureactionids = self.insert_feature_actions_bulk( samplingfeaturesids, action_ids) bench_insert_fa += (time.time() - st) sPrint('inserting feature actions...%3.5f sec' % bench_insert_fa, MessageType.DEBUG) st = time.time() resultids = self.insert_results_bulk( FeatureActionIDs=featureactionids, ResultTypeCV='time series', VariableID=variable.VariableID, UnitsID=unit.UnitsID, ValueCount=len(dates), ProcessingLevelID=processinglevel.ProcessingLevelID, SampledMediumCV='unknown') sPrint('inserting results...%3.5f sec' % (time.time() - st), MessageType.DEBUG) geom_index = 0 for resultid in resultids: # create time series result st = time.time() vals = [None] * 11 vals[0] = resultid # insert result id vals[-1] = 'Unknown' # insert aggregation statistic self.cursor.execute( 'INSERT INTO TimeSeriesResults VALUES ' '(?,?,?,?,?,?,?,?,?,?,?)', vals) self.conn.commit() sPrint( 'inserting time series results...%3.5f sec' % (time.time() - st), MessageType.DEBUG) # get datavalues corresponding to this resultid (i.e. geometry) datavalues = e.getValues2(geom_index, geom_index) # increment to the next geometry geom_index += 1 # flatten row-wise, [t1g, t2g, ..., tng] values = datavalues.flatten(order='C') # get datetime column in the array [t1, ..., tn] valuedates = dates[:, 1] st = time.time() try: self.insert_timeseries_result_values_bulk( ResultIDs=resultid, TimeAggregationInterval=timestep_value, TimeAggregationIntervalUnitsID=timestepunit.UnitsID, DataValues=values, ValueDateTimes=valuedates, ValueDateTimeUTCOffset=-6, CensorCodeCV='nc', QualityCodeCV='unknown') bulk = time.time() - st sPrint( 'insert time series result values (%d records)...' '%3.5f sec' % (len(values), bulk), MessageType.DEBUG) except Exception, e: msg = 'Encountered an error while inserting timeseries ' \ 'result values: %s' % e elog.error(msg) sPrint(msg, MessageType.ERROR) return None
def insert_timeseries_result_values_bulk(self, ResultIDs=1, DataValues=[], ValueDateTimes=[], QualityCodeCV='unknown', TimeAggregationIntervalUnitsID=1, TimeAggregationInterval=1, CensorCodeCV='nc', ValueDateTimeUTCOffset=-6): """ Performs a bulk insert of time series result values Args: ResultIDs: int DataValues: [float] ValueDateTimes: [datetime] QualityCodeCV: str TimeAggregationIntervalUnitsID: int TimeAggregationInterval: int CensorCodeCV: str ValueDateTimeUTCOffset: int Returns: 1 if successful, otherwise 0 """ if ResultIDs is None: elog.error('Result ID cannot be None') return False if len(DataValues) != len(ValueDateTimes): elog.error('Length of Values and Dates must be equal') return False # convert datetime into apsw accepted format value_date_times = numpy.array([str(d) for d in ValueDateTimes]) # isolate all of the finite values (i.e. nan will violate the not # null constraint) data_exists = numpy.isfinite(DataValues) value_date_times = value_date_times[data_exists] data = numpy.array(DataValues)[data_exists] # convert parameters values into arrays valCount = len(data) censor_codes = [CensorCodeCV] * valCount quality_codes = [QualityCodeCV] * valCount time_unit_ids = [TimeAggregationIntervalUnitsID] * valCount time_intervals = [TimeAggregationInterval] * valCount time_offsets = [ValueDateTimeUTCOffset] * valCount result_ids = [ResultIDs] * valCount # get the last record index nextID = self.get_next_insert_id(models.TimeSeriesResultValues) # build array of value ids valueIDs = range(nextID, nextID + valCount, 1) # prepare all data for inserting vals = zip(valueIDs, result_ids, data, value_date_times, time_offsets, censor_codes, quality_codes, time_intervals, time_unit_ids) # insert values in chunks of 10,000 sPrint('Begin inserting %d value' % len(vals), MessageType.DEBUG) chunk_size = 10000 # number of records to insert at a time data_inserted = False for i in range(0, len(vals), chunk_size): # get the start and end indices for the bulk insert sidx = i eidx = i + chunk_size if (i + chunk_size) < len(vals) else len(vals) # perform execute many self.cursor.executemany( 'INSERT INTO TimeSeriesResultValues VALUES' ' (?, ?, ?, ?, ?, ?, ?, ?, ?)', vals[sidx:eidx]) # print the percent complete percent_complete = float(eidx) / float(len(vals)) * 100 sPrint( '.. inserted %d records, %3.1f %% complete' % ((eidx - sidx), percent_complete), MessageType.DEBUG) # set data inserted to True so that it is committed to the db data_inserted = True # only attempt a commit if data was inserted if data_inserted: self.conn.commit() return 1
def __init__(self,config_params): """ initialization that will occur when loaded into a configuration """ super(weap,self).__init__(config_params) sPrint('WEAP Model - Begin Component Initialization') # open WEAP via COM try: self.weap_model = com.Dispatch("WEAP.WEAPApplication") self.weap_model.Visible = 0 except: msg = 'Failed to load the WEAP application. Make sure that WEAP is installed and running correctly on your machine before proceeding' elog.error(msg) raise Exception(msg) sPrint('..parsing model inputs') model_inputs = self.get_model_inputs(config_params) # move the input weap dir into the weap AreasDirectoryPath area_path = model_inputs['area_path'] active_area = model_inputs['active_area'] destPath = os.path.join(self.weap_model.AreasDirectory, active_area) sPrint('..moving model files in weap working directory') if os.path.exists(destPath): shutil.rmtree(destPath) os.mkdir(destPath) def copytree(src, dst): for item in os.listdir(model_inputs['area_path']): s = os.path.join(src, item) d = os.path.join(dst, item) if os.path.isdir(s): shutil.copytree(s, d) else: shutil.copy2(s, d) copytree(area_path, destPath) # close and restart the weap model for the changes to take effect del self.weap_model self.weap_model = com.Dispatch("WEAP.WEAPApplication") # set the active area for the model self.weap_model.ActiveArea = active_area # get start, end, timestep info from weap instance self.start_year = self.weap_model.BaseYear self.end_year = self.weap_model.EndYear self.first_step = self.weap_model.FirstTimestep self.num_steps = self.weap_model.NumTimeSteps sPrint('..simulation timespan %d - %d' % (self.start_year, self.end_year)) sPrint('..first timestep = %d, total timesteps per year %d' % (self.first_step, self.num_steps)) # collect all the branch names sPrint('..collecting variable names') self.variables = {} for b in self.weap_model.Branches: for v in b.Variables: if v.IsResultVariable: v_name = v.Name if v_name in self.variables: self.variables[v_name].append(b.FullName) else: self.variables[v_name] = [b.FullName] # build inputs and outputs # sPrint('..building exchange items') # io = mdl.build_exchange_items_from_config(config_params) # set inputs and outputs # self.inputs(value=io[stdlib.ExchangeItemType.INPUT]) # self.outputs(value=io[stdlib.ExchangeItemType.OUTPUT]) # model_inputs # inputs = config_params['model inputs'][0] # read input parameters # sPrint('..reading input parameters') # sPrint('..building input/output geometries') sPrint('..component initialization completed successfully')
def insert_results_bulk(self, FeatureActionIDs=[], ResultTypeCV=None, VariableID=None, UnitsID=None, TaxonomicClassfierID=None, ProcessingLevelID=None, ResultDateTime=None, ResultDateTimeUTCOffset=None, ValidDateTime=None, ValidDateTimeUTCOffset=None, StatusCV=None, SampledMediumCV=None, ValueCount=None): """ Performs a bulk insert of results :return: list of result ids ResultID ResultUUID FeatureActionID ResultTypeCV VariableID UnitsID TaxonomicClassfierID ProcessingLevelID ResultDateTime ResultDateTimeUTCOffset ValidDateTime ValidDateTimeUTCOffset StatusCV SampledMediumCV ValueCount """ if VariableID is None or UnitsID is None or ProcessingLevelID is None or ValueCount is None \ or SampledMediumCV is None or FeatureActionIDs == []: elog.error('Failed to bulk insert Results. VariableID, UnitID, ProcessingLevelID, ValueCount, SampledMediumCV, FeatureActionIDs are required fields.') return False valCount = len(FeatureActionIDs) # get the last record index res = self.cursor.execute('SELECT ResultID FROM Results ORDER BY ResultID DESC') startID = res.fetchone() or (-1,) startID = startID[0] + 1 # increment the last id # generate UUIDs for each Result record uuids = [uuid.uuid4().hex for i in range(valCount)] ResultIDs = range(startID, startID + valCount, 1) # convert parameter values into lists ResultTypeCV = [ResultTypeCV] * valCount VariableID = [VariableID] * valCount UnitsID = [UnitsID] * valCount TaxonomicClassfierID = [TaxonomicClassfierID] * valCount ProcessingLevelID = [ProcessingLevelID] * valCount ResultDateTime = [ResultDateTime] * valCount ResultDateTimeUTCOffset = [ResultDateTimeUTCOffset] * valCount ValidDateTime = [ValidDateTime] * valCount ValidDateTimeUTCOffset = [ValidDateTimeUTCOffset] * valCount StatusCV = [StatusCV] * valCount SampledMediumCV = [SampledMediumCV] * valCount ValueCount = [ValueCount] * valCount # zip the values up vals = zip(ResultIDs, uuids, FeatureActionIDs, ResultTypeCV, VariableID, UnitsID, TaxonomicClassfierID, ProcessingLevelID, ResultDateTime, ResultDateTimeUTCOffset, ValidDateTime, ValidDateTimeUTCOffset, StatusCV, SampledMediumCV, ValueCount) self.cursor.executemany('INSERT INTO Results VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', vals) self.conn.commit() # return the feature action ids return ResultIDs
def insert_results_bulk(self, FeatureActionIDs=[], ResultTypeCV=None, VariableID=None, UnitsID=None, TaxonomicClassfierID=None, ProcessingLevelID=None, ResultDateTime=None, ResultDateTimeUTCOffset=None, ValidDateTime=None, ValidDateTimeUTCOffset=None, StatusCV=None, SampledMediumCV=None, ValueCount=None): """ Performs a bulk insert of results :return: list of result ids ResultID ResultUUID FeatureActionID ResultTypeCV VariableID UnitsID TaxonomicClassfierID ProcessingLevelID ResultDateTime ResultDateTimeUTCOffset ValidDateTime ValidDateTimeUTCOffset StatusCV SampledMediumCV ValueCount """ if VariableID is None or UnitsID is None or ProcessingLevelID is None or ValueCount is None \ or SampledMediumCV is None or FeatureActionIDs == []: elog.error( 'Failed to bulk insert Results. VariableID, UnitID, ProcessingLevelID, ValueCount, SampledMediumCV, FeatureActionIDs are required fields.' ) return False valCount = len(FeatureActionIDs) # get the last record index res = self.cursor.execute( 'SELECT ResultID FROM Results ORDER BY ResultID DESC') startID = res.fetchone() or (-1, ) startID = startID[0] + 1 # increment the last id # generate UUIDs for each Result record uuids = [uuid.uuid4().hex for i in range(valCount)] ResultIDs = range(startID, startID + valCount, 1) # convert parameter values into lists ResultTypeCV = [ResultTypeCV] * valCount VariableID = [VariableID] * valCount UnitsID = [UnitsID] * valCount TaxonomicClassfierID = [TaxonomicClassfierID] * valCount ProcessingLevelID = [ProcessingLevelID] * valCount ResultDateTime = [ResultDateTime] * valCount ResultDateTimeUTCOffset = [ResultDateTimeUTCOffset] * valCount ValidDateTime = [ValidDateTime] * valCount ValidDateTimeUTCOffset = [ValidDateTimeUTCOffset] * valCount StatusCV = [StatusCV] * valCount SampledMediumCV = [SampledMediumCV] * valCount ValueCount = [ValueCount] * valCount # zip the values up vals = zip(ResultIDs, uuids, FeatureActionIDs, ResultTypeCV, VariableID, UnitsID, TaxonomicClassfierID, ProcessingLevelID, ResultDateTime, ResultDateTimeUTCOffset, ValidDateTime, ValidDateTimeUTCOffset, StatusCV, SampledMediumCV, ValueCount) self.cursor.executemany( 'INSERT INTO Results VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', vals) self.conn.commit() # return the feature action ids return ResultIDs
def __init__(self,config_params): """ initialization that will occur when loaded into a configuration """ super(topmodel,self).__init__(config_params) if LooseVersion(np.__version__) < LooseVersion('1.9.0'): elog.error('Could not load TOPMODEL, NumPY version 1.9.0 or greater required') sPrint('Could not load TOPMODEL, NumPY version 1.9.0 or greater required', MessageType.ERROR) raise Exception('Could not load TOPMODEL, NumPY version 1.9.0 or greater required') sPrint('Begin Component Initialization') # build inputs and outputs sPrint('Building exchange items') io = mdl.build_exchange_items_from_config(config_params) # set inputs and outputs self.inputs(value=io[stdlib.ExchangeItemType.INPUT]) self.outputs(value=io[stdlib.ExchangeItemType.OUTPUT]) # read input parameters sPrint('Reading input parameters') #read model input parameters self.topo_input = config_params['ti'] self.fac_input = config_params['fac'] self.c = config_params['m'] self.Tmax = config_params["tmax"] self.R = config_params["r"] self.interception = config_params["interception"] self.ti = [] self.freq = [] # read topographic input file sPrint('Reading topographic input data') self.read_topo_input() sPrint('Building input/output geometries') self.ti_geoms = None self.output_soil_moisture_geoms = None self.calc_ti_geometries() # set precipitation geometries sPrint('Setting excess precipitation geometries') self.outputs()['excess'].addGeometries2(self.ti_geoms) # set saturation geometries # elog.info('Setting soil saturation geometries') # self.outputs()['soil moisture'].addGeometries2(self.ti_geoms) # ---- calculate saturation deficit sPrint('Calculating initial saturation deficit') TI_freq = [x*y for x,y in zip(self.ti, self.freq)] self.lamda_average = sum(TI_freq) / sum(self.freq) # catchment average saturation deficit(S_bar) self.s_average = (-1.)*self.c * ((math.log10(self.R / self.Tmax)) + self.lamda_average) sPrint('Component Initialization Completed Successfully')
def on_save(self, event): """ Saves all link objects to the engine and then closes the link creation window """ # Deleting the links that are in the delete queue for link_id in self.links_to_delete: engine.removeLinkById(link_id) if link_id in self.__links: self.__links.pop(link_id) warnings = [] errors = [] for l in self.__links.values(): if l.iei == '---' or l.oei == '--': warnings.append(l) else: try: kwargs = dict(source_id=l.source_id, source_item=l.oei, target_id=l.target_id, target_item=l.iei, spatial_interpolation=l.spatial_interpolation, temporal_interpolation=l.temporal_interpolation, uid=l.uid) # remove the existing link, if there is one engine.removeLinkById(l.uid) # add a new link inside the engine link_id = engine.addLink(**kwargs) if link_id: l.saved = True wx.PostEvent(self, LinkUpdatedEvent()) except: elog.error('ERROR|Could not save link: %s' % l.name) errors.append(l) if len(warnings) > 0: warning_links = '\n'.join(l.name() for l in warnings) warning = wx.MessageDialog(self, "Could not save the following links because they lacking either input or output items: \n\n " + warning_links + "\n\n Would you like to discard these partial link objects?", 'Question', wx.YES_NO | wx.NO_DEFAULT | wx.ICON_WARNING) if warning.ShowModal() == wx.ID_YES: self.remove_warning_links(warnings=warnings) self.Destroy() else: return result = self.find_link_direction() if result: # todo: these images need to be coming from config instead of hardcoded self.replace_canvas_image(image="rightArrowBlue60.png", one_way=True) elif result is False: self.replace_canvas_image(image="multiArrow.png") else: self.replace_canvas_image(image="questionMark.png") self.Destroy()
def save(obj, datasave, modelids): """ Saves simulation calculations to an ODM2 database Args: obj: Engine object datasave: datasave object that contains database and user info modelids: list of model ids to save data for Returns: """ if datasave.session is not None: session = datasave.session else: msg = "Could not connect to database for results saving: %s" % datasave.database_args["address"] elog.error(msg) sPrint(msg, MessageType.ERROR) return 0 db = dbv2.connect(sessionFactory=session) sPrint("Saving Simulation Results...") st = time.time() action_date = datetime.datetime.now() action_utc_offset = int((action_date - datetime.datetime.utcnow()).total_seconds() / 3600) # insert data! for modelid in modelids: # get the current model instance model_obj = obj.get_model_by_id(modelid) model_inst = obj.get_model_object(modelid).instance() model_name = model_inst.name() # get the output exchange items to save for this model oeis = datasave.datasets[model_name] items = [] for oei in oeis: items.append(model_inst.outputs()[oei]) sPrint("..found %d items to save for model %s" % (len(items), model_name), MessageType.INFO) if len(items) > 0: # create a simulation in the database id = db.create_simulation( coupledSimulationName=datasave.simulationName, user_obj=datasave.user, action_date=action_date, action_utc_offset=action_utc_offset, ei=items, simulation_start=model_inst.simulation_start(), simulation_end=model_inst.simulation_end(), timestep_value=model_inst.time_step(), timestep_unit="seconds", description=model_inst.description(), name=model_inst.name(), ) if id is None: sPrint("Failed to save results for: %s " % (model_name), MessageType.ERROR) sPrint("Saving Complete, elapsed time = %3.5f seconds" % (time.time() - st), MessageType.INFO)
def validate_config_ini( ini_path): # Deprecated. Use utilities.models.validate_json_model try: cparser = ConfigParser.ConfigParser(None, multidict) # parse the ini cparser.read(ini_path) # get the ini sections from the parser parsed_sections = cparser.sections() # if no sections are found, than the file format must be incorrect if len(parsed_sections) == 0: raise Exception('> [Exception] Invalid model configuration file') # load lookup tables dir = os.path.dirname(__file__) var_cv = os.path.join(io.getAppDataDir(), 'dat/var_cv.dat') unit_cv = os.path.join(io.getAppDataDir(), 'dat/units_cv.dat') var = pickle.load(open(var_cv, 'rb')) unit = pickle.load(open(unit_cv, 'rb')) # var = pickle.load(open(os.path.join(dir,'../data/var_cv.dat'),'rb')) # unit = pickle.load(open(os.path.join(dir,'../data/units_cv.dat'),'rb')) # check to see if 'ignorecv' option has been provided ignorecv = False for p in parsed_sections: if p.split('^')[0] == 'options': if cparser.has_option(p, 'ignorecv'): ignorecv = int(cparser.get(p, 'ignorecv')) break # validate for section in parsed_sections: # get ini options options = cparser.options(section) if not ignorecv: # validate units and variables parameters if section.split('_')[0] == 'output' or section.split( '_')[0] == 'input': # check that variable and unit exist if 'variable_name_cv' not in options or 'unit_type_cv' not in options: raise Exception( 'Inputs and Outputs must contain "variable_name_cv" and "unit_type_cv" parameters ' ) # check each option individually for option in options: val = cparser.get(section, option) # validate date format if option == 'simulation_start' or option == 'simulation_end': try: datetime.datetime.strptime(val, getattr(ini_types, option)) except ValueError: raise ValueError("Incorrect data format, should be " + getattr(ini_types, option)) else: # validate data type #if not isinstance(val,type(getattr(ini_types, option))): # raise Exception(option+' is not of type '+getattr(ini_types, option)) if not ignorecv: # check variable cv (i.e. lookup table) if option == 'variable_name_cv': if val not in var: raise Exception( val + ' is not a valid controlled vocabulary term' ) # check unit type cv (i.e. lookup table) if option == 'unit_type_cv': if val not in unit: raise Exception( val + ' is not a valid controlled vocabulary term' ) if section.split('^')[0] == 'software': # check that software filepath is valid relpath = cparser.get(section, 'filepath') basedir = os.path.realpath(os.path.dirname(ini_path)) abspath = os.path.abspath(os.path.join(basedir, relpath)) # add the base path to the sys.path sys.path.append(basedir) if not os.path.isfile(abspath): raise Exception(abspath + ' is not a valid file') #todo: check that software class name exists try: classname = cparser.get(section, 'classname') filename = os.path.basename(abspath) module = imp.load_source(filename.split('.')[0], abspath) m = getattr(module, classname) except Exception, e: elog.error('Configuration Parsing Error: ' + str(e)) sPrint('Configuration Parsing Error: ' + str(e), MessageType.ERROR) except Exception, e: elog.error('Configuration Parsing Error: ' + str(e)) sPrint('Configuration Parsing Error: ' + str(e), MessageType.ERROR) return 0
def save(obj, datasave, modelids): """ Saves simulation calculations to an ODM2 database Args: obj: Engine object datasave: datasave object that contains database and user info modelids: list of model ids to save data for Returns: """ if datasave.session is not None: session = datasave.session else: msg = 'Could not connect to database for results saving: %s' % datasave.database_args['address'] elog.error(msg) sPrint(msg, MessageType.ERROR) return 0 db = dbv2.connect(sessionFactory=session) sPrint('Saving Simulation Results...') st = time.time() action_date = datetime.datetime.now() action_utc_offset = int((action_date - datetime.datetime.utcnow()).total_seconds()/3600) # insert data! for modelid in modelids: # get the current model instance model_obj = obj.get_model_by_id(modelid) model_inst = obj.get_model_object(modelid).instance() model_name = model_inst.name() # get the output exchange items to save for this model oeis = datasave.datasets[model_name] items = [] for oei in oeis: items.append(model_inst.outputs()[oei]) sPrint('..found %d items to save for model %s' % (len(items), model_name), MessageType.INFO) if len(items) > 0: # create a simulation in the database id = db.create_simulation(coupledSimulationName=datasave.simulationName, user_obj=datasave.user, action_date=action_date, action_utc_offset=action_utc_offset, ei=items, simulation_start=model_inst.simulation_start(), simulation_end=model_inst.simulation_end(), timestep_value=model_inst.time_step(), timestep_unit='seconds', description=model_inst.description(), name=model_inst.name() ) if id is None: sPrint('Failed to save results for: %s ' % (model_name), MessageType.ERROR) sPrint('Saving Complete, elapsed time = %3.5f seconds' % (time.time() - st), MessageType.INFO)
def __init__(self, config_params): """ initialization that will occur when loaded into a configuration """ super(topmodel, self).__init__(config_params) if LooseVersion(np.__version__) < LooseVersion("1.9.0"): elog.error("Could not load TOPMODEL, NumPY version 1.9.0 or greater required") raise Exception("Could not load TOPMODEL, NumPY version 1.9.0 or greater required") elog.info("Begin Component Initialization") # build inputs and outputs elog.info("Building exchange items") io = mdl.build_exchange_items_from_config(config_params) # set inputs and outputs self.inputs(value=io[stdlib.ExchangeItemType.INPUT]) self.outputs(value=io[stdlib.ExchangeItemType.OUTPUT]) # model_inputs inputs = config_params["model inputs"][0] # read input parameters elog.info("Reading input parameters") self.topo_input = inputs["ti"] self.fac_input = inputs["fac"] # read model input parameters self.c = float(inputs["m"]) self.Tmax = float(inputs["tmax"]) self.R = float(inputs["r"]) self.interception = float(inputs["interception"]) self.ti = [] self.freq = [] # read topographic input file elog.info("Reading topographic input data") self.read_topo_input() elog.info("Building input/output geometries") self.ti_geoms = None self.output_soil_moisture_geoms = None self.calc_ti_geometries() # set precipitation geometries elog.info("Setting excess precipitation geometries") self.outputs()["excess"].addGeometries2(self.ti_geoms) # set saturation geometries # elog.info('Setting soil saturation geometries') # self.outputs()['soil moisture'].addGeometries2(self.ti_geoms) # ---- calculate saturation deficit elog.info("Calculating initial saturation deficit") TI_freq = [x * y for x, y in zip(self.ti, self.freq)] self.lamda_average = sum(TI_freq) / sum(self.freq) # catchment average saturation deficit(S_bar) self.s_average = (-1.0) * self.c * ((math.log10(self.R / self.Tmax)) + self.lamda_average) elog.info("Component Initialization Completed Successfully")
def on_save(self, event): """ Saves all link objects to the engine and then closes the link creation window """ # Deleting the links that are in the delete queue for link_id in self.links_to_delete: engine.removeLinkById(link_id) if link_id in self.__links: self.__links.pop(link_id) warnings = [] errors = [] for l in self.__links.values(): if l.iei == "---" or l.oei == "--": warnings.append(l) else: try: kwargs = dict( source_id=l.source_id, source_item=l.oei, target_id=l.target_id, target_item=l.iei, spatial_interpolation=l.spatial_interpolation, temporal_interpolation=l.temporal_interpolation, uid=l.uid, ) # remove the existing link, if there is one engine.removeLinkById(l.uid) # add a new link inside the engine link_id = engine.addLink(**kwargs) if link_id: l.saved = True wx.PostEvent(self, LinkUpdatedEvent()) except: elog.error("ERROR|Could not save link: %s" % l.name) errors.append(l) if len(warnings) > 0: warning_links = "\n".join(l.name() for l in warnings) warning = wx.MessageDialog( self, "Could not save the following links because they lacking either input or output items: \n\n " + warning_links + "\n\n Would you like to discard these partial link objects?", "Question", wx.YES_NO | wx.NO_DEFAULT | wx.ICON_WARNING, ) if warning.ShowModal() == wx.ID_YES: self.remove_warning_links(warnings=warnings) self.Destroy() else: return result = self.find_link_direction() if result: # todo: these images need to be coming from config instead of hardcoded self.replace_canvas_image(image="rightArrowBlue60.png", one_way=True) elif result is False: self.replace_canvas_image(image="multiArrow.png") else: self.replace_canvas_image(image="questionMark.png") self.Destroy()
def validate_config_ini(ini_path): # Deprecated. Use utilities.models.validate_json_model try: cparser = ConfigParser.ConfigParser(None, multidict) # parse the ini cparser.read(ini_path) # get the ini sections from the parser parsed_sections = cparser.sections() # if no sections are found, than the file format must be incorrect if len(parsed_sections) == 0: raise Exception('> [Exception] Invalid model configuration file') # load lookup tables dir = os.path.dirname(__file__) var_cv = os.path.join(io.getAppDataDir(), 'dat/var_cv.dat') unit_cv= os.path.join(io.getAppDataDir(), 'dat/units_cv.dat') var = pickle.load(open(var_cv, 'rb')) unit= pickle.load(open(unit_cv, 'rb')) # var = pickle.load(open(os.path.join(dir,'../data/var_cv.dat'),'rb')) # unit = pickle.load(open(os.path.join(dir,'../data/units_cv.dat'),'rb')) # check to see if 'ignorecv' option has been provided ignorecv = False for p in parsed_sections: if p.split('^')[0] == 'options': if cparser.has_option(p,'ignorecv'): ignorecv = int(cparser.get(p,'ignorecv')) break # validate for section in parsed_sections: # get ini options options = cparser.options(section) if not ignorecv: # validate units and variables parameters if section.split('_')[0] == 'output' or section.split('_')[0] == 'input': # check that variable and unit exist if 'variable_name_cv' not in options or 'unit_type_cv' not in options: raise Exception ('Inputs and Outputs must contain "variable_name_cv" and "unit_type_cv" parameters ') # check each option individually for option in options: val = cparser.get(section,option) # validate date format if option == 'simulation_start' or option == 'simulation_end': try: datetime.datetime.strptime(val, getattr(ini_types, option)) except ValueError: raise ValueError("Incorrect data format, should be "+getattr(ini_types, option)) else: # validate data type #if not isinstance(val,type(getattr(ini_types, option))): # raise Exception(option+' is not of type '+getattr(ini_types, option)) if not ignorecv: # check variable cv (i.e. lookup table) if option == 'variable_name_cv': if val not in var: raise Exception (val+' is not a valid controlled vocabulary term') # check unit type cv (i.e. lookup table) if option == 'unit_type_cv': if val not in unit: raise Exception (val+' is not a valid controlled vocabulary term') if section.split('^')[0] == 'software': # check that software filepath is valid relpath = cparser.get(section,'filepath') basedir = os.path.realpath(os.path.dirname(ini_path)) abspath = os.path.abspath(os.path.join(basedir,relpath)) # add the base path to the sys.path sys.path.append(basedir) if not os.path.isfile(abspath): raise Exception(abspath+' is not a valid file') #todo: check that software class name exists try: classname = cparser.get(section,'classname') filename = os.path.basename(abspath) module = imp.load_source(filename.split('.')[0], abspath) m = getattr(module, classname) except Exception, e: elog.error('Configuration Parsing Error: '+str(e)) sPrint('Configuration Parsing Error: '+str(e), MessageType.ERROR) except Exception, e: elog.error('Configuration Parsing Error: '+str(e)) sPrint('Configuration Parsing Error: '+str(e), MessageType.ERROR) return 0
def create_simulation(self, coupledSimulationName, user_obj, action_date, action_utc_offset, ei, simulation_start, simulation_end, timestep_value, timestep_unit, description, name): """ Inserts a simulation record into the database Args: coupledSimulationName: The name of the coupled model simulation user_obj: object containing the user config_params: ei: simulation_start: simulation_end: timestep_value: timestep_unit: description: name: Returns: """ bench_insert_fa = 0 # todo: handle multiple affiliations sPrint('Inserting person', MessageType.DEBUG) person = self.createPerson(user_obj) sPrint('inserting organization', MessageType.DEBUG) organization = self.createOrganization(user_obj) sPrint('inserting affiliation', MessageType.DEBUG) affiliation = self.createAffiliation(organization.OrganizationID, person.PersonID, user_obj) # get the timestep unit id sPrint('inserting time step unit', MessageType.DEBUG) timestepunit = self.createTimeStepUnit(timestep_unit, timestep_unit) # insert method sPrint('inserting method', MessageType.DEBUG) method = self.createMethod(organization) sPrint('inserting action', MessageType.DEBUG) actions = self.read.getActions(type='Simulation') actionid = None for action in actions: if abs(action.BeginDateTime - action_date) < \ datetime.timedelta(seconds=1) and \ action.BeginDateTimeUTCOffset == action_utc_offset: actionid = action.ActionID break if actionid is None: actions = self.read.getActions() actionid = int(actions[-1].ActionID) + 1 if len(actions) > 0 else 1 self.cursor.execute('INSERT INTO Actions VALUES ' '(?, ?, ?, ?, ?, ?, ?, ?, ?)', [actionid, 'Simulation', method.MethodID, action_date.strftime('%Y-%m-%d %H:%M:%S'), action_utc_offset, None, None, None, None ] ) self.conn.commit() sPrint('inserting actionby', MessageType.DEBUG) ab = self.cursor.execute('SELECT * FROM ActionBy').fetchall() bridgeid = int(ab[-1][0]) + 1 if len(ab) > 0 else 1 self.cursor.execute('INSERT INTO ActionBy VALUES (?,?,?,?,?)', [bridgeid, actionid, affiliation[0].AffiliationID, True, None]) self.conn.commit() # create processing level sPrint('inserting processing levels', MessageType.DEBUG) processinglevels = self.read.getProcessingLevels(codes=[2]) if not processinglevels: pl = models.ProcessingLevels() pl.ProcessingLevelCode = 2 pl.Definition = 'Derived Product' pl.Explanation ='Derived products require scientific and ' \ 'technical interpretation and include ' \ 'multiple-sensor data. An example might be basin' \ ' average precipitation derived from rain gages ' \ 'using an interpolation procedure.' self.write.createProcessingLevel(pl) processinglevels = self.read.getProcessingLevels(codes=[2]) processinglevel = processinglevels[0] # create dataset sPrint('inserting dataset', MessageType.DEBUG) ds = models.DataSets() ds.DataSetAbstract = description ds.DataSetTitle = 'Input for Simulation : %s' % name ds.DataSetTypeCV = 'Simulation Input' ds.DataSetUUID = uuid.uuid4().hex # this must be unique, so using uuid ds.DataSetCode = 'Input_%s_%s' % (name, ds.DataSetUUID) self.write.createDataset(ds) datasets = self.read.getDataSets(codes=[ds.DataSetCode]) dataset = datasets[0] # make sure the exchange item is represented as a list if not hasattr(ei,'__len__'): ei = [ei] # loop over output exchange items for e in ei: geometries = numpy.array(e.getGeometries2()) dates = numpy.array(e.getDates2()) # create variable sPrint('inserting variables', MessageType.DEBUG) variables = self.read.getVariables(codes=[e.variable(). VariableNameCV()]) if not variables: v = models.Variables() v.VariableCode = e.variable().VariableNameCV() v.VariableNameCV = e.variable().VariableDefinition() v.VariableTypeCV = 'unknown' v.NoDataValue = -999 self.write.createVariable(v) variables = self.read.getVariables(codes=[e.variable(). VariableNameCV()]) variable = variables[0] units = self.read.getUnits(name=e.unit().UnitName()) sPrint('inserting units', MessageType.DEBUG) if not units: u = models.Units() u.UnitsAbbreviation = e.unit().UnitAbbreviation() u.UnitsName = e.unit().UnitName() u.UnitsTypeCV = e.unit().UnitTypeCV() self.write.createUnit(u) units = self.read.getUnits(name=e.unit().UnitName()) unit = units[0] # create spatial reference sPrint('inserting srs', MessageType.DEBUG) srs = e.srs() refcode = "%s:%s" %(srs.GetAttrValue("AUTHORITY", 0), srs.GetAttrValue("AUTHORITY", 1)) spatialref = self.read.getSpatialReferences(srsCodes=[refcode]) if not spatialref: if srs == "": sr = models.SpatialReferences() sr.SRSCode = refcode sr.SRSName = srs.GetAttrValue("GEOGCS", 0) sr.SRSDescription = "%s|%s|%s" % \ (srs.GetAttrValue("PROJCS", 0), srs.GetAttrValue("GEOGCS", 0), srs.GetAttrValue("DATUM", 0)) self.write.createSpatialReference(sr) spatialref = self.read.getSpatialReferences(srsCodes=[refcode]) else: sPrint("Could not set spatial reference. Make sure GDAL_DATA path is set in system", MessageType.WARNING) # todo: insert sampling features bulk st = time.time() samplingfeaturesids = [] for i in range(0, len(geometries)): # create sampling features geom_wkt = geometries[i].ExportToWkt() geom_type = geometries[i].type samplingFeature = self.insert_sampling_feature(type='site', geometryType=geom_type, WKTgeometry=geom_wkt) samplingfeaturesids.append(samplingFeature.SamplingFeatureID) bench_insert_sf = (time.time() - st) sPrint('inserting sampling features...%3.5f sec' % bench_insert_sf, MessageType.DEBUG) st = time.time() featureactions = [] action_ids = [actionid] * len(samplingfeaturesids) featureactionids = self.insert_feature_actions_bulk( samplingfeaturesids, action_ids) bench_insert_fa += (time.time() - st) sPrint('inserting feature actions...%3.5f sec' % bench_insert_fa, MessageType.DEBUG) st = time.time() resultids = self.insert_results_bulk( FeatureActionIDs=featureactionids, ResultTypeCV='time series', VariableID=variable.VariableID, UnitsID=unit.UnitsID, ValueCount=len(dates), ProcessingLevelID=processinglevel.ProcessingLevelID, SampledMediumCV='unknown') sPrint('inserting results...%3.5f sec' % (time.time() - st), MessageType.DEBUG) geom_index = 0 for resultid in resultids: # create time series result st = time.time() vals = [None]*11 vals[0] = resultid # insert result id vals[-1] = 'Unknown' # insert aggregation statistic self.cursor.execute('INSERT INTO TimeSeriesResults VALUES ' '(?,?,?,?,?,?,?,?,?,?,?)', vals) self.conn.commit() sPrint('inserting time series results...%3.5f sec' % (time.time() - st), MessageType.DEBUG) # get datavalues corresponding to this resultid (i.e. geometry) datavalues = e.getValues2(geom_index, geom_index) # increment to the next geometry geom_index += 1 # flatten row-wise, [t1g, t2g, ..., tng] values = datavalues.flatten(order='C') # get datetime column in the array [t1, ..., tn] valuedates = dates[:, 1] st = time.time() try: self.insert_timeseries_result_values_bulk( ResultIDs=resultid, TimeAggregationInterval=timestep_value, TimeAggregationIntervalUnitsID=timestepunit.UnitsID, DataValues=values, ValueDateTimes=valuedates, ValueDateTimeUTCOffset=-6, CensorCodeCV='nc', QualityCodeCV='unknown') bulk = time.time() - st sPrint('insert time series result values (%d records)...' '%3.5f sec' % (len(values), bulk), MessageType.DEBUG) except Exception, e: msg = 'Encountered an error while inserting timeseries ' \ 'result values: %s' % e elog.error(msg) sPrint(msg, MessageType.ERROR) return None