def getSOSProcedureObservations(self, name, begin, end, qi=False): """ Execute a getObservation > Return an array observations. """ begin1 = "" end1 = "" # Checking dates format if isinstance(begin, datetime): # Check tz if begin.tzinfo is None: raise Exception( "Time Zone (tzinfo) is mandatory in datetime objects") begin1 = begin.isoformat() elif isinstance(begin, str): tmp = iso.parse_datetime(begin) if tmp.tzinfo is None: raise Exception( "Time Zone (tzinfo) is mandatory in datetime objects") begin1 = tmp.isoformat() if isinstance(end, datetime): # Check tz if end.tzinfo is None: raise Exception( "Time Zone (tzinfo) is mandatory in datetime objects") end1 = end.isoformat() elif isinstance(end, str): tmp = iso.parse_datetime(end) if tmp.tzinfo is None: raise Exception( "Time Zone (tzinfo) is mandatory in datetime objects") end1 = tmp.isoformat() # Executing request res = req.get("%s/%s" % (self.host, self.service), params={ 'service': 'SOS', 'version': '1.0.0', 'request': 'GetObservation', 'offering': 'temporary', 'responseFormat': 'application/json', 'procedure': name, 'qualityIndex': qi, 'eventTime': "%s/%s" % (begin1, end1), 'observedProperty': ":" }, auth=self.auth) json = res.json() return json['ObservationCollection']['member'][0]['result'][ 'DataArray']['values']
def extractSamplingFromGOJson(self, jsonRes): if "beginPosition" in jsonRes["samplingTime"]: begin = jsonRes["samplingTime"]["beginPosition"] end = jsonRes["samplingTime"]["endPosition"] return [iso.parse_datetime(begin), iso.parse_datetime(end)] else: return [None, None]
def getObservation(self, name, begin=None, end=None, qi=False): begin1 = "" end1 = "" # Checking dates format if isinstance(begin, datetime): # Check tz if begin.tzinfo is None: raise Exception( "Time Zone (tzinfo) is mandatory in datetime objects") begin1 = begin.isoformat() elif isinstance(begin, str): tmp = iso.parse_datetime(begin) if tmp.tzinfo is None: raise Exception( "Time Zone (tzinfo) is mandatory in datetime objects") begin1 = tmp.isoformat() if isinstance(end, datetime): # Check tz if end.tzinfo is None: raise Exception( "Time Zone (tzinfo) is mandatory in datetime objects") end1 = end.isoformat() elif isinstance(end, str): tmp = iso.parse_datetime(end) if tmp.tzinfo is None: raise Exception( "Time Zone (tzinfo) is mandatory in datetime objects") end1 = tmp.isoformat() params = { 'service': 'SOS', 'version': '1.0.0', 'request': 'GetObservation', 'offering': 'temporary', 'responseFormat': 'application/json', 'procedure': name, 'qualityIndex': qi, 'observedProperty': ":" } if begin: params['eventTime'] = "%s/%s" % (begin1, end1) # Executing request res = req.get("%s/%s" % (self.host, self.service), params=params, auth=self.auth) jsonRes = res.json() return jsonRes
def getSOSProcedureObservations(self, name, begin, end, qi = False): """ Execute a getObservation > Return an array observations. """ begin1 = "" end1 = "" # Checking dates format if isinstance(begin, datetime): # Check tz if begin.tzinfo is None: raise Exception("Time Zone (tzinfo) is mandatory in datetime objects") begin1 = begin.isoformat() elif isinstance(begin, str): tmp = iso.parse_datetime(begin) if tmp.tzinfo is None: raise Exception("Time Zone (tzinfo) is mandatory in datetime objects") begin1 = tmp.isoformat() if isinstance(end, datetime): # Check tz if end.tzinfo is None: raise Exception("Time Zone (tzinfo) is mandatory in datetime objects") end1 = end.isoformat() elif isinstance(end, str): tmp = iso.parse_datetime(end) if tmp.tzinfo is None: raise Exception("Time Zone (tzinfo) is mandatory in datetime objects") end1 = tmp.isoformat() # Executing request res = req.get("%s/%s" % (self.host, self.service), params={ 'service': 'SOS', 'version': '1.0.0', 'request': 'GetObservation', 'offering': 'temporary', 'responseFormat': 'application/json', 'procedure': name, 'qualityIndex': qi, 'eventTime': "%s/%s" % (begin1,end1), 'observedProperty': ":" }, auth=self.auth) json = res.json() return json['ObservationCollection']['member'][0]['result']['DataArray']['values']
def extractSamplingFromGOJson(self, json): if "beginPosition" in json["samplingTime"]: begin = json["samplingTime"]["beginPosition"] end = json["samplingTime"]["endPosition"] return [ iso.parse_datetime(begin), iso.parse_datetime(end) ] else: print "%s: %s - %s" % (name, None, None) return [None,None]
def executePut(self): """ Method for executing a PUT requests that rename a SOS service @note: This method renames: 1. create a new service folder, 2. copy content from old to new service configuration file 3. rename the databse schema 4. delete old service files The POST must be in Json format with mandatory service key >>> { "service" : "service_name" } """ if self.service == "default": raise Exception("offerings PUT operation can not be done for 'default' service instance.") try: self.offering = self.pathinfo[(self.pathinfo.index("offerings")+1)] if self.offering == 'temporary' and self.offering != self.json["name"]: raise Exception("'temporary' offering name cannot be updated") servicedb = databaseManager.PgDB( self.serviceconf.connection["user"], self.serviceconf.connection["password"], self.serviceconf.connection["dbname"], self.serviceconf.connection["host"], self.serviceconf.connection["port"] ) sql = "UPDATE %s.offerings" % self.service sql += " SET name_off = %s, desc_off = %s, expiration_off = %s , active_off = %s " sql += " WHERE name_off = %s" name = self.json["name"] desc = self.json["description"] try: exp = isodate.parse_datetime(self.json["expiration"]) except: exp = None try: act = True if (self.json.has_key("active") and self.json["active"]=='on') else False except: act = False pars = (name,desc,exp,act,self.offering) servicedb.execute(sql,pars) self.setMessage("offering successfully updated") except Exception as e: self.setException("Error in updating an offering: %s" % e)
def executePut(self): """ Method for executing a PUT requests that rename a SOS service @note: This method renames: 1. create a new service folder, 2. copy content from old to new service configuration file 3. rename the databse schema 4. delete old service files The POST must be in Json format with mandatory service key >>> { "service" : "service_name" } """ if self.service == "default": raise Exception("offerings PUT operation can not be done for 'default' service instance.") try: self.offering = self.pathinfo[(self.pathinfo.index("offerings") + 1)] if self.offering == "temporary" and self.offering != self.json["name"]: raise Exception("'temporary' offering name cannot be updated") servicedb = databaseManager.PgDB( self.serviceconf.connection["user"], self.serviceconf.connection["password"], self.serviceconf.connection["dbname"], self.serviceconf.connection["host"], self.serviceconf.connection["port"], ) sql = "UPDATE %s.offerings" % self.service sql += " SET name_off = %s, desc_off = %s, expiration_off = %s , active_off = %s " sql += " WHERE name_off = %s" name = self.json["name"] desc = self.json["description"] try: exp = isodate.parse_datetime(self.json["expiration"]) except: exp = None try: act = True if (self.json.has_key("active") and self.json["active"] == "on") else False except: act = False pars = (name, desc, exp, act, self.offering) servicedb.execute(sql, pars) self.setMessage("offering successfully updated") except Exception as e: self.setException("Error in updating an offering: %s" % e)
def executePost(self, db=True): """ Method for executing a POST requests that initialize a new SOS service @note: This method creates a new istSOS offering The POST must be in Json format with mandatory offering key: >>> { "offering" : "meteorology", "description" : "meteo information" "expiration" : "2012-12-30T12:00" "active" : "sos_db" } """ servicedb = databaseManager.PgDB( self.serviceconf.connection["user"], self.serviceconf.connection["password"], self.serviceconf.connection["dbname"], self.serviceconf.connection["host"], self.serviceconf.connection["port"], ) # insert new offering in db try: sql = "INSERT INTO %s.offerings" % self.service sql += " (name_off,desc_off,expiration_off,active_off)" sql += " VALUES (%s, %s, %s, %s)" name = self.json["name"] try: desc = self.json["description"] except: desc = None try: exp = isodate.parse_datetime(self.json["expiration"]) except: exp = None try: act = self.json["active"] except: act = False pars = (name, desc, exp, act) servicedb.execute(sql, pars) self.setMessage("new offering successfully added") except Exception as e: self.setException("Error in adding new offering: %s" % e)
def executePost(self,db=True): """ Method for executing a POST requests that initialize a new SOS service @note: This method creates a new istSOS offering The POST must be in Json format with mandatory offering key: >>> { "offering" : "meteorology", "description" : "meteo information" "expiration" : "2012-12-30T12:00" "active" : "sos_db" } """ servicedb = databaseManager.PgDB( self.serviceconf.connection["user"], self.serviceconf.connection["password"], self.serviceconf.connection["dbname"], self.serviceconf.connection["host"], self.serviceconf.connection["port"] ) #insert new offering in db try: sql = "INSERT INTO %s.offerings" % self.service sql += " (name_off,desc_off,expiration_off,active_off)" sql += " VALUES (%s, %s, %s, %s)" name = self.json["name"] try: desc = self.json["description"] except: desc = None try: exp = isodate.parse_datetime(self.json["expiration"]) except: exp = None try: act = self.json["active"] except: act = False pars = (name,desc,exp,act) servicedb.execute(sql,pars) self.setMessage("new offering successfully added") except Exception as e: self.setException("Error in adding new offering: %s" % e)
def makeFile(res, procedure, op, path): text = res.text text = text.replace("%s," % procedure, "") lines = text.split('\n') if lines[-1] == '': del lines[-1] tmpOp = op.replace("x-ist::","x-istsos:1.0:") lines[0] = "%s,%s,%s:qualityIndex" % (isoop, tmpOp, tmpOp) if len(lines)>1: datenumber = iso.parse_datetime(lines[-1].split(",")[0]) print "File: %s/%s_%s.dat" % (path, procedure, datetime.datetime.strftime(datenumber, "%Y%m%d%H%M%S%f")) out_file = open("%s/%s_%s.dat" % (path, procedure, datetime.datetime.strftime(datenumber, "%Y%m%d%H%M%S%f")),"w") out_file.write("\n".join(lines)) #print "\n".join(lines) out_file.close()
def makeFile(res, procedure, op, path): text = res.text text = text.replace("%s," % procedure, "") lines = text.split('\n') if lines[-1] == '': del lines[-1] tmpOp = op.replace("x-ist::", "x-istsos:1.0:") lines[0] = "%s,%s,%s:qualityIndex" % (isoop, tmpOp, tmpOp) if len(lines) > 1: datenumber = iso.parse_datetime(lines[-1].split(",")[0]) print "File: %s/%s_%s.dat" % (path, procedure, datetime.datetime.strftime( datenumber, "%Y%m%d%H%M%S%f")) out_file = open( "%s/%s_%s.dat" % (path, procedure, datetime.datetime.strftime(datenumber, "%Y%m%d%H%M%S%f")), "w") out_file.write("\n".join(lines)) out_file.close()
def setData(self,pgdb,o,filter): """get data according to request filters""" # @todo mettere da qualche altra parte #SET FOI OF PROCEDURE #========================================= sqlFoi = "SELECT name_fty, name_foi, ST_AsGml(ST_Transform(geom_foi,%s)) as gml, st_x(geom_foi) as x, st_y(geom_foi) as y " %(filter.srsName) sqlFoi += " FROM %s.procedures, %s.foi, %s.feature_type" %(filter.sosConfig.schema,filter.sosConfig.schema,filter.sosConfig.schema) sqlFoi += " WHERE id_foi_fk=id_foi AND id_fty_fk=id_fty AND id_prc=%s" %(o["id_prc"]) try: resFoi = pgdb.select(sqlFoi) except: raise Exception("SQL: %s"%(sqlFoi)) self.featureOfInterest = resFoi[0]["name_foi"] self.foi_urn = filter.sosConfig.urn["feature"] + resFoi[0]["name_fty"] + ":" + resFoi[0]["name_foi"] srs = filter.srsName or filter.sosConfig.istsosepsg if resFoi[0]["gml"].find("srsName")<0: self.foiGml = resFoi[0]["gml"][:resFoi[0]["gml"].find(">")] + " srsName=\"EPSG:%s\"" % srs + resFoi[0]["gml"][resFoi[0]["gml"].find(">"):] else: self.foiGml = resFoi[0]["gml"] self.srs = srs self.x = resFoi[0]["x"] self.y = resFoi[0]["y"] #SET INFORMATION ABOUT OBSERVED_PROPERTIES #========================================= sqlObsPro = "SELECT id_pro, id_opr, name_opr, def_opr, name_uom FROM %s.observed_properties, %s.proc_obs, %s.uoms" %(filter.sosConfig.schema,filter.sosConfig.schema,filter.sosConfig.schema) sqlObsPro += " WHERE id_opr_fk=id_opr AND id_uom_fk=id_uom AND id_prc_fk=%s" %(o["id_prc"]) sqlObsPro += " AND (" #sqlObsPro += " OR ".join(["def_opr='" + str(i) + "'" for i in filter.observedProperty]) sqlObsPro += " OR ".join(["def_opr SIMILAR TO '%(:|)" + str(i) + "(:|)%'" for i in filter.observedProperty]) sqlObsPro += " ) ORDER BY def_opr ASC" try: obspr_res = pgdb.select(sqlObsPro) except: raise Exception("SQL: %s"%(sqlObsPro)) self.observedProperty = [] self.observedPropertyName = [] self.opr_urn = [] self.uom = [] self.qualityIndex = filter.qualityIndex for row in obspr_res: self.observedProperty += [str(row["def_opr"])] self.observedPropertyName +=[str(row["name_opr"])] self.opr_urn += [str(row["def_opr"])] try: #self.uom += [str(row["name_uom"]).encode('utf-8')] self.uom += [row["name_uom"]] except: self.uom += ["n/a"] if self.qualityIndex==True: self.observedProperty += [str(row["def_opr"])+":qualityIndex"] self.observedPropertyName += [str(row["name_opr"])+":qualityIndex"] self.opr_urn += [str(row["def_opr"] +":qualityIndex")] self.uom += ["-"] #SET DATA #=========================================getSampligTime #CASE "insitu-fixed-point" or "insitu-mobile-point" #----------------------------------------- if self.procedureType in ["insitu-fixed-point","insitu-mobile-point"]: sqlSel = "SELECT et.time_eti as t," joinar=[] cols=[] aggrCols=[] aggrNotNull=[] valeFieldName = [] for idx, obspr_row in enumerate(obspr_res): if self.qualityIndex==True: #cols.append("C%s.val_msr as c%s_v, C%s.id_qi_fk as c%s_qi" %(idx,idx,idx,idx)) cols.append("C%s.val_msr as c%s_v, COALESCE(C%s.id_qi_fk,%s) as c%s_qi" %(idx,idx,idx,filter.aggregate_nodata_qi,idx)) valeFieldName.append("c%s_v" %(idx)) valeFieldName.append("c%s_qi" %(idx)) else: cols.append("C%s.val_msr as c%s_v" %(idx,idx)) valeFieldName.append("c%s_v" %(idx)) # If Aggregatation funtion is set #--------------------------------- if filter.aggregate_interval != None: # This can be usefull with string values '''aggrCols.append("CASE WHEN %s(dt.c%s_v) is NULL THEN '%s' ELSE '' || %s(dt.c%s_v) END as c%s_v\n" % ( filter.aggregate_function, idx, filter.aggregate_nodata, filter.aggregate_function, idx, idx) )''' # This accept only numeric results aggrCols.append("COALESCE(%s(dt.c%s_v),'%s') as c%s_v\n" %(filter.aggregate_function,idx,filter.aggregate_nodata,idx)) if self.qualityIndex==True: #raise sosException.SOSException(3,"QI: %s"%(self.qualityIndex)) aggrCols.append("COALESCE(MIN(dt.c%s_qi),%s) as c%s_qi\n" %( idx, filter.aggregate_nodata_qi, idx )) aggrNotNull.append(" c%s_v > -900 " %(idx)) # Set SQL JOINS #--------------- join_txt = " left join (\n" join_txt += " SELECT distinct A%s.id_msr, A%s.val_msr, A%s.id_eti_fk\n" %(idx,idx,idx) if self.qualityIndex==True: join_txt += ",A%s.id_qi_fk\n" %(idx) join_txt += " FROM %s.measures A%s, %s.event_time B%s\n" %(filter.sosConfig.schema,idx,filter.sosConfig.schema,idx) join_txt += " WHERE A%s.id_eti_fk = B%s.id_eti\n" %(idx,idx) join_txt += " AND A%s.id_pro_fk=%s\n" %(idx,obspr_row["id_pro"]) join_txt += " AND B%s.id_prc_fk=%s\n" %(idx,o["id_prc"]) # if qualityIndex has filter #------------------------------ #if filter.qualityIndex and filter.qualityIndex.__class__.__name__=='str': # join_txt += " AND %s\n" %(filter.qualityIndex) # ATTENTION: HERE -999 VALUES ARE EXCLUDED WHEN ASKING AN AGGREAGATE FUNCTION if filter.aggregate_interval != None: # >> Should be removed because measures data is not inserted if there is a nodata value join_txt += " AND A%s.val_msr > -900 " % idx # If eventTime is set add to JOIN part #-------------------------------------- if filter.eventTime: join_txt += " AND (" etf=[] for ft in filter.eventTime: if len(ft)==2: etf.append("B%s.time_eti > timestamptz '%s' AND B%s.time_eti <= timestamptz '%s' \n" %(idx,ft[0],idx,ft[1])) elif len(ft)==1: etf.append("B%s.time_eti = timestamptz '%s' \n" %(idx,ft[0])) else: raise Exception("error in time filter") join_txt += " OR ".join(etf) join_txt += ")\n" else: join_txt += " AND B%s.time_eti = (SELECT max(time_eti) FROM %s.event_time WHERE id_prc_fk=%s) \n" %(idx,filter.sosConfig.schema,o["id_prc"]) # close SQL JOINS #----------------- join_txt += " ) as C%s\n" %(idx) join_txt += " on C%s.id_eti_fk = et.id_eti" %(idx) joinar.append(join_txt) #If MOBILE PROCEDURE #-------------------- if self.procedureType=="insitu-mobile-point": join_txt = " left join (\n" join_txt += " SELECT distinct Ax.id_pos, X(ST_Transform(Ax.geom_pos,%s)) as x,Y(ST_Transform(Ax.geom_pos,%s)) as y,Z(ST_Transform(Ax.geom_pos,%s)) as z, Ax.id_eti_fk\n" %(filter.srsName,filter.srsName,filter.srsName) if self.qualityIndex==True: join_txt += ", Ax.id_qi_fk as posqi\n" join_txt += " FROM %s.positions Ax, %s.event_time Bx\n" %(filter.sosConfig.schema,filter.sosConfig.schema) join_txt += " WHERE Ax.id_eti_fk = Bx.id_eti" join_txt += " AND Bx.id_prc_fk=%s" %(o["id_prc"]) if filter.eventTime: join_txt += " AND (" etf=[] for ft in filter.eventTime: if len(ft)==2: etf.append("Bx.time_eti > timestamptz '%s' AND Bx.time_eti <= timestamptz '%s' " %(ft[0],ft[1])) elif len(ft)==1: etf.append("Bx.time_eti = timestamptz '%s' " %(ft[0])) else: raise Exception("error in time filter") join_txt += " OR ".join(etf) join_txt += ")\n" else: join_txt += " AND Bx.time_eti = (SELECT max(time_eti) FROM %s.event_time WHERE id_prc_fk=%s) " %(filter.sosConfig.schema,o["id_prc"]) join_txt += " ) as Cx on Cx.id_eti_fk = et.id_eti\n" sqlSel += " Cx.x as x, Cx.y as y, Cx.z as z, " if self.qualityIndex==True: #sqlSel += "COALESCE(Cx.posqi,%s) as posqi, " % filter.aggregate_nodata_qi sqlSel += "Cx.posqi, " joinar.append(join_txt) # Set FROM CLAUSE #----------------- sqlSel += ", ".join(cols) sqlSel += " FROM %s.event_time et\n" %(filter.sosConfig.schema) #==================== # Set WHERE CLAUSES #==================== sqlData = " ".join(joinar) sqlData += " WHERE et.id_prc_fk=%s\n" %(o["id_prc"]) # Set FILTER ON RESULT (OGC:COMPARISON) - #---------------------------------------- if filter.result: for ind, ov in enumerate(self.observedProperty): if ov.find(filter.result[0])>0: sqlData += " AND C%s.val_msr %s" %(ind,filter.result[1]) #sqlData += " AND C%s.val_msr %s" %(self.observedProperty.index(filter.result[0]),filter.result[1]) # Set FILTER ON EVENT-TIME - #--------------------------- if filter.eventTime: sqlData += " AND (" etf=[] for ft in filter.eventTime: if len(ft)==2: etf.append("et.time_eti > timestamptz '%s' AND et.time_eti <= timestamptz '%s' " %(ft[0],ft[1])) elif len(ft)==1: etf.append("et.time_eti = timestamptz '%s' " %(ft[0])) else: raise Exception("error in time filter") sqlData += " OR ".join(etf) sqlData += ")" else: sqlData += " AND et.time_eti = (SELECT max(time_eti) FROM %s.event_time WHERE id_prc_fk=%s) " %(filter.sosConfig.schema,o["id_prc"]) sqlData += " ORDER by et.time_eti" sql = sqlSel+sqlData # if filter.aggregate_interval != None: self.aggregate_function = filter.aggregate_function.upper() ''' for i in range(0,len(self.observedProperty)): self.observedProperty[i] = "%s:%s" % (self.observedProperty[i], filter.aggregate_function) for ob in self.observedProperty: ob = "%s:%s" % (ob, filter.aggregate_function)''' # Interval preparation # Converting ISO 8601 duration isoInt = iso.parse_duration(filter.aggregate_interval) sqlInt = "" if isinstance(isoInt, timedelta): if isoInt.days>0: sqlInt += "%s days " % isoInt.days if isoInt.seconds>0: sqlInt += "%s seconds " % isoInt.seconds elif isinstance(isoInt, iso.Duration): if isoInt.years>0: sqlInt += "%s years " % isoInt.years if isoInt.months>0: isoInt.months = int(isoInt.months) sqlInt += "%s months " % isoInt.months if isoInt.days>0: sqlInt += "%s days " % isoInt.days if isoInt.seconds>0: sqlInt += "%s seconds " % isoInt.seconds # @todo improve this part # calculate how many step are included in the asked interval. hopBefore = 1 hop = 0 tmpStart = iso.parse_datetime(filter.eventTime[0][0]) tmpEnd = self.samplingTime[1] while (tmpStart+isoInt)<=tmpEnd and (tmpStart+isoInt)<=iso.parse_datetime(filter.eventTime[0][1]): if tmpStart < self.samplingTime[0]: hopBefore+=1 hop+=1 elif (tmpStart >= self.samplingTime[0]) and ((tmpStart+isoInt)<=self.samplingTime[1]): hop+=1 tmpStart=tmpStart+isoInt aggregationSQL = "SELECT ts.sint as t, %s\n" aggregationSQL += "FROM\n" aggregationSQL += " (\n" # Generating time series here aggregationSQL += " select\n" aggregationSQL += " (('%s'::TIMESTAMP WITH TIME ZONE) \n" aggregationSQL += " + s.a * '%s'::interval)::TIMESTAMP WITH TIME ZONE as sint\n" aggregationSQL += " from generate_series(%s, %s) as s(a)\n" aggregationSQL += " ) as ts LEFT JOIN ( \n\n" aggregationSQL += " %s \n\n" aggregationSQL += " ) as dt\n" aggregationSQL += " ON (\n" aggregationSQL += " dt.t > (ts.sint-'%s'::interval)\n" aggregationSQL += " AND\n" aggregationSQL += " dt.t <= (ts.sint) \n" aggregationSQL += " )\n" aggregationSQL += " GROUP BY ts.sint\n" aggregationSQL += " ORDER BY ts.sint" sql = aggregationSQL % (", ".join(aggrCols), filter.eventTime[0][0], sqlInt, hopBefore, hop, sql, sqlInt) else: self.aggregate_function = None #print sql.replace('\n','') try: data_res = pgdb.select(sql) except: raise Exception("SQL: %s"%(sql)) #------------------------------------ #--------- APPEND DATA IN ARRAY ----- #------------------------------------ #append data for line in data_res: if self.procedureType=="insitu-fixed-point": data_array = [line["t"]] elif self.procedureType=="insitu-mobile-point": if self.qualityIndex==True: data_array = [line["t"],line["x"],line["y"],line["z"],line["posqi"]] else: data_array = [line["t"],line["x"],line["y"],line["z"]] data_array.extend([line[field] for field in valeFieldName]) self.data.append(data_array) #----------------------------------------- #CASE "virtual" #----------------------------------------- elif self.procedureType in ["virtual"]: self.aggregate_function = filter.aggregate_function self.aggregate_interval = filter.aggregate_interval self.aggregate_nodata = filter.aggregate_nodata self.aggregate_nodata_qi = filter.aggregate_nodata_qi vpFolder = os.path.join(os.path.join(filter.sosConfig.virtual_processes_folder,self.name)) if not os.path.isfile("%s/%s.py" % (vpFolder,self.name)): raise Exception("Virtual procedure folder does not contain any Virtual Procedure code for %s" % self.name) #----- VIRTUAL PROCESS LOADING ----- try: sys.path.append(vpFolder) except: raise Exception("error in loading virtual procedure path") #import procedure process exec "import %s as vproc" %(self.name) # Initialization of virtual procedure will load the source data vp = vproc.istvp() vp._configure(filter, pgdb) # Calculate virtual procedure data vp.calculateObservations(self)
def applyFunction(ob, filter): import copy try: # Create array container begin = iso.parse_datetime(filter.eventTime[0][0]) end = iso.parse_datetime(filter.eventTime[0][1]) duration = iso.parse_duration(filter.aggregate_interval) result = {} dt = begin fields = len(ob.observedProperty)# + 1 # +1 timestamp field not mentioned in the observedProperty array while dt < end: dt2 = dt + duration result[dt2]=[] for c in range(fields): result[dt2].append([]) d = 0 data = copy.copy(ob.data) while len(data) > 0: tmp = data.pop(d) if dt < tmp[0] and tmp[0] <= dt2: ob.data.pop(d) for c in range(fields): result[dt2][c].append(float(tmp[c+1])) elif dt > tmp[0]: ob.data.pop(d) elif dt2 < tmp[0]: break dt = dt2 data = [] for r in sorted(result): record = [r] for v in range(len(result[r])): if ob.observedProperty[v].split(":")[-1]=="qualityIndex": if len(result[r][v])==0: record.append(filter.aggregate_nodata_qi) else: record.append(int(min(result[r][v]))) else: val = None if len(result[r][v])==0: val = filter.aggregate_nodata elif filter.aggregate_function.upper() == 'SUM': val = sum(result[r][v]) elif filter.aggregate_function.upper() == 'MAX': val = max(result[r][v]) elif filter.aggregate_function.upper() == 'MIN': val = min(result[r][v]) elif filter.aggregate_function.upper() == 'AVG': val = round(sum(result[r][v])/len(result[r][v]),4) elif filter.aggregate_function.upper() == 'COUNT': val = len(result[r][v]) record.append(val) data.append(record) ob.data = data except Exception as e: raise Exception("Error while applying aggregate function on virtual procedures: %s" % (e))
def setDischargeCurves(self): "method for setting h-q tranformation tables/curves" #set requested period #================================================ hqFile = os.path.join( self.filter.sosConfig.virtual_processes_folder, self.filter.procedure[0], self.filter.procedure[0]+".rcv" ) tp=[] if self.filter.eventTime == None: tp = [None,None] else: for t in self.filter.eventTime: if len(t) == 2: if t[0].find('+')==-1: t[0] += "+00:00" if t[1].find('+')==-1: t[1] += "+00:00" tp.append(iso.parse_datetime(t[0])) tp.append(iso.parse_datetime(t[1])) if len(t)==1: if t[0].find('+')==-1: t[0] += "+00:00" tp.append(iso.parse_datetime(t[0])) period = (min(tp),max(tp)) #get required parameters #================================================== try: hq_fh = open(hqFile,'r') except Exception as e: raise Exception("Unable to open hq rating curve file at: %s" % hqFile) lines = hq_fh.readlines() #read header hqs = {'from':[],'to':[],'low':[],'up': [],'A':[],'B':[],'C':[],'K':[]} head = lines[0].strip().split("|") try: fromt = head.index('from') #from time tot = head.index('to') #to time low = head.index('low_val') #if value is bigger than up = head.index('up_val') #and is lower than A = head.index('A') #use this A B = head.index('B') #use this B C = head.index('C') #use this C K = head.index('K') #use this K except Exception as e: raise Exception("setDischargeCurves: FILE %s ,%s error in header.\n %s" %(hqFile,head,e)) #get equations if not period[0] == None: for l in range(1,len(lines)): line = lines[l].split("|") if iso.parse_datetime(line[1]) > period[0] or iso.parse_datetime(line[0]) <= period[1]: hqs['from'].append(iso.parse_datetime(line[fromt])) hqs['to'].append(iso.parse_datetime(line[tot])) hqs['low'].append(float(line[low])) hqs['up'].append(float(line[up])) hqs['A'].append(float(line[A])) hqs['B'].append(float(line[B])) hqs['C'].append(float(line[C])) hqs['K'].append(float(line[K])) else: for l in [-1,-2]: try: line = lines[l].split("|") hqs['from'].append(iso.parse_datetime(line[fromt])) hqs['to'].append(iso.parse_datetime(line[tot])) hqs['low'].append(float(line[low])) hqs['up'].append(float(line[up])) hqs['A'].append(float(line[A])) hqs['B'].append(float(line[B])) hqs['C'].append(float(line[C])) hqs['K'].append(float(line[K])) except: pass #raise sosException.SOSException(3,"%s" %(hqs)) self.hqCurves = hqs
def getDSEndPosition(self): if u'constraint' in self.describe['outputs'][0]: return iso.parse_datetime( self.describe['outputs'][0]['constraint']['interval'][1]) return None
def __init__(self,sosRequest,method,requestObject,sosConfig): f.sosFilter.__init__(self,sosRequest,method,requestObject,sosConfig) # @TODO Declare attribute first! # self.offering = None # etc.. #************************** if method == "GET": #---------- THE OFFERING if requestObject.has_key("offering"): self.offering = get_name_from_urn(requestObject["offering"],"offering",sosConfig) else: raise sosException.SOSException(1,"Parameter \"offering\" is mandatory with multiplicity 1") #---------- THE OBSERVED PROPERTY if requestObject.has_key("observedProperty"): self.observedProperty = [] oprs = requestObject["observedProperty"].split(",") for opr in oprs: # get_name_from_urn limit the ability to ask for an observedProperty with LIKE: # eg: ask "water" to get all the water related data, "water:discharge", "water:temperature" ... #oprName = get_name_from_urn(opr,"property") oprName = opr self.observedProperty.append(oprName) # one-many ID else: raise sosException.SOSException(1,"Parameter \"observedProperty\" is mandatory with multiplicity N") #---------- RESPONSE FORMAT if requestObject.has_key("responseFormat"): if not requestObject["responseFormat"] in sosConfig.parameters["GO_responseFormat"]: raise sosException.SOSException(2,"Parameter \"responseFormat\" sent with invalid value : use one of %s" % "; ".join(sosConfig.parameters["GO_responseFormat"])) else: self.responseFormat = requestObject["responseFormat"] else: raise sosException.SOSException(1,"Parameter \"responseFormat\" is mandatory with multiplicity 1") #one #OPTIONAL request parameters #---------- SRS FILTER if requestObject.has_key("srsName"): self.srsName = get_name_from_urn(requestObject["srsName"],"refsystem",sosConfig) if not self.srsName in sosConfig.parameters["GO_srs"]: raise sosException.SOSException(2,"srsName \"%s\" not supported, use one of: %s" %(self.srsName,",".join(sosConfig.parameters["GO_srs"]))) else: self.srsName = sosConfig.parameters["GO_srs"][0] #---------- TIME FILTER if requestObject.has_key('eventTime'): self.eventTime = [] for i in requestObject["eventTime"].replace(" ","+").split(","): if len(i.split("/")) < 3: self.eventTime.append(i.split("/")) else: raise sosException.SOSException(2,"Parameter \"eventTime\" bad formatted") tp=[] for t in self.eventTime: if len(t) == 2: tp.append(iso.parse_datetime(t[0])) tp.append(iso.parse_datetime(t[1])) if len(t)==1: tp.append(iso.parse_datetime(t[0])) # Checking if some event limitation is reached #if sosConfig["maxGoPeriod"]: if int(sosConfig.maxGoPeriod) > 0: from datetime import timedelta d = timedelta(hours=int(sosConfig.maxGoPeriod)) userPeriod = max(tp)-min(tp) if d < userPeriod: raise sosException.SOSException(2,"You are requesting data for a period of [%s hours], but you are not permitted to ask for a period longer than: %s hours" % (userPeriod,d)) else: self.eventTime = None #---------- PROCEDURES FILTER if requestObject.has_key("procedure"): self.procedure = [] prcs = requestObject["procedure"].split(",") for prc in prcs: prcName = get_name_from_urn(prc,"procedure",sosConfig) self.procedure.append(prcName) else: self.procedure = None #---------- FEATURES OF INTEREST FILTER self.featureOfInterest = None self.featureOfInterestSpatial = None if requestObject.has_key("featureOfInterest"): foi = requestObject["featureOfInterest"] if foi.find("<ogc:")>=0 and foi.find("<gml:")>=0: #raise sosException.SOSException(3,"FOI SPATIAL: %s" %(foi)) self.featureOfInterestSpatial = sosUtils.ogcSpatCons2PostgisSql(foi,'geom_foi',sosConfig.istsosepsg) else: self.featureOfInterest = get_name_from_urn(foi,"feature",sosConfig) #fois = requestObject["featureOfInterest"].split(",") #for foi in fois: # foiName = get_name_from_urn(foi,"feature") # self.featureOfInterest.append(foiName) #---------- FILTERS FOR QUERY NOT SUPPORTED YET if requestObject.has_key("result"): #raise sosException.SOSException(3,"Parameter \"result\" not yet supported") self.result = sosUtils.ogcCompCons2PostgisSql(requestObject["result"]) else: self.result = None #zero-one optional #---------- RESULT MODEL if requestObject.has_key("resultModel"): if requestObject["resultModel"] in sosConfig.parameters["GO_resultModel"]: self.resultModel = requestObject["resultModel"] else: raise sosException.SOSException(2,"Parameter \"resultModel\" sent with invalid value: supported values are: %s" %",".join(sosConfig.parameters["GO_resultModel"])) else: self.resultModel = sosConfig.parameters["GO_resultModel"][0] #---------- RESPONSE MODE if requestObject.has_key("responseMode"): if requestObject["responseMode"] in sosConfig.parameters["GO_responseMode"]: self.responseMode = requestObject["responseMode"] else: raise sosException.SOSException(2,"Parameter \"responseMode\" sent with invalid value, supported values are: %s" %(",".join(sosConfig.parameters["GO_responseMode"]))) else: self.responseMode = sosConfig.parameters["GO_responseMode"][0] ########################### # NON STANDARD PARAMETERS # ########################### #---------- AGGREGATE INTERVAL # In ISO 8601 duration format if requestObject.has_key("aggregateInterval"): # Check on the eventTime parameter: it must be only one interval: 2010-01-01T00:00:00+00/2011-01-01T00:00:01+00 exeMsg = "Using aggregate functions, the event time must exist with an interval composed by a begin and an end date (ISO8601)" if self.eventTime == None or len(self.eventTime)!=1 or len(self.eventTime[0])!=2: raise sosException.SOSException(2,exeMsg) self.aggregate_interval = requestObject["aggregateInterval"] try: iso.parse_duration(self.aggregate_interval) except Exception as ex: raise sosException.SOSException(2,"Parameter \"aggregate_interval\" sent with invalid format (check ISO8601 duration spec): %s" % ex) else: self.aggregate_interval = None #---------- AGGREGATE FUNCTION # sum,avg,max,min if requestObject.has_key("aggregateFunction"): if self.aggregate_interval==None: raise sosException.SOSException(2,"Using aggregate functions parameters \"aggregateInterval\" and \"aggregateFunction\" are both mandatory") self.aggregate_function = requestObject["aggregateFunction"] if not (self.aggregate_function.upper() in ["AVG","COUNT","MAX","MIN","SUM"]): raise sosException.SOSException(2,"Available aggregation functions: avg, count, max, min, sum.") else: self.aggregate_function = None #---------- AGGREGATE NODATA if requestObject.has_key("aggregateNodata"): if self.aggregate_interval==None or self.aggregate_function==None: raise sosException.SOSException(2,"Using aggregateNodata parameter requires both \"aggregateInterval\" and \"aggregateFunction\"") self.aggregate_nodata = requestObject["aggregateNodata"] else: self.aggregate_nodata = sosConfig.aggregate_nodata #---------- AGGREGATE NODATA QUALITY INDEX if requestObject.has_key("aggregateNodataQi"): if self.aggregate_interval==None or self.aggregate_function==None: raise sosException.SOSException(2,"Using aggregateNodataQi parameter requires both \"aggregateInterval\" and \"aggregateFunction\"") self.aggregate_nodata_qi = requestObject["aggregateNodataQi"] else: self.aggregate_nodata_qi = sosConfig.aggregate_nodata_qi #------------ QUALITY INDEX self.qualityIndex=False if requestObject.has_key("qualityIndex"): if requestObject["qualityIndex"].upper() == "TRUE": self.qualityIndex = True elif requestObject["qualityIndex"].upper() == "FALSE": self.qualityIndex = False else: raise sosException.SOSException(2,"qualityIndex can only be True or False!") # self.qualityIndex = sosUtils.CQLvalueFilter2PostgisSql("id_qi_fk",requestObject["qualityIndex"]) #********************** if method == "POST": from xml.dom import minidom #---------- THE OFFERING offs = requestObject.getElementsByTagName('offering') if len(offs) == 1: val = offs[0].firstChild if val.nodeType == val.TEXT_NODE: self.offering = get_name_from_urn(str(val.data),"offering",sosConfig) else: err_txt = "XML parsing error (get value: offering)" raise sosException.SOSException(1,err_txt) else: err_txt = "Parameter \"offering\" is mandatory with multiplicity 1" raise sosException.SOSException(1,err_txt) #---------- THE OBSERVED PROPERTY obsProps = requestObject.getElementsByTagName('observedProperty') self.observedProperty = [] if len(obsProps) > 0: for obsProp in obsProps: val = obsProp.firstChild if val.nodeType == val.TEXT_NODE: # get_name_from_urn limit the ability to ask for an observedProperty with LIKE: # eg: ask "water" to get all the water related data, "water:discharge", "water:temperature" ... #self.observedProperty.append(get_name_from_urn(str(val.data),"property")) self.observedProperty.append(str(val.data)) else: err_txt = "XML parsing error (get value: observedProperty)" raise sosException.SOSException(1,err_txt) else: err_txt = "Parameter \"observedProperty\" is mandatory with multiplicity N" raise sosException.SOSException(1,err_txt) #---------- RESPONSE FORMAT respF = requestObject.getElementsByTagName('responseFormat') if len(respF) == 1: val = respF[0].firstChild if val.nodeType == val.TEXT_NODE: self.responseFormat = str(val.data) if self.responseFormat not in sosConfig.parameters["GO_responseFormat"]: raise sosException.SOSException(2,"Parameter \"responseFormat\" sent with invalid value: use one of %s" % "; ".join(sosConfig.parameters["GO_responseFormat"])) else: err_txt = "XML parsing error (get value: responseFormat)" raise sosException.SOSException(1,err_txt) else: err_txt = "Parameter \"responseFormat\" is mandatory with multiplicity 1" raise sosException.SOSException(1,err_txt) #OPTIONAL request parameters #---------- SRS OF RETURNED GML FEATURES srss = requestObject.getElementsByTagName('srsName') if len(srss) > 0: if len(srss) < 2: val = srss[0].firstChild if val.nodeType == val.TEXT_NODE: self.srsName = get_name_from_urn(str(val.data),"refsystem",sosConfig) else: err_txt = "XML parsing error (get value: srsName)" raise sosException.SOSException(1,err_txt) else: err_txt = "Allowed only ONE parameter \"srsName\"" raise sosException.SOSException(1,err_txt) else: self.srsName = sosConfig.parameters["GO_srs"][0] #---------- TIME FILTER evtms = requestObject.getElementsByTagName('eventTime') self.eventTime = [] if len(evtms) > 0: for evtm in evtms: tps = evtm.getElementsByTagName('gml:TimePeriod') for tp in tps: begin = tp.getElementsByTagName('gml:beginPosition') end = tp.getElementsByTagName('gml:endPosition') if len(begin)==1 and len(end)==1: Bval = begin[0].firstChild Eval = end[0].firstChild #raise sosException.SOSException(1,end[0].toprettyxml()) if Bval.nodeType == Bval.TEXT_NODE and Eval.nodeType == Eval.TEXT_NODE: self.eventTime.append([str(Bval.data).replace(" ","+"),str(Eval.data).replace(" ","+")]) #raise sosException.SOSException(1,str(self.eventTime)) else: err_txt = "XML parsing error (get value: TimePeriod)" raise sosException.SOSException(1,err_txt) tis = evtm.getElementsByTagName('gml:TimeInstant') for ti in tis: instant = ti.getElementsByTagName('gml:timePosition') if len(instant)>0 and len(instant)<2: Ival = instant[0].firstChild if Ival.nodeType == Ival.TEXT_NODE: self.eventTime.append([str(Ival.data).replace(" ","+")]) else: err_txt = "XML parsing error (get value: Timeinstant)" raise sosException.SOSException(1,err_txt) else: self.eventTime = None #---------- PROCEDURES FILTER procs = requestObject.getElementsByTagName('procedure') if len(procs) > 0: self.procedure=[] for proc in procs: if "xlink:href" in proc.attributes.keys(): self.procedure.append(str(proc.getAttribute("xlink:href"))) elif proc.hasChildNodes(): val = proc.firstChild if val.nodeType == val.TEXT_NODE: self.procedure.append(get_name_from_urn(str(val.data),"procedure",sosConfig)) else: err_txt = "XML parsing error (get value: procedure)" raise sosException.SOSException(1,err_txt) else: self.procedure = None #---------- FEATURES OF INTEREST FILTER fets = requestObject.getElementsByTagName('featureOfInterest') self.featureOfInterest = None self.featureOfInterestSpatial = None if len(fets)>0: if len(fets)<2: elements = [e for e in fets[0].childNodes if e.nodeType == e.ELEMENT_NODE] if len(elements)==1: self.featureOfInterestSpatial = sosUtils.ogcSpatCons2PostgisSql(elements[0],'geom_foi',sosConfig.istsosepsg) else: if "xlink:href" in fets[0].attributes.keys(): self.featureOfInterest = str(fets[0].getAttribute("xlink:href")) elif fets[0].hasChildNodes(): val = fets[0].firstChild if val.nodeType == val.TEXT_NODE: self.featureOfInterest = get_name_from_urn(str(val.data),"feature",sosConfig) else: err_txt = "XML parsing error (get value: featureOfInterest)" raise sosException.SOSException(1,err_txt) else: err_txt = "Allowed only ONE parameter \"featureOfInterest\"" raise sosException.SOSException(1,err_txt) #---------- FILTERS FOR QUERY NOT SUPPORTED YET ress = requestObject.getElementsByTagName('result') if len(ress)>0: raise sosException.SOSException(3,"Parameter \"result\" not yet supported") else: self.result = None #zero-one optional #---------- RESULT MODEL mods = requestObject.getElementsByTagName('resultModel') if len(mods)>0: if len(mods)<2: val = mods[0].firstChild if val.nodeType == val.TEXT_NODE: self.resultModel = str(val.data) if self.resultModel not in sosConfig.parameters["GO_resultModel"]: raise sosException.SOSException(2,"Parameter \"resultModel\" sent with invalid value") else: err_txt = "XML parsing error (get value: resultModel)" raise sosException.SOSException(1,err_txt) else: err_txt = "Allowed only ONE parameter \"resultModel\"" raise sosException.SOSException(1,err_txt) else: self.resultModel = None #---------- RESPONSE MODE rsmods = requestObject.getElementsByTagName('responseMode') if len(rsmods)>0: if len(rsmods)<2: val = rsmods[0].firstChild if val.nodeType == val.TEXT_NODE: self.responseMode = str(val.data) if self.responseMode not in sosConfig.parameters["GO_responseMode"]: raise sosException.SOSException(2,"Parameter \"responseMode\" sent with invalid value") else: err_txt = "XML parsing error (get value: responseMode)" raise sosException.SOSException(1,err_txt) else: err_txt = "Allowed only ONE parameter \"responseMode\"" raise sosException.SOSException(1,err_txt) else: self.responseMode = sosConfig.parameters["GO_responseMode"][0] #-------------- AGGREGATE INTERVAL & FUNCTION self.aggregate_interval = None self.aggregate_function = None aggint = requestObject.getElementsByTagName('aggregateInterval') aggfun = requestObject.getElementsByTagName('aggregateFunction') aggnodata = requestObject.getElementsByTagName('aggregateNodata') if len(aggint)==1 and len(aggfun)==1: #----------------------- # -- aggregate_interval #----------------------- # Check on the eventTime parameter: it must be only one interval: 2010-01-01T00:00:00+00/2011-01-01T00:00:01+00 exeMsg = "Using aggregate functions, the event time must exist with an interval composed by a begin and an end date (ISO8601)" if self.eventTime == None or len(self.eventTime)!=1 or len(self.eventTime[0])!=2: raise sosException.SOSException(2,exeMsg) val = aggint[0].firstChild if val.nodeType == val.TEXT_NODE: self.aggregate_interval = str(val.data) try: iso.parse_duration(self.aggregate_interval) except Exception as ex: raise sosException.SOSException(2,"Parameter \"aggregate_interval\" sent with invalid format (check ISO8601 duration spec): %s" % ex) else: err_txt = "cannot get ISO8601 duration value in \"aggregateInterval\"" raise sosException.SOSException(1,err_txt) #----------------------- # -- aggregate_function #----------------------- val = aggfun[0].firstChild if val.nodeType == val.TEXT_NODE: self.aggregate_function = str(val.data) if not (self.aggregate_function.upper() in ["AVG","COUNT","MAX","MIN","SUM"]): raise sosException.SOSException(2,"Available aggregation functions: avg, count, max, min, sum.") #----------------------------------- # -- aggregate_no_data default value #----------------------------------- if len(aggnodata)==1: val = aggnodata[0].firstChild self.aggregate_nodata = str(val.data) else: self.aggregate_nodata = sosConfig.aggregate_nodata #================================ #MISSING AGGREGATE QUALITY INDEX #================================ elif len(aggint)==0 and len(aggfun)==0: pass else: err_txt = "\"aggregateInterval\" and \"aggregate_function\" are both required with multiplicity 1" raise sosException.SOSException(1,err_txt) #------------ QUALITY INDEX self.qualityIndex=False qidx = requestObject.getElementsByTagName('qualityIndex') if len(qidx)>0: if len(qidx)<2: val = qidx[0].firstChild if val.nodeType == val.TEXT_NODE: self.qualityIndex = str(val.data) if self.qualityIndex.upper() == "TRUE": self.qualityIndex=True elif self.qualityIndex.upper() == "FALSE": pass else: raise sosException.SOSException(2,"qualityIndex can only be \'True\' or \'False\'") elif len(qidx)==0: pass else: err_txt = "\"qualityIndex\" is allowed with multiplicity 1 only" raise sosException.SOSException(1,err_txt)
def executePost(self, db=True): if self.procedurename is None: raise Exception( "POST action without procedure name not allowed") # Create data array data = self.waEnviron['wsgi_input'].split(";") # Assigned id always in the first position assignedid = data[0] if len(data) == 4: # regular time series mode = self.MODE_REGULAR start = iso.parse_datetime(data[1]) step = iso.parse_duration(data[2]) tmp_data = [] data = data[3].split("@") for idx in range(0, len(data)): tmp_data.append([ (start + (step * idx)).isoformat() ] + data[idx].split(",")) data = tmp_data elif len(data) == 2: # irregular time series mode = self.MODE_IRREGULAR data = [i.split(",") for i in data[1].split("@")] else: raise Exception( "Body content wrongly formatted. Please read the docs.") try: conn = databaseManager.PgDB( self.serviceconf.connection['user'], self.serviceconf.connection['password'], self.serviceconf.connection['dbname'], self.serviceconf.connection['host'], self.serviceconf.connection['port'] ) sql = """ SELECT procedures.id_prc, proc_obs.id_pro, proc_obs.constr_pro, procedures.stime_prc, procedures.etime_prc FROM %s.procedures, %s.proc_obs WHERE proc_obs.id_prc_fk = procedures.id_prc """ % (self.servicename, self.servicename) sql += """ AND assignedid_prc = %s ORDER BY proc_obs.id_pro ASC; """ rows = conn.select(sql, (assignedid,)) if len(rows) == 0: raise Exception( "Procedure with aid %s not found." % assignedid) # check if procedure observations length is ok if len(rows) != (len(data[0])-1): raise Exception( "Array length missmatch with procedures " "observation number") insertEventTime = """ INSERT INTO %s.event_time (id_prc_fk, time_eti) """ % (self.servicename) insertEventTime += """ VALUES (%s, %s::TIMESTAMPTZ) RETURNING id_eti; """ deleteEventTime = """ DELETE FROM %s.event_time """ % (self.servicename) deleteEventTime += """ WHERE id_prc_fk = %s AND time_eti = %s::TIMESTAMPTZ """ insertMeasure = """ INSERT INTO %s.measures( id_eti_fk, id_qi_fk, id_pro_fk, val_msr ) """ % (self.servicename) insertMeasure += """ VALUES (%s, 100, %s, %s); """ updateBeginPosition = """ UPDATE %s.procedures""" % (self.servicename) updateBeginPosition += """ SET stime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s """ updateEndPosition = """ UPDATE %s.procedures""" % (self.servicename) updateEndPosition += """ SET etime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s """ id_prc = rows[0][0] bp = rows[0][3] bpu = False ep = rows[0][4] epu = False for observation in data: id_eti = conn.executeInTransaction( insertEventTime, ( id_prc, observation[0])) for idx in range(0, len(rows)): conn.executeInTransaction( insertMeasure, ( int(id_eti[0][0]), # id_eti int(rows[idx][1]), # id_pro float(observation[(idx+1)]))) if (bp is None) or (bp == '') or ( iso.parse_datetime(observation[0]) < bp): bp = iso.parse_datetime(observation[0]) bpu = True if (ep is None) or (ep == '') or ( iso.parse_datetime(observation[0]) > ep): ep = iso.parse_datetime(observation[0]) epu = True if bpu: conn.executeInTransaction( updateBeginPosition, (bp.isoformat(), id_prc)) if epu: conn.executeInTransaction( updateEndPosition, (ep.isoformat(), id_prc)) conn.commitTransaction() # self.setData(ret) self.setMessage("Thanks for data") except Exception as e: traceback.print_exc(file=sys.stderr) conn.rollbackTransaction() raise Exception( "Error in fast insert (%s): %s" % (type(e), e))
def __init__(self, filter, pgdb): # get procedure information sql = """ SELECT id_prc, name_prc, name_oty, name_foi, stime_prc, etime_prc FROM %s.procedures, %s.obs_type, %s.foi""" % (filter.sosConfig.schema, filter.sosConfig.schema, filter.sosConfig.schema) sql += """ WHERE id_oty=id_oty_fk AND id_foi=id_foi_fk AND assignedid_prc=%s""" params = (filter.assignedSensorId, ) try: prc = pgdb.select(sql, params)[0] except: raise sosException.SOSException( "InvalidParameterValue", "assignedSensorId", "assignedSensorId '%s' is not valid!" % (filter.assignedSensorId)) # check requested procedure name exists if not prc["name_prc"] == filter.procedure: raise sosException.SOSException( "NoApplicableCode", None, "procedure '%s' not associated with provided " "assignedSensorId!" % (filter.procedure)) # check requested foi name exists if not filter.foiName == prc["name_foi"]: raise sosException.SOSException( "NoApplicableCode", None, "featureOfInterest '%s' not associated with " "provided assignedSensorId" % (filter.foiName)) # check provided samplingTime and upadate # begin/end time procedure if necessary # (samplingTime=period or istant of provided # observations defined by samplingTime filter) #============================================= if filter.samplingTime: stime = filter.samplingTime.split("/") # if len(stime) == 2: # is a TimePeriod start = iso.parse_datetime(stime[0]) end = iso.parse_datetime(stime[1]) elif len(stime) == 1: # is a TimeInstant start = end = iso.parse_datetime(stime[0]) else: raise Exception("filter samplingTime error! given '%s'" % (filter.samplingTime)) if start > end: raise Exception( "endPosition (%s) must be after beginPosition (%s)" % (end, start)) # check samplingTime # > verify procedure begin/end exist if not (prc["stime_prc"].__class__.__name__ == "NoneType" and (prc["etime_prc"].__class__.__name__ == "NoneType")): # check eventTime interval and update begin/end position when # force flas is active if filter.forceInsert: # update begin time of procedure if start < prc["stime_prc"]: sql = """ UPDATE %s.procedures""" % (filter.sosConfig.schema) sql += """ SET stime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s""" params = (stime[0], prc["id_prc"]) try: a = pgdb.executeInTransaction(sql, params) com = True except: raise Exception("SQL: %s" % (pgdb.mogrify(sql, params))) # update end time of procedure if end > prc["etime_prc"]: sql = """ UPDATE %s.procedures""" % (filter.sosConfig.schema) sql += """ SET etime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s""" params = (stime[1], prc["id_prc"]) try: b = pgdb.executeInTransaction(sql, params) com = True except Exception as err: raise Exception( "SQL: %s - %s" % (pgdb.mogrify(sql, params), err.pgerror)) # check eventTime interval and update begin/end position when # force flag is off else: if filter.sosConfig.sequential: sql = """ SELECT max(time_eti) as max_time_eti FROM %s.event_time""" % (filter.sosConfig.schema) sql += """ WHERE id_prc_fk = %s GROUP BY id_prc_fk""" params = (prc["id_prc"], ) try: lastMsr = pgdb.select(sql, params)[0]["max_time_eti"] except: lastMsr = None if lastMsr is not None: # verify begin observation is minor/equal then end # time procedure and later then last observation if not (end >= prc["etime_prc"] and (start <= prc["etime_prc"]) and (start >= lastMsr)): raise Exception( "begin observation (%s) must be between last " "observation (%s) and end procedure (%s); end" " observation (%s) must be after end " "procedure (%s)" % (start, lastMsr, prc["etime_prc"], end, prc["etime_prc"])) else: # verify begin observation is minor/equal then end # time procedure and later then first observation if not (end >= prc["etime_prc"] and (start <= prc["etime_prc"]) and (start >= prc["stime_prc"])): raise Exception( "begin observation (%s) must be between start " "procedure (%s) and end procedure (%s); end " "observation (%s) must be after end procedure " "(%s)" % (start, prc["stime_prc"], prc["etime_prc"], end, prc["etime_prc"])) #-- update end time of procedure sql = """ UPDATE %s.procedures""" % (filter.sosConfig.schema) sql += """ SET etime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s""" params = (str(stime[1]), int(prc["id_prc"])) try: b = pgdb.executeInTransaction(sql, params) com = True except Exception as err: raise Exception( "SQL: %s - %s" % (pgdb.mogrify(sql, params), err.pgerror)) else: sql = """ UPDATE %s.procedures""" % (filter.sosConfig.schema) sql += """ SET stime_prc=%s::TIMESTAMPTZ, etime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s""" params = (str(stime[0]), str(stime[1]), int(prc["id_prc"])) try: b = pgdb.executeInTransaction(sql, params) com = True except: raise Exception("SQL: %s" % (pgdb.mogrify(sql, params))) # check data definition and uom (compare registered # observed properties with provided observations) #================================================== # get values for provided data: UOM, NAME, URN, ID #-------------------------------------------------- sql = """ SELECT id_pro, id_opr, def_opr, name_uom, constr_opr, constr_pro FROM %s.observed_properties, %s.proc_obs, %s.uoms""" % (filter.sosConfig.schema, filter.sosConfig.schema, filter.sosConfig.schema) sql += """ WHERE id_uom_fk = id_uom AND id_opr_fk = id_opr AND id_prc_fk = %s""" params = (prc["id_prc"], ) try: opr = pgdb.select(sql, params) except Exception as err: raise Exception("SQL2: %s -%s" % (pgdb.mogrify(sql, params), err.pgerror)) # get list of available ObservedProperty, unit of measure, property # id for this procedure oprNames = [] oprUoms = [] oprIds = [] # to be removed ???? proIds = [] obsPropConstr = [] procConstr = [] ''' Building a matrix: oprNames = ["urn:ogc:def:parameter:x-istsos:1.0:" + "meteo:air:temperature" , ...] oprUoms = ["mm" , ...] oprIds = [id_opr , ...] proIds = [id_pro , ...] obsPropConstr = [{ "interval": ["-40", "50"], "role": "urn:ogc:def:classifiers:x-istsos:1.0:" + "qualityIndex:check:acceptable"} , ...] procConstr = [{ "max": "100", "role": "urn:ogc:def:classifiers:x-istsos:1.0:" + "qualityIndex:check:reasonable"} , ...] ''' for row in opr: oprNames.append(row["def_opr"]) oprUoms.append(row["name_uom"]) oprIds.append(row["id_opr"]) proIds.append(row["id_pro"]) if not row["constr_opr"] in [None, '']: obsPropConstr.append(json.loads(row["constr_opr"])) else: obsPropConstr.append(None) if not row["constr_pro"] in [None, '']: procConstr.append(json.loads(row["constr_pro"])) else: procConstr.append(None) # get ordered list of observed properties in data---- dataKeys = [key for key in filter.data.keys()] # get ordered list of unit of measures provided with data------- dataUoms = [] for key in filter.data.keys(): if "uom" in filter.data[key].keys(): dataUoms.append(filter.data[key]["uom"]) else: dataUoms.append('None') # verify that all the properties observed by this procedure # are provided with the correct data definition and uom for i, opr in enumerate(oprNames): try: k = dataKeys.index(opr) except: raise sosException.SOSException( "NoApplicableCode", None, "parameter '%s' not observed by RegisteredSensor " "%s - %s" % (opr, oprNames, dataKeys)) if not dataUoms[k] == oprUoms[i]: raise sosException.SOSException( "NoApplicableCode", None, "parameter '%s' not observed with provided unit of " "measure" % (opr)) # verify if time and coordinates are passed as data parameters # and create the parameters list and parameters ID xobs = yobs = zobs = tpar = None pars = [] # Observed parameters parsId = [] parsConsObs = [] parsConsPro = [] # urn of different parameters for i, dn in enumerate(dataKeys): if dn.split(":")[-1] in filter.sosConfig.parGeom["x"]: xobs = dataKeys[i] elif dn.split(":")[-1] in filter.sosConfig.parGeom["y"]: yobs = dataKeys[i] elif dn.split(":")[-1] in filter.sosConfig.parGeom["z"]: zobs = dataKeys[i] elif dn.find("iso8601") >= 0: tpar = dataKeys[i] else: if dn.split(":")[-1] != "qualityIndex": pars.append(dn) try: parsId.append(proIds[oprNames.index(dn)]) parsConsObs.append(obsPropConstr[oprNames.index(dn)]) parsConsPro.append(procConstr[oprNames.index(dn)]) except: raise Exception( "parameter %s not observed by this sensor " "%s - %s" % (dn, pars, oprNames)) # set default quality index if not provided for par in pars: try: dataKeys.index(par + ":qualityIndex") except: filter.data[par + ":qualityIndex"] = { "vals": [filter.sosConfig.default_qi] * len(filter.data[par]["vals"]) } # verify that mobile sensors provide coordinates as X,Y,Z if (xobs is False and yobs is False and zobs is False) and (prc["name_oty"] == "insitu-mobile-point"): raise Exception("Mobile sensors require x, y, z parameters") # verify that time parameter is provided if not tpar: raise Exception( "parameter 'time:iso8601' is required for InsertObservation") # verify that eventime are in provided samplingTime if len(filter.data[tpar]["vals"]) > 0: maxDate = iso.parse_datetime(max(filter.data[tpar]["vals"])) minDate = iso.parse_datetime(min(filter.data[tpar]["vals"])) if not maxDate <= end and minDate >= start: raise Exception( "provided data (min: %s, max:%s) are not included in " "provided <samplingTime> period (%s / %s) for " "procedure %s" % (minDate.isoformat(), maxDate.isoformat(), start.isoformat(), end.isoformat(), prc["name_prc"])) # insert observation # delete existing observations if force flag is active if filter.forceInsert: sql = """ DELETE FROM %s.event_time""" % (filter.sosConfig.schema) sql += """ WHERE id_prc_fk = %s AND time_eti >= %s::TIMESTAMPTZ AND time_eti <= %s::TIMESTAMPTZ""" params = (prc["id_prc"], stime[0], stime[1]) try: b = pgdb.executeInTransaction(sql, params) com = True except: raise Exception("SQL: %s" % (pgdb.mogrify(sql, params))) # CASE I: observations list is void if len(filter.data[tpar]["vals"]) == 0: self.assignedId = "" ids_eti = [] # CASE I: observations list contains data elif len(filter.data[tpar]["vals"]) > 0: # insert event times ids_eti = [] params = [] sql = """ INSERT INTO %s.event_time (id_prc_fk,time_eti)""" % ( filter.sosConfig.schema) sql += """ VALUES ( %s, %s::TIMESTAMPTZ) RETURNING id_eti""" for val in filter.data[tpar]["vals"]: try: ids_eti.append( pgdb.executeInTransaction( sql, (prc["id_prc"], val))[0]['id_eti']) com = True except Exception as e: raise Exception("Error inserting event times for %s: %s" % (prc["name_prc"], e)) for i, par in enumerate(pars): params = [] ids_msr = [] sql = """ INSERT INTO %s.measures ( id_pro_fk, id_eti_fk, id_qi_fk, val_msr)""" % (filter.sosConfig.schema) sql += """ VALUES (%s, %s, %s, %s) RETURNING id_msr""" pco = parsConsObs[i] pcp = parsConsPro[i] for ii, id_et in enumerate(ids_eti): if not filter.data[par]["vals"][ii] in [ 'NULL', u'NULL', None, 'None', u'None', filter.sosConfig.aggregate_nodata ]: # TODO: add an else statement to add the # aggregate_nodata value OR delete the event time if # not filter.data[par]["vals"][ii] in ['NULL',u'NULL', # None]: pqi = int( float(filter.data[par + ":qualityIndex"]["vals"][ii])) # Constraint quality is done only if the quality index # is equal to the default qi (RAW DATA) if int(filter.sosConfig.default_qi) == pqi: # quality check level I (gross error) val = float(filter.data[par]["vals"][ii]) if filter.sosConfig.correct_qi is not None and ( pco is not None): if 'max' in pco: if val <= (float(pco['max'])): pqi = int(filter.sosConfig.correct_qi) elif 'min' in pco: if val >= (float(pco['min'])): pqi = int(filter.sosConfig.correct_qi) elif 'interval' in pco: if (float(pco['interval'][0]) <= val <= float(pco['interval'][1])): pqi = int(filter.sosConfig.correct_qi) elif 'valueList' in pco: if val in [ float(p) for p in (pco['valueList']) ]: pqi = int(filter.sosConfig.correct_qi) # quality check level II (statistical range) if filter.sosConfig.stat_qi is not None and ( pcp is not None): if 'max' in pcp: if val <= float(pcp['max']): pqi = int(filter.sosConfig.stat_qi) elif 'min' in pcp: if val >= float(pcp['min']): pqi = int(filter.sosConfig.stat_qi) elif 'interval' in pcp: if (float(pcp['interval'][0]) <= val <= float(pcp['interval'][1])): pqi = int(filter.sosConfig.stat_qi) elif 'valueList' in pcp: if val in [ float(p) for p in pcp['valueList'] ]: pqi = int(filter.sosConfig.stat_qi) params = (int(parsId[i]), int(id_et), pqi, float(filter.data[par]["vals"][ii])) try: nid_msr = pgdb.executeInTransaction(sql, params) ids_msr.append(str(nid_msr[0]['id_msr'])) except Exception as e: com = False raise e #--insert position values if required if prc["name_oty"] == "insitu-mobile-point": xparspl = xobs.split(":") epsg = xparspl[xparspl.index("EPSG") + 1] params = [] sql = """ INSERT INTO %s.positions ( id_qi_fk, id_eti_fk, geom_pos)""" % (filter.sosConfig.schema) sql += """ VALUES ( %s, %s, ST_Transform( ST_SetSRID(ST_MakePoint(%s, %s, %s), %s), %s))""" for i, id_et in enumerate(ids_eti): params = (filter.sosConfig.default_qi, id_et, filter.data[xobs]["vals"][i], filter.data[yobs]["vals"][i], filter.data[zobs]["vals"][i], epsg, filter.sosConfig.istsosepsg) try: ids_pos = pgdb.executeInTransaction(sql, params) com = True except Exception as a: com = False raise Exception("%s\nSQL: %s" % (a, pgdb.mogrify(sql, params))) # register assigned IDs of measures self.assignedId = "@".join([str(p) for p in ids_eti]) # commit executed operations #Register the transactional operation in Log table if filter.sosConfig.transactional_log in ['True', 'true', 1]: sqlLog = """ INSERT INTO %s.tran_log ( operation_trl, procedure_trl, begin_trl, end_trl, count, stime_prc, etime_prc)""" % (filter.sosConfig.schema) sqlLog += """ VALUES ( 'InsertObservation', %s, %s::TIMESTAMPTZ, %s::TIMESTAMPTZ, %s, %s::TIMESTAMPTZ, %s::TIMESTAMPTZ)""" params = (str(filter.procedure), start, end, len(ids_eti), prc["stime_prc"], prc["etime_prc"]) try: pgdb.executeInTransaction(sqlLog, params) com = True except: raise Exception("SQL: %s" % (pgdb.mogrify(sqlLog, params))) if com is True: pgdb.commitTransaction() # broadcasting to mqtt broker if configured if filter.sosConfig.mqtt["broker_url"] != '' and ( filter.sosConfig.mqtt["broker_port"] != ''): from istmqttlib import PahoPublisher PahoPublisher({ "broker_url": filter.sosConfig.mqtt["broker_url"], "broker_port": filter.sosConfig.mqtt["broker_port"], "broker_topic": "%s%s" % (filter.sosConfig.mqtt["broker_topic"], filter.procedure), "data": filter.dataArray }).start()
op = go1['observedProperty']['component'][1] print "Gathering info for %s from destination" % procedure go2 = sos2.getSOSProcedure(procedure) samp2 = sos2.extractSamplingFromGOJson(go2) """interval = timedelta(days=int(30)) begin = yearsago(1, samp1[1]) #samp1[0] if begin < samp1[0]: begin = samp1[0]""" begin = samp1[0] end = samp1[1] # Comment this if you want a full migration begin = isodate.parse_datetime('2015-04-29T07:30:00+00:00') end = isodate.parse_datetime('2015-04-29T08:10:00+00:00') istsos2csv.execute({ 'begin': begin.isoformat(), 'end': end.isoformat(), 'procedure': procedure, 'op': op, 'url': '%s/%s' % (url1, srv1), 'd': dirpath, 'user': auth[0], 'password': auth[1], 'noqi': True }) csv2istsos.execute({
def execute(args, logger=None): def log(message): if debug: if logger: logger.log(message) else: print message # SCRIPT CONFIGURATION # ========================================================================= # Activate and print verbose information debug = args['v'] if args.has_key('v') else False # Procedure name procedure = args['procedure'] # Begin date begin = args['begin'] if args.has_key('begin') else "*" # End date end = args['end'] if args.has_key('end') else "*" # Global User and password valid for all connections suser = duser = auser = args['user'] if args.has_key('user') else None spwd = dpwd = apwd = args['pwd'] if args.has_key('pwd') else None # Activate this will copy also the quality index from source to destination cpqi = args['cpqi'] if args.has_key('cpqi') else False # Aggregating function configuration resolution = args['resolution'] if 'resolution' in args else None function = args['function'] if 'function' in args else None nodataValue = args['nodataValue'] if 'nodataValue' in args else None nodataQI = args['nodataQI'] if 'nodataQI' in args else None # Retroactive aggregation retro = args['retro'] if 'retro' in args else 0 # Force using last position as end position during insert sensor operation lm = args['lm'] if 'lm' in args else False # SOURCE istSOS CONFIG ================================== # Location surl = args['surl'] # Service instance name ssrv = args['ssrv'] # User and password if given this will be used for source istSOS if args.has_key('suser'): suser = args['suser'] if args.has_key('spwd'): spwd = args['spwd'] # DESTINATION istSOS CONFIG ============================= # Location (if not given, same as source will be used) durl = args['durl'] if (args.has_key('durl') and args['durl'] is not None) else surl # Service instance name dsrv = args['dsrv'] # User and password if given this will be used for destination istSOS if args.has_key('duser'): duser = args['duser'] if args.has_key('dpwd'): dpwd = args['dpwd'] # ALTERNATIVE istSOS SERVICE FOR QI EXTRAPOLATION ======= # Location (if not given, same as source will be used) aurl = args['aurl'] if (args.has_key('aurl') and args['aurl'] is not None) else None # Service instance name asrv = args['asrv'] if (args.has_key('asrv') and args['asrv'] is not None) else None # User and password if given this will be used for extrapolation QI istSOS if args.has_key('auser'): auser = args['auser'] if args.has_key('apwd'): apwd = args['apwd'] # PROCESSING STARTS HERE ================================================== log("\nistSOS > 2 > istSOS STARTED:") log("==============================\n") #req = requests.session() req = requests # Load procedure description log("1. Loading procedure description: %s" % procedure) # Loading describe sensor from source ===================================== res = req.get("%s/wa/istsos/services/%s/procedures/%s" % (surl, ssrv, procedure), auth=(suser, spwd), verify=False) sdata = res.json() if sdata['success'] == False: raise Exception( "Description of procedure %s can not be loaded from source service: %s" % (procedure, sdata['message'])) else: log(" > DS Source Ok.") # Loading describe sensor from destination ================================ res = req.get("%s/wa/istsos/services/%s/procedures/%s" % (durl, dsrv, procedure), auth=(duser, dpwd), verify=False) ddata = res.json() if ddata['success'] == False: raise Exception( "Description of procedure %s can not be loaded from destination service: %s" % (procedure, ddata['message'])) else: log(" > DS Destination Ok.") # Load of a getobservation template from destination ======================================= res = req.get( "%s/wa/istsos/services/%s/operations/getobservation/offerings/%s/procedures/%s/observedproperties/:/eventtime/last?qualityIndex=False" % (durl, dsrv, 'temporary', procedure), params={"qualityIndex": cpqi}, auth=(duser, dpwd), verify=False) dtemplate = res.json() if dtemplate['success'] == False: raise Exception( "Observation template of procedure %s can not be loaded: %s" % (procedure, dtemplate['message'])) else: dtemplate = dtemplate['data'][0] dtemplate['AssignedSensorId'] = ddata['data']['assignedSensorId'] dtemplate['result']['DataArray']['values'] = [] log(" > GO Template Ok.") # Loading describe sensor from QI EXTRAPOLATION service =================== if aurl and asrv: res = req.get("%s/wa/istsos/services/%s/procedures/%s" % (aurl, asrv, procedure), auth=(auser, apwd), verify=False) adata = res.json() if adata['success'] == False: raise Exception( "Description of procedure %s can not be loaded from destination service: %s" % (procedure, adata['message'])) else: log(" > DS QI Extrapolation Ok.") log("\n2. Identifying processing interval:") # Check if mesaures are present in source procedure, by identifying the sampling time constraint # located always in the first position of the outputs, if it is empty an exception is thrown if (not 'constraint' in sdata['data']['outputs'][0] or not 'interval' in sdata['data']['outputs'][0]['constraint']): raise Exception( "There is no data in the source procedure to be copied to the destination procedure." ) else: # Check if the contraint interval contains a valid ISO date begin position try: iso.parse_datetime( sdata['data']['outputs'][0]['constraint']['interval'][0]) except Exception: raise Exception( "The date in the source procedure constraint interval (%s) is not valid." % sdata['data']['outputs'][0]['constraint']['interval'][0]) # Check if the contraint interval contains a valid ISO date end position try: iso.parse_datetime( sdata['data']['outputs'][0]['constraint']['interval'][1]) except Exception: raise Exception( "The date in the source procedure constraint interval (%s) is not valid." % sdata['data']['outputs'][0]['constraint']['interval'][1]) log(" > Source interval is valid") # Looking for start (IO beginPOsition) instant processing # If the default value (*) is used, then the endPosition of # the "destination" service procedure will be used. But if the destination # procedure is empty , then the begin position of the source will be used start = None stop = None if begin == "*": if ('constraint' in ddata['data']['outputs'][0] and 'interval' in ddata['data']['outputs'][0]['constraint']): try: if function and resolution: # getting last inserted observations of "destination" service log("Aggregation requested: getting last inserted observations of \"destination\" service" ) params = { "request": "GetObservation", "service": "SOS", "version": "1.0.0", "observedProperty": ':', "procedure": procedure, "responseFormat": "application/json", "offering": 'temporary' } res = req.get("%s/%s" % (durl, dsrv), params=params, auth=(duser, dpwd), verify=False) obs = res.json() start = iso.parse_datetime( obs['ObservationCollection']['member'][0]['result'] ['DataArray']['values'][0][0]) else: # The endPosition of the destination will be used as Start/IO BeginPosition start = iso.parse_datetime(ddata['data']['outputs'][0] ['constraint']['interval'][1]) if retro > 0: # Retroactive aggregation log("Retroactive aggregation active.") if start - timedelta(minutes=retro) > iso.parse_datetime( ddata['data']['outputs'][0]['constraint'] ['interval'][0]): start = start - timedelta(minutes=retro) else: start = iso.parse_datetime( ddata['data']['outputs'][0]['constraint'] ['interval'][0]) log("Start: %s" % start) except Exception as ee: print "Error setting start date for proc %s: %s" % (procedure, ee) raise Exception( "The date in the destination procedure %s constraint interval (%s) is not valid." % (procedure, ddata['data']['outputs'][0]['constraint']['interval'][0])) else: # The beginPosition of the source will be used as Start/IO BeginPosition start = iso.parse_datetime( sdata['data']['outputs'][0]['constraint']['interval'][0]) else: start = iso.parse_datetime(begin) if end == "*": # The endPosition of the source will be used as Stop/IO EndPosition stop = iso.parse_datetime( sdata['data']['outputs'][0]['constraint']['interval'][1]) else: stop = iso.parse_datetime(end) log(" > Destination interval is valid") log(" > Start processing: %s" % start) log(" > Stop processing: %s" % stop) if retro > 0: log(" > Retro aggregation: %s minutes" % retro) # Insertion loop step timedelta interval = timedelta(days=15) if start < stop and start + interval > stop: interval = stop - start log(" > Insertion loop step: %s" % interval) if function and resolution: try: iso.duration_isoformat(resolution) except: raise Exception( "The resolution (%s) to apply in the aggregating function is not valid." % resolution) log(" > Function(Resolution) : %s(%s)" % (function, resolution)) while start + interval <= stop: nextStart = start + interval params = { "request": "GetObservation", "service": "SOS", "version": "1.0.0", "observedProperty": ':', "procedure": procedure, "qualityIndex": str(cpqi), "responseFormat": "application/json", "offering": 'temporary', "eventTime": "%s/%s" % (start.isoformat(), nextStart.isoformat()) } if function and resolution: params['aggregateFunction'] = function params['aggregateInterval'] = resolution if nodataValue != None: params['aggregateNodata'] = nodataValue if nodataQI != None: params['aggregateNodataQi'] = nodataQI res = req.get("%s/%s" % (surl, ssrv), params=params, auth=(suser, spwd), verify=False) # Check if an Exception occured if 'ExceptionReport' in res.content: raise Exception(res.content) smeasures = res.json()['ObservationCollection']['member'][0] #pp.pprint(smeasures) log(" > %s measures from: %s to: %s" % (len(smeasures['result']['DataArray']['values']), start.isoformat(), nextStart.isoformat())) dtemplate["samplingTime"] = {} if lm and len(smeasures['result']['DataArray']['values']) > 0: dtemplate["samplingTime"]["beginPosition"] = smeasures['result'][ 'DataArray']['values'][0][0] dtemplate["samplingTime"]["endPosition"] = smeasures['result'][ 'DataArray']['values'][-1][0] else: dtemplate["samplingTime"]["beginPosition"] = start.isoformat() dtemplate["samplingTime"]["endPosition"] = nextStart.isoformat() dtemplate['result']['DataArray']['values'] = smeasures['result'][ 'DataArray']['values'] dtemplate['result']['DataArray']['field'] = smeasures['result'][ 'DataArray']['field'] # POST data to WA res = req.post( "%s/wa/istsos/services/%s/operations/insertobservation" % (durl, dsrv), auth=(duser, dpwd), verify=False, data=json.dumps({ "ForceInsert": "true", "AssignedSensorId": ddata['data']['assignedSensorId'], "Observation": dtemplate })) # read response log(" > Insert observation success: %s" % res.json()['success']) #print res.json() if not res.json()['success']: raise Exception('Error inserting observation: %s' % res.json()['message']) start = nextStart if start < stop and start + interval > stop: interval = stop - start
def __init__(self,filter,pgdb): #--get procedure information #============================================ sql = "SELECT id_prc, name_prc, name_oty, name_foi, stime_prc, etime_prc from %s.procedures,%s.obs_type,%s.foi" %(filter.sosConfig.schema,filter.sosConfig.schema,filter.sosConfig.schema) sql += " WHERE id_oty=id_oty_fk AND id_foi=id_foi_fk AND assignedid_prc=%s" params = (filter.assignedSensorId,) try: prc = pgdb.select(sql,params)[0] except: raise sosException.SOSException("InvalidParameterValue","assignedSensorId","assignedSensorId '%s' is not valid!" %(filter.assignedSensorId)) #--check requested procedure name exists #============================================= if not prc["name_prc"]==filter.procedure: raise sosException.SOSException("NoApplicableCode",None,"procedure '%s' not associated with provided assignedSensorId!" %(filter.procedure)) #--check requested foi name exists #============================================= if not filter.foiName == prc["name_foi"]: raise sosException.SOSException("NoApplicableCode",None,"featureOfInterest '%s' not associated with provided assignedSensorId" %(filter.foiName)) #--check provided samplingTime and upadate # begin/end time procedure if necessary # (samplingTime=period or istant of provided # observations defined by samplingTime filter) #============================================= if filter.samplingTime: stime = filter.samplingTime.split("/") # if len(stime)==2: # is a TimePeriod start = iso.parse_datetime(stime[0]) end = iso.parse_datetime(stime[1]) elif len(stime)==1: # is a TimeInstant start = end = iso.parse_datetime(stime[0]) else: raise Exception(" filter samplingTime error! given '%s'" %(filter.samplingTime)) if start>end: raise Exception(" endPosition (%s) must be after beginPosition (%s)" %(end,start)) #-- check samplingTime #========================================== # verify procedure begin/end exist #----------------------------------- if not (prc["stime_prc"].__class__.__name__ == "NoneType" and prc["etime_prc"].__class__.__name__ == "NoneType"): # check eventTime interval and update begin/end position when force flas is active #---------------------------------------------------------------------------------- if filter.forceInsert: #-- verify interval limits '''if not (end>=prc["stime_prc"] and start<=prc["etime_prc"]): raise sosException.SOSException(3,"observation eventTime (%s-%s) must overlap procedure samplingTime (%s-%s)" %(start,end,prc["stime_prc"],prc["etime_prc"])) else:''' #-- update begin time of procedure if start<prc["stime_prc"]: sql = "UPDATE %s.procedures" %(filter.sosConfig.schema) sql += " SET stime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s" params = (stime[0],prc["id_prc"]) try: a = pgdb.executeInTransaction(sql,params) com=True except: raise Exception("SQL: %s" %(pgdb.mogrify(sql,params))) #-- update end time of procedure if end>prc["etime_prc"]: sql = "UPDATE %s.procedures" %(filter.sosConfig.schema) sql += " SET etime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s" params = (stime[1],prc["id_prc"]) try: b = pgdb.executeInTransaction(sql,params) com=True except Exception as err: raise Exception("SQL: %s - %s" %(pgdb.mogrify(sql,params), err.pgerror)) # check eventTime interval and update begin/end position when force flag is off #---------------------------------------------------------------------------------- else: sql = "SELECT max(time_eti) as max_time_eti from %s.event_time" %(filter.sosConfig.schema) sql += " WHERE id_prc_fk = %s group by id_prc_fk" params = (prc["id_prc"],) try: lastMsr = pgdb.select(sql,params)[0]["max_time_eti"] except: lastMsr = None if lastMsr!=None: #-- verify begin observation is minor/equal then end time procedure and later then last observation if not (end>=prc["etime_prc"] and start<=prc["etime_prc"] and start>=lastMsr): raise Exception("begin observation (%s) must be between last observation (%s) and end procedure (%s); end observation (%s) must be after end procedure (%s)" %(start,lastMsr,prc["etime_prc"],end,prc["etime_prc"])) else: #-- verify begin observation is minor/equal then end time procedure and later then first observation if not (end>=prc["etime_prc"] and start<=prc["etime_prc"] and start>=prc["stime_prc"]) : raise Exception("begin observation (%s) must be between start procedure (%s) and end procedure (%s); end observation (%s) must be after end procedure (%s)" %(start,prc["stime_prc"],prc["etime_prc"],end,prc["etime_prc"])) #-- update end time of procedure sql = "UPDATE %s.procedures" %(filter.sosConfig.schema) sql += " SET etime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s" params = (str(stime[1]),int(prc["id_prc"])) try: b = pgdb.executeInTransaction(sql,params) com=True except Exception as err: raise Exception("SQL: %s - %s" %(pgdb.mogrify(sql,params), err.pgerror)) else: sql = "UPDATE %s.procedures" %(filter.sosConfig.schema) sql += " SET stime_prc=%s::TIMESTAMPTZ, etime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s" params = (str(stime[0]),str(stime[1]),int(prc["id_prc"])) try: b = pgdb.executeInTransaction(sql,params) com=True except: raise Exception("SQL: %s" %(pgdb.mogrify(sql,params))) # check data definition and uom (compare registered # observed properties with provided observations) #================================================== # get values for provided data: UOM, NAME, URN, ID #-------------------------------------------------- sql = "SELECT id_pro, id_opr, def_opr, name_uom, constr_opr, constr_pro FROM %s.observed_properties, %s.proc_obs, %s.uoms" %(filter.sosConfig.schema,filter.sosConfig.schema,filter.sosConfig.schema) sql += " WHERE id_uom_fk=id_uom AND id_opr_fk=id_opr AND id_prc_fk=%s" params = (prc["id_prc"],) try: opr = pgdb.select(sql,params) except Exception as err: raise Exception("SQL2: %s -%s" %(pgdb.mogrify(sql,params), err.pgerror)) #---- get list of available ObservedProperty, unit of measure, property id for this procedure ----- oprNames=[] oprUoms=[] oprIds=[] #to be removed ???? proIds=[] obsPropConstr=[] procConstr=[] # Building a matrix ''' oprNames= ["urn:ogc:def:parameter:x-istsos:1.0:meteo:air:temperature" , ...] oprUoms= ["mm" , ...] oprIds= [id_opr , ...] proIds= [id_pro , ...] obsPropConstr= [{"interval": ["-40", "50"], "role": "urn:ogc:def:classifiers:x-istsos:1.0:qualityIndex:check:acceptable"} , ...] procConstr= [{"max": "100", "role": "urn:ogc:def:classifiers:x-istsos:1.0:qualityIndex:check:reasonable"} , ...] ''' for row in opr: oprNames.append(row["def_opr"]) oprUoms.append(row["name_uom"]) oprIds.append(row["id_opr"]) proIds.append(row["id_pro"]) if not row["constr_opr"] in [None,'']: obsPropConstr.append(json.loads(row["constr_opr"])) else: obsPropConstr.append(None) if not row["constr_pro"] in [None,'']: procConstr.append(json.loads(row["constr_pro"])) else: procConstr.append(None) #---- get ordered list of observed properties in data---- dataKeys = [ key for key in filter.data.keys() ] #----- get ordered list of unit of measures provided with data------- dataUoms = [] for key in filter.data.keys(): if "uom" in filter.data[key].keys(): dataUoms.append(filter.data[key]["uom"]) else: dataUoms.append('None') #------------------------------------------------------------------ # verify that all the properties observed by this procedure # are provided with the correct data definition and uom #------------------------------------------------------------------ for i,opr in enumerate(oprNames): try: k = dataKeys.index(opr) except: raise sosException.SOSException("NoApplicableCode",None,"parameter '%s' not observed by RegisteredSensor %s - %s" %(opr,oprNames,dataKeys)) #if not str(dataUoms[k])==str(oprUoms[i]): if not dataUoms[k]==oprUoms[i]: raise sosException.SOSException("NoApplicableCode",None,"parameter '%s' not observed with provided unit of measure" %(opr)) #--------------------------------------------------------------- # verify if time and coordinates are passed as data parameters # and create the parameters list and parameters ID #-------------------------------------------------------------- xobs=None yobs=None zobs=None tpar=None pars=[] # Observed parameters parsId=[] parsConsObs=[] parsConsPro=[] # urn of different parameters for i, dn in enumerate(dataKeys): if dn.split(":")[-1] in filter.sosConfig.parGeom["x"]: xobs = dataKeys[i] elif dn.split(":")[-1] in filter.sosConfig.parGeom["y"]: yobs = dataKeys[i] elif dn.split(":")[-1] in filter.sosConfig.parGeom["z"]: zobs = dataKeys[i] elif dn.find("iso8601")>=0: tpar = dataKeys[i] else: if dn.split(":")[-1] != "qualityIndex": pars.append(dn) try: parsId.append(proIds[oprNames.index(dn)]) parsConsObs.append(obsPropConstr[oprNames.index(dn)]) parsConsPro.append(procConstr[oprNames.index(dn)]) except: raise Exception("parameter %s not observed by this sensor %s - %s" %(dn,pars,oprNames)) #---------------------------------------------------------------------------------- # set default quality index if not provided #---------------------------------------------------------------------------------- for par in pars: try: dataKeys.index(par+":qualityIndex") except: filter.data[par+":qualityIndex"]={"vals":[filter.sosConfig.default_qi]*len(filter.data[par]["vals"])} #--------------------------------------------------------------- # verify that mobile sensors provide coordinates as X,Y,Z #--------------------------------------------------------------- if (xobs==False and yobs==False and zobs==False) and prc["name_oty"] == "insitu-mobile-point": raise Exception("Mobile sensors require x,y,z parameters") #--------------------------------------------------------------- # verify that time parameter is provided #--------------------------------------------------------------- if not tpar: raise Exception("parameter 'time:iso8601' is required for InsertObservation") #--------------------------------------------------------------- # verify that eventime are in provided samplingTime #--------------------------------------------------------------- if len(filter.data[tpar]["vals"])>0: maxDate = iso.parse_datetime(max(filter.data[tpar]["vals"])) minDate = iso.parse_datetime(min(filter.data[tpar]["vals"])) if not maxDate <= end and minDate >= start: raise Exception("provided data (min: %s, max:%s) are not included in provided <samplingTime> period (%s / %s) for procedure %s" % (minDate.isoformat(), maxDate.isoformat(), start.isoformat(), end.isoformat(), prc["name_prc"])) #====================== #-- insert observation #====================== # delete existing observations if force flag is active #------------------------------------------------------ if filter.forceInsert: sql = "DELETE FROM %s.event_time" %(filter.sosConfig.schema) sql += " WHERE id_prc_fk=%s AND time_eti>=%s::TIMESTAMPTZ AND time_eti<=%s::TIMESTAMPTZ" params = (prc["id_prc"],stime[0],stime[1]) try: b = pgdb.executeInTransaction(sql,params) com=True except: raise Exception("SQL: %s" %(pgdb.mogrify(sql,params))) #---------------------------------------- # CASE I: observations list is void #---------------------------------------- if len(filter.data[tpar]["vals"])==0: self.assignedId = "" ids_eti = [] #---------------------------------------- # CASE I: observations list contains data #---------------------------------------- elif len(filter.data[tpar]["vals"])>0: #-------------------- # insert event times #-------------------- ids_eti = [] params = [] sql = "INSERT INTO %s.event_time (id_prc_fk,time_eti)" %(filter.sosConfig.schema) sql += " VALUES (%s,%s::TIMESTAMPTZ) RETURNING id_eti" for val in filter.data[tpar]["vals"]: try: ids_eti.append(pgdb.executeInTransaction(sql,(prc["id_prc"],val))[0]['id_eti']) com = True except Exception as e: raise Exception("Error inserting event times for %s: %s" % (prc["name_prc"], str(e)) ) for i, par in enumerate(pars): params = [] ids_msr = [] sql = "INSERT INTO %s.measures (id_pro_fk, id_eti_fk,id_qi_fk,val_msr) VALUES" % (filter.sosConfig.schema) sql += " (%s,%s,%s,%s) RETURNING id_msr" #hasvalues = False for ii,id_et in enumerate(ids_eti): if not filter.data[par]["vals"][ii] in ['NULL',u'NULL',None,-999,"-999",u"-999",filter.sosConfig.aggregate_nodata]: #TODO: add a else statement to add the aggregate_nodata value OR delete the event time #if not filter.data[par]["vals"][ii] in ['NULL',u'NULL',None]: pqi = int(filter.data[par+":qualityIndex"]["vals"][ii]) # Constraint quality is done only if the quality index is equal to the default qi (RAW DATA) if int(filter.sosConfig.default_qi) == pqi: # quality check level I (gross error) #------------------------------------ if filter.sosConfig.correct_qi != None and parsConsObs[i] != None: if 'max' in parsConsObs[i]: if float(filter.data[par]["vals"][ii]) <= float(parsConsObs[i]['max']): pqi = int(filter.sosConfig.correct_qi) elif 'min' in parsConsObs[i]: if float(filter.data[par]["vals"][ii]) >= float(parsConsObs[i]['min']): pqi = int(filter.sosConfig.correct_qi) elif 'interval' in parsConsObs[i]: if float(parsConsObs[i]['interval'][0]) <= float(filter.data[par]["vals"][ii]) <= float(parsConsObs[i]['interval'][1]): pqi = int(filter.sosConfig.correct_qi) elif 'valueList' in parsConsObs[i]: if float(filter.data[par]["vals"][ii]) in [float(p) for p in parsConsObs[i]['valueList']]: pqi = int(filter.sosConfig.correct_qi) # quality check level II (statistical range) #------------------------------------------- if filter.sosConfig.stat_qi != None and parsConsPro[i] != None: if 'max' in parsConsPro[i]: if float(filter.data[par]["vals"][ii]) <= float(parsConsPro[i]['max']): pqi = int(filter.sosConfig.stat_qi) elif 'min' in parsConsPro[i]: if float(filter.data[par]["vals"][ii]) >= float(parsConsPro[i]['min']): pqi = int(filter.sosConfig.stat_qi) elif 'interval' in parsConsPro[i]: if float(parsConsPro[i]['interval'][0]) <= float(filter.data[par]["vals"][ii]) <= float(parsConsPro[i]['interval'][1]): pqi = int(filter.sosConfig.stat_qi) elif 'valueList' in parsConsPro[i]: if float(filter.data[par]["vals"][ii]) in [float(p) for p in parsConsPro[i]['valueList']]: pqi = int(filter.sosConfig.stat_qi) params = (int(parsId[i]),int(id_et),pqi,float(filter.data[par]["vals"][ii])) try: nid_msr = pgdb.executeInTransaction(sql,params) ids_msr.append(str(nid_msr[0]['id_msr'])) except Exception as e: com=False raise e raise Exception("L: %s - %s - %s - %s") %(int(parsId[i]),int(id_et),pqi,float(filter.data[par]["vals"][ii])) #------------------------------------- #--insert position values if required #------------------------------------- if prc["name_oty"] == "insitu-mobile-point": xparspl = xobs.split(":") epsg = xparspl[xparspl.index("EPSG")+1] params = [] sql = "INSERT INTO %s.positions (id_qi_fk, id_eti_fk,geom_pos) VALUES" %(filter.sosConfig.schema) sql += "(%s,%s,ST_Transform(ST_SetSRID(ST_MakePoint(%s, %s, %s), %s), %s))" for i,id_et in enumerate(ids_eti): params = (filter.sosConfig.default_qi,id_et,filter.data[xobs]["vals"][i],filter.data[yobs]["vals"][i],filter.data[zobs]["vals"][i],epsg,filter.sosConfig.istsosepsg) try: ids_pos = pgdb.executeInTransaction(sql,params) com=True except Exception as a: com=False raise Exception("%s\nSQL: %s" %(a,pgdb.mogrify(sql,params))) # register assigned IDs of measures self.assignedId = "@".join([str(p) for p in ids_eti]) # commit executed operations #Register the transactional operation in Log table if filter.sosConfig.transactional_log in ['True','true',1]: sqlLog = "INSERT INTO %s.tran_log" %(filter.sosConfig.schema) sqlLog += " (operation_trl,procedure_trl,begin_trl,end_trl,count,stime_prc,etime_prc)" sqlLog += " VALUES ('InsertObservation', %s, %s::TIMESTAMPTZ, %s::TIMESTAMPTZ, %s, %s::TIMESTAMPTZ , %s::TIMESTAMPTZ)" params = (str(filter.procedure),start,end,len(ids_eti),prc["stime_prc"],prc["etime_prc"]) try: pgdb.executeInTransaction(sqlLog,params) com=True except: raise Exception("SQL: %s" %(pgdb.mogrify(sqlLog,params))) if com==True: pgdb.commitTransaction()
go1 = sos1.getSOSProcedure(procedure) if ' component' in go1['observedProperty']: op = go1['observedProperty'][' component'][1] else: op = go1['observedProperty']['component'][1] '''print "Gathering info for %s from destination" % procedure go2 = sos2.getSOSProcedure(procedure)''' for line in lines: if procedure in ['Q_BOL_PTC', 'Q_CFER_ARO', 'Q_LAV_MEN', 'Q_MAG_VIS', 'Q_MOR_GIU', 'Q_RMUL_COM', 'Q_ROV_CVM2', 'Q_SBIB_PON', 'Q_TICTO_CHIR', 'Q_TRA_ARB_FFS1', 'Q_VED_ISO', 'Q_VVED_AGN', 'P_PON', 'Q_BON_QUA', 'Q_CUC_POR', 'Q_LAV_RSV', 'Q_MAR_MAR', 'Q_MUZ_MUZ', 'Q_RMUL_MAR', 'Q_ROV_CVM3', 'Q_SCA_LUG', 'Q_TRA_ARB', 'Q_TRA_ARB_FFS2', 'Q_VED_MUZ', 'V_TEST', 'Q_AETCAN_AIR', 'Q_CAL_AIR', 'Q_GNO_GNO', 'Q_MAG_LOD', 'Q_MAR_MAR2', 'Q_RMUL_ARB', 'Q_ROV_CVM1', 'Q_SAL_MAG', 'Q_TIC_BED', 'Q_TRA_ARB_FFS', 'Q_TRA_ARB_FFS3', 'Q_VER_BGU']: continue begin = isodate.parse_datetime(line.replace("\"","").split(",")[0]) end = isodate.parse_datetime(line.replace("\"","").split(",")[0]) # Creating temporary directory where CSV files will be stored dirpath = tempfile.mkdtemp() try: istsos2csv.execute({ 'begin': begin.isoformat(), 'end': end.isoformat(), 'procedure': procedure, 'op': op, 'url': '%s/%s' % (url1, srv1), 'd': dirpath })
def execute (args, logger=None): print "istsos2csv start.." try: url = args['url'] procedure = args['procedure'] observedProperty = args['op'] begin = iso.parse_datetime(args['begin']) end = iso.parse_datetime(args['end']) d = args['d'] auth = None if 'user' in args: user = args['user'] password = None if 'password' in args: password = args['password'] if auth and password: auth = HTTPBasicAuth(user, password) qi = 'True' if 'noqi' in args: if args['noqi'] == True: qi = 'False' params = { "request": "GetObservation", "offering": "temporary", "procedure": procedure, "eventTime": None, "observedProperty": observedProperty, "responseFormat": "text/plain", "service": "SOS", "version":"1.0.0", "qualityIndex": qi } tmpBegin = begin tmpEnd = end if (end-begin)>step: tmpEnd = tmpBegin + step print params while tmpEnd <= end: print ("%s - %s") % (tmpBegin,tmpEnd) if tmpBegin == tmpEnd: params["eventTime"] = iso.datetime_isoformat(tmpBegin) else: params["eventTime"] = "%s/%s" % (iso.datetime_isoformat(tmpBegin), iso.datetime_isoformat(tmpEnd)) res = req.get("%s?%s" % (url, urllib.urlencode(params)), auth=auth) makeFile(res, procedure, observedProperty, d) tmpBegin = tmpEnd tmpEnd = tmpBegin + step print " %s ************************** " % iso.datetime_isoformat(tmpEnd) if tmpBegin < end: tmpEnd = end if tmpBegin == tmpEnd: params["eventTime"] = iso.datetime_isoformat(tmpBegin) else: params["eventTime"] = "%s/%s" % (iso.datetime_isoformat(tmpBegin), iso.datetime_isoformat(tmpEnd)) res = req.get("%s?%s" % (url, urllib.urlencode(params)), auth=auth) makeFile(res, procedure, observedProperty, d) print " %s ************************** " % iso.datetime_isoformat(end) print "Finish." except Exception as e: print "ERROR: %s\n\n" % e traceback.print_exc()
def __init__(self,filter,pgdb): self.offInfo = offInfo(filter.offering,pgdb,filter.sosConfig) self.refsys = filter.sosConfig.urn["refsystem"] + filter.srsName self.filter = filter #CHECK FILTER VALIDITY #========================================= """ off_list = BuildOfferingList(pgdb,filter.sosConfig) if not filter.offering in off_list: raise sosException.SOSException("InvalidParameterValue","offering","Parameter \"offering\" sent with invalid value: %s - available options for offering are %s" %(filter.offering,off_list)) """ if filter.procedure: pl = BuildProcedureList(pgdb,filter.offering,filter.sosConfig) for p in filter.procedure: if not p in pl: raise sosException.SOSException("InvalidParameterValue","procedure","Parameter \"procedure\" sent with invalid value: %s - available options for offering \"%s\": %s"%(p,filter.offering,pl)) if filter.featureOfInterest: fl = BuildfeatureOfInterestList(pgdb,filter.offering,filter.sosConfig) if not filter.featureOfInterest in fl: raise sosException.SOSException("InvalidParameterValue","featureOfInterest","Parameter \"featureOfInterest\" sent with invalid value: %s - available options: %s"%(filter.featureOfInterest,fl)) if filter.observedProperty: opl = BuildobservedPropertyList(pgdb, filter.offering,filter.sosConfig) opr_sel = "SELECT def_opr FROM %s.observed_properties WHERE " %(filter.sosConfig.schema,) opr_sel_w = [] for op in filter.observedProperty: opr_sel_w += ["def_opr SIMILAR TO '%%(:|)%s(:|)%%'" %(op)] opr_sel = opr_sel + " OR ".join(opr_sel_w) try: opr_filtered = pgdb.select(opr_sel) except: raise Exception("SQL: %s"%(opr_sel)) if not len(opr_filtered)>0: raise sosException.SOSException("InvalidParameterValue","observedProperty","Parameter \"observedProperty\" sent with invalid value: %s - available options: %s"%(filter.observedProperty,opl)) #SET TIME PERIOD #========================================= tp=[] if filter.eventTime == None: tp = [None,None] else: for t in filter.eventTime: if len(t) == 2: tp.append(iso.parse_datetime(t[0])) tp.append(iso.parse_datetime(t[1])) if len(t)==1: tp.append(iso.parse_datetime(t[0])) #else: rise error ??? self.period = [min(tp),max(tp)] self.obs=[] # SET REQUEST TIMEZONE #=================================== if filter.eventTime: if iso.parse_datetime(filter.eventTime[0][0]).tzinfo: self.reqTZ = iso.parse_datetime(filter.eventTime[0][0]).tzinfo pgdb.setTimeTZ(iso.parse_datetime(filter.eventTime[0][0])) else: self.reqTZ = pytz.utc pgdb.setTimeTZ("UTC") else: self.reqTZ = pytz.utc pgdb.setTimeTZ("UTC") #BUILD PROCEDURES LIST #========================================= #---select part of query sqlSel = "SELECT DISTINCT" sqlSel += " id_prc, name_prc, name_oty, stime_prc, etime_prc, time_res_prc" #---from part of query ################################# # Rimosso codice di time_res_unit ################################# sqlFrom = "FROM %s.procedures, %s.proc_obs p, %s.observed_properties, %s.uoms," %(filter.sosConfig.schema,filter.sosConfig.schema,filter.sosConfig.schema,filter.sosConfig.schema) sqlFrom += " %s.off_proc o, %s.offerings, %s.obs_type" %(filter.sosConfig.schema,filter.sosConfig.schema,filter.sosConfig.schema) if filter.featureOfInterest or filter.featureOfInterestSpatial: sqlFrom += " ,%s.foi, %s.feature_type" %(filter.sosConfig.schema,filter.sosConfig.schema) sqlWhere = "WHERE id_prc=p.id_prc_fk AND id_opr_fk=id_opr AND o.id_prc_fk=id_prc AND id_off_fk=id_off AND id_uom=id_uom_fk AND id_oty=id_oty_fk" sqlWhere += " AND name_off='%s'" %(filter.offering) #---where condition based on featureOfInterest if filter.featureOfInterest: #sqlWhere += " AND id_foi=id_foi_fk AND id_fty=id_fty_fk AND (name_foi='%s')" %(filter.featureOfInterest) sqlWhere += " AND id_foi=id_foi_fk AND id_fty=id_fty_fk AND (name_foi IN (%s))" %(",".join( [ "'"+f+"'" for f in filter.featureOfInterest.split(",")])) if filter.featureOfInterestSpatial: sqlWhere += " AND id_foi_fk=id_foi AND %s" %(filter.featureOfInterestSpatial) #---where condition based on procedures if filter.procedure: sqlWhere += " AND (" procWhere = [] for proc in filter.procedure: procWhere.append("name_prc='%s'" %(proc)) sqlWhere += " OR ".join(procWhere) sqlWhere += ")" #---where condition based on observed properties sqlWhere += " AND (" obsprWhere = [] for obs in opr_filtered: obsprWhere.append("def_opr='%s'" %(obs["def_opr"])) sqlWhere += " OR ".join(obsprWhere) sqlWhere += ")" try: res = pgdb.select(sqlSel + " " + sqlFrom + " " + sqlWhere) except: raise Exception("SQL: %s"%(sqlSel + " " + sqlFrom + " " + sqlWhere)) #FOR EACH PROCEDURE #========================================= for o in res: #id_prc, name_prc, name_oty, stime_prc, etime_prc, time_res_prc, name_tru #CRETE OBSERVATION OBJECT #================================================= ob = Observation() #BUILD BASE INFOS FOR EACH PROCEDURE (Pi) #================================================= ob.baseInfo(pgdb,o,filter.sosConfig) #GET DATA FROM PROCEDURE ACCORDING TO THE FILTERS #================================================= ob.setData(pgdb,o,filter) #ADD OBSERVATIONS #================================================= self.obs.append(ob)
def execute (args, logger=None): def log(message): if logger: logger.log(message) else: print message pp = pprint.PrettyPrinter(indent=2) try: # Initializing URLs url = args['u'] # Service instance name service = args['s'] # Quality index quality = '100' if 'q' in args: quality = args['q'] # Procedures procs = args['p'] # Working directory, where the CSV files are located wd = args['wd'] # File extension ext = '.dat' if 'e' in args: ext = args['e'] debug = False if 'v' in args: debug = args['v'] test = False if 't' in args: test = args['t'] user = None if 'user' in args: user = args['user'] passw = None if 'password' in args: password = args['password'] auth = None if user and password: auth = HTTPBasicAuth(user, password) noqi = False # False meas that quality index is also migrated if 'noqi' in args: if args['noqi'] == True: noqi = True #print "noqi: %s" % noqi maxobs = 5000 if 'm' in args: maxobs = int(args['m']) #req = requests.session() req = requests for proc in procs: log("\nProcedure: %s" % proc) # Load procedure description res = req.get("%s/wa/istsos/services/%s/procedures/%s" % ( url, service, proc ), auth=auth, verify=False) data = res.json() if data['success']==False: raise Exception ("Description of procedure %s can not be loaded: %s" % (proc, data['message'])) else: print " > %s" % data['message'] data = data['data'] aid = data['assignedSensorId'] # Getting observed properties from describeSensor response op = [] for out in data['outputs']: if not noqi or not ':qualityIndex' in out['definition']: op.append(out['definition']) # Load of a getobservation request res = req.get("%s/wa/istsos/services/%s/operations/getobservation/offerings/%s/procedures/%s/observedproperties/%s/eventtime/last" % ( url, service, 'temporary', proc, ','.join(op) ), auth=auth, verify=False) data = res.json() if data['success']==False: raise Exception ("Last observation of procedure %s can not be loaded: %s" % (proc, data['message'])) else: print " > %s" % data['message'] data = data['data'][0] data['AssignedSensorId'] = aid # Set values array empty (can contain 1 value if procedure not empty) lastMeasure = data['result']['DataArray']['values'][0] if len(data['result']['DataArray']['values'])==1 else None data['result']['DataArray']['values'] = [] # discover json observed property disposition jsonindex = {} for pos in range(0, len(data['result']['DataArray']['field'])): field = data['result']['DataArray']['field'][pos] if not noqi: jsonindex[field['definition']] = pos elif not ':qualityIndex' in field['definition']: jsonindex[field['definition']] = pos elif ':qualityIndex' in field['definition'] and noqi: data['result']['DataArray']['field'].pop(pos) log ("Searching: %s" % os.path.join(wd, "%s_[0-9]*%s" % (proc,ext))) files = glob.glob(os.path.join(wd, "%s_*%s" % (proc,ext))) files.sort() if debug: print " > %s %s found" % (len(files), "Files" if len(files)>1 else "File") if len(files)>0: for f in files: # open file file = open(f, 'rU') # loop lines lines = file.readlines() obsindex = lines[0].strip(' \t\n\r').split(",") # Check if all the observedProperties of the procedure are included in the CSV file (quality index is optional) for k, v in jsonindex.iteritems(): if k in obsindex: continue elif ':qualityIndex' in k: continue else: raise Exception ("Mandatory observed property %s is not present in the CSV." % k) # loop lines (skipping header) for i in range(1, len(lines)): try: line = lines[i] lineArray = line.strip(' \t\n\r').split(",") # Creating an empty array where the values will be inserted observation = ['']*len(jsonindex) for k, v in jsonindex.iteritems(): val = None if k in obsindex: val = lineArray[obsindex.index(k)] elif ':qualityIndex' in k: # Quality index is not present in the CSV so the default value will be set val = quality observation[v] = val # attach to object data['result']['DataArray']['values'].append(observation) except Exception as e: print "Errore alla riga: %s - %s)" % (i, lines[i]) traceback.print_exc() raise e log ("Before insert ST:") if 'beginPosition' in data["samplingTime"]: log (" > Begin: %s" % data["samplingTime"]["beginPosition"]) if 'endPosition' in data["samplingTime"]: log (" + End: %s" % data["samplingTime"]["endPosition"]) ep = datetime.strptime( os.path.split(f)[1].replace("%s_" % proc, "").replace(ext, ""),"%Y%m%d%H%M%S%f" ).replace(tzinfo=timezone('UTC')) # Kick in the brain code snippet # If there is at least one measure: if len(data['result']['DataArray']['values'])>0: # taking first observation as begin position bp = iso.parse_datetime( data['result']['DataArray']['values'][0][jsonindex['urn:ogc:def:parameter:x-istsos:1.0:time:iso8601']] ) else: # otherwise this can be an irrebular procedure where just the end position is moved forward if ep > iso.parse_datetime(data["samplingTime"]["endPosition"]): bp = ep else: raise Exception("Something is wrong with begin position..") data["samplingTime"] = { "beginPosition": bp.isoformat(), "endPosition": ep.isoformat() } #data["result"]["DataArray"]["elementCount"] = str(len(data['result']['DataArray']['values'])) log ("Insert ST:") log (" > Begin: %s" % bp.isoformat()) log (" + End: %s" % ep.isoformat()) log (" > Values: %s" % len( data['result']['DataArray']['values'])) if not test and len(files)>0: # send to wa if len(data['result']['DataArray']['values']) > maxobs: import copy total = len(data['result']['DataArray']['values']) inserted = last = maxobs while len(data['result']['DataArray']['values'])>0: tmpData = copy.deepcopy(data) tmpData['result']['DataArray']['values'] = data['result']['DataArray']['values'][:last] data['result']['DataArray']['values'] = data['result']['DataArray']['values'][last:] if len(data['result']['DataArray']['values'])>0: tmpData["samplingTime"] = { "beginPosition": tmpData['result']['DataArray']['values'][0][jsonindex['urn:ogc:def:parameter:x-istsos:1.0:time:iso8601']], "endPosition": data['result']['DataArray']['values'][0][jsonindex['urn:ogc:def:parameter:x-istsos:1.0:time:iso8601']] } else: tmpData["samplingTime"] = { "beginPosition": tmpData['result']['DataArray']['values'][0][jsonindex['urn:ogc:def:parameter:x-istsos:1.0:time:iso8601']], "endPosition": ep.isoformat() } res = req.post("%s/wa/istsos/services/%s/operations/insertobservation" % ( url, service), auth=auth, verify=False, data=json.dumps({ "ForceInsert": "true", "AssignedSensorId": aid, "Observation": tmpData }) ) # read response res.raise_for_status() log (" > Insert observation success of %s/%s (%s / %s) observations: %s" % (inserted,total,tmpData["samplingTime"]["beginPosition"],tmpData["samplingTime"]["endPosition"],res.json()['success'])) if not res.json()['success']: log (res.json()['message']) if len(data['result']['DataArray']['values'])<maxobs: last = len(data['result']['DataArray']['values']) inserted += last else: res = req.post("%s/wa/istsos/services/%s/operations/insertobservation" % ( url, service), auth=auth, verify=False, data=json.dumps({ "ForceInsert": "true", "AssignedSensorId": aid, "Observation": data }) ) # read response res.raise_for_status() log (" > Insert observation success: %s" % res.json()['success']) if not res.json()['success']: log (res.json()['message']) print "~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~" pass except requests.exceptions.HTTPError as eh: print "ERROR: %s\n\n" % eh traceback.print_exc() pass except Exception as e: print "ERROR: %s\n\n" % e traceback.print_exc() pass pass
def insert_observation(self, broker_url, port, topic, data): print("url: %s:%s, topic: %s, data: %s" % (broker_url, port, topic, data)) with self.lock: broker = "%s:%s" % (broker_url, port) if (broker in self.broker) and (topic in self.broker[broker]): id_prc = self.broker[broker][topic]['id'] name_prc = self.broker[broker][topic]['name'] instance = self.broker[broker][topic]['instance'] conn = self.services[instance]['conn'] try: sql = """ SELECT procedures.id_prc, proc_obs.id_pro, proc_obs.constr_pro, procedures.stime_prc, procedures.etime_prc FROM %s.procedures, %s.proc_obs WHERE proc_obs.id_prc_fk = procedures.id_prc """ % (instance, instance) sql += """ AND id_prc = %s ORDER BY proc_obs.id_pro ASC; """ rows = conn.select(sql, (int(id_prc), )) if not isinstance(data, list): #print (type(data)) if isinstance(data, str): data = [data.split(",")] else: data = [data.decode('utf-8').split(",")] # check if procedure observations length is ok if len(rows) != (len(data[0]) - 1): raise Exception( "Array length missmatch with procedures " "observation number") else: insertEventTime = """ INSERT INTO %s.event_time (id_prc_fk, time_eti) """ % (instance) insertEventTime += """ VALUES (%s, %s::TIMESTAMPTZ) RETURNING id_eti; """ deleteEventTime = """ DELETE FROM %s.event_time """ % (instance) deleteEventTime += """ WHERE id_prc_fk = %s AND time_eti = %s::TIMESTAMPTZ """ insertMeasure = """ INSERT INTO %s.measures(id_eti_fk, id_qi_fk, id_pro_fk,val_msr) """ % (instance) insertMeasure += """ VALUES (%s, 100, %s, %s); """ updateBeginPosition = """ UPDATE %s.procedures""" % (instance) updateBeginPosition += """ SET stime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s """ updateEndPosition = """ UPDATE %s.procedures""" % (instance) updateEndPosition += """ SET etime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s """ bp = rows[0][3] bpu = False ep = rows[0][4] epu = False for observation in data: try: id_eti = conn.executeInTransaction( insertEventTime, (rows[0][0], observation[0])) except psycopg2.IntegrityError as ie: conn.rollbackTransaction() conn.executeInTransaction( deleteEventTime, (rows[0][0], observation[0])) id_eti = conn.executeInTransaction( insertEventTime, (rows[0][0], observation[0])) for idx in range(0, len(rows)): conn.executeInTransaction( insertMeasure, (int(id_eti[0][0]), int(rows[idx][1]), float(observation[(idx + 1)]))) if (bp is None) or (bp == '') or ( iso.parse_datetime(observation[0]) < bp): bp = iso.parse_datetime(observation[0]) bpu = True if (ep is None) or (ep == '') or ( iso.parse_datetime(observation[0]) > ep): ep = iso.parse_datetime(observation[0]) epu = True conn.commitTransaction() if bpu: conn.executeInTransaction(updateBeginPosition, (bp.isoformat(), id_prc)) if epu: conn.executeInTransaction(updateEndPosition, (ep.isoformat(), id_prc)) conn.commitTransaction() # Publish / broadcast new data mqttConf = self.services[instance]['config'].mqtt print("mqttConf: ") print(mqttConf) if mqttConf["broker_url"] != '' and ( mqttConf["broker_port"] != ''): print("Broadcasting new data!!") istmqttlib.PahoPublisher({ "broker_url": mqttConf["broker_url"], "broker_port": mqttConf["broker_port"], "broker_topic": "%s%s" % (mqttConf["broker_topic"], name_prc), "data": data }).start() except Exception as e: traceback.print_exc(file=sys.stderr) conn.rollbackTransaction() raise Exception("Error in fast insert (%s): %s" % (type(e), e)) else: #print("Sensor unknown") pass
errors = False #print "**********************************************" #print "Stop: %s, %s" % (stop1,stop2) #print "**********************************************" for i in range(0,stop): row = [] iso1 = None iso2 = None #toprint = i, ": " if (cnt1)<stop1: iso1 = isodate.parse_datetime(observations1[cnt1][0]) #toprint = toprint , cnt1, observations1[cnt1], ' ' if (cnt2)<stop2: iso2 = isodate.parse_datetime(observations2[cnt2][0]) #toprint = toprint , cnt2, observations2[cnt2] #print toprint if iso1 == None: row = [None,None,None,iso2.isoformat(),observations2[cnt2][1],observations2[cnt2][2]] #errors = True #mergedErrors.append([None,None,None,iso2.isoformat(),observations2[cnt2][1],observations2[cnt2][2]]) cnt1 += 1 if cnt2<stop2 and cnt1>stop1: cnt2 += 1
def execute(args): pp = pprint.PrettyPrinter(indent=2) try: istsos_version = args['istsos'] if 'istsos' in args else None debug = args['v'] test = args['t'] procs = args['p'] omit = args['o'] procedures = {} src = args['s'] dst = args['d'] srv = args['n'] duser = None if 'du' in args: duser = args['du'] dpassw = None if 'dp' in args: dpassw = args['dp'] #print "%s:%s" % (duser,'*'*len(dpassw)) auth = None if duser and dpassw: print "User and password!" auth = HTTPBasicAuth(duser, dpassw) appendData = False if 'a' in args and args['a']: print "Append data: enabled." appendData = True dfrom = None dto = None if 'from' in args and type('') == type(args['from']): print "From: %s" % args['from'] dfrom = iso.parse_datetime(args['from']) appendData = None if 'to' in args and type('') == type(args['to']): print "To: %s" % args['to'] dto = iso.parse_datetime(args['to']) appendData = None registerOnly = args['r'] virtual = False hq = False # Executing request res = req.get("%s" % (src), params={ 'service': 'SOS', 'version': '1.0.0', 'request': 'GetCapabilities', 'section': 'contents' }, verify=False) # Parsing response gc, gcNs = parse_and_get_ns(StringIO(res.content)) # Extract all offerings elOfferings = gc.findall( "{%s}Contents/{%s}ObservationOfferingList/{%s}ObservationOffering" % (gcNs['sos'], gcNs['sos'], gcNs['sos'])) for offering in elOfferings: offeringName = offering.find("{%s}name" % (gcNs['gml'])).text.split(":")[-1] if offeringName != 'temporary': continue # For each offering get the procedures elProcs = offering.findall("{%s}procedure" % (gcNs['sos'])) for p in elProcs: pname = p.get('{%s}href' % gcNs['xlink']) if (type(procs) == type([]) and pname not in procs) or (type(omit) == type([]) and pname in omit): continue print "\n%s" % pname print "================================" procedures[pname] = Procedure(pname, offeringName, dst, srv, auth) if virtual: procedures[pname].setSystemType('virtual') res = req.get("%s" % (src), params={ 'service': 'SOS', 'version': '1.0.0', 'request': 'DescribeSensor', 'outputFormat': 'text/xml;subtype=\'sensorML/1.0.0\'', 'procedure': pname }, verify=False) #print res.content ds, dsNs = parse_and_get_ns(StringIO(res.content)) #print res.content #print "Root: %s" % ds.getroot().tag if ds.getroot().tag == 'ExceptionReport': print "Error on DS for %s" % pname continue #print "Outputs found: %s" % len(elDescribe) observedProperties = [] print "istsos_version: ", istsos_version uniqidurn = 'urn:ogc:def:parameter:x-ist::' if istsos_version != None and istsos_version == '2': uniqidurn = 'urn:ogc:def:parameter:x-ist:1.0:' elFields = ds.findall( "{%s}member/{%s}System/{%s}outputs/{%s}OutputList/{%s}output/{%s}DataRecord/{%s}field" % (dsNs['sml'], dsNs['sml'], dsNs['sml'], dsNs['sml'], dsNs['sml'], dsNs['swe'], dsNs['swe'])) print "Observed properties (v2): %s " % len(elFields) for fs in elFields: print fs.get('name') if fs.get('name') != 'Time': observedProperties.append( fs.find( "{%s}Quantity" % (dsNs['swe'])).get('definition').replace( uniqidurn, '')) else: elDescribe = ds.findall( "member/{%s}System/{%s}outputs/{%s}OutputList/{%s}output" % (dsNs['sml'], dsNs['sml'], dsNs['sml'], dsNs['sml'])) print "Observed properties: %s " % len(elDescribe) for ds in elDescribe: definition = ds.find( "{%s}ObservableProperty" % (dsNs['swe'])).get('definition').replace( uniqidurn, '') #print definition if definition.find('time:iso8601') < 0: observedProperties.append(definition) #print { # 'service': 'SOS', # 'version': '1.0.0', # 'request': 'GetObservation', # 'offering': offeringName, # 'responseFormat': 'text/xml;subtype=\'sensorML/1.0.0\'', # 'procedure': pname, # 'observedProperty': ",".join(observedProperties) #} res = req.get("%s" % (src), params={ 'service': 'SOS', 'version': '1.0.0', 'request': 'GetObservation', 'offering': offeringName, 'responseFormat': 'text/xml;subtype=\'sensorML/1.0.0\'', 'procedure': pname, 'observedProperty': ",".join(observedProperties) }, verify=False) go, goNs = parse_and_get_ns(StringIO(res.content)) if go.getroot().tag == 'ExceptionReport': print "Error on GO for %s:\nparams:%s\n%s" % ( pname, { 'service': 'SOS', 'version': '1.0.0', 'request': 'GetObservation', 'offering': offeringName, 'responseFormat': 'text/xml;subtype=\'sensorML/1.0.0\'', 'procedure': pname, 'observedProperty': ",".join(observedProperties) }, res.content) continue # Extracting begin and end position begin = go.find( "{%s}member/{%s}Observation/{%s}samplingTime/{%s}TimePeriod/{%s}beginPosition" % (goNs['om'], goNs['om'], goNs['om'], goNs['gml'], goNs['gml'])) end = go.find( "{%s}member/{%s}Observation/{%s}samplingTime/{%s}TimePeriod/{%s}endPosition" % (goNs['om'], goNs['om'], goNs['om'], goNs['gml'], goNs['gml'])) procedures[pname].begin = begin.text procedures[pname].end = end.text # Extracting Feature of Interest and coordinates foi = go.find( "{%s}member/{%s}Observation/{%s}featureOfInterest" % (goNs['om'], goNs['om'], goNs['om'])) point = foi.find("{%s}Point" % (goNs['gml'])) if point == None: point = foi.find( "{%s}FeatureCollection/{%s}location/{%s}Point" % (goNs['gml'], goNs['gml'], goNs['gml'])) coord = point.find("{%s}coordinates" % (goNs['gml'])).text.split(",") if len(coord) == 2: coord.append('0') procedures[pname].setFoi( foi.get('{%s}href' % gcNs['xlink']).split(":")[-1], point.get('srsName'), coord) # Extracting UOM fields = go.findall( "{%s}member/{%s}Observation/{%s}result/{%s}DataArray/{%s}elementType/{%s}DataRecord/{%s}field" % (goNs['om'], goNs['om'], goNs['om'], goNs['swe'], goNs['swe'], goNs['swe'], goNs['swe'])) for field in fields: if field.get('name') != 'Time': qty = field.find("{%s}Quantity" % (goNs['swe'])) uom = field.find("{%s}Quantity/{%s}uom" % (goNs['swe'], goNs['swe'])) procedures[pname].addObservedProperty( field.get('name'), qty.get('definition').replace(uniqidurn, ''), uom.get('code')) if dfrom: begin = dfrom _begin = dfrom else: begin = iso.parse_datetime(procedures[pname].begin) _begin = iso.parse_datetime(procedures[pname].begin) if dto: end = dto else: end = iso.parse_datetime(procedures[pname].end) # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ # REGISTRATION PROCESS # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ # Check if procedure already exist res = req.get("%s/wa/istsos/services/%s/procedures/%s" % (dst, srv, pname), auth=auth, verify=False) if not res.json()["success"]: #print procedures[pname].data # Registering procedure to istSOS res = req.post("%s/wa/istsos/services/%s/procedures" % (dst, srv), data=json.dumps(procedures[pname].data), auth=auth) if not res.json()["success"]: #print json.dumps(procedures[pname].data) raise Exception( "Registering procedure %s failed: \n%s" % (pname, res.json()["message"])) # Getting details (describe sensor) to get the assignedSensorId res = req.get("%s/wa/istsos/services/%s/procedures/%s" % (dst, srv, pname), auth=auth) # Getting an InsertObservation template template = procedures[pname].getIoTemplate() else: # Getting an InsertObservation template template = procedures[pname].getIoTemplate() try: if appendData and ('endPosition' in template['samplingTime']): procedures[pname].begin = template['samplingTime'][ 'endPosition'] begin = iso.parse_datetime( template['samplingTime']['endPosition']) except Exception as exproc: print res.text raise exproc procedures[pname].oid = res.json()["data"]["assignedSensorId"] days = int(args['i']) interval = timedelta(days=int(days)) if not registerOnly: if virtual and hq: # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ # VIRTUAL PROCEDURE CODE INITIALIZATION # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ pass else: print "Starting migration.." # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ # PROCEDURE OBSERVATIONS MIGRATION # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ oOrder = [] passedLoops = 0 lastPrint = "" startTime = time.time() print "%s: %s - %s" % (pname, procedures[pname].begin, procedures[pname].end) if begin < end and begin + interval > end: interval = end - begin while (begin + interval) <= end: loopTotalTime = time.time() nextPosition = begin + interval passedLoops = passedLoops + 1 t = float( calendar.timegm(end.utctimetuple()) - calendar.timegm(_begin.utctimetuple())) t1 = float( calendar.timegm(nextPosition.utctimetuple()) - calendar.timegm(_begin.utctimetuple())) try: percentage = round((t1 / t) * 100, 2) except: percentage = 0 if percentage > 100: percentage = 100 lastPrint = "%s > %s%% (%s / %s %s days)" % ( "\b" * len(lastPrint), percentage, begin.strftime(fmtshort), nextPosition.strftime(fmtshort), days) looptime = time.time() # GetObservation from source SOS params = { 'service': 'SOS', 'version': '1.0.0', 'request': 'GetObservation', 'eventTime': '%s/%s' % (begin.strftime(fmt), nextPosition.strftime(fmt)), 'qualityIndex': 'True', 'offering': offeringName, 'responseFormat': 'text/xml;subtype=\'sensorML/1.0.0\'', 'procedure': pname, 'observedProperty': ",".join(observedProperties) } try: res = req.get("%s" % (src), params=params, verify=False) except Exception: res = req.get("%s" % (src), params=params, verify=False) gotime = timedelta(seconds=int(time.time() - looptime)) if gotime > timedelta(seconds=int(10)): if days > 1: days = int(days / 2) if days <= 1: days = 1 interval = timedelta(days=days) elif gotime < timedelta(seconds=int(5)): days = days + 1 interval = timedelta(days=days) lastPrint = "%s - GO: '%s'" % (lastPrint, gotime) go, goNs = parse_and_get_ns(StringIO(res.content)) if len(oOrder) == 0: fields = go.findall( "{%s}member/{%s}Observation/{%s}result/{%s}DataArray/{%s}elementType/{%s}DataRecord/{%s}field" % (goNs['om'], goNs['om'], goNs['om'], goNs['swe'], goNs['swe'], goNs['swe'], goNs['swe'])) for field in fields: oOrder.append( qty.get('definition').replace( 'urn:ogc:def:parameter:x-ist::', '')) values = go.find( "{%s}member/{%s}Observation/{%s}result/{%s}DataArray/{%s}values" % (goNs['om'], goNs['om'], goNs['om'], goNs['swe'], goNs['swe'])) if values.text: rows = values.text.strip().split("@") lastPrint = "%s " % (lastPrint) copy = [] for row in rows: copy.append(row.split(",")) # InsertObservation to istSOS template['result']['DataArray'][ 'values'] = copy template['samplingTime'] = { "beginPosition": copy[0][0], "endPosition": nextPosition.strftime(fmt) } '''template['samplingTime'] = { "beginPosition": begin.strftime(fmt), "endPosition": nextPosition.strftime(fmt) }''' template[u"AssignedSensorId"] = procedures[ pname].oid looptime = time.time() res = req.post( "%s/wa/istsos/services/%s/operations/insertobservation" % (dst, srv), auth=auth, data=json.dumps({ u"AssignedSensorId": procedures[pname].oid, u"ForceInsert": u"true", u"Observation": template })) iotime = timedelta(seconds=int(time.time() - looptime)) lastPrint = "%s - IO: '%s'" % (lastPrint, iotime) begin = nextPosition if begin < end and begin + interval > end: interval = end - begin if percentage < 100: lastPrint = "%s - Step time: '%s' - Elapsed: %s " % ( lastPrint, timedelta(seconds=int(time.time() - loopTotalTime)), timedelta(seconds=int(time.time() - startTime))) else: lastPrint = "%s - Step time: '%s' " % ( lastPrint, timedelta(seconds=int(time.time() - loopTotalTime))) sys.stdout.write(lastPrint) sys.stdout.flush() print " > Completed in %s" % timedelta( seconds=int(time.time() - startTime)) break except Exception as e: print "ERROR: %s\n\n" % e traceback.print_exc()
def getDSEndPosition(self): if u'constraint' in self.describe['outputs'][0]: return iso.parse_datetime(self.describe['outputs'][0]['constraint']['interval'][1]) return None
def execute(args, logger=None): print "istsos2csv start.." try: url = args['url'] procedure = args['procedure'] if 'filename' in args and args['filename'] is not None: filename = args['filename'] else: filename = procedure observedProperty = args['op'] begin = iso.parse_datetime(args['begin']) end = iso.parse_datetime(args['end']) d = args['d'] auth = None if 'user' in args: user = args['user'] password = None if 'password' in args: password = args['password'] if user and password: auth = HTTPBasicAuth(user, password) qi = 'True' if 'noqi' in args: if args['noqi'] is True: qi = 'False' params = { "request": "GetObservation", "offering": "temporary", "procedure": procedure, "eventTime": None, "observedProperty": observedProperty, "responseFormat": "text/plain", "service": "SOS", "version": "1.0.0", "qualityIndex": qi } tmpBegin = begin tmpEnd = end if (end - begin) > step: tmpEnd = tmpBegin + step while tmpEnd <= end: print("%s - %s") % (tmpBegin, tmpEnd) if tmpBegin == tmpEnd: params["eventTime"] = iso.datetime_isoformat(tmpBegin) else: params["eventTime"] = "%s/%s" % (iso.datetime_isoformat( tmpBegin), iso.datetime_isoformat(tmpEnd)) res = req.get("%s?%s" % (url, urllib.urlencode(params)), auth=auth) makeFile(res, procedure, observedProperty, d, qi, filename) tmpBegin = tmpEnd tmpEnd = tmpBegin + step print " %s ************************** " % iso.datetime_isoformat( tmpEnd) if tmpBegin < end: tmpEnd = end if tmpBegin == tmpEnd: params["eventTime"] = iso.datetime_isoformat(tmpBegin) else: params["eventTime"] = "%s/%s" % (iso.datetime_isoformat( tmpBegin), iso.datetime_isoformat(tmpEnd)) res = req.get("%s?%s" % (url, urllib.urlencode(params)), auth=auth) makeFile(res, procedure, observedProperty, d, qi, filename) print " %s ************************** " % iso.datetime_isoformat( end) print "Finish." except Exception as e: print "ERROR: %s\n\n" % e traceback.print_exc()
def __init__(self, sosRequest, method, requestObject, sosConfig): f.sosFilter.__init__(self, sosRequest, method, requestObject, sosConfig) # @TODO Declare attribute first! # self.offering = None # etc.. #************************** if method == "GET": #---------- THE OFFERING if requestObject.has_key("offering"): self.offering = get_name_from_urn(requestObject["offering"], "offering", sosConfig) else: raise sosException.SOSException( 1, "Parameter \"offering\" is mandatory with multiplicity 1") #---------- THE OBSERVED PROPERTY if requestObject.has_key("observedProperty"): self.observedProperty = [] oprs = requestObject["observedProperty"].split(",") for opr in oprs: # get_name_from_urn limit the ability to ask for an observedProperty with LIKE: # eg: ask "water" to get all the water related data, "water:discharge", "water:temperature" ... #oprName = get_name_from_urn(opr,"property") oprName = opr self.observedProperty.append(oprName) # one-many ID else: raise sosException.SOSException( 1, "Parameter \"observedProperty\" is mandatory with multiplicity N" ) #---------- RESPONSE FORMAT if requestObject.has_key("responseFormat"): if not requestObject["responseFormat"] in sosConfig.parameters[ "GO_responseFormat"]: raise sosException.SOSException( 2, "Parameter \"responseFormat\" sent with invalid value : use one of %s" % "; ".join(sosConfig.parameters["GO_responseFormat"])) else: self.responseFormat = requestObject["responseFormat"] else: raise sosException.SOSException( 1, "Parameter \"responseFormat\" is mandatory with multiplicity 1" ) #one #OPTIONAL request parameters #---------- SRS FILTER if requestObject.has_key("srsName"): self.srsName = get_name_from_urn(requestObject["srsName"], "refsystem", sosConfig) if not self.srsName in sosConfig.parameters["GO_srs"]: raise sosException.SOSException( 2, "srsName \"%s\" not supported, use one of: %s" % (self.srsName, ",".join( sosConfig.parameters["GO_srs"]))) else: self.srsName = sosConfig.parameters["GO_srs"][0] #---------- TIME FILTER if requestObject.has_key('eventTime'): self.eventTime = [] for i in requestObject["eventTime"].replace(" ", "+").split(","): if len(i.split("/")) < 3: self.eventTime.append(i.split("/")) else: raise sosException.SOSException( 2, "Parameter \"eventTime\" bad formatted") tp = [] for t in self.eventTime: if len(t) == 2: tp.append(iso.parse_datetime(t[0])) tp.append(iso.parse_datetime(t[1])) if len(t) == 1: tp.append(iso.parse_datetime(t[0])) # Checking if some event limitation is reached #if sosConfig["maxGoPeriod"]: if int(sosConfig.maxGoPeriod) > 0: from datetime import timedelta d = timedelta(hours=int(sosConfig.maxGoPeriod)) userPeriod = max(tp) - min(tp) if d < userPeriod: raise sosException.SOSException( 2, "You are requesting data for a period of [%s hours], but you are not permitted to ask for a period longer than: %s hours" % (userPeriod, d)) else: self.eventTime = None #---------- PROCEDURES FILTER if requestObject.has_key("procedure"): self.procedure = [] prcs = requestObject["procedure"].split(",") for prc in prcs: prcName = get_name_from_urn(prc, "procedure", sosConfig) self.procedure.append(prcName) else: self.procedure = None #---------- FEATURES OF INTEREST FILTER self.featureOfInterest = None self.featureOfInterestSpatial = None if requestObject.has_key("featureOfInterest"): foi = requestObject["featureOfInterest"] if foi.find("<ogc:") >= 0 and foi.find("<gml:") >= 0: #raise sosException.SOSException(3,"FOI SPATIAL: %s" %(foi)) self.featureOfInterestSpatial = sosUtils.ogcSpatCons2PostgisSql( foi, 'geom_foi', sosConfig.istsosepsg) else: self.featureOfInterest = get_name_from_urn( foi, "feature", sosConfig) #fois = requestObject["featureOfInterest"].split(",") #for foi in fois: # foiName = get_name_from_urn(foi,"feature") # self.featureOfInterest.append(foiName) #---------- FILTERS FOR QUERY NOT SUPPORTED YET if requestObject.has_key("result"): #raise sosException.SOSException(3,"Parameter \"result\" not yet supported") self.result = sosUtils.ogcCompCons2PostgisSql( requestObject["result"]) else: self.result = None #zero-one optional #---------- RESULT MODEL if requestObject.has_key("resultModel"): if requestObject["resultModel"] in sosConfig.parameters[ "GO_resultModel"]: self.resultModel = requestObject["resultModel"] else: raise sosException.SOSException( 2, "Parameter \"resultModel\" sent with invalid value: supported values are: %s" % ",".join(sosConfig.parameters["GO_resultModel"])) else: self.resultModel = sosConfig.parameters["GO_resultModel"][0] #---------- RESPONSE MODE if requestObject.has_key("responseMode"): if requestObject["responseMode"] in sosConfig.parameters[ "GO_responseMode"]: self.responseMode = requestObject["responseMode"] else: raise sosException.SOSException( 2, "Parameter \"responseMode\" sent with invalid value, supported values are: %s" % (",".join(sosConfig.parameters["GO_responseMode"]))) else: self.responseMode = sosConfig.parameters["GO_responseMode"][0] ########################### # NON STANDARD PARAMETERS # ########################### #---------- AGGREGATE INTERVAL # In ISO 8601 duration format if requestObject.has_key("aggregateInterval"): # Check on the eventTime parameter: it must be only one interval: 2010-01-01T00:00:00+00/2011-01-01T00:00:01+00 exeMsg = "Using aggregate functions, the event time must exist with an interval composed by a begin and an end date (ISO8601)" if self.eventTime == None or len(self.eventTime) != 1 or len( self.eventTime[0]) != 2: raise sosException.SOSException(2, exeMsg) self.aggregate_interval = requestObject["aggregateInterval"] try: iso.parse_duration(self.aggregate_interval) except Exception as ex: raise sosException.SOSException( 2, "Parameter \"aggregate_interval\" sent with invalid format (check ISO8601 duration spec): %s" % ex) else: self.aggregate_interval = None #---------- AGGREGATE FUNCTION # sum,avg,max,min if requestObject.has_key("aggregateFunction"): if self.aggregate_interval == None: raise sosException.SOSException( 2, "Using aggregate functions parameters \"aggregateInterval\" and \"aggregateFunction\" are both mandatory" ) self.aggregate_function = requestObject["aggregateFunction"] if not (self.aggregate_function.upper() in ["AVG", "COUNT", "MAX", "MIN", "SUM"]): raise sosException.SOSException( 2, "Available aggregation functions: avg, count, max, min, sum." ) else: self.aggregate_function = None #---------- AGGREGATE NODATA if requestObject.has_key("aggregateNodata"): if self.aggregate_interval == None or self.aggregate_function == None: raise sosException.SOSException( 2, "Using aggregateNodata parameter requires both \"aggregateInterval\" and \"aggregateFunction\"" ) self.aggregate_nodata = requestObject["aggregateNodata"] else: self.aggregate_nodata = sosConfig.aggregate_nodata #---------- AGGREGATE NODATA QUALITY INDEX if requestObject.has_key("aggregateNodataQi"): if self.aggregate_interval == None or self.aggregate_function == None: raise sosException.SOSException( 2, "Using aggregateNodataQi parameter requires both \"aggregateInterval\" and \"aggregateFunction\"" ) self.aggregate_nodata_qi = requestObject["aggregateNodataQi"] else: self.aggregate_nodata_qi = sosConfig.aggregate_nodata_qi #------------ QUALITY INDEX self.qualityIndex = False if requestObject.has_key("qualityIndex"): if requestObject["qualityIndex"].upper() == "TRUE": self.qualityIndex = True elif requestObject["qualityIndex"].upper() == "FALSE": self.qualityIndex = False else: raise sosException.SOSException( 2, "qualityIndex can only be True or False!") # self.qualityIndex = sosUtils.CQLvalueFilter2PostgisSql("id_qi_fk",requestObject["qualityIndex"]) #********************** if method == "POST": from xml.dom import minidom #---------- THE OFFERING offs = requestObject.getElementsByTagName('offering') if len(offs) == 1: val = offs[0].firstChild if val.nodeType == val.TEXT_NODE: self.offering = get_name_from_urn(str(val.data), "offering", sosConfig) else: err_txt = "XML parsing error (get value: offering)" raise sosException.SOSException(1, err_txt) else: err_txt = "Parameter \"offering\" is mandatory with multiplicity 1" raise sosException.SOSException(1, err_txt) #---------- THE OBSERVED PROPERTY obsProps = requestObject.getElementsByTagName('observedProperty') self.observedProperty = [] if len(obsProps) > 0: for obsProp in obsProps: val = obsProp.firstChild if val.nodeType == val.TEXT_NODE: # get_name_from_urn limit the ability to ask for an observedProperty with LIKE: # eg: ask "water" to get all the water related data, "water:discharge", "water:temperature" ... #self.observedProperty.append(get_name_from_urn(str(val.data),"property")) self.observedProperty.append(str(val.data)) else: err_txt = "XML parsing error (get value: observedProperty)" raise sosException.SOSException(1, err_txt) else: err_txt = "Parameter \"observedProperty\" is mandatory with multiplicity N" raise sosException.SOSException(1, err_txt) #---------- RESPONSE FORMAT respF = requestObject.getElementsByTagName('responseFormat') if len(respF) == 1: val = respF[0].firstChild if val.nodeType == val.TEXT_NODE: self.responseFormat = str(val.data) if self.responseFormat not in sosConfig.parameters[ "GO_responseFormat"]: raise sosException.SOSException( 2, "Parameter \"responseFormat\" sent with invalid value: use one of %s" % "; ".join( sosConfig.parameters["GO_responseFormat"])) else: err_txt = "XML parsing error (get value: responseFormat)" raise sosException.SOSException(1, err_txt) else: err_txt = "Parameter \"responseFormat\" is mandatory with multiplicity 1" raise sosException.SOSException(1, err_txt) #OPTIONAL request parameters #---------- SRS OF RETURNED GML FEATURES srss = requestObject.getElementsByTagName('srsName') if len(srss) > 0: if len(srss) < 2: val = srss[0].firstChild if val.nodeType == val.TEXT_NODE: self.srsName = get_name_from_urn( str(val.data), "refsystem", sosConfig) else: err_txt = "XML parsing error (get value: srsName)" raise sosException.SOSException(1, err_txt) else: err_txt = "Allowed only ONE parameter \"srsName\"" raise sosException.SOSException(1, err_txt) else: self.srsName = sosConfig.parameters["GO_srs"][0] #---------- TIME FILTER evtms = requestObject.getElementsByTagName('eventTime') self.eventTime = [] if len(evtms) > 0: for evtm in evtms: tps = evtm.getElementsByTagName('gml:TimePeriod') for tp in tps: begin = tp.getElementsByTagName('gml:beginPosition') end = tp.getElementsByTagName('gml:endPosition') if len(begin) == 1 and len(end) == 1: Bval = begin[0].firstChild Eval = end[0].firstChild #raise sosException.SOSException(1,end[0].toprettyxml()) if Bval.nodeType == Bval.TEXT_NODE and Eval.nodeType == Eval.TEXT_NODE: self.eventTime.append([ str(Bval.data).replace(" ", "+"), str(Eval.data).replace(" ", "+") ]) #raise sosException.SOSException(1,str(self.eventTime)) else: err_txt = "XML parsing error (get value: TimePeriod)" raise sosException.SOSException(1, err_txt) tis = evtm.getElementsByTagName('gml:TimeInstant') for ti in tis: instant = ti.getElementsByTagName('gml:timePosition') if len(instant) > 0 and len(instant) < 2: Ival = instant[0].firstChild if Ival.nodeType == Ival.TEXT_NODE: self.eventTime.append( [str(Ival.data).replace(" ", "+")]) else: err_txt = "XML parsing error (get value: Timeinstant)" raise sosException.SOSException(1, err_txt) else: self.eventTime = None #---------- PROCEDURES FILTER procs = requestObject.getElementsByTagName('procedure') if len(procs) > 0: self.procedure = [] for proc in procs: if "xlink:href" in proc.attributes.keys(): self.procedure.append( str(proc.getAttribute("xlink:href"))) elif proc.hasChildNodes(): val = proc.firstChild if val.nodeType == val.TEXT_NODE: self.procedure.append( get_name_from_urn(str(val.data), "procedure", sosConfig)) else: err_txt = "XML parsing error (get value: procedure)" raise sosException.SOSException(1, err_txt) else: self.procedure = None #---------- FEATURES OF INTEREST FILTER fets = requestObject.getElementsByTagName('featureOfInterest') self.featureOfInterest = None self.featureOfInterestSpatial = None if len(fets) > 0: if len(fets) < 2: elements = [ e for e in fets[0].childNodes if e.nodeType == e.ELEMENT_NODE ] if len(elements) == 1: self.featureOfInterestSpatial = sosUtils.ogcSpatCons2PostgisSql( elements[0], 'geom_foi', sosConfig.istsosepsg) else: if "xlink:href" in fets[0].attributes.keys(): self.featureOfInterest = str( fets[0].getAttribute("xlink:href")) elif fets[0].hasChildNodes(): val = fets[0].firstChild if val.nodeType == val.TEXT_NODE: self.featureOfInterest = get_name_from_urn( str(val.data), "feature", sosConfig) else: err_txt = "XML parsing error (get value: featureOfInterest)" raise sosException.SOSException(1, err_txt) else: err_txt = "Allowed only ONE parameter \"featureOfInterest\"" raise sosException.SOSException(1, err_txt) #---------- FILTERS FOR QUERY NOT SUPPORTED YET ress = requestObject.getElementsByTagName('result') if len(ress) > 0: raise sosException.SOSException( 3, "Parameter \"result\" not yet supported") else: self.result = None #zero-one optional #---------- RESULT MODEL mods = requestObject.getElementsByTagName('resultModel') if len(mods) > 0: if len(mods) < 2: val = mods[0].firstChild if val.nodeType == val.TEXT_NODE: self.resultModel = str(val.data) if self.resultModel not in sosConfig.parameters[ "GO_resultModel"]: raise sosException.SOSException( 2, "Parameter \"resultModel\" sent with invalid value" ) else: err_txt = "XML parsing error (get value: resultModel)" raise sosException.SOSException(1, err_txt) else: err_txt = "Allowed only ONE parameter \"resultModel\"" raise sosException.SOSException(1, err_txt) else: self.resultModel = None #---------- RESPONSE MODE rsmods = requestObject.getElementsByTagName('responseMode') if len(rsmods) > 0: if len(rsmods) < 2: val = rsmods[0].firstChild if val.nodeType == val.TEXT_NODE: self.responseMode = str(val.data) if self.responseMode not in sosConfig.parameters[ "GO_responseMode"]: raise sosException.SOSException( 2, "Parameter \"responseMode\" sent with invalid value" ) else: err_txt = "XML parsing error (get value: responseMode)" raise sosException.SOSException(1, err_txt) else: err_txt = "Allowed only ONE parameter \"responseMode\"" raise sosException.SOSException(1, err_txt) else: self.responseMode = sosConfig.parameters["GO_responseMode"][0] #-------------- AGGREGATE INTERVAL & FUNCTION self.aggregate_interval = None self.aggregate_function = None aggint = requestObject.getElementsByTagName('aggregateInterval') aggfun = requestObject.getElementsByTagName('aggregateFunction') aggnodata = requestObject.getElementsByTagName('aggregateNodata') if len(aggint) == 1 and len(aggfun) == 1: #----------------------- # -- aggregate_interval #----------------------- # Check on the eventTime parameter: it must be only one interval: 2010-01-01T00:00:00+00/2011-01-01T00:00:01+00 exeMsg = "Using aggregate functions, the event time must exist with an interval composed by a begin and an end date (ISO8601)" if self.eventTime == None or len(self.eventTime) != 1 or len( self.eventTime[0]) != 2: raise sosException.SOSException(2, exeMsg) val = aggint[0].firstChild if val.nodeType == val.TEXT_NODE: self.aggregate_interval = str(val.data) try: iso.parse_duration(self.aggregate_interval) except Exception as ex: raise sosException.SOSException( 2, "Parameter \"aggregate_interval\" sent with invalid format (check ISO8601 duration spec): %s" % ex) else: err_txt = "cannot get ISO8601 duration value in \"aggregateInterval\"" raise sosException.SOSException(1, err_txt) #----------------------- # -- aggregate_function #----------------------- val = aggfun[0].firstChild if val.nodeType == val.TEXT_NODE: self.aggregate_function = str(val.data) if not (self.aggregate_function.upper() in ["AVG", "COUNT", "MAX", "MIN", "SUM"]): raise sosException.SOSException( 2, "Available aggregation functions: avg, count, max, min, sum." ) #----------------------------------- # -- aggregate_no_data default value #----------------------------------- if len(aggnodata) == 1: val = aggnodata[0].firstChild self.aggregate_nodata = str(val.data) else: self.aggregate_nodata = sosConfig.aggregate_nodata #================================ #MISSING AGGREGATE QUALITY INDEX #================================ elif len(aggint) == 0 and len(aggfun) == 0: pass else: err_txt = "\"aggregateInterval\" and \"aggregate_function\" are both required with multiplicity 1" raise sosException.SOSException(1, err_txt) #------------ QUALITY INDEX self.qualityIndex = False qidx = requestObject.getElementsByTagName('qualityIndex') if len(qidx) > 0: if len(qidx) < 2: val = qidx[0].firstChild if val.nodeType == val.TEXT_NODE: self.qualityIndex = str(val.data) if self.qualityIndex.upper() == "TRUE": self.qualityIndex = True elif self.qualityIndex.upper() == "FALSE": pass else: raise sosException.SOSException( 2, "qualityIndex can only be \'True\' or \'False\'" ) elif len(qidx) == 0: pass else: err_txt = "\"qualityIndex\" is allowed with multiplicity 1 only" raise sosException.SOSException(1, err_txt)
def __init__(self, sosRequest, method, requestObject, sosConfig): f.sosFilter.__init__(self, sosRequest, method, requestObject, sosConfig) if method == "GET": self.eventTime = None self.featureOfInterest = None self.featureOfInterestSpatial = None self.result = None #zero-one optional self.observedProperty = [':'] # OBSERVED PROPERTY # get_name_from_urn limit the ability to ask for an observedProperty with LIKE: # eg: ask "water" to get all the water related data, "water:discharge", "water:temperature" ... if requestObject.has_key("observedproperty"): self.observedProperty = [] oprs = requestObject["observedproperty"].split(",") for opr in oprs: if opr == '': raise sosException.SOSException( "MissingParameterValue", "observedProperty", "Missing 'observedProperty' parameter") oprName = opr self.observedProperty.append(oprName) # one-many ID # PROCEDURES FILTER if requestObject.has_key("procedure"): self.procedure = [] prcs = requestObject["procedure"].split(",") for prc in prcs: if prc == '': raise sosException.SOSException( "MissingParameterValue", "procedure", "Missing 'procedure' parameter") try: prcName = get_name_from_urn(prc, "procedure", sosConfig) except Exception as e: raise sosException.SOSException( "InvalidParameterValue", "procedure", str(e)) self.procedure.append(prcName) else: self.procedure = None if self.version == '2.0.0': # THE OFFERING # > in istSOS offerings are equals to procedures # > so offerings are inserted into the procedure array filter if requestObject.has_key("offering"): prcs = requestObject["offering"].split(",") if self.procedure == None: self.procedure = [] for prc in prcs: if prc == '': raise sosException.SOSException( "MissingParameterValue", "offering", "Missing 'offering' parameter") try: prcName = get_name_from_urn( prc, "offering", sosConfig) except Exception as e: raise sosException.SOSException( "InvalidParameterValue", "offering", str(e)) # Check for name redundancy if prcName not in self.procedure: self.procedure.append(prcName) # RESPONSE FORMAT self.responseFormat = 'text/xml;subtype="om/2.0"' if requestObject.has_key("responseformat"): if requestObject["responseformat"] == '': raise sosException.SOSException( "MissingParameterValue", "responseformat", "Missing 'responseformat' parameter") if not requestObject[ "responseformat"] in sosConfig.parameters[ "GO_responseFormat_2_0_0"]: raise sosException.SOSException( "InvalidParameterValue", "responseFormat", "Parameter \"responseFormat\" sent with invalid value : use one of %s" % "; ".join(sosConfig. parameters["GO_responseFormat_2_0_0"])) elif requestObject[ "responseformat"] == sosConfig.parameters[ "GO_responseFormat_2_0_0"][0]: self.responseFormat = 'text/xml;subtype="om/2.0"' else: self.responseFormat = requestObject["responseformat"] # OPTIONAL SRS FILTER if requestObject.has_key("crs"): try: self.srsName = requestObject["crs"].split(':')[-1] except Exception as e: raise sosException.SOSException( "InvalidParameterValue", "crs", "%s" % e) if not self.srsName in sosConfig.parameters["GO_srs"]: raise sosException.SOSException( "InvalidParameterValue", "crs", "crs \"%s\" not supported, use one of: %s" % (self.srsName, ",".join( sosConfig.parameters["GO_srs"]))) else: self.srsName = sosConfig.parameters["GO_srs"][0] # TIME FILTER # istSOS supports # kvp examples: # - during: temporalFilter=om:phenomenonTime,2012-11-19T14:00:00+01:00/2012-11-19T14:15:00+01:00 # - equals: temporalFilter=om:phenomenonTime,2012-11-19T14:00:00.000+01:00 # - combination: temporalFilter=om:phenomenonTime,2012-11-19T14:00:00+01:00/2012-11-19T14:15:00+01:00,2012-11-19T14:00:00.000+01:00 if 'temporalfilter' in requestObject: self.eventTime = [] temporalfilter = requestObject["temporalfilter"].replace( " ", "+").split(",") # > in istSOS om:phenomenonTime is equals to om:resultTime if temporalfilter.pop(0) not in [ 'om:phenomenonTime', 'phenomenonTime', 'om:resultTime', 'resultTime' ]: raise sosException.SOSException( "InvalidParameterValue", "temporalfilter", "Parameter \"temporalFilter\" bad formatted") for i in temporalfilter: if '/' in i: interval = i.split("/") if len(interval) != 2: raise sosException.SOSException( "InvalidParameterValue", "temporalfilter", "Parameter \"temporalfilter\" bad formatted" ) try: iso.parse_date(interval[0]) iso.parse_date(interval[1]) except iso.ISO8601Error as isoerr: raise sosException.SOSException( "InvalidParameterValue", "temporalfilter", "Parameter \"temporalfilter\" bad formatted, %s" % isoerr) self.eventTime.append(interval) else: try: iso.parse_date(i) except iso.ISO8601Error as isoerr: raise sosException.SOSException( "InvalidParameterValue", "temporalfilter", "Parameter \"temporalfilter\" bad formatted, %s" % isoerr) self.eventTime.append([i]) # FEATURES OF INTEREST FILTER if requestObject.has_key("featureofinterest"): if requestObject["featureofinterest"] == '': raise sosException.SOSException( "MissingParameterValue", "featureOfInterest", "Missing 'featureOfInterest' parameter") if sosConfig.urn["feature"] in requestObject[ "featureofinterest"]: self.featureOfInterest = get_name_from_urn( requestObject["featureofinterest"], "feature", sosConfig) else: self.featureOfInterest = requestObject[ "featureofinterest"] # SPATIAL FILTER # example1: spatialFilter=om:featureOfInterest/*/sams:shape,0.0,0.0,60.0,60.0,http://www.opengis.net/def/crs/EPSG/0/4326 # example2: spatialFilter=om:featureOfInterest/*/sams:shape,0.0,0.0,60.0,60.0,urn:ogc:def:crs:EPSG::4326 if requestObject.has_key("spatialfilter"): sfs = requestObject["spatialfilter"].split(",") if len(sfs) != 6: raise sosException.SOSException( "InvalidParameterValue", "spatialfilter", "Invalid spatial filter '%s'" % requestObject["spatialfilter"]) if sfs[0] != 'om:featureOfInterest/*/sams:shape': raise sosException.SOSException( "InvalidParameterValue", "spatialfilter", "Invalid spatial filter '%s'" % requestObject["spatialfilter"]) srsName = None if sfs[5].index(':') > -1: srsName = sfs[5].split(':')[-1] if sfs[5].index('/') > -1: srsName = sfs[5].split('/')[-1] ogcfilter = ( "<ogc:BBOX>" + "<ogc:PropertyName>the_geom</ogc:PropertyName>" + ("<gml:Box srsName='EPSG:%s'>" % (srsName)) + ("<gml:coordinates>%s,%s %s,%s</gml:coordinates>" % (sfs[1], sfs[2], sfs[3], sfs[4])) + "</gml:Box>" + "</ogc:BBOX>") self.featureOfInterestSpatial = sosUtils.ogcSpatCons2PostgisSql( ogcfilter, 'geom_foi', sosConfig.istsosepsg) else: # THE OFFERING if requestObject.has_key("offering"): try: self.offering = get_name_from_urn( requestObject["offering"], "offering", sosConfig) except Exception as e: raise sosException.SOSException( "InvalidParameterValue", "offering", str(e)) # RESPONSE FORMAT if requestObject.has_key("responseformat"): if requestObject["responseformat"] == '': raise sosException.SOSException( "MissingParameterValue", "responseFormat", "Missing 'responseFormat' parameter") if not requestObject[ "responseformat"] in sosConfig.parameters[ "GO_responseFormat"]: raise sosException.SOSException( "InvalidParameterValue", "responseFormat", "Parameter \"responseFormat\" sent with invalid value : use one of %s" % "; ".join( sosConfig.parameters["GO_responseFormat"])) else: self.responseFormat = requestObject["responseformat"] if not requestObject.has_key("offering"): raise sosException.SOSException( "MissingParameterValue", "offering", "Parameter \"offering\" is mandatory with multiplicity 1" ) if not requestObject.has_key("observedproperty"): raise sosException.SOSException( "MissingParameterValue", "observedProperty", "Parameter \"observedProperty\" is mandatory with multiplicity N" ) if not requestObject.has_key("responseformat"): raise sosException.SOSException( "MissingParameterValue", "responseFormat", "Parameter \"responseFormat\" is mandatory with multiplicity 1" ) #one # OPTIONAL SRS FILTER if requestObject.has_key("srsname"): try: self.srsName = get_name_from_urn( requestObject["srsname"], "refsystem", sosConfig) except Exception as e: raise sosException.SOSException( "InvalidParameterValue", "srsname", "%s" % e) if not self.srsName in sosConfig.parameters["GO_srs"]: raise sosException.SOSException( "InvalidParameterValue", "srsName", "srsName \"%s\" not supported, use one of: %s" % (self.srsName, ",".join( sosConfig.parameters["GO_srs"]))) else: self.srsName = sosConfig.parameters["GO_srs"][0] # TIME FILTER if requestObject.has_key('eventtime'): self.eventTime = [] for i in requestObject["eventtime"].replace( " ", "+").split(","): if len(i.split("/")) < 3: self.eventTime.append(i.split("/")) else: raise sosException.SOSException( "InvalidParameterValue", "eventTime", "Parameter \"eventTime\" bad formatted") # FEATURES OF INTEREST FILTER if requestObject.has_key("featureofinterest"): foi = requestObject["featureofinterest"] if foi.find("<ogc:") >= 0 and foi.find("<gml:") >= 0: self.featureOfInterestSpatial = sosUtils.ogcSpatCons2PostgisSql( foi, 'geom_foi', sosConfig.istsosepsg) else: try: self.featureOfInterest = get_name_from_urn( foi, "feature", sosConfig) except Exception as e: raise sosException.SOSException( "InvalidParameterValue", "featureofinterest", str(e)) # FILTERS FOR QUERY NOT SUPPORTED YET if requestObject.has_key("result"): self.result = sosUtils.ogcCompCons2PostgisSql( requestObject["result"]) # RESULT MODEL if requestObject.has_key("resultmodel"): if requestObject["resultmodel"] in sosConfig.parameters[ "GO_resultModel"]: self.resultModel = requestObject["resultmodel"] else: raise sosException.SOSException( "InvalidParameterValue", "resultModel", "Parameter \"resultModel\" sent with invalid value: supported values are: %s" % ",".join(sosConfig.parameters["GO_resultModel"])) else: self.resultModel = sosConfig.parameters["GO_resultModel"][ 0] # RESPONSE MODE if requestObject.has_key("responsemode"): if requestObject["responsemode"] in sosConfig.parameters[ "GO_responseMode"]: self.responseMode = requestObject["responsemode"] else: raise sosException.SOSException( "InvalidParameterValue", "responseMode", "Parameter \"responseMode\" sent with invalid value, supported values are: %s" % (",".join( sosConfig.parameters["GO_responseMode"]))) else: self.responseMode = sosConfig.parameters[ "GO_responseMode"][0] # Checking if some event limitation is reached if self.eventTime != None: tp = [] for t in self.eventTime: if len(t) == 2: tp.append(iso.parse_datetime(t[0])) tp.append(iso.parse_datetime(t[1])) if len(t) == 1: tp.append(iso.parse_datetime(t[0])) if int(sosConfig.maxGoPeriod) > 0: maxhours = timedelta(hours=int(sosConfig.maxGoPeriod)) userPeriod = max(tp) - min(tp) if maxhours < userPeriod: if self.version == '2.0.0': # REQ39 - http://www.opengis.net/spec/SOS/2.0/req/core/go-too-many-obs-exception # The service determined that the requested result set exceeds the response # size limit of the service and thus cannot be delivered. raise sosException.SOSException( "ResponseExceedsSizeLimit", "", "You are requesting data for a period of [%s hours], but you are not permitted to ask for a period longer than: %s hours" % (userPeriod, maxhours)) else: raise sosException.SOSException( "InvalidParameterValue", "eventTime", "You are requesting data for a period of [%s hours], but you are not permitted to ask for a period longer than: %s hours" % (userPeriod, maxhours)) elif (sosConfig.strictogc in ['True', 'true', 1] and self.version == '2.0.0' and self.eventTime == None and self.featureOfInterest == None and self.featureOfInterestSpatial == None and self.procedure == None): # ResponseExceedsSizeLimit fake exception raise sosException.SOSException( "ResponseExceedsSizeLimit", "", "Sorry but, You are requesting too many data") ##################################### # NON STANDARD PARAMETERS by istSOS # ##################################### # AGGREGATE INTERVAL # In ISO 8601 duration format if requestObject.has_key("aggregateinterval"): # Check on the eventTime parameter: it must be only one interval: 2010-01-01T00:00:00+00/2011-01-01T00:00:01+00 exeMsg = "Using aggregate functions, the event time must exist with an interval composed by a begin and an end date (ISO8601)" if self.eventTime == None or len(self.eventTime) != 1 or len( self.eventTime[0]) != 2: raise sosException.SOSException("InvalidParameterValue", "aggregateInterval", exeMsg) self.aggregate_interval = requestObject["aggregateinterval"] try: iso.parse_duration(self.aggregate_interval) except Exception as ex: raise sosException.SOSException( "InvalidParameterValue", "aggregateInterval", "Parameter \"aggregate_interval\" sent with invalid format (check ISO8601 duration spec): %s" % ex) else: self.aggregate_interval = None # AGGREGATE FUNCTION # sum,avg,max,min if requestObject.has_key("aggregatefunction"): if self.aggregate_interval == None: raise sosException.SOSException( "InvalidParameterValue", "aggregateFunction", "Using aggregate functions parameters \"aggregateInterval\" and \"aggregateFunction\" are both mandatory" ) self.aggregate_function = requestObject["aggregatefunction"] if not (self.aggregate_function.upper() in ["AVG", "COUNT", "MAX", "MIN", "SUM"]): raise sosException.SOSException( "InvalidParameterValue", "aggregateFunction", "Available aggregation functions: avg, count, max, min, sum." ) else: self.aggregate_function = None # AGGREGATE NODATA if requestObject.has_key("aggregatenodata"): if self.aggregate_interval == None or self.aggregate_function == None: raise sosException.SOSException( "InvalidParameterValue", "aggregateNodata", "Using aggregateNodata parameter requires both \"aggregateInterval\" and \"aggregateFunction\"" ) self.aggregate_nodata = requestObject["aggregatenodata"] else: self.aggregate_nodata = sosConfig.aggregate_nodata # AGGREGATE NODATA QUALITY INDEX if requestObject.has_key("aggregatenodataqi"): if self.aggregate_interval == None or self.aggregate_function == None: raise sosException.SOSException( "InvalidParameterValue", "aggregateNodataQi", "Using aggregateNodataQi parameter requires both \"aggregateInterval\" and \"aggregateFunction\"" ) self.aggregate_nodata_qi = requestObject["aggregatenodataqi"] else: self.aggregate_nodata_qi = sosConfig.aggregate_nodata_qi # QUALITY INDEX self.qualityIndex = False if requestObject.has_key("qualityindex"): if requestObject["qualityindex"].upper() == "TRUE": self.qualityIndex = True elif requestObject["qualityindex"].upper() == "FALSE": self.qualityIndex = False else: raise sosException.SOSException( "InvalidParameterValue", "qualityIndex", "qualityIndex can only be True or False!") # QUALITY INDEX FILTERING self.qualityFilter = False if requestObject.has_key("qualityfilter"): if len(requestObject["qualityfilter"]) >= 2: try: if requestObject["qualityfilter"][ 0:2] == '<=' or requestObject["qualityfilter"][ 0:2] == '>=': self.qualityFilter = ( requestObject["qualityfilter"][0:2], float(requestObject["qualityfilter"][2:])) elif (requestObject["qualityfilter"][0] == '>' or requestObject["qualityfilter"][0] == '=' or requestObject["qualityfilter"][0] == '<'): self.qualityFilter = ( requestObject["qualityfilter"][0], float(requestObject["qualityfilter"][1:])) # If qualityFilter is defined qualityIndex are automatically returned self.qualityIndex = True except ValueError as ve: raise sosException.SOSException( "InvalidParameterValue", "qualityFilter", "invalid quality index value in qualityFilter") else: raise sosException.SOSException( "InvalidParameterValue", "qualityFilter", "qualityFilter operator can only be in ['<','>','<=','>=','=']" ) if method == "POST": from xml.dom import minidom # THE OFFERING offs = requestObject.getElementsByTagName('offering') if len(offs) == 1: val = offs[0].firstChild if val.nodeType == val.TEXT_NODE: try: self.offering = get_name_from_urn( str(val.data), "offering", sosConfig) except Exception as e: raise sosException.SOSException( "InvalidParameterValue", "offering", str(e)) else: err_txt = "XML parsing error (get value: offering)" raise sosException.SOSException("NoApplicableCode", None, err_txt) else: err_txt = "Parameter \"offering\" is mandatory with multiplicity 1" raise sosException.SOSException("MissingParameterValue", "offering", err_txt) # THE OBSERVED PROPERTY obsProps = requestObject.getElementsByTagName('observedProperty') self.observedProperty = [] if len(obsProps) > 0: for obsProp in obsProps: val = obsProp.firstChild if val.nodeType == val.TEXT_NODE: # get_name_from_urn limit the ability to ask for an observedProperty with LIKE: # eg: ask "water" to get all the water related data, "water:discharge", "water:temperature" ... #self.observedProperty.append(get_name_from_urn(str(val.data),"property")) self.observedProperty.append(str(val.data)) else: err_txt = "XML parsing error (get value: observedProperty)" raise sosException.SOSException( "NoApplicableCode", None, err_txt) else: err_txt = "Parameter \"observedProperty\" is mandatory with multiplicity N" raise sosException.SOSException("MissingParameterValue", "observedProperty", err_txt) # RESPONSE FORMAT respF = requestObject.getElementsByTagName('responseFormat') if len(respF) == 1: val = respF[0].firstChild if val.nodeType == val.TEXT_NODE: self.responseFormat = str(val.data) if self.responseFormat not in sosConfig.parameters[ "GO_responseFormat"]: raise sosException.SOSException( "InvalidParameterValue", "responseFormat", "Parameter \"responseFormat\" sent with invalid value: use one of %s" % "; ".join( sosConfig.parameters["GO_responseFormat"])) else: err_txt = "XML parsing error (get value: responseFormat)" raise sosException.SOSException("NoApplicableCode", None, err_txt) else: err_txt = "Parameter \"responseFormat\" is mandatory with multiplicity 1" raise sosException.SOSException("MissingParameterValue", "responseFormat", err_txt) # OPTIONAL request parameters # SRS OF RETURNED GML FEATURES srs = requestObject.getAttributeNode('srsName') if srs: self.srsName = srs.nodeValue if not self.srsName in sosConfig.parameters["GO_srs"]: raise sosException.SOSException( "InvalidParameterValue", "srsName", "srsName \"%s\" not supported, use one of: %s" % (self.srsName, ",".join( sosConfig.parameters["GO_srs"]))) else: self.srsName = sosConfig.parameters["GO_srs"][0] # TIME FILTER evtms = requestObject.getElementsByTagName('eventTime') self.eventTime = [] if len(evtms) > 0: for evtm in evtms: tps = evtm.getElementsByTagName('gml:TimePeriod') for tp in tps: begin = tp.getElementsByTagName('gml:beginPosition') end = tp.getElementsByTagName('gml:endPosition') if len(begin) == 1 and len(end) == 1: Bval = begin[0].firstChild Eval = end[0].firstChild if Bval.nodeType == Bval.TEXT_NODE and Eval.nodeType == Eval.TEXT_NODE: self.eventTime.append([ str(Bval.data).replace(" ", "+"), str(Eval.data).replace(" ", "+") ]) else: err_txt = "XML parsing error (get value: TimePeriod)" raise sosException.SOSException( "NoApplicableCode", None, err_txt) tis = evtm.getElementsByTagName('gml:TimeInstant') for ti in tis: instant = ti.getElementsByTagName('gml:timePosition') if len(instant) > 0 and len(instant) < 2: Ival = instant[0].firstChild if Ival.nodeType == Ival.TEXT_NODE: self.eventTime.append( [str(Ival.data).replace(" ", "+")]) else: err_txt = "XML parsing error (get value: Timeinstant)" raise sosException.SOSException( "NoApplicableCode", None, err_txt) else: self.eventTime = None # PROCEDURES FILTER procs = requestObject.getElementsByTagName('procedure') if len(procs) > 0: self.procedure = [] for proc in procs: if "xlink:href" in proc.attributes.keys(): self.procedure.append( str(proc.getAttribute("xlink:href"))) elif proc.hasChildNodes(): val = proc.firstChild if val.nodeType == val.TEXT_NODE: try: self.procedure.append( get_name_from_urn(str(val.data), "procedure", sosConfig)) except Exception as e: raise sosException.SOSException( "InvalidParameterValue", "procedure", str(e)) else: err_txt = "XML parsing error (get value: procedure)" raise sosException.SOSException( "NoApplicableCode", None, err_txt) else: self.procedure = None # FEATURES OF INTEREST FILTER fets = requestObject.getElementsByTagName('featureOfInterest') self.featureOfInterest = None self.featureOfInterestSpatial = None # get sub-elements of FOI if fets: elements = [ e for e in fets[0].childNodes if e.nodeType == e.ELEMENT_NODE ] if len(elements) == 0: err_txt = "ObjectID or ogc:spatialOps elements in parameter \"featureOfInterest\" are mandatory" raise sosException.SOSException("NoApplicableCode", None, err_txt) # only one sub element elif len(elements) == 1 and elements[0].tagName != "ObjectID": self.featureOfInterestSpatial = sosUtils.ogcSpatCons2PostgisSql( elements[0], 'geom_foi', sosConfig.istsosepsg) else: tempfois = [] for e in elements: if not e.tagName == "ObjectID": err_txt = "Allowed only ObjectID or ogc:spatialOps elements in parameter \"featureOfInterest\"" raise sosException.SOSException( "NoApplicableCode", None, err_txt) try: val = e.firstChild if val.nodeType == val.TEXT_NODE: try: tempfois.append( get_name_from_urn( str(val.data), "feature", sosConfig)) except Exception as e: raise sosException.SOSException( "InvalidParameterValue", "featureOfInterest", str(e)) except Exception as e: raise e self.featureOfInterest = ",".join(tempfois) # FILTERS FOR QUERY NOT SUPPORTED YET ress = requestObject.getElementsByTagName('result') if len(ress) > 0: raise sosException.SOSException( "NoApplicableCode", None, "Parameter \"result\" not yet supported") else: self.result = None #zero-one optional # RESULT MODEL mods = requestObject.getElementsByTagName('resultModel') if len(mods) > 0: if len(mods) < 2: val = mods[0].firstChild if val.nodeType == val.TEXT_NODE: self.resultModel = str(val.data) if self.resultModel not in sosConfig.parameters[ "GO_resultModel"]: raise sosException.SOSException( "InvalidParameterValue", "resultModel", "Parameter \"resultModel\" sent with invalid value" ) else: err_txt = "XML parsing error (get value: resultModel)" raise sosException.SOSException( "NoApplicableCode", None, err_txt) else: err_txt = "Allowed only ONE parameter \"resultModel\"" raise sosException.SOSException("NoApplicableCode", None, err_txt) else: self.resultModel = None # RESPONSE MODE rsmods = requestObject.getElementsByTagName('responseMode') if len(rsmods) > 0: if len(rsmods) < 2: val = rsmods[0].firstChild if val.nodeType == val.TEXT_NODE: self.responseMode = str(val.data) if self.responseMode not in sosConfig.parameters[ "GO_responseMode"]: raise sosException.SOSException( "InvalidParameterValue", "responseMode", "Parameter \"responseMode\" sent with invalid value" ) else: err_txt = "XML parsing error (get value: responseMode)" raise sosException.SOSException( "NoApplicableCode", None, err_txt) else: err_txt = "Allowed only ONE parameter \"responseMode\"" raise sosException.SOSException("NoApplicableCode", None, err_txt) else: self.responseMode = sosConfig.parameters["GO_responseMode"][0] # AGGREGATE INTERVAL & FUNCTION self.aggregate_interval = None self.aggregate_function = None self.aggregate_nodata = None self.aggregate_nodata_qi = None aggint = requestObject.getElementsByTagName('aggregateInterval') aggfun = requestObject.getElementsByTagName('aggregateFunction') aggnodata = requestObject.getElementsByTagName('aggregateNodata') if len(aggint) == 1 and len(aggfun) == 1: # aggregate_interval # Check on the eventTime parameter: it must be only one interval: 2010-01-01T00:00:00+00/2011-01-01T00:00:01+00 exeMsg = "Using aggregate functions, the event time must exist with an interval composed by a begin and an end date (ISO8601)" if self.eventTime == None or len(self.eventTime) != 1 or len( self.eventTime[0]) != 2: raise sosException.SOSException("NoApplicableCode", None, exeMsg) val = aggint[0].firstChild if val.nodeType == val.TEXT_NODE: self.aggregate_interval = str(val.data) try: iso.parse_duration(self.aggregate_interval) except Exception as ex: raise sosException.SOSException( "InvalidParameterValue", "aggregateInterval", "Parameter \"aggregate_interval\" sent with invalid format (check ISO8601 duration spec): %s" % ex) else: err_txt = "cannot get ISO8601 duration value in \"aggregateInterval\"" raise sosException.SOSException("InvalidParameterValue", "aggregateInterval", err_txt) # aggregate_function val = aggfun[0].firstChild if val.nodeType == val.TEXT_NODE: self.aggregate_function = str(val.data) if not (self.aggregate_function.upper() in ["AVG", "COUNT", "MAX", "MIN", "SUM"]): raise sosException.SOSException( "InvalidParameterValue", "aggregateFunction", "Available aggregation functions: avg, count, max, min, sum." ) # aggregate_no_data default value if len(aggnodata) == 1: val = aggnodata[0].firstChild self.aggregate_nodata = str(val.data) else: self.aggregate_nodata = sosConfig.aggregate_nodata #================================ # MISSING AGGREGATE QUALITY INDEX #================================ elif len(aggint) == 0 and len(aggfun) == 0: pass else: err_txt = "\"aggregateInterval\" and \"aggregate_function\" are both required with multiplicity 1" raise sosException.SOSException("NoApplicableCode", None, err_txt) # QUALITY INDEX self.qualityIndex = False qidx = requestObject.getElementsByTagName('qualityIndex') if len(qidx) > 0: if len(qidx) < 2: val = qidx[0].firstChild if val.nodeType == val.TEXT_NODE: self.qualityIndex = str(val.data) if self.qualityIndex.upper() == "TRUE": self.qualityIndex = True elif self.qualityIndex.upper() == "FALSE": pass else: raise sosException.SOSException( "InvalidParameterValue", "qualityIndex", "qualityIndex can only be \'True\' or \'False\'" ) elif len(qidx) == 0: pass else: err_txt = "\"qualityIndex\" is allowed with multiplicity 1 only" raise sosException.SOSException("NoApplicableCode", None, err_txt) self.qualityFilter = False
def executePost(self, db=True): if self.procedurename is None: raise Exception("POST action without procedure name not allowed") now = datetime.now(iso.UTC) non_blocking_exceptions = [] # Create data array data = self.waEnviron['wsgi_input'].split(";") # Assigned id always in the first position assignedid = data[0] if len(data) == 4: # regular time series mode = self.MODE_REGULAR elif len(data) == 2: # irregular time series mode = self.MODE_IRREGULAR else: raise Exception( "Body content wrongly formatted. Please read the docs.") try: conn = databaseManager.PgDB( self.serviceconf.connection['user'], self.serviceconf.connection['password'], self.serviceconf.connection['dbname'], self.serviceconf.connection['host'], self.serviceconf.connection['port']) rows = conn.select((""" SELECT procedures.id_prc, proc_obs.id_pro, proc_obs.constr_pro, procedures.stime_prc, procedures.etime_prc, procedures.name_prc FROM %s.procedures, %s.proc_obs WHERE proc_obs.id_prc_fk = procedures.id_prc """ % (self.servicename, self.servicename)) + """ AND assignedid_prc = %s ORDER BY proc_obs.id_pro ASC; """, (assignedid, )) if len(rows) == 0: raise Exception("Procedure with aid %s not found." % assignedid) id_prc = rows[0][0] name_prc = rows[0][5] bp = rows[0][3] bpu = False ep = rows[0][4] epu = False def check_sampling(sampling): # If the end position exists the new measures must be after if ep is not None and sampling_time < ep: non_blocking_exceptions.append( "Procedure %s, Sampling time (%s) " "is before the end position (%s)" % (name_prc, sampling_time.isoformat(), ep.isoformat())) return False # Check that the sampling time is before now if sampling_time > now: non_blocking_exceptions.append( "Procedure %s, Sampling time (%s) " "is in the future (%s)" % (name_prc, sampling_time.isoformat(), now.isoformat())) return False return True tmp_data = [] if mode == self.MODE_REGULAR: try: start = iso.parse_datetime(data[1]) except Exception: raise Exception( "Procedure %s, Sampling time (%s) " "wrong format" % name_prc, data[1]) try: step = iso.parse_duration(data[2]) except Exception: raise Exception("Procedure %s, duration (%s) " "wrong format" % (name_prc, data[2])) data = data[3].split("@") for idx in range(0, len(data)): sampling_time = start + (step * idx) if not check_sampling(sampling_time): continue tmp_data.append([sampling_time.isoformat()] + data[idx].split(",")) elif mode == self.MODE_IRREGULAR: data = data[1].split("@") for i in range(0, len(data)): data[i] = data[i].split(",") try: try: sampling_time = iso.parse_datetime(data[i][0]) if not check_sampling(sampling_time): continue except Exception: raise Exception("Procedure %s, Sampling time (%s) " "wrong format" % (name_prc, data[i][0])) tmp_data.append(data[i]) except Exception: non_blocking_exceptions.append( "Procedure %s, Sampling time (%s) " "wrong format" % (name_prc, data[1])) continue data = tmp_data op_cnt = len(rows) for observation in data: id_eti = conn.executeInTransaction((""" INSERT INTO %s.event_time (id_prc_fk, time_eti) """ % self.servicename) + """ VALUES (%s, %s::TIMESTAMPTZ) RETURNING id_eti; """, (id_prc, observation[0])) if (bp is None) or (bp == '') or (iso.parse_datetime( observation[0]) < bp): bp = iso.parse_datetime(observation[0]) bpu = True if (ep is None) or (ep == '') or (iso.parse_datetime( observation[0]) > ep): ep = iso.parse_datetime(observation[0]) epu = True # check if procedure observations length is ok # (-1 remove datetime from lenght of observations array) if op_cnt != (len(observation) - 1): non_blocking_exceptions.append( "Procedure %s, Array length missmatch with procedures " "observation number: %s" % (name_prc, observation)) continue for idx in range(0, op_cnt): try: conn.executeInTransaction( (""" INSERT INTO %s.measures( id_eti_fk, id_qi_fk, id_pro_fk, val_msr ) """ % self.servicename) + """ VALUES (%s, 100, %s, %s); """, ( int(id_eti[0][0]), # id_eti int(rows[idx][1]), # id_pro float(observation[(idx + 1)]))) except Exception as ie: non_blocking_exceptions.append("Procedure %s, %s" % (name_prc, ie)) if bpu: conn.executeInTransaction((""" UPDATE %s.procedures """ % self.servicename) + """ SET stime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s """, (bp.isoformat(), id_prc)) if epu: conn.executeInTransaction((""" UPDATE %s.procedures """ % self.servicename) + """ SET etime_prc=%s::TIMESTAMPTZ WHERE id_prc=%s """, (ep.isoformat(), id_prc)) conn.commitTransaction() # self.setData(ret) self.setMessage("Thanks for data") if len(non_blocking_exceptions) > 0: print >> sys.stderr, str(non_blocking_exceptions) except Exception as e: print >> sys.stderr, traceback.print_exc() #traceback.print_exc(file=sys.stderr) conn.rollbackTransaction() raise Exception("Error in fast insert (%s): %s" % (type(e), e))
def execute(args, conf=None): def log(message): if conf is not None and 'logger' in conf: conf['logger'].log(message) else: print message def addMessage(self, message): if 'logger' in conf: conf['logger'].addMessage(message) else: print message def addWarning(self, message): if 'logger' in conf: conf['logger'].addWarning(message) else: print message def addException(self, message): if 'logger' in conf: conf['logger'].addException(message) else: print message pp = pprint.PrettyPrinter(indent=2) try: # Initializing URLs url = args['u'] # Service instance name service = args['s'] # Quality index quality = '100' if 'q' in args: quality = args['q'] # Procedures procs = args['p'] # Working directory, where the CSV files are located wd = args['wd'] # File extension ext = '.dat' if 'e' in args: ext = args['e'] debug = False if 'v' in args: debug = args['v'] test = False if 't' in args: test = args['t'] user = None if 'user' in args and args['user'] is not None: user = args['user'] password = None if 'password' in args and args['password'] is not None: password = args['password'] auth = None if user and password: auth = HTTPBasicAuth(user, password) noqi = False # False meas that quality index is also migrated if 'noqi' in args: if args['noqi'] is True: noqi = True maxobs = 5000 if 'm' in args: maxobs = int(args['m']) #req = requests.session() req = requests for proc in procs: log("\nProcedure: %s" % proc) if conf is not None and 'description' in conf: data = conf['description'] else: # Load procedure description res = req.get("%s/wa/istsos/services/%s/procedures/%s" % (url, service, proc), auth=auth, verify=False) data = res.json() if data['success'] is False: raise Exception( "Description of procedure %s can not be loaded: %s" % (proc, data['message'])) else: log("%s > %s" % (proc, data['message'])) data = data['data'] aid = data['assignedSensorId'] # Getting observed properties from describeSensor response op = [] for out in data['outputs']: if not noqi or not ':qualityIndex' in out['definition']: op.append(out['definition']) # Load of a getobservation request res = req.get("%s/wa/istsos/services/%s/operations/getobservation/" "offerings/%s/procedures/%s/observedproperties/%s/ev" "enttime/last" % (url, service, 'temporary', proc, ','.join(op)), auth=auth, verify=False) data = res.json() if data['success'] is False: raise Exception("Last observation of procedure %s can not be " "loaded: %s" % (proc, data['message'])) else: log("%s > %s" % (proc, data['message'])) data = data['data'][0] data['AssignedSensorId'] = aid # Set values array empty, can contain 1 value if # procedure not empty if len(data['result']['DataArray']['values']) == 1: lastMeasure = data['result']['DataArray']['values'][0] else: lastMeasure = None data['result']['DataArray']['values'] = [] # discover json observed property disposition jsonindex = {} for pos in range(0, len(data['result']['DataArray']['field'])): field = data['result']['DataArray']['field'][pos] if not noqi: jsonindex[field['definition']] = pos elif not ':qualityIndex' in field['definition']: jsonindex[field['definition']] = pos elif ':qualityIndex' in field['definition'] and noqi: data['result']['DataArray']['field'].pop(pos) log("Searching: %s" % os.path.join(wd, "%s_[0-9]*%s" % (proc, ext))) files = glob.glob(os.path.join(wd, "%s_*%s" % (proc, ext))) files.sort() if debug: log("%s > %s %s found" % (proc, len(files), "Files" if len(files) > 1 else "File")) if len(files) > 0: for f in files: # open file file = open(f, 'rU') # loop lines lines = file.readlines() obsindex = lines[0].strip(' \t\n\r').split(",") # Check if all the observedProperties of the procedure are # included in the CSV file (quality index is optional) for k, v in jsonindex.iteritems(): if k in obsindex: continue elif ':qualityIndex' in k: continue else: raise Exception( "Mandatory observed property %s is not present" " in the CSV." % k) # loop lines (skipping header) for i in range(1, len(lines)): try: line = lines[i] lineArray = line.strip(' \t\n\r').split(",") # Creating an empty array where the values will # be inserted observation = [''] * len(jsonindex) for k, v in jsonindex.iteritems(): val = None if k in obsindex: val = lineArray[obsindex.index(k)] elif ':qualityIndex' in k: # Quality index is not present in the CSV # so the default value will be set val = quality observation[v] = val # attach to object data['result']['DataArray']['values'].append( observation) except Exception as e: raise Exception("Errore alla riga: %s - %s\n%s" % (i, lines[i], str(e))) log("Before insert ST: %s" % proc) if 'beginPosition' in data["samplingTime"]: log(" > Begin: %s" % data["samplingTime"]["beginPosition"]) if 'endPosition' in data["samplingTime"]: log(" + End: %s" % data["samplingTime"]["endPosition"]) dtstr = os.path.split(f)[1].replace("%s_" % proc, "").replace(ext, "") offset = False if '+' in dtstr: offset = dtstr[dtstr.index('+'):] offset = [offset[0:3], offset[3:5]] dtstr = dtstr[:dtstr.index('+')] elif '-' in dtstr: offset = dtstr[dtstr.index('-'):] offset = [offset[0:3], offset[3:5]] dtstr = dtstr[:dtstr.index('-')] ep = datetime.strptime( dtstr, "%Y%m%d%H%M%S%f").replace(tzinfo=timezone('UTC')) if offset: ep = ep - timedelta(hours=int(offset[0]), minutes=int(offset[1])) # A kick in the brain code snippet # If there is at least one measure: if len(data['result']['DataArray']['values']) > 0: # taking first observation as begin position bp = iso.parse_datetime(data['result']['DataArray'] ['values'][0][jsonindex[isourn]]) else: # otherwise this can be an irrebular procedure where just # the end position is moved forward if ep > iso.parse_datetime( data["samplingTime"]["endPosition"]): bp = ep else: raise Exception( "Something is wrong with begin position..") data["samplingTime"] = { "beginPosition": bp.isoformat(), "endPosition": ep.isoformat() } log("Insert ST: %s" % proc) log(" > Begin: %s" % bp.isoformat()) log(" + End: %s" % ep.isoformat()) log(" > Values: %s" % len(data['result']['DataArray']['values'])) if not test and len(files) > 0: # send to wa if len(data['result']['DataArray']['values']) > maxobs: total = len(data['result']['DataArray']['values']) inserted = last = maxobs while len(data['result']['DataArray']['values']) > 0: tmpData = copy.deepcopy(data) tmpData['result']['DataArray']['values'] = ( data['result']['DataArray']['values'][:last]) data['result']['DataArray']['values'] = ( data['result']['DataArray']['values'][last:]) if len(data['result']['DataArray']['values']) > 0: tmpData["samplingTime"] = { "beginPosition": tmpData['result']['DataArray']['values'][0] [jsonindex[isourn]], "endPosition": data['result']['DataArray']['values'][0][ jsonindex[isourn]] } else: tmpData["samplingTime"] = { "beginPosition": tmpData['result']['DataArray']['values'][0] [jsonindex[isourn]], "endPosition": ep.isoformat() } res = req.post("%s/wa/istsos/services/%s/" "operations/insertobservation" % (url, service), auth=auth, verify=False, data=json.dumps({ "ForceInsert": "true", "AssignedSensorId": aid, "Observation": tmpData })) # read response res.raise_for_status() log(" > Insert observation success of %s/%s (%s / " "%s) observations: %s" % (inserted, total, tmpData["samplingTime"]["beginPosition"], tmpData["samplingTime"]["endPosition"], res.json()['success'])) if not res.json()['success']: log(res.json()['message']) if len(data['result']['DataArray'] ['values']) < maxobs: last = len( data['result']['DataArray']['values']) inserted += last else: res = req.post("%s/wa/istsos/services/%s/operations/" "insertobservation" % (url, service), auth=auth, verify=False, data=json.dumps({ "ForceInsert": "true", "AssignedSensorId": aid, "Observation": data })) # read response res.raise_for_status() log(" > Insert observation success: %s" % (res.json()['success'])) if not res.json()['success']: log(res.json()['message']) pass except requests.exceptions.HTTPError as eh: #addException(str(eh)) traceback.print_exc() except Exception as e: #addException(str(e)) traceback.print_exc()
def execute (args): pp = pprint.PrettyPrinter(indent=2) try: istsos_version = args['istsos'] if 'istsos' in args else None debug = args['v'] test = args['t'] procs = args['p'] omit = args['o'] procedures = {} src = args['s'] dst = args['d'] srv = args['n'] appendData = False if 'a' in args: appendData = True dfrom = None dto = None if 'from' in args and type('') == type(args['from']): print "From: %s" % args['from'] dfrom = iso.parse_datetime(args['from']) appendData = None if 'to' in args and type('') == type(args['to']): print "To: %s" % args['to'] dto = iso.parse_datetime(args['to']) appendData = None registerOnly = args['r'] virtual = False hq = False # Executing request res = req.get("%s" % (src), params={ 'service': 'SOS', 'version': '1.0.0', 'request': 'GetCapabilities', 'section': 'contents' }, verify=False) # Parsing response gc, gcNs = parse_and_get_ns(StringIO(res.content)) # Extract all offerings elOfferings = gc.findall("{%s}Contents/{%s}ObservationOfferingList/{%s}ObservationOffering" % (gcNs['sos'],gcNs['sos'],gcNs['sos'])) for offering in elOfferings: offeringName = offering.find("{%s}name" % (gcNs['gml']) ).text.split(":")[-1] # For each offering get the procedures elProcs = offering.findall("{%s}procedure" % (gcNs['sos']) ) for p in elProcs: pname = p.get('{%s}href' % gcNs['xlink']) if ( type(procs) == type([]) and pname not in procs ) or ( type(omit) == type([]) and pname in omit ): continue #print pname procedures[pname] = Procedure(pname, offeringName, dst, srv) if virtual: procedures[pname].setSystemType('virtual') res = req.get("%s" % (src), params={ 'service': 'SOS', 'version': '1.0.0', 'request': 'DescribeSensor', 'outputFormat': 'text/xml;subtype=\'sensorML/1.0.0\'', 'procedure': pname }, verify=False) ds, dsNs = parse_and_get_ns(StringIO(res.content)) #print res.content #print "Root: %s" % ds.getroot().tag if ds.getroot().tag == 'ExceptionReport': print "Error on DS for %s" % pname continue #print "Outputs found: %s" % len(elDescribe) observedProperties = [] print "istsos_version: ", istsos_version uniqidurn = 'urn:ogc:def:parameter:x-ist::' if istsos_version != None and istsos_version == '2': uniqidurn = 'urn:ogc:def:parameter:x-ist:1.0:' elFields = ds.findall("{%s}member/{%s}System/{%s}outputs/{%s}OutputList/{%s}output/{%s}DataRecord/{%s}field" % ( dsNs['sml'],dsNs['sml'],dsNs['sml'],dsNs['sml'],dsNs['sml'],dsNs['swe'],dsNs['swe']) ) print "Observed properties (v2): %s " % len(elFields) for fs in elFields: print fs.get('name') if fs.get('name') != 'Time': observedProperties.append(fs.find("{%s}Quantity" % (dsNs['swe'])).get('definition').replace(uniqidurn,'')) else: elDescribe = ds.findall("member/{%s}System/{%s}outputs/{%s}OutputList/{%s}output" % (dsNs['sml'],dsNs['sml'],dsNs['sml'],dsNs['sml']) ) print "Observed properties: %s " % len(elDescribe) for ds in elDescribe: definition = ds.find("{%s}ObservableProperty" % (dsNs['swe'])).get('definition').replace(uniqidurn,'') #print definition if definition.find('time:iso8601')<0: observedProperties.append(definition) print { 'service': 'SOS', 'version': '1.0.0', 'request': 'GetObservation', 'offering': offeringName, 'responseFormat': 'text/xml;subtype=\'sensorML/1.0.0\'', 'procedure': pname, 'observedProperty': ",".join(observedProperties) } res = req.get("%s" % (src), params={ 'service': 'SOS', 'version': '1.0.0', 'request': 'GetObservation', 'offering': offeringName, 'responseFormat': 'text/xml;subtype=\'sensorML/1.0.0\'', 'procedure': pname, 'observedProperty': ",".join(observedProperties) }, verify=False) go, goNs = parse_and_get_ns(StringIO(res.content)) if go.getroot().tag == 'ExceptionReport': print "Error on GO for %s:\nparams:%s\n%s" % (pname,{ 'service': 'SOS', 'version': '1.0.0', 'request': 'GetObservation', 'offering': offeringName, 'responseFormat': 'text/xml;subtype=\'sensorML/1.0.0\'', 'procedure': pname, 'observedProperty': ",".join(observedProperties) },res.content) continue # Extracting begin and end position begin = go.find("{%s}member/{%s}Observation/{%s}samplingTime/{%s}TimePeriod/{%s}beginPosition" % ( goNs['om'], goNs['om'], goNs['om'], goNs['gml'], goNs['gml']) ) end = go.find("{%s}member/{%s}Observation/{%s}samplingTime/{%s}TimePeriod/{%s}endPosition" % ( goNs['om'], goNs['om'], goNs['om'], goNs['gml'], goNs['gml']) ) procedures[pname].begin = begin.text procedures[pname].end = end.text # Extracting Feature of Interest and coordinates foi = go.find("{%s}member/{%s}Observation/{%s}featureOfInterest" % ( goNs['om'], goNs['om'], goNs['om']) ) point = foi.find("{%s}Point" % ( goNs['gml']) ) if point == None: point = foi.find("{%s}FeatureCollection/{%s}location/{%s}Point" % ( goNs['gml'],goNs['gml'],goNs['gml']) ) coord = point.find("{%s}coordinates" % ( goNs['gml']) ).text.split(",") if len(coord) == 2: coord.append('0') procedures[pname].setFoi( foi.get('{%s}href' % gcNs['xlink']).split(":")[-1], point.get('srsName'), coord ) # Extracting UOM fields = go.findall("{%s}member/{%s}Observation/{%s}result/{%s}DataArray/{%s}elementType/{%s}DataRecord/{%s}field" % ( goNs['om'], goNs['om'], goNs['om'], goNs['swe'], goNs['swe'], goNs['swe'], goNs['swe']) ) for field in fields: if field.get('name')!='Time': qty = field.find("{%s}Quantity" % ( goNs['swe']) ) uom = field.find("{%s}Quantity/{%s}uom" % ( goNs['swe'],goNs['swe']) ) procedures[pname].addObservedProperty( field.get('name'), qty.get('definition').replace(uniqidurn,''), uom.get('code') ) if dfrom: begin = dfrom _begin = dfrom else: begin = iso.parse_datetime(procedures[pname].begin) _begin = iso.parse_datetime(procedures[pname].begin) if dto: end = dto else: end = iso.parse_datetime(procedures[pname].end) # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ # REGISTRATION PROCESS # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ # Check if procedure already exist res = req.get("%s/wa/istsos/services/%s/procedures/%s" % (dst,srv,pname), verify=False) if not res.json()["success"]: # Registering procedure to istSOS res = req.post("%s/wa/istsos/services/%s/procedures" % (dst,srv), data=json.dumps(procedures[pname].data) ) if not res.json()["success"]: #print json.dumps(procedures[pname].data) raise Exception("Registering procedure %s failed: \n%s" % (pname, res.json()["message"])) # Getting details (describe sensor) to get the assignedSensorId res = req.get("%s/wa/istsos/services/%s/procedures/%s" % (dst,srv,pname)) # Getting an InsertObservation template template = procedures[pname].getIoTemplate() else: # Getting an InsertObservation template template = procedures[pname].getIoTemplate() try: if appendData and ('endPosition' in template['samplingTime']): procedures[pname].begin = template['samplingTime']['endPosition'] begin = iso.parse_datetime(template['samplingTime']['endPosition']) except Exception as exproc: print res.text raise exproc procedures[pname].oid = res.json()["data"]["assignedSensorId"] days = int(args['i']) interval = timedelta(days=int(days)) if not registerOnly: if virtual and hq: # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ # VIRTUAL PROCEDURE CODE INITIALIZATION # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ pass else: # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ # PROCEDURE OBSERVATIONS MIGRATION # ~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~ oOrder = [] passedLoops = 0 lastPrint = "" startTime = time.time() print "%s: %s - %s" % (pname, procedures[pname].begin, procedures[pname].end) if begin<end and begin+interval>end: interval = end-begin while (begin+interval)<=end: loopTotalTime = time.time() nextPosition = begin + interval passedLoops = passedLoops+1 t = float(calendar.timegm(end.utctimetuple())-calendar.timegm(_begin.utctimetuple())) t1 = float(calendar.timegm(nextPosition.utctimetuple())-calendar.timegm(_begin.utctimetuple())) try: percentage = round((t1/t)*100,2) except: percentage = 0 if percentage > 100: percentage = 100 lastPrint = "%s > %s%% (%s / %s %s days)" % ("\b"*len(lastPrint),percentage, begin.strftime(fmtshort), nextPosition.strftime(fmtshort), days) looptime = time.time() # GetObservation from source SOS params={ 'service': 'SOS', 'version': '1.0.0', 'request': 'GetObservation', 'eventTime': '%s/%s' % ( begin.strftime(fmt), nextPosition.strftime(fmt)), 'qualityIndex': 'True', 'offering': offeringName, 'responseFormat': 'text/xml;subtype=\'sensorML/1.0.0\'', 'procedure': pname, 'observedProperty': ",".join(observedProperties) } try: res = req.get("%s" % (src), params=params, verify=False) except Exception: res = req.get("%s" % (src), params=params, verify=False) gotime = timedelta(seconds=int(time.time() - looptime)) if gotime > timedelta(seconds=int(10)): if days > 1: days = int(days/2) if days <= 1: days = 1 interval = timedelta(days=days) elif gotime < timedelta(seconds=int(5)): days = days + 1 interval = timedelta(days=days) lastPrint = "%s - GO: '%s'" % (lastPrint, gotime) go, goNs = parse_and_get_ns(StringIO(res.content)) if len(oOrder)==0: fields = go.findall("{%s}member/{%s}Observation/{%s}result/{%s}DataArray/{%s}elementType/{%s}DataRecord/{%s}field" % ( goNs['om'], goNs['om'], goNs['om'], goNs['swe'], goNs['swe'], goNs['swe'], goNs['swe']) ) for field in fields: oOrder.append(qty.get('definition').replace('urn:ogc:def:parameter:x-ist::','')) values = go.find("{%s}member/{%s}Observation/{%s}result/{%s}DataArray/{%s}values" % ( goNs['om'], goNs['om'], goNs['om'], goNs['swe'], goNs['swe']) ) if values.text: rows = values.text.strip().split("@") lastPrint = "%s " % (lastPrint) copy = [] for row in rows: copy.append(row.split(",")) # InsertObservation to istSOS template['result']['DataArray']['values'] = copy template['samplingTime'] = { "beginPosition": copy[0][0], "endPosition": nextPosition.strftime(fmt) } '''template['samplingTime'] = { "beginPosition": begin.strftime(fmt), "endPosition": nextPosition.strftime(fmt) }''' template[u"AssignedSensorId"] = procedures[pname].oid looptime = time.time() res = req.post("%s/wa/istsos/services/%s/operations/insertobservation" % ( dst, srv ),data = json.dumps({ u"AssignedSensorId": procedures[pname].oid, u"ForceInsert": u"true", u"Observation": template })) iotime = timedelta(seconds=int(time.time() - looptime)) lastPrint = "%s - IO: '%s'" % (lastPrint, iotime) begin = nextPosition if begin<end and begin+interval>end: interval = end-begin if percentage < 100: lastPrint = "%s - Step time: '%s' - Elapsed: %s " % ( lastPrint, timedelta(seconds=int(time.time() - loopTotalTime)), timedelta(seconds=int(time.time() - startTime)) ) else: lastPrint = "%s - Step time: '%s' " % ( lastPrint, timedelta(seconds=int(time.time() - loopTotalTime)) ) sys.stdout.write(lastPrint) sys.stdout.flush() print " > Completed in %s" % timedelta(seconds=int(time.time() - startTime)) break except Exception as e: print "ERROR: %s\n\n" % e traceback.print_exc()
def execute (args, logger=None): # SCRIPT CONFIGURATION # ========================================================================= # Activate and print verbose information debug = args['v'] if args.has_key('v') else False # Procedure name procedure = args['procedure'] # Begin date begin = args['begin'] if args.has_key('begin') else "*" # End date end = args['end'] if args.has_key('end') else "*" # Global User and password valid for all connections suser = duser = auser = args['user'] if args.has_key('user') else None spwd = dpwd = apwd = args['pwd'] if args.has_key('pwd') else None # Activate this will copy also the quality index from source to destination cpqi = args['cpqi'] if args.has_key('cpqi') else False # Aggregating function configuration resolution = args['resolution'] if 'resolution' in args else None function = args['function'] if 'function' in args else None nodataValue = args['nodataValue'] if 'nodataValue' in args else None nodataQI = args['nodataQI'] if 'nodataQI' in args else None # Retroactive aggregation retro = args['retro'] if 'retro' in args else 0 # Force using last position as end position during insert sensor operation lm = args['lm'] if 'lm' in args else False # SOURCE istSOS CONFIG ================================== # Location surl = args['surl'] # Service instance name ssrv = args['ssrv'] # User and password if given this will be used for source istSOS if args.has_key('suser'): suser = args['suser'] if args.has_key('spwd'): spwd = args['spwd'] # DESTINATION istSOS CONFIG ============================= # Location (if not given, same as source will be used) durl = args['durl'] if (args.has_key('durl') and args['durl'] is not None) else surl # Service instance name dsrv = args['dsrv'] # User and password if given this will be used for destination istSOS if args.has_key('duser'): duser = args['duser'] if args.has_key('dpwd'): dpwd = args['dpwd'] # ALTERNATIVE istSOS SERVICE FOR QI EXTRAPOLATION ======= # Location (if not given, same as source will be used) aurl = args['aurl'] if (args.has_key('aurl') and args['aurl'] is not None) else None # Service instance name asrv = args['asrv'] if (args.has_key('asrv') and args['asrv'] is not None) else None # User and password if given this will be used for extrapolation QI istSOS if args.has_key('auser'): auser = args['auser'] if args.has_key('apwd'): apwd = args['apwd'] def log(message): if debug: if logger: logger.log(message) else: print message # PROCESSING STARTS HERE ================================================== log("\nistSOS > 2 > istSOS STARTED:") log("==============================\n") #req = requests.session() req = requests # Load procedure description log("1. Loading procedure description: %s" % procedure) # Loading describe sensor from source ===================================== res = req.get("%s/wa/istsos/services/%s/procedures/%s" % ( surl, ssrv, procedure ), auth=(suser, spwd), verify=False) sdata = res.json() if sdata['success']==False: raise Exception ("Description of procedure %s can not be loaded from source service: %s" % (procedure, sdata['message'])) else: log(" > DS Source Ok.") # Loading describe sensor from destination ================================ res = req.get("%s/wa/istsos/services/%s/procedures/%s" % ( durl, dsrv, procedure ), auth=(duser, dpwd), verify=False) ddata = res.json() if ddata['success']==False: raise Exception ("Description of procedure %s can not be loaded from destination service: %s" % (procedure, ddata['message'])) else: log(" > DS Destination Ok.") # Load of a getobservation template from destination ======================================= res = req.get("%s/wa/istsos/services/%s/operations/getobservation/offerings/%s/procedures/%s/observedproperties/:/eventtime/last?qualityIndex=False" % ( durl, dsrv, 'temporary', procedure ), params={ "qualityIndex": cpqi }, auth=(duser, dpwd), verify=False) dtemplate = res.json() if dtemplate['success']==False: raise Exception ("Observation template of procedure %s can not be loaded: %s" % (procedure, dtemplate['message'])) else: dtemplate = dtemplate['data'][0] dtemplate['AssignedSensorId'] = ddata['data']['assignedSensorId'] dtemplate['result']['DataArray']['values'] = [] log(" > GO Template Ok.") # Loading describe sensor from QI EXTRAPOLATION service =================== if aurl and asrv: res = req.get("%s/wa/istsos/services/%s/procedures/%s" % ( aurl, asrv, procedure ), auth=(auser, apwd), verify=False) adata = res.json() if adata['success']==False: raise Exception ("Description of procedure %s can not be loaded from destination service: %s" % (procedure, adata['message'])) else: log(" > DS QI Extrapolation Ok.") log("\n2. Identifying processing interval:") # Check if mesaures are present in source procedure, by identifying the sampling time constraint # located always in the first position of the outputs, if it is empty an exception is thrown if (not 'constraint' in sdata['data']['outputs'][0] or not 'interval' in sdata['data']['outputs'][0]['constraint'] ): raise Exception ("There is no data in the source procedure to be copied to the destination procedure.") else: # Check if the contraint interval contains a valid ISO date begin position try: iso.parse_datetime(sdata['data']['outputs'][0]['constraint']['interval'][0]) except Exception: raise Exception ("The date in the source procedure constraint interval (%s) is not valid." % sdata['data']['outputs'][0]['constraint']['interval'][0]) # Check if the contraint interval contains a valid ISO date end position try: iso.parse_datetime(sdata['data']['outputs'][0]['constraint']['interval'][1]) except Exception: raise Exception ("The date in the source procedure constraint interval (%s) is not valid." % sdata['data']['outputs'][0]['constraint']['interval'][1]) log(" > Source interval is valid") # Looking for start (IO beginPOsition) instant processing # If the default value (*) is used, then the endPosition of # the "destination" service procedure will be used. But if the destination # procedure is empty , then the begin position of the source will be used start = None stop = None if begin == "*": if ('constraint' in ddata['data']['outputs'][0] and 'interval' in ddata['data']['outputs'][0]['constraint']): try: if function and resolution: # getting last inserted observations of "destination" service log("Aggregation requested: getting last inserted observations of \"destination\" service") params = { "request": "GetObservation", "service": "SOS", "version": "1.0.0", "observedProperty": ':', "procedure": procedure, "responseFormat": "application/json", "offering": 'temporary' } res = req.get("%s/%s" % (durl,dsrv), params=params, auth=(duser, dpwd), verify=False) obs = res.json() start = iso.parse_datetime(obs['ObservationCollection']['member'][0]['result']['DataArray']['values'][0][0]) else: # The endPosition of the destination will be used as Start/IO BeginPosition start = iso.parse_datetime(ddata['data']['outputs'][0]['constraint']['interval'][1]) if retro > 0: # Retroactive aggregation log("Retroactive aggregation active.") if start-timedelta(minutes=retro) > iso.parse_datetime(ddata['data']['outputs'][0]['constraint']['interval'][0]): start = start-timedelta(minutes=retro) else: start = iso.parse_datetime(ddata['data']['outputs'][0]['constraint']['interval'][0]) log("Start: %s" % start) except Exception as ee: print "Error setting start date for proc %s: %s" % (procedure,ee) raise Exception ("The date in the destination procedure %s constraint interval (%s) is not valid." % (procedure,ddata['data']['outputs'][0]['constraint']['interval'][0])) else: # The beginPosition of the source will be used as Start/IO BeginPosition start = iso.parse_datetime(sdata['data']['outputs'][0]['constraint']['interval'][0]) else: start = iso.parse_datetime(begin) if end == "*": # The endPosition of the source will be used as Stop/IO EndPosition stop = iso.parse_datetime(sdata['data']['outputs'][0]['constraint']['interval'][1]) else: stop = iso.parse_datetime(end) log(" > Destination interval is valid") log(" > Start processing: %s" % start) log(" > Stop processing: %s" % stop) if retro > 0: log(" > Retro aggregation: %s minutes" % retro) # Insertion loop step timedelta interval = timedelta(days=15) if start<stop and start+interval>stop: interval = stop-start log(" > Insertion loop step: %s" % interval) if function and resolution: try: iso.duration_isoformat(resolution) except: raise Exception ("The resolution (%s) to apply in the aggregating function is not valid." % resolution) log(" > Function(Resolution) : %s(%s)" % (function,resolution)) while start+interval<=stop: nextStart = start + interval params = { "request": "GetObservation", "service": "SOS", "version": "1.0.0", "observedProperty": ':', "procedure": procedure, "qualityIndex": str(cpqi), "responseFormat": "application/json", "offering": 'temporary', "eventTime": "%s/%s" % (start.isoformat(), nextStart.isoformat()) } if function and resolution: params['aggregateFunction'] = function params['aggregateInterval'] = resolution if nodataValue != None: params['aggregateNodata'] = nodataValue if nodataQI != None: params['aggregateNodataQi'] = nodataQI res = req.get("%s/%s" % (surl,ssrv), params=params, auth=(suser, spwd), verify=False) # Check if an Exception occured if 'ExceptionReport' in res.content: raise Exception (res.content) smeasures = res.json()['ObservationCollection']['member'][0] #pp.pprint(smeasures) log(" > %s measures from: %s to: %s" % (len(smeasures['result']['DataArray']['values']), start.isoformat(), nextStart.isoformat())) dtemplate["samplingTime"] = {} if lm and len(smeasures['result']['DataArray']['values'])>0: dtemplate["samplingTime"]["beginPosition"] = smeasures['result']['DataArray']['values'][0][0] dtemplate["samplingTime"]["endPosition"] = smeasures['result']['DataArray']['values'][-1][0] else: dtemplate["samplingTime"]["beginPosition"] = start.isoformat() dtemplate["samplingTime"]["endPosition"] = nextStart.isoformat() dtemplate['result']['DataArray']['values'] = smeasures['result']['DataArray']['values'] dtemplate['result']['DataArray']['field'] = smeasures['result']['DataArray']['field'] # POST data to WA res = req.post("%s/wa/istsos/services/%s/operations/insertobservation" % ( durl, dsrv), auth=(duser, dpwd), verify=False, data=json.dumps({ "ForceInsert": "true", "AssignedSensorId": ddata['data']['assignedSensorId'], "Observation": dtemplate }) ) # read response log(" > Insert observation success: %s" % res.json()['success']) #print res.json() if not res.json()['success']: raise Exception ('Error inserting observation: %s' % res.json()['message']) start = nextStart if start<stop and start+interval>stop: interval = stop-start