def writeAtiMelt(selectedList, dList, aList, mList): # print '\n#----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------#' # print '#Begin Creating ATI and Melt-CUM DSS paths' # print '#----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------#' for ab, d, a, m in zip(selectedList, dList, aList, mList): aPart = ab.split("/")[1] b = ab.split("/")[2] # print 'aPart: ', aPart # ATI-MELT DSS path Created tsATI = TimeSeriesContainer() tsATI.watershed = aPart tsATI.location = b tsATI.parameter = 'ATI' # F-part will be 'CALC' tsATI.version = 'CALC' # Interval is hard coded as 1Day = 1440 minutes. tsATI.interval = 1440 tsATI.fullName = '/%s/%s/%s//1DAY/%s/' % \ (aPart, b, 'ATI', 'CALC') tsATI.values = a times = [] hecTime = HecTime() for i, v in enumerate(d): hecTime.set(d[i]) times.append(hecTime.value()) tsATI.times = times tsATI.startTime = times[0] tsATI.endTime = times[-1] tsATI.numberValues = len(a) tsATI.units = 'DEGF-DAY' tsATI.type = 'INST-VAL' dssFile.put(tsATI) # MELT-CUM DSS path Created tsMelt = TimeSeriesContainer() tsMelt.watershed = aPart tsMelt.location = b tsMelt.parameter = 'MELT-CUM' tsMelt.version = 'CALC' # Interval and FullName are hard coded as 1Day = 1440 minutes. tsMelt.interval = 1440 tsMelt.fullName = '/%s/%s/%s//1DAY/%s/' % \ (aPart, b, 'MELT-CUM', 'CALC') tsMelt.values = m tsMelt.times = times tsMelt.startTime = times[0] tsMelt.endTime = times[-1] tsMelt.numberValues = len(m) tsMelt.units = 'IN' tsMelt.type = 'INST-VAL' dssFile.put(tsMelt)
def reformatForecast(fcstTSC): # converts dailies to forecast set; # converts irregular data with monthly 0s to standard CRT format. # iterate through values outTimes = [] outVals = [] firstPoint = True prevTime, prevVal = 0, Constants.UNDEFINED isIrregular = (fcstTSC.interval <= 0) for t,v in zip(fcstTSC.times, fcstTSC.values): # add end point from previous when this changes; then start with new value ht = HecTime() ht.set(t) prevHt = HecTime() prevHt.set(prevTime) # insert new point whenever it changes, month changes, or on the first point if firstPoint or v != prevVal or (ht.month() != prevHt.month() and v != 0): # output endtimestamp for previous value if not firstPoint: outTimes.append(prevTime) outVals.append(prevVal) if firstPoint: firstPoint = False # output start timestamp for this value offset = -(24*60) + 1 # default to one minute past 0000 this day. if isIrregular and ht.minute() != 0: offset = 0 outTimes.append(t + offset) outVals.append(v) prevTime = t prevVal = v # add last value to finish out series. #outTimes.append(t) #outVals.append(v) # create output TSC fcstOutTSC = TimeSeriesContainer() fcstOutTSC.interval = -1 newPathName = fcstTSC.fullName.split("/") if not isIrregular: newPathName[5] = "IR-CENTURY" fcstOutTSC.fullName = "/".join(newPathName) fcstOutTSC.times = outTimes fcstOutTSC.values = outVals print fcstOutTSC.fullName print(outVals) fcstOutTSC.numberValues = len(outVals) fcstOutTSC.startTime = outTimes[0] fcstOutTSC.units = fcstTSC.units fcstOutTSC.type = "INST-VAL" return fcstOutTSC
def _write_dss(input_): # Create time series container tsc = TimeSeriesContainer() # Get data from input file try: tsc.fullName = input_['fullname'] tsc.interval = input_['interval'] tsc.units = input_['units'] tsc.type = input_['dsstype'] data = input_['data'] filepath = input_['filepath'] except KeyError: _logger.exception('Incomplete data on the dss handler input file!') _logger.error('Exiting.') exit(1) _logger.debug('filepath: %s', filepath) # Get list of times and respective values times = [] values = [] for k, v in sorted(data.viewitems()): # t = datetime.strptime(k, '%Y-%m-%d %H:%M:%S') t = HecTime(k.strftime('%d%b%Y'), k.strftime('%H%M')) times.append(t.value()) values.append(v) # Set list of times, values, and size of list tsc.times = times tsc.values = values tsc.numberValues = len(values) _logger.debug('tsc.times: %s', tsc.times) _logger.debug('tsc.values: %s', tsc.values) # Check if dss file already exists if op.isfile(filepath): _logger.warning('Deleting old file!') # Delete existing dss file try: os.remove(filepath) except OSError: _logger.warning('Warning! Deletion of old file failed.') # else: # _logger.warning("File doesn't exist!") # Write new dss file dss_file = HecDss.open(filepath) dss_file.put(tsc) dss_file.done()
def createTemplateTSC(rawDataList): #Derives a TimeSeriesContainer object from the raw ESP data list # where all that needs to be done is update the pathname # and values - timestamps should be uniform across each # ESP trace #intializing HEC java objects tsc =TimeSeriesContainer() #new TSC object hecStartTime= HecTime() hecEndTime = HecTime() #copmuting HEC times and interval (minutes) of timestep times = [] for i in range(len(rawDataList)): times.append(rawDataList[i][0]) hecStartTime.set(times[0]) hecEndTime.set(times[-1]) #The formatting of these times might need to be adjusted at a later point Dates = [] for x in range(len(times)): IndividualDate = times[x] T = HecTime() T.set(IndividualDate) Dates.append(T.value()) DiffBetweenInterval = [] DiffBetweenInterval = [a - Dates[i-1] for i, a in enumerate(Dates)][1:] for x in DiffBetweenInterval: UniqueList_Minutes = [] #Check if exist in list or not if x not in UniqueList_Minutes: UniqueList_Minutes.append(x) interval =UniqueList_Minutes[0] hecTimes = list(range(Dates[0],Dates[-1],int(interval))) hecTimes.append(Dates[-1]) interval_hours = int(interval)/60 tsc.times = hecTimes tsc.values = [Constants.UNDEFINED]*len(times) #add null data number here. tsc.interval = interval_hours tsc.startTime =(int(hecStartTime.julian())*1440)+1080 tsc.endTime =(int( hecEndTime.julian())*1440)+1080 tsc.numberValues = len(times) tsc.units = "CFS" tsc.type = "PER-AVER" tsc.parameter = "FLOW" #Assuming always want this to be flow return tsc
def addData(dss,path,data): tsc = TimeSeriesContainer() tsc.fullName = path start = HecTime("01Jan2100", "1200") tsc.interval = 5 rain = data times = [] for value in rain : times.append(start. value()) start.add(tsc.interval) tsc.times = times tsc.values = rain tsc.numberValues = len(rain) tsc.units = "MM" tsc.type = "PER-CUM" dss.put(tsc)
def createTSrecord(dss_filename, pathname, start_time, values, comp_step, data_units): start = HecTime() tsc = TimeSeriesContainer() tsc.fullName = pathname tsc.interval = comp_step start.set(start_time) times = [] for value in values: times.append(start.value()) start.add(tsc.interval) tsc.values = values tsc.times = times tsc.startTime = times[0] tsc.endTime = times[-1] tsc.numberValues = len(values) tsc.units = data_units tsc.type = "INST-VAL" dss_file = HecDss.open(dss_filename) dss_file.put(tsc) dss_file.done()
def makeTimeSeriesContainer(station, interval, tz, records, decodeInfo): global timezones sdf = SimpleDateFormat("ddMMMyyyy, HH:mm") if dssTimezone: if not timezones["DSS"]: timezones["DSS"] = TimeZone.getTimeZone( tzInfo[dssTimezone]["JAVA"]) sdf.setTimeZone(timezones["DSS"]) else: sdf.setTimeZone(timezones["USGS"]) dd, decodeInfo = decodeInfo cal = Calendar.getInstance() t = HecTime() tsc = TimeSeriesContainer() tsc.interval = interval times = [] values = [] tsc.quality = None factor = decodeInfo["DSS_FACTOR"] for j in range(len(records)): millis, value = records[j] cal.setTimeInMillis(millis) t.set(sdf.format(cal.getTime())) times.append(t.value()) try: values.append(float(value) * factor) except: values.append(Constants.UNDEFINED) tsc.times = times tsc.values = values tsc.startTime = times[0] tsc.endTime = times[-1] tsc.numberValues = len(values) tsc.timeZoneID = sdf.getTimeZone().getID() tsc.timeZoneRawOffset = sdf.getTimeZone().getRawOffset() return tsc
num_locations = len(csv_list[0]) - 1 num_values = len(csv_list) - NUM_METADATA_LINES # Ignore Metadata location_ids = csv_list[1][1:] for i in range(0, num_locations): precipitations = [] for j in range(NUM_METADATA_LINES, num_values + NUM_METADATA_LINES): p = float(csv_list[j][i + 1]) precipitations.append(p) tsc = TimeSeriesContainer() # tsc.fullName = "/BASIN/LOC/FLOW//1HOUR/OBS/" # tsc.fullName = '//' + locationIds[i].upper() + '/PRECIP-INC//1DAY/GAGE/' tsc.fullName = '//' + location_ids[i].upper( ) + '/PRECIP-INC//1HOUR/GAGE/' start = HecTime(csv_list[NUM_METADATA_LINES][0]) tsc.interval = 60 # in minutes times = [] for value in precipitations: times.append(start.value()) start.add(tsc.interval) tsc.times = times tsc.values = precipitations tsc.numberValues = len(precipitations) tsc.units = "MM" tsc.type = "PER-CUM" converted_dss.put(tsc) finally: converted_dss.done()
def makeTimeSeriesContainer(tsData, timeZone, pathname=None): ''' Construct a TimeSeriesContainer object from a python dictionary that was created from a single "time-series" returned from the CWMS RADAR web service ''' #---------------# # initial setup # #---------------# tsc = None try: tz = TimeZone.getTimeZone(timeZone) sdf8601 = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX") sdfHecTime = SimpleDateFormat("ddMMMyyyy, HH:mm") cal = Calendar.getInstance() for obj in sdf8601, sdfHecTime, cal: obj.setTimeZone(tz) ht = HecTime() times, values, qualities = [], [], [] #------------------# # process the data # #------------------# if tsData.has_key("regular-interval-values"): #----------------------------------------# # regular time series (a lot to process) # #----------------------------------------# rts = tsData["regular-interval-values"] intvlStr = rts["interval"] unit = rts["unit"].split()[0] if intvlStr.startswith("PT"): intvlNum, intvlUnit = int(intvlStr[2:-1]), intvlStr[-1] try: factor, field = { "M": (1, Calendar.MINUTE), "H": (60, Calendar.HOUR_OF_DAY) }[intvlUnit] except KeyError: raise Exception("Unexpected interval: %s" % intvlStr) else: intvlNum, intvlUnit = int(intvlStr[1:-1]), intvlStr[-1] try: factor, field = { "Y": (1440 * 365, Calendar.YEAR), "M": (1440 * 30, Calendar.MONTH), "D": (1440, Calendar.DATE) }[intvlUnit] except KeyError: raise Exception("Unexpected interval: %s" % intvlStr) intvl = intvlNum * factor segmentCount = rts["segment-count"] cal.setTimeInMillis( sdf8601.parse(rts["segments"][0]["first-time"]).getTime()) for i in range(segmentCount): for j in range(rts["segments"][i]["value-count"]): ht.set(sdfHecTime.format(cal.getTimeInMillis())) v, q = rts["segments"][i]["values"][j] times.append(ht.value()) values.append(v) qualities.append(q) cal.add(field, intvlNum) if i < segmentCount - 1: nextBegin = sdf8601.parse( rts["segments"][i + 1]["first-time"]).getTime() time = cal.getTimeInMillis() while time < nextBegin: ht.set(sdfHecTime.format(time)) times.append(ht.value()) values.append(Constants.UNDEFINED) qualities.append(0) cal.add(field, intvlNum) time = cal.getTimeInMillis() elif tsData.has_key("irregular-interval-values"): #------------------------------# # irregular time series (easy) # #------------------------------# its = tsData["irregular-interval-values"] unit = its["unit"].split()[0] intvl = 0 for t, v, q in its["values"]: ht.set(sdfHecTime.format(sdf8601.parse(t))) times.append(ht.value()) values.append(v) qualities.append(q) else: raise Exception("Time series has no values") #--------------------------------------------------# # code common to regular and irregular time series # #--------------------------------------------------# tsc = TimeSeriesContainer() tsc.times = times tsc.values = values tsc.quality = qualities tsc.numberValues = len(times) tsc.startTime = times[0] tsc.endTime = times[-1] tsc.interval = intvl tsc.units = unit tsc.timeZoneID = timeZone tsc.timeZoneRawOffset = tz.getRawOffset() name = tsData["name"] loc, param, paramType, intv, dur, ver = name.split(".") if pathname: #---------------------------# # use pathname if specified # #---------------------------# A, B, C, D, E, F = 1, 2, 3, 4, 5, 6 parts = pathname.split("/") parts[D] = '' tsc.fullName = "/".join(parts) tsc.watershed = parts[A] try: tsc.location, tsc.subLocation = parts[B].split("-", 1) except: tsc.location = parts[B] try: tsc.parameter, tsc.subParameter = parts[C].split("-", 1) except: tsc.parameter = parts[C] try: tsc.version, tsc.subVersion = parts[F].split("-", 1) except: tsc.version = parts[F] else: #--------------------------------------# # no pathname, use CWMS time series id # #--------------------------------------# try: tsc.location, tsc.subLocation = loc.split("-", 1) except: tsc.location = loc try: tsc.parameter, tsc.subParameter = param.split("-", 1) except: tsc.parameter = param try: tsc.version, tsc.subVersion = ver.split("-", 1) except: tsc.version = ver tsc.type = { "Total": "PER-CUM", "Max": "PER-MAX", "Min": "PER-MIN", "Const": "INST-VAL", "Ave": "PER-AVER", "Inst": ("INST-VAL", "INST-CUM")[param.startswith("Precip")] }[paramType] except: output(traceback.format_exc()) return tsc
def put_to_dss(site, dss): """Save timeseries to DSS File Parameters ---------- site: json JSON object containing meta data about the site/parameter combination, time array and value array dss: HecDss DSS file object The open DSS file records are written to Returns ------- None Raises ------ Put to DSS exception handled with a message output saying site not saved, but continues on trying additional site/parameter combinations """ Site = namedtuple( 'Site', site.keys() )(**site) parameter, unit, data_type, version = usgs_code[Site.code] times = [ HecTime(t, HecTime.MINUTE_GRANULARITY).value() for t in Site.times ] timestep_min = None for i, t in enumerate(range(len(times) - 1)): ts = abs(times[t + 1] - times[t]) if ts < timestep_min or timestep_min is None: timestep_min = ts epart = TimeStep().getEPartFromIntervalMinutes(timestep_min) # Set the pathname pathname = '/{0}/{1}/{2}//{3}/{4}/'.format(ws_name, Site.site_number, parameter, epart, version).upper() apart, bpart, cpart, _, _, fpart = pathname.split('/')[1:-1] container = TimeSeriesContainer() container.fullName = pathname container.location = apart container.parameter = parameter container.type = data_type container.version = version container.interval = timestep_min container.units = unit container.times = times container.values = Site.values container.numberValues = len(Site.times) container.startTime = times[0] container.endTime = times[-1] container.timeZoneID = tz # container.makeAscending() if not TimeSeriesMath.checkTimeSeries(container): return 'Site: "{}" not saved to DSS'.format(Site.site_number) tsc = TimeSeriesFunctions.snapToRegularInterval(container, epart, "0MIN", "0MIN", "0MIN") # Put the data to DSS try: dss.put(tsc) except Exception as ex: print(ex) return 'Site: "{}" not saved to DSS'.format(Site.site_number)
ver = "OBS" startTime = "12Oct2003 0100" values = [12.36, 12.37, 12.42, 12.55, 12.51, 12.47, 12.43, 12.39] hecTime = HecTime() tsc = TimeSeriesContainer() tsc.watershed = watershed tsc.location = loc tsc.parameter = param tsc.version = ver tsc.fullName = "/%s/%s/%s//1HOUR/%s/" % (watershed, loc, param, ver) tsc.interval = 60 hecTime.set(startTime) times=[] for value in values: times.append(hecTime.value()) hecTime.add(tsc.interval) tsc.values = values tsc.times = times tsc.startTime = times[0] tsc.endTime = times[-1] tsc.numberValues = len(values) tsc.units = "FEET" tsc.type = "INST-VAL" dssFile = HecDss.open("myFile.dss") dssFile.put(tsc) dssFile.done()
fin.close() try: try: myDss = HecDss.open( "C:/Users/CIVIL/Desktop/HEC-HMS_EXAMPLE/HEC-HMS_EXAMPLE/SandyUpdate28th/SandyUpdate28th.dss" ) tsc = TimeSeriesContainer() tsc.fullName = "//%s/PRECIP-INC/%s /1HOUR/OBS/" % (f[:-4], times[0]) start = HecTime(dates[0], times[0]) tsc.interval = int(times[1]) - int(times[0]) hec_times = [] for t in times: hec_times.append(start.value()) start.add(tsc.interval) tsc.times = hec_times tsc.values = precip tsc.numberValues = len(precip) tsc.units = "INCHES" tsc.type = "PER-CUM" myDss.put(tsc) except Exception, e: MessageBox.showError(' '.join(e.args), "Python Error") except java.lang.Exception, e: MessageBox.showError(e.getMessage(), "Error") finally: print "DSS created for %s!" % (fin.name) HecDss.done(myDss)
from hec.script import Plot, MessageBox from hec.io import TimeSeriesContainer from hec.heclib.dss import HecDss, DSSPathname from hec.heclib.util import HecTime import java import sys try: myDss = HecDss.open("c:/temp/test.dss") tsc = TimeSeriesContainer() tsc.fullName = "/BASIN/LOC/FLOW//1HOUR/OBS/" start = HecTime("04Sep1996", "1330") tsc.interval = 60 flows = [0.0, 2.0, 1.0, 4.0, 3.0, 6.0, 5.0, 8.0, 7.0, 9.0] times = [] for value in flows: times.append(start.value()) start.add(tsc.interval) tsc.times = times tsc.values = flows tsc.numberValues = len(flows) tsc.units = "CFS" tsc.type = "PER-AVER" myDss.put(tsc) print "Done" except Exception, e: print(e) finally: print "Closing DSS File" myDss.close()
aPart = locDict[b] # print aPart # print a #F-part will be 'CALC' #ATI-MELT DSS path Created tsATI = TimeSeriesContainer() tsATI.watershed = aPart tsATI.location = b tsATI.parameter = 'ATI' tsATI.version = 'CALC' tsATI.interval = 1440 tsATI.fullName = '/%s/%s/%s//1DAY/%s/' % \ (aPart, b, 'ATI', 'CALC') tsATI.values = a times = [] hecTime = HecTime() for i, v in enumerate(d): hecTime.set(d[i]) times.append(hecTime.value()) tsATI.times = times tsATI.startTime = times[0] tsATI.endTime = times[-1] tsATI.numberValues = len(a) tsATI.units = 'DEGF-DAY' tsATI.type = 'INST-VAL' dssFile.put(tsATI) #MELT-CUM DSS path Created tsMelt = TimeSeriesContainer()
from hec.heclib.util import HecTime from hec.heclib.dss import HecDss, DSSPathname from hec.io import TimeSeriesContainer from hec.heclib.util import HecTime import java import sys import os fileName = "c:/temp/day_granularity.dss" if os.path.isfile(fileName): os.remove(fileName) dss = HecDss.open(fileName) tsc = TimeSeriesContainer() tsc.fullName = "/test/day_granularity/FLOW/04Sep3000/1YEAR/MODEL/" tsc.values = range(1, 3000, 1) start = HecTime("04Sep3000", "1330") LastYear = 3000 AnnualTimes = [] for x in range(len(tsc.values)): LastYear += 1 hecTime = HecTime('31Dec%04d 2400' % LastYear, HecTime.DAY_GRANULARITY) AnnualTimes.append(hecTime.value()) tsc.times = AnnualTimes tsc.numberValues = len(tsc.values) tsc.units = "CFS" tsc.type = "PER-AVER" tsc.setTimeGranularitySeconds(86400) dss.put(tsc)