def writeAtiMelt(selectedList, dList, aList, mList): # print '\n#----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------#' # print '#Begin Creating ATI and Melt-CUM DSS paths' # print '#----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------#' for ab, d, a, m in zip(selectedList, dList, aList, mList): aPart = ab.split("/")[1] b = ab.split("/")[2] # print 'aPart: ', aPart # ATI-MELT DSS path Created tsATI = TimeSeriesContainer() tsATI.watershed = aPart tsATI.location = b tsATI.parameter = 'ATI' # F-part will be 'CALC' tsATI.version = 'CALC' # Interval is hard coded as 1Day = 1440 minutes. tsATI.interval = 1440 tsATI.fullName = '/%s/%s/%s//1DAY/%s/' % \ (aPart, b, 'ATI', 'CALC') tsATI.values = a times = [] hecTime = HecTime() for i, v in enumerate(d): hecTime.set(d[i]) times.append(hecTime.value()) tsATI.times = times tsATI.startTime = times[0] tsATI.endTime = times[-1] tsATI.numberValues = len(a) tsATI.units = 'DEGF-DAY' tsATI.type = 'INST-VAL' dssFile.put(tsATI) # MELT-CUM DSS path Created tsMelt = TimeSeriesContainer() tsMelt.watershed = aPart tsMelt.location = b tsMelt.parameter = 'MELT-CUM' tsMelt.version = 'CALC' # Interval and FullName are hard coded as 1Day = 1440 minutes. tsMelt.interval = 1440 tsMelt.fullName = '/%s/%s/%s//1DAY/%s/' % \ (aPart, b, 'MELT-CUM', 'CALC') tsMelt.values = m tsMelt.times = times tsMelt.startTime = times[0] tsMelt.endTime = times[-1] tsMelt.numberValues = len(m) tsMelt.units = 'IN' tsMelt.type = 'INST-VAL' dssFile.put(tsMelt)
def _write_dss(input_): # Create time series container tsc = TimeSeriesContainer() # Get data from input file try: tsc.fullName = input_['fullname'] tsc.interval = input_['interval'] tsc.units = input_['units'] tsc.type = input_['dsstype'] data = input_['data'] filepath = input_['filepath'] except KeyError: _logger.exception('Incomplete data on the dss handler input file!') _logger.error('Exiting.') exit(1) _logger.debug('filepath: %s', filepath) # Get list of times and respective values times = [] values = [] for k, v in sorted(data.viewitems()): # t = datetime.strptime(k, '%Y-%m-%d %H:%M:%S') t = HecTime(k.strftime('%d%b%Y'), k.strftime('%H%M')) times.append(t.value()) values.append(v) # Set list of times, values, and size of list tsc.times = times tsc.values = values tsc.numberValues = len(values) _logger.debug('tsc.times: %s', tsc.times) _logger.debug('tsc.values: %s', tsc.values) # Check if dss file already exists if op.isfile(filepath): _logger.warning('Deleting old file!') # Delete existing dss file try: os.remove(filepath) except OSError: _logger.warning('Warning! Deletion of old file failed.') # else: # _logger.warning("File doesn't exist!") # Write new dss file dss_file = HecDss.open(filepath) dss_file.put(tsc) dss_file.done()
def addData(dss,path,data): tsc = TimeSeriesContainer() tsc.fullName = path start = HecTime("01Jan2100", "1200") tsc.interval = 5 rain = data times = [] for value in rain : times.append(start. value()) start.add(tsc.interval) tsc.times = times tsc.values = rain tsc.numberValues = len(rain) tsc.units = "MM" tsc.type = "PER-CUM" dss.put(tsc)
def processPathsDatesList(sP): sT = [] # print '\nsP: ', sP for p in sP: s = dssFile.get(p, 1) sT.append(s.times) # print '\nsT: ', sT # timesListHec = [] t = HecTime() dList = [] for p in sT: iList = [] for i in p: t.set(i) h = t.date(4) iList.append(deepcopy(h)) dList.append(iList) # print '\ndList: ', dList return dList
def makeTimeSeriesContainer(station, interval, tz, records, decodeInfo): global timezones sdf = SimpleDateFormat("ddMMMyyyy, HH:mm") if dssTimezone: if not timezones["DSS"]: timezones["DSS"] = TimeZone.getTimeZone( tzInfo[dssTimezone]["JAVA"]) sdf.setTimeZone(timezones["DSS"]) else: sdf.setTimeZone(timezones["USGS"]) dd, decodeInfo = decodeInfo cal = Calendar.getInstance() t = HecTime() tsc = TimeSeriesContainer() tsc.interval = interval times = [] values = [] tsc.quality = None factor = decodeInfo["DSS_FACTOR"] for j in range(len(records)): millis, value = records[j] cal.setTimeInMillis(millis) t.set(sdf.format(cal.getTime())) times.append(t.value()) try: values.append(float(value) * factor) except: values.append(Constants.UNDEFINED) tsc.times = times tsc.values = values tsc.startTime = times[0] tsc.endTime = times[-1] tsc.numberValues = len(values) tsc.timeZoneID = sdf.getTimeZone().getID() tsc.timeZoneRawOffset = sdf.getTimeZone().getRawOffset() return tsc
def createTSrecord(dss_filename, pathname, start_time, values, comp_step, data_units): start = HecTime() tsc = TimeSeriesContainer() tsc.fullName = pathname tsc.interval = comp_step start.set(start_time) times = [] for value in values: times.append(start.value()) start.add(tsc.interval) tsc.values = values tsc.times = times tsc.startTime = times[0] tsc.endTime = times[-1] tsc.numberValues = len(values) tsc.units = data_units tsc.type = "INST-VAL" dss_file = HecDss.open(dss_filename) dss_file.put(tsc) dss_file.done()
def reformatForecast(fcstTSC): # converts dailies to forecast set; # converts irregular data with monthly 0s to standard CRT format. # iterate through values outTimes = [] outVals = [] firstPoint = True prevTime, prevVal = 0, Constants.UNDEFINED isIrregular = (fcstTSC.interval <= 0) for t,v in zip(fcstTSC.times, fcstTSC.values): # add end point from previous when this changes; then start with new value ht = HecTime() ht.set(t) prevHt = HecTime() prevHt.set(prevTime) # insert new point whenever it changes, month changes, or on the first point if firstPoint or v != prevVal or (ht.month() != prevHt.month() and v != 0): # output endtimestamp for previous value if not firstPoint: outTimes.append(prevTime) outVals.append(prevVal) if firstPoint: firstPoint = False # output start timestamp for this value offset = -(24*60) + 1 # default to one minute past 0000 this day. if isIrregular and ht.minute() != 0: offset = 0 outTimes.append(t + offset) outVals.append(v) prevTime = t prevVal = v # add last value to finish out series. #outTimes.append(t) #outVals.append(v) # create output TSC fcstOutTSC = TimeSeriesContainer() fcstOutTSC.interval = -1 newPathName = fcstTSC.fullName.split("/") if not isIrregular: newPathName[5] = "IR-CENTURY" fcstOutTSC.fullName = "/".join(newPathName) fcstOutTSC.times = outTimes fcstOutTSC.values = outVals print fcstOutTSC.fullName print(outVals) fcstOutTSC.numberValues = len(outVals) fcstOutTSC.startTime = outTimes[0] fcstOutTSC.units = fcstTSC.units fcstOutTSC.type = "INST-VAL" return fcstOutTSC
from hec.heclib.dss import HecDss, HecTimeSeries from hec.io import TimeSeriesContainer from hec.heclib.util import HecTime watershed = "Green River" loc = "OAKVILLE" param = "STAGE" ver = "OBS" startTime = "12Oct2003 0100" values = [12.36, 12.37, 12.42, 12.55, 12.51, 12.47, 12.43, 12.39] hecTime = HecTime() tsc = TimeSeriesContainer() tsc.watershed = watershed tsc.location = loc tsc.parameter = param tsc.version = ver tsc.fullName = "/%s/%s/%s//1HOUR/%s/" % (watershed, loc, param, ver) tsc.interval = 60 hecTime.set(startTime) times=[] for value in values: times.append(hecTime.value()) hecTime.add(tsc.interval) tsc.values = values tsc.times = times tsc.startTime = times[0] tsc.endTime = times[-1]
def onePerParam(config, dssFilePath): plotted = 0 # Number of plots exported messages = [] outputFolder = tbu.relativeFolder(config['output_folder'], dssFilePath) dssFile = HecDss.open(dssFilePath) minDate = HecTime(config['period']['start']) maxDate = HecTime(config['period']['end']) colours = _coloursByLocation(config) for param, paramConfig in config['params'].iteritems(): thePlot = Plot.newPlot() dataPaths = [ "/%s/%s/%s//%s/%s/" % (config['site'].upper(), location.upper(), param.upper(), config['interval'].upper(), config['version'].upper()) for location in config['locations'] ] datasets = [dssFile.get(p, 1) for p in dataPaths] datasets = [d for d in datasets if d.numberValues > 0] if not datasets: messages.append("No data for parameter '%s'." % param) continue map(thePlot.addData, datasets) thePlot.showPlot() thePlot.setPlotTitleText(param) thePlot.setPlotTitleVisible(1) thePlot.setSize(int(config['width']), int(config['height'])) # We can only access labels and curves at this point map(lambda d: thePlot.getLegendLabel(d).setText(d.location), datasets) for dataset in datasets: curve = thePlot.getCurve(dataset) curve.setLineColor("%s, %s, %s" % tuple(colours[dataset.location])) curve.setLineWidth(config['line']['width']) units = set(ds.units for ds in datasets) for vp_index, unit in enumerate(units): # 1 viewport per distinct unit viewport = thePlot.getViewport(vp_index) viewport.getAxis("X1").setScaleLimits(minDate.value(), maxDate.value()) viewport.getAxis("Y1").setLabel(unit) viewport.setMinorGridXVisible(1) viewport.setMinorGridYVisible(1) if paramConfig: if paramConfig['scale'].lower() == 'log': viewport.setLogarithmic('Y1') # This throws a warning message if y-values <= 0. We can't catch this as an exception. thePlot.saveToJpeg(os.path.join(outputFolder, config['version'] + "_" + param), 95) thePlot.close() plotted += 1 dssFile.done() return plotted, messages
num_locations = len(csv_list[0]) - 1 num_values = len(csv_list) - NUM_METADATA_LINES # Ignore Metadata location_ids = csv_list[1][1:] for i in range(0, num_locations): precipitations = [] for j in range(NUM_METADATA_LINES, num_values + NUM_METADATA_LINES): p = float(csv_list[j][i + 1]) precipitations.append(p) tsc = TimeSeriesContainer() # tsc.fullName = "/BASIN/LOC/FLOW//1HOUR/OBS/" # tsc.fullName = '//' + locationIds[i].upper() + '/PRECIP-INC//1DAY/GAGE/' tsc.fullName = '//' + location_ids[i].upper( ) + '/PRECIP-INC//1HOUR/GAGE/' start = HecTime(csv_list[NUM_METADATA_LINES][0]) tsc.interval = 60 # in minutes times = [] for value in precipitations: times.append(start.value()) start.add(tsc.interval) tsc.times = times tsc.values = precipitations tsc.numberValues = len(precipitations) tsc.units = "MM" tsc.type = "PER-CUM" converted_dss.put(tsc) finally: converted_dss.done()
def onePerParam(config, dssFilePath): plotted = 0 # Number of plots exported messages = [] outputFolder = tbu.relativeFolder(config['output_folder'], config['config_file']) minDate = HecTime(config['period']['start']) maxDate = HecTime(config['period']['end']) dssFile = HecDss.open(dssFilePath, str(minDate), str(maxDate)) colours = _coloursByLocation(config) for param, paramConfig in config['params'].iteritems(): plot = Plot.newPlot() dataPaths = [ "/%s/%s/%s//%s/%s/" % (config['site'].upper(), location.upper(), param.upper(), config['interval'].upper(), config['version'].upper()) for location in config['locations'] ] datasets = [dssFile.get(p) for p in dataPaths] datasets = [d for d in datasets if d.numberValues > 0] if not datasets: messages.append("No data for parameter '%s'." % param) continue map(plot.addData, datasets) plot.showPlot() plot.setPlotTitleText(param) plot.setPlotTitleVisible(1) plot.setSize(int(config['width']), int(config['height'])) # We can only access labels and curves at this point map(lambda d: plot.getLegendLabel(d).setText(d.location), datasets) # Style curves for dataset in datasets: curve = plot.getCurve(dataset) curve.setLineColor('{}, {}, {}'.format(*colours[dataset.location])) curve.setLineWidth(config['line']['width']) if config['line']['markers']: curve.setSymbolsVisible(1) curve.setSymbolType('Circle') curve.setSymbolLineColor('{}, {}, {}' .format(*colours[dataset.location])) curve.setSymbolFillColor('{}, {}, {}' .format(*colours[dataset.location])) # Axes scales units = set(ds.units for ds in datasets) for vp_index, unit in enumerate(units): # 1 viewport per distinct unit viewport = plot.getViewport(vp_index) viewport.getAxis("X1").setScaleLimits(minDate.value(), maxDate.value()) viewport.getAxis("Y1").setLabel(unit) viewport.setMinorGridXVisible(1) viewport.setMinorGridYVisible(1) if paramConfig: if paramConfig['scale'].lower() == 'log': viewport.setLogarithmic('Y1') # This throws a warning message if y-values <= 0. We can't catch this as an exception. # Horizontal threshold lines thresholds = _get_thresholds(datasets[0], dssFilePath, config) for marker in _thresholdMarkers(thresholds): viewport.addAxisMarker(marker) # Export plot plot.saveToJpeg(os.path.join(outputFolder, param + "-" + config['version']), 95) plot.close() plotted += 1 dssFile.done() return plotted, messages
def put_to_dss(site, dss): """Save timeseries to DSS File Parameters ---------- site: json JSON object containing meta data about the site/parameter combination, time array and value array dss: HecDss DSS file object The open DSS file records are written to Returns ------- None Raises ------ Put to DSS exception handled with a message output saying site not saved, but continues on trying additional site/parameter combinations """ Site = namedtuple( 'Site', site.keys() )(**site) parameter, unit, data_type, version = usgs_code[Site.code] times = [ HecTime(t, HecTime.MINUTE_GRANULARITY).value() for t in Site.times ] timestep_min = None for i, t in enumerate(range(len(times) - 1)): ts = abs(times[t + 1] - times[t]) if ts < timestep_min or timestep_min is None: timestep_min = ts epart = TimeStep().getEPartFromIntervalMinutes(timestep_min) # Set the pathname pathname = '/{0}/{1}/{2}//{3}/{4}/'.format(ws_name, Site.site_number, parameter, epart, version).upper() apart, bpart, cpart, _, _, fpart = pathname.split('/')[1:-1] container = TimeSeriesContainer() container.fullName = pathname container.location = apart container.parameter = parameter container.type = data_type container.version = version container.interval = timestep_min container.units = unit container.times = times container.values = Site.values container.numberValues = len(Site.times) container.startTime = times[0] container.endTime = times[-1] container.timeZoneID = tz # container.makeAscending() if not TimeSeriesMath.checkTimeSeries(container): return 'Site: "{}" not saved to DSS'.format(Site.site_number) tsc = TimeSeriesFunctions.snapToRegularInterval(container, epart, "0MIN", "0MIN", "0MIN") # Put the data to DSS try: dss.put(tsc) except Exception as ex: print(ex) return 'Site: "{}" not saved to DSS'.format(Site.site_number)
def makeTimeSeriesContainer(tsData, timeZone, pathname=None): ''' Construct a TimeSeriesContainer object from a python dictionary that was created from a single "time-series" returned from the CWMS RADAR web service ''' #---------------# # initial setup # #---------------# tsc = None try: tz = TimeZone.getTimeZone(timeZone) sdf8601 = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX") sdfHecTime = SimpleDateFormat("ddMMMyyyy, HH:mm") cal = Calendar.getInstance() for obj in sdf8601, sdfHecTime, cal: obj.setTimeZone(tz) ht = HecTime() times, values, qualities = [], [], [] #------------------# # process the data # #------------------# if tsData.has_key("regular-interval-values"): #----------------------------------------# # regular time series (a lot to process) # #----------------------------------------# rts = tsData["regular-interval-values"] intvlStr = rts["interval"] unit = rts["unit"].split()[0] if intvlStr.startswith("PT"): intvlNum, intvlUnit = int(intvlStr[2:-1]), intvlStr[-1] try: factor, field = { "M": (1, Calendar.MINUTE), "H": (60, Calendar.HOUR_OF_DAY) }[intvlUnit] except KeyError: raise Exception("Unexpected interval: %s" % intvlStr) else: intvlNum, intvlUnit = int(intvlStr[1:-1]), intvlStr[-1] try: factor, field = { "Y": (1440 * 365, Calendar.YEAR), "M": (1440 * 30, Calendar.MONTH), "D": (1440, Calendar.DATE) }[intvlUnit] except KeyError: raise Exception("Unexpected interval: %s" % intvlStr) intvl = intvlNum * factor segmentCount = rts["segment-count"] cal.setTimeInMillis( sdf8601.parse(rts["segments"][0]["first-time"]).getTime()) for i in range(segmentCount): for j in range(rts["segments"][i]["value-count"]): ht.set(sdfHecTime.format(cal.getTimeInMillis())) v, q = rts["segments"][i]["values"][j] times.append(ht.value()) values.append(v) qualities.append(q) cal.add(field, intvlNum) if i < segmentCount - 1: nextBegin = sdf8601.parse( rts["segments"][i + 1]["first-time"]).getTime() time = cal.getTimeInMillis() while time < nextBegin: ht.set(sdfHecTime.format(time)) times.append(ht.value()) values.append(Constants.UNDEFINED) qualities.append(0) cal.add(field, intvlNum) time = cal.getTimeInMillis() elif tsData.has_key("irregular-interval-values"): #------------------------------# # irregular time series (easy) # #------------------------------# its = tsData["irregular-interval-values"] unit = its["unit"].split()[0] intvl = 0 for t, v, q in its["values"]: ht.set(sdfHecTime.format(sdf8601.parse(t))) times.append(ht.value()) values.append(v) qualities.append(q) else: raise Exception("Time series has no values") #--------------------------------------------------# # code common to regular and irregular time series # #--------------------------------------------------# tsc = TimeSeriesContainer() tsc.times = times tsc.values = values tsc.quality = qualities tsc.numberValues = len(times) tsc.startTime = times[0] tsc.endTime = times[-1] tsc.interval = intvl tsc.units = unit tsc.timeZoneID = timeZone tsc.timeZoneRawOffset = tz.getRawOffset() name = tsData["name"] loc, param, paramType, intv, dur, ver = name.split(".") if pathname: #---------------------------# # use pathname if specified # #---------------------------# A, B, C, D, E, F = 1, 2, 3, 4, 5, 6 parts = pathname.split("/") parts[D] = '' tsc.fullName = "/".join(parts) tsc.watershed = parts[A] try: tsc.location, tsc.subLocation = parts[B].split("-", 1) except: tsc.location = parts[B] try: tsc.parameter, tsc.subParameter = parts[C].split("-", 1) except: tsc.parameter = parts[C] try: tsc.version, tsc.subVersion = parts[F].split("-", 1) except: tsc.version = parts[F] else: #--------------------------------------# # no pathname, use CWMS time series id # #--------------------------------------# try: tsc.location, tsc.subLocation = loc.split("-", 1) except: tsc.location = loc try: tsc.parameter, tsc.subParameter = param.split("-", 1) except: tsc.parameter = param try: tsc.version, tsc.subVersion = ver.split("-", 1) except: tsc.version = ver tsc.type = { "Total": "PER-CUM", "Max": "PER-MAX", "Min": "PER-MIN", "Const": "INST-VAL", "Ave": "PER-AVER", "Inst": ("INST-VAL", "INST-CUM")[param.startswith("Precip")] }[paramType] except: output(traceback.format_exc()) return tsc
from hec.heclib.util import HecTime from hec.heclib.dss import HecDss, DSSPathname from hec.io import TimeSeriesContainer from hec.heclib.util import HecTime import java import sys import os fileName = "c:/temp/day_granularity.dss" if os.path.isfile(fileName): os.remove(fileName) dss = HecDss.open(fileName) tsc = TimeSeriesContainer() tsc.fullName = "/test/day_granularity/FLOW/04Sep3000/1YEAR/MODEL/" tsc.values = range(1, 3000, 1) start = HecTime("04Sep3000", "1330") LastYear = 3000 AnnualTimes = [] for x in range(len(tsc.values)): LastYear += 1 hecTime = HecTime('31Dec%04d 2400' % LastYear, HecTime.DAY_GRANULARITY) AnnualTimes.append(hecTime.value()) tsc.times = AnnualTimes tsc.numberValues = len(tsc.values) tsc.units = "CFS" tsc.type = "PER-AVER" tsc.setTimeGranularitySeconds(86400) dss.put(tsc)
for i in range(0, numLocations): print '\n>>>>>>> Start processing ', locationIds[i], '<<<<<<<<<<<<' precipitations = [] for j in range(num_metadata_lines, numValues + num_metadata_lines): p = float(csvList[j][i + 1]) precipitations.append(p) print 'Precipitation of ', locationIds[i], precipitations[:10] tsc = TimeSeriesContainer() # tsc.fullName = "/BASIN/LOC/FLOW//1HOUR/OBS/" # tsc.fullName = '//' + locationIds[i].upper() + '/PRECIP-INC//1DAY/GAGE/' tsc.fullName = '//' + locationIds[i].upper() + '/PRECIP-INC//1HOUR/GAGE/' print 'Start time : ', csvList[num_metadata_lines][0] start = HecTime(csvList[num_metadata_lines][0]) tsc.interval = 60 # in minutes times = [] for value in precipitations: times.append(start.value()) start.add(tsc.interval) tsc.times = times tsc.values = precipitations tsc.numberValues = len(precipitations) tsc.units = "MM" tsc.type = "PER-CUM" my_dss.put(tsc) except Exception, e: MessageBox.showError(' '.join(e.args), "Python Error") except java.lang.Exception, e:
print '\n#----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------#' print '#End Process all Precip Data to pList' print '#----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------#' print '\n#----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------#' print '#Begin Process Dates to tList' print '#----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------#' print '\nsP: ', sP sT = [] for p in sP: s = dssFile.get(p, 1) sT.append(s.times) # print 'sT:', sT timesListHec = [] t = HecTime() dList = [] for p in sT: # print '\np: ', p iList = [] for i in p: # print '\ni: ', i t.set(i) # h = t.dateAndTime(4) h = t.date(4) iList.append(deepcopy(h)) dList.append(iList) print '\ndList: ', dList print '\n#----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------#' print '#End Process Dates to tList'
#Use ListSelection to open manual data entry dialog and save to mydb.dss from hec.script import Plot, MessageBox # from hec.io import TimeSeriesContainer # from hec.io import PairedDataContainer # from hec.hecmath import TimeSeriesMath # from hec.hecmath import PairedDataMath from hec.heclib.dss import HecDss, DSSPathname from hec.dssgui import ListSelection from hec.heclib.util import HecTime import java import sys mw = ListSelection.getMainWindow() mw.setIsInteractive(1, 0) # Turn off popups mw.open(sys.argv[1] + "\\mydb.dss") time = HecTime() time.setCurrent() time.setTime("0800") time.addDays(-5) mw.timeSeriesDataEntry("/GREEN/OAK/FLOW//1DAY/OBS", time.dateAndTime(4)) # mw.finish() # Batch mode only
from hec.script import Plot, MessageBox from hec.heclib.dss import HecDss, HecTimeSeries from hec.io import TimeSeriesContainer from hec.heclib.util import HecTime import java try: myDss = HecDss.open("myFile.dss") tsc = TimeSeriesContainer() tsc.fullName = "/Basin/loc/FLOW/01NOV2002/1Hour//" start = HecTime("01Nov2022", "0800") tsc.interval = 60 flows = [0.0, 2.0, 1.0, 4.0, 3.0, 6.0, 5.0, 8.0, 7.0, 9.0] times = [] for value in flows: times.append(start.value()) start.add(tsc.interval) tsc.times = times tsc.values = flows tsc.numberValues = len(flows) tsc.units = "CFS" tsc.type = "PER-AVER" myDss.put(tsc) except Exception, e: MessageBox.showError(''.join(e.args), "Python Error") finally: myDss.close()
''' The main section to determine is the script is executed within or outside of the CAVI environment ''' # Decide to execute within the CAVI environment if cavi_env: script_name = "{}.py".format(arg2) # Get the watershed name for the slug ws_name = cavistatus.get_watershed().getName() if ws_name is None else ws_name ws_name_slug = re.sub(r'\s+|_', '-', ws_name).lower() tw = cavistatus.get_timewindow() if tw != None: st, et = tw print("Time window: {}".format(tw)) else: raise Exception('No forecast open on Modeling tab to get a timewindow.') st = HecTime(st, HecTime.MINUTE_GRANULARITY) st.showTimeAsBeginningOfDay(True) # Convert start to UTC print('Converting time window to UTC for API request.') ws_tz = cavistatus.get_timezone() HecTime.convertTimeZone(st, ws_tz, TimeZone.getTimeZone('UTC')) et = HecTime(et, HecTime.MINUTE_GRANULARITY) et.showTimeAsBeginningOfDay(True) # Convert end to UTC HecTime.convertTimeZone(et, ws_tz, TimeZone.getTimeZone('UTC')) after = '{}-{:02d}-{:02d}T{:02d}:{:02d}:00Z'.format(st.year(), st.month(), st.day(), st.hour(), st.minute()) before = '{}-{:02d}-{:02d}T{:02d}:{:02d}:00Z'.format(et.year(), et.month(), et.day(), et.hour(), et.minute()) # DSS filename and path if dssfilename is None: dssfilename = 'data.dss' if not dssfilename.endswith('.dss'): dssfilename += '.dss'
if (TS or PD): flow = dss.get(path,True) if (TS and hasattr(flow,'times')): pathArray=path.split('/') pathArray[4]='' parsepath='/'.join(pathArray) if (parsepath in visited): continue else: visited[parsepath]=True vs=[] try: for i in (range(len(flow.times))): t=HecTime() t.set(flow.times[i]) date=str(t.year())+'-'+str(t.month())+'-'+str(t.day()) val="%.3f}" % flow.values[i]; vs.append('""('+str(date)+','+val+')""') except : vs=[] v='"{'+','.join(vs)+'}"' st=HecTime() st.set(flow.startTime) sd=str(st.year())+'-'+str(st.month())+'-'+str(st.day()) et=HecTime() et.set(flow.endTime) ed=str(et.year())+'-'+str(et.month())+'-'+str(et.day()) parms=['"'+path+'"',
def createTemplateTSC(rawDataList): #Derives a TimeSeriesContainer object from the raw ESP data list # where all that needs to be done is update the pathname # and values - timestamps should be uniform across each # ESP trace #intializing HEC java objects tsc =TimeSeriesContainer() #new TSC object hecStartTime= HecTime() hecEndTime = HecTime() #copmuting HEC times and interval (minutes) of timestep times = [] for i in range(len(rawDataList)): times.append(rawDataList[i][0]) hecStartTime.set(times[0]) hecEndTime.set(times[-1]) #The formatting of these times might need to be adjusted at a later point Dates = [] for x in range(len(times)): IndividualDate = times[x] T = HecTime() T.set(IndividualDate) Dates.append(T.value()) DiffBetweenInterval = [] DiffBetweenInterval = [a - Dates[i-1] for i, a in enumerate(Dates)][1:] for x in DiffBetweenInterval: UniqueList_Minutes = [] #Check if exist in list or not if x not in UniqueList_Minutes: UniqueList_Minutes.append(x) interval =UniqueList_Minutes[0] hecTimes = list(range(Dates[0],Dates[-1],int(interval))) hecTimes.append(Dates[-1]) interval_hours = int(interval)/60 tsc.times = hecTimes tsc.values = [Constants.UNDEFINED]*len(times) #add null data number here. tsc.interval = interval_hours tsc.startTime =(int(hecStartTime.julian())*1440)+1080 tsc.endTime =(int( hecEndTime.julian())*1440)+1080 tsc.numberValues = len(times) tsc.units = "CFS" tsc.type = "PER-AVER" tsc.parameter = "FLOW" #Assuming always want this to be flow return tsc
script_name = arg2 # Add rtsutils package to sys.path before importing try: sys.path.append(os.path.join(os.environ['APPDATA'], "rsgis")) from rtsutils import cavistatus, cwmsradar except ImportError, ex: raise # tw = cavistatus.get_timewindow() if tw != None: st, et = tw print("Time window: {}".format(tw)) else: raise Exception('No Forecast open or in "Setup Tab"') cwmsdat = cwmsradar.CwmsRADAR() cwmsdat.begintime = cwmsdat.format_datetime(HecTime(st)) cwmsdat.endtime = cwmsdat.format_datetime(HecTime(et)) cwmsdat.dssfile = os.path.join( cavistatus.get_database_directory(), '{}.dss'.format(cavistatus.get_watershed().getName()) ) cwmsdat.read_config(os.path.join( cavistatus.get_shared_directory(), 'cwms_radar.config' ) ) cwmsdat.set_tsids() # Reading the configutation file defines the lists but they still need to be set cwmsdat.run() MessageBox.showInformation( "Script '{}' done!".format(script_name), "End Process")
def timeWindowMod(runtimeWindow, alternative, computeOptions): originalRTW = computeOptions.getRunTimeWindow() dssFile = DSS.open(computeOptions.getDssFilename(), originalRTW.getTimeWindowString()) # pathname for breaches twmTSM = TimeSeriesMath(alternative.getTimeSeries()) # assumes this is the mapped input to TWM twmPath = twmTSM.getPath().split("/") # use this for e/f parts breachPath = "/".join(["", "","BREACHTRACKER-TIMESTEPS REMAINING","TIMESTEPS REMAINING","",twmPath[5], twmPath[6], ""]) # find start and end of breach timeseries breaches = dssFile.read(breachPath) dssFile.done() breachTSC = breaches.getData() start, end = None, None rtwStart = runtimeWindow.getStartTime().value() newStart = HecTime() # keep track of start time that is a valid ResSim timestep for t,v in zip(breachTSC.times, breachTSC.values): if v > 0: if start is None: # first non-zero start = t end = t # update until original start time occurs, make sure this is prev. timestep in ResSim # avoids interpolated input on start timestep in RAS if t <= rtwStart: newStart.set(t) # no breach if start is None: runtimeWindow.setStartTime(newStart) return runtimeWindow # compare and adjust if needed startTime = HecTime() startTime.set(start) startTime.subtractDays(RAS_START_BUFFER) # add days to give RAS a little spin up time if startTime <= runtimeWindow.getStartTime(): runtimeWindow.setStartTime(startTime) endTime = HecTime() endTime.set(end) endTime.addDays(RAS_END_BUFFER) # buffer at end if endTime >= runtimeWindow.getEndTime(): runtimeWindow.setEndTime(endTime) alternative.addComputeMessage("New time window set: %s" % runtimeWindow.getTimeWindowString()) return runtimeWindow
#Make TimeSeriesContainer, add values and times, then put from hec.script import Plot, MessageBox from hec.io import TimeSeriesContainer from hec.heclib.dss import HecDss, DSSPathname from hec.heclib.util import HecTime import java import sys try: myDss = HecDss.open("c:/temp/test.dss") tsc = TimeSeriesContainer() tsc.fullName = "/BASIN/LOC/FLOW//1HOUR/OBS/" start = HecTime("04Sep1996", "1330") tsc.interval = 60 flows = [0.0, 2.0, 1.0, 4.0, 3.0, 6.0, 5.0, 8.0, 7.0, 9.0] times = [] for value in flows: times.append(start.value()) start.add(tsc.interval) tsc.times = times tsc.values = flows tsc.numberValues = len(flows) tsc.units = "CFS" tsc.type = "PER-AVER" myDss.put(tsc) print "Done" except Exception, e: print(e) finally: print "Closing DSS File" myDss.close()
flow = myDss.get('//HANWELLA/FLOW//1HOUR/RUN:RUN 1/', 1) if flow.numberValues == 0: MessageBox.showError('No Data', 'Error') else: csvWriter.writerow(['Location Ids', 'Hanwella']) csvWriter.writerow(['Time', 'Flow']) print flow.values[:1], flow.times[:1] print flow.values[-1], flow.times[-1] csvList = [] for i in range(0, flow.numberValues): # print int(flow.times[i]) time = HecTime() time.set(int(flow.times[i])) d = [ time.year(), '%d' % (time.month(), ), '%d' % (time.day(), ) ] t = [ '%d' % (time.hour(), ), '%d' % (time.minute(), ), '%d' % (time.second(), ) ] if (int(t[0]) > 23): t[0] = '23' dtStr = '-'.join(str(x) for x in d) + ' ' + ':'.join(
from hec.heclib.dss import HecDss from hec.heclib.util import HecTime from hec.hecmath import TimeSeriesMath from hec.io import TimeSeriesContainer # Need to import the ParseMathExpr module. Update this depending on where you stored # the ParseMathExpr.py script, and make sure this path is on the sys.path from NWDJyLib import ParseMathExpr startTW = HecTime() # Starting time window endTW = HecTime() # Ending time window startTW.setYearMonthDay(1928, 7, 1, 1440) # July 1st, 1928 at midnight endTW.setYearMonthDay(2008, 9, 30, 1440) # September 30th, 2008 at midnight # Convert to readable string startTW = startTW.dateAndTime(4).upper() endTW = endTW.dateAndTime(4).upper() # Set up DSS file name and open the file dssInputFileName = "example_input.dss" dssInputFile = HecDss.open(dssInputFileName) variables = ["ARDB", "MCDB"] dssPaths = [ "//ARROW LAKES_IN/FLOW-UNREG//1DAY/COMPUTED/", "/COLUMBIA/MICA_IN/FLOW-IN//1DAY/NWP/" ] # Load DSS timeseries into TimeSeriesMath objects and place into a dictionary inputs = {} for variable, dssPath in zip(variables, dssPaths):
def paramPerPage(config, dssFilePath): """ Plot timeseries, 1 location per plot, 1 parameter per page. Also adds specified thresholds. """ plotted = 0 # Number of plots exported messages = [] outputFolder = tbu.relativeFolder(config['output_folder'], config['config_file']) minDate = HecTime(config['period']['start']) maxDate = HecTime(config['period']['end']) dssFile = HecDss.open(dssFilePath, str(minDate), str(maxDate)) for param, paramConfig in config['params'].iteritems(): plots = [] dataPaths = [ '/{}/{}/{}//{}/{}/'.format(config['site'].upper(), loc.upper(), param.upper(), config['interval'].upper(), config['version'].upper()) for loc in config['locations'] ] datasets = [dssFile.get(dp) for dp in dataPaths] datasets = [d for d in datasets if d.numberValues > 0] if not datasets: messages.append("No data for parameter '{}'.".format(param)) continue for dataset in datasets: plot = Plot.newPlot(param) layout = Plot.newPlotLayout() layout.setHasLegend(0) vp = layout.addViewport() vp.addCurve('Y1', dataset) plot.configurePlotLayout(layout) plots.append(plot) # Format normal data curves ymin, ymax = float('+inf'), float('-inf') for dataset, plot in zip(datasets, plots): plot.setPlotTitleText("{0.parameter} at {0.location}".format(dataset)) plot.setPlotTitleVisible(1) plot.setLocation(-10000, -10000) plot.setSize(config['width'], config['height']) plot.setLegendLabelText(dataset, dataset.location) panelProp = plot.getPlotpanel().getProperties() panelProp.setViewportSpaceSize(0) curve = plot.getCurve(dataset) curve.setLineColor('{}, {}, {}'.format(*config['line']['colour'])) curve.setLineWidth(config['line']['width']) if config['line']['markers']: curve.setSymbolsVisible(1) curve.setSymbolType('Circle') curve.setSymbolLineColor('{}, {}, {}'.format(*config['line']['colour'])) curve.setSymbolFillColor('{}, {}, {}'.format(*config['line']['colour'])) vp = plot.getViewport(dataset.fullName) vp.setMinorGridXVisible(1) vp.getAxis('Y1').setLabel(dataset.units) if _paramScale(param, config) == 'log': vp.setLogarithmic('Y1') # This throws a warning message if y-values <= 0. We can't catch this as an exception. # Horizontal lines thresholds = _get_thresholds(dataset, dssFilePath, config) for marker in _thresholdMarkers(thresholds): vp.addAxisMarker(marker) # Vertical lines if _baselinePeriod(dataset.location, config): vp.addAxisMarker(_baselineMarker(dataset.location, config)) ymin = min(ymin, vp.getAxis('Y1').getScaleMin()) ymax = max(ymax, vp.getAxis('Y1').getScaleMax()) for dataset, plot in zip(datasets, plots): plot.showPlot() plot.setSize(config['width'], config['height']) # Set all y-axes same limits vp = plot.getViewports()[0] vp.getAxis('Y1').setScaleLimits(ymin, ymax) vp.getAxis('X1').setScaleLimits(minDate.value(), maxDate.value()) plot.saveToJpeg(os.path.join(outputFolder, "TH plot-{0.parameter}-{0.version}-{0.location}" .format(dataset)), 95) plot.close() plotted += 1 dssFile.done() return plotted, messages
#MessageBox.showPlain("%s" %end_time,"Test") if not os.path.isfile(dss_filename): MessageBox.showError( "DSS was unable to open the RAS DSS file. \n The filename provided is: %s \n Please check to see that your RAS project is exporting data to this file!" % (dss_filename), "DSS unable to open file!") dss_file = HecDss.open(dss_filename, start_time, end_time) try: math = dss_file.read(pathname) dss_file.done() tsc = math.getData() values = tsc.values times = tsc.times time_str = HecTime() time_list = [] for t in times: time_str.set(t) time_list.append(HecTime.dateAndTime(time_str)) text_file = open(output_filename, "w") text_file.write("Data: \n %s \n" % values) text_file.write("Time String: \n %s \n" % time_list) text_file.write("Time Minutes: \n %s" % times) text_file.close() except: text_file = open(output_filename, "w") text_file.write("NO DATA") text_file.close()