def writeAtiMelt(selectedList, dList, aList, mList): # print '\n#----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------#' # print '#Begin Creating ATI and Melt-CUM DSS paths' # print '#----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------#' for ab, d, a, m in zip(selectedList, dList, aList, mList): aPart = ab.split("/")[1] b = ab.split("/")[2] # print 'aPart: ', aPart # ATI-MELT DSS path Created tsATI = TimeSeriesContainer() tsATI.watershed = aPart tsATI.location = b tsATI.parameter = 'ATI' # F-part will be 'CALC' tsATI.version = 'CALC' # Interval is hard coded as 1Day = 1440 minutes. tsATI.interval = 1440 tsATI.fullName = '/%s/%s/%s//1DAY/%s/' % \ (aPart, b, 'ATI', 'CALC') tsATI.values = a times = [] hecTime = HecTime() for i, v in enumerate(d): hecTime.set(d[i]) times.append(hecTime.value()) tsATI.times = times tsATI.startTime = times[0] tsATI.endTime = times[-1] tsATI.numberValues = len(a) tsATI.units = 'DEGF-DAY' tsATI.type = 'INST-VAL' dssFile.put(tsATI) # MELT-CUM DSS path Created tsMelt = TimeSeriesContainer() tsMelt.watershed = aPart tsMelt.location = b tsMelt.parameter = 'MELT-CUM' tsMelt.version = 'CALC' # Interval and FullName are hard coded as 1Day = 1440 minutes. tsMelt.interval = 1440 tsMelt.fullName = '/%s/%s/%s//1DAY/%s/' % \ (aPart, b, 'MELT-CUM', 'CALC') tsMelt.values = m tsMelt.times = times tsMelt.startTime = times[0] tsMelt.endTime = times[-1] tsMelt.numberValues = len(m) tsMelt.units = 'IN' tsMelt.type = 'INST-VAL' dssFile.put(tsMelt)
def _write_dss(input_): # Create time series container tsc = TimeSeriesContainer() # Get data from input file try: tsc.fullName = input_['fullname'] tsc.interval = input_['interval'] tsc.units = input_['units'] tsc.type = input_['dsstype'] data = input_['data'] filepath = input_['filepath'] except KeyError: _logger.exception('Incomplete data on the dss handler input file!') _logger.error('Exiting.') exit(1) _logger.debug('filepath: %s', filepath) # Get list of times and respective values times = [] values = [] for k, v in sorted(data.viewitems()): # t = datetime.strptime(k, '%Y-%m-%d %H:%M:%S') t = HecTime(k.strftime('%d%b%Y'), k.strftime('%H%M')) times.append(t.value()) values.append(v) # Set list of times, values, and size of list tsc.times = times tsc.values = values tsc.numberValues = len(values) _logger.debug('tsc.times: %s', tsc.times) _logger.debug('tsc.values: %s', tsc.values) # Check if dss file already exists if op.isfile(filepath): _logger.warning('Deleting old file!') # Delete existing dss file try: os.remove(filepath) except OSError: _logger.warning('Warning! Deletion of old file failed.') # else: # _logger.warning("File doesn't exist!") # Write new dss file dss_file = HecDss.open(filepath) dss_file.put(tsc) dss_file.done()
def createTemplateTSC(rawDataList): #Derives a TimeSeriesContainer object from the raw ESP data list # where all that needs to be done is update the pathname # and values - timestamps should be uniform across each # ESP trace #intializing HEC java objects tsc =TimeSeriesContainer() #new TSC object hecStartTime= HecTime() hecEndTime = HecTime() #copmuting HEC times and interval (minutes) of timestep times = [] for i in range(len(rawDataList)): times.append(rawDataList[i][0]) hecStartTime.set(times[0]) hecEndTime.set(times[-1]) #The formatting of these times might need to be adjusted at a later point Dates = [] for x in range(len(times)): IndividualDate = times[x] T = HecTime() T.set(IndividualDate) Dates.append(T.value()) DiffBetweenInterval = [] DiffBetweenInterval = [a - Dates[i-1] for i, a in enumerate(Dates)][1:] for x in DiffBetweenInterval: UniqueList_Minutes = [] #Check if exist in list or not if x not in UniqueList_Minutes: UniqueList_Minutes.append(x) interval =UniqueList_Minutes[0] hecTimes = list(range(Dates[0],Dates[-1],int(interval))) hecTimes.append(Dates[-1]) interval_hours = int(interval)/60 tsc.times = hecTimes tsc.values = [Constants.UNDEFINED]*len(times) #add null data number here. tsc.interval = interval_hours tsc.startTime =(int(hecStartTime.julian())*1440)+1080 tsc.endTime =(int( hecEndTime.julian())*1440)+1080 tsc.numberValues = len(times) tsc.units = "CFS" tsc.type = "PER-AVER" tsc.parameter = "FLOW" #Assuming always want this to be flow return tsc
def createTSrecord(dss_filename, pathname, start_time, values, comp_step, data_units): start = HecTime() tsc = TimeSeriesContainer() tsc.fullName = pathname tsc.interval = comp_step start.set(start_time) times = [] for value in values: times.append(start.value()) start.add(tsc.interval) tsc.values = values tsc.times = times tsc.startTime = times[0] tsc.endTime = times[-1] tsc.numberValues = len(values) tsc.units = data_units tsc.type = "INST-VAL" dss_file = HecDss.open(dss_filename) dss_file.put(tsc) dss_file.done()
def makeTimeSeriesContainer(station, interval, tz, records, decodeInfo): global timezones sdf = SimpleDateFormat("ddMMMyyyy, HH:mm") if dssTimezone: if not timezones["DSS"]: timezones["DSS"] = TimeZone.getTimeZone( tzInfo[dssTimezone]["JAVA"]) sdf.setTimeZone(timezones["DSS"]) else: sdf.setTimeZone(timezones["USGS"]) dd, decodeInfo = decodeInfo cal = Calendar.getInstance() t = HecTime() tsc = TimeSeriesContainer() tsc.interval = interval times = [] values = [] tsc.quality = None factor = decodeInfo["DSS_FACTOR"] for j in range(len(records)): millis, value = records[j] cal.setTimeInMillis(millis) t.set(sdf.format(cal.getTime())) times.append(t.value()) try: values.append(float(value) * factor) except: values.append(Constants.UNDEFINED) tsc.times = times tsc.values = values tsc.startTime = times[0] tsc.endTime = times[-1] tsc.numberValues = len(values) tsc.timeZoneID = sdf.getTimeZone().getID() tsc.timeZoneRawOffset = sdf.getTimeZone().getRawOffset() return tsc
def paramPerPage(config, dssFilePath): """ Plot timeseries, 1 location per plot, 1 parameter per page. Also adds specified thresholds. """ plotted = 0 # Number of plots exported messages = [] outputFolder = tbu.relativeFolder(config['output_folder'], config['config_file']) minDate = HecTime(config['period']['start']) maxDate = HecTime(config['period']['end']) dssFile = HecDss.open(dssFilePath, str(minDate), str(maxDate)) for param, paramConfig in config['params'].iteritems(): plots = [] dataPaths = [ '/{}/{}/{}//{}/{}/'.format(config['site'].upper(), loc.upper(), param.upper(), config['interval'].upper(), config['version'].upper()) for loc in config['locations'] ] datasets = [dssFile.get(dp) for dp in dataPaths] datasets = [d for d in datasets if d.numberValues > 0] if not datasets: messages.append("No data for parameter '{}'.".format(param)) continue for dataset in datasets: plot = Plot.newPlot(param) layout = Plot.newPlotLayout() layout.setHasLegend(0) vp = layout.addViewport() vp.addCurve('Y1', dataset) plot.configurePlotLayout(layout) plots.append(plot) # Format normal data curves ymin, ymax = float('+inf'), float('-inf') for dataset, plot in zip(datasets, plots): plot.setPlotTitleText("{0.parameter} at {0.location}".format(dataset)) plot.setPlotTitleVisible(1) plot.setLocation(-10000, -10000) plot.setSize(config['width'], config['height']) plot.setLegendLabelText(dataset, dataset.location) panelProp = plot.getPlotpanel().getProperties() panelProp.setViewportSpaceSize(0) curve = plot.getCurve(dataset) curve.setLineColor('{}, {}, {}'.format(*config['line']['colour'])) curve.setLineWidth(config['line']['width']) if config['line']['markers']: curve.setSymbolsVisible(1) curve.setSymbolType('Circle') curve.setSymbolLineColor('{}, {}, {}'.format(*config['line']['colour'])) curve.setSymbolFillColor('{}, {}, {}'.format(*config['line']['colour'])) vp = plot.getViewport(dataset.fullName) vp.setMinorGridXVisible(1) vp.getAxis('Y1').setLabel(dataset.units) if _paramScale(param, config) == 'log': vp.setLogarithmic('Y1') # This throws a warning message if y-values <= 0. We can't catch this as an exception. # Horizontal lines thresholds = _get_thresholds(dataset, dssFilePath, config) for marker in _thresholdMarkers(thresholds): vp.addAxisMarker(marker) # Vertical lines if _baselinePeriod(dataset.location, config): vp.addAxisMarker(_baselineMarker(dataset.location, config)) ymin = min(ymin, vp.getAxis('Y1').getScaleMin()) ymax = max(ymax, vp.getAxis('Y1').getScaleMax()) for dataset, plot in zip(datasets, plots): plot.showPlot() plot.setSize(config['width'], config['height']) # Set all y-axes same limits vp = plot.getViewports()[0] vp.getAxis('Y1').setScaleLimits(ymin, ymax) vp.getAxis('X1').setScaleLimits(minDate.value(), maxDate.value()) plot.saveToJpeg(os.path.join(outputFolder, "TH plot-{0.parameter}-{0.version}-{0.location}" .format(dataset)), 95) plot.close() plotted += 1 dssFile.done() return plotted, messages
def onePerParam(config, dssFilePath): plotted = 0 # Number of plots exported messages = [] outputFolder = tbu.relativeFolder(config['output_folder'], config['config_file']) minDate = HecTime(config['period']['start']) maxDate = HecTime(config['period']['end']) dssFile = HecDss.open(dssFilePath, str(minDate), str(maxDate)) colours = _coloursByLocation(config) for param, paramConfig in config['params'].iteritems(): plot = Plot.newPlot() dataPaths = [ "/%s/%s/%s//%s/%s/" % (config['site'].upper(), location.upper(), param.upper(), config['interval'].upper(), config['version'].upper()) for location in config['locations'] ] datasets = [dssFile.get(p) for p in dataPaths] datasets = [d for d in datasets if d.numberValues > 0] if not datasets: messages.append("No data for parameter '%s'." % param) continue map(plot.addData, datasets) plot.showPlot() plot.setPlotTitleText(param) plot.setPlotTitleVisible(1) plot.setSize(int(config['width']), int(config['height'])) # We can only access labels and curves at this point map(lambda d: plot.getLegendLabel(d).setText(d.location), datasets) # Style curves for dataset in datasets: curve = plot.getCurve(dataset) curve.setLineColor('{}, {}, {}'.format(*colours[dataset.location])) curve.setLineWidth(config['line']['width']) if config['line']['markers']: curve.setSymbolsVisible(1) curve.setSymbolType('Circle') curve.setSymbolLineColor('{}, {}, {}' .format(*colours[dataset.location])) curve.setSymbolFillColor('{}, {}, {}' .format(*colours[dataset.location])) # Axes scales units = set(ds.units for ds in datasets) for vp_index, unit in enumerate(units): # 1 viewport per distinct unit viewport = plot.getViewport(vp_index) viewport.getAxis("X1").setScaleLimits(minDate.value(), maxDate.value()) viewport.getAxis("Y1").setLabel(unit) viewport.setMinorGridXVisible(1) viewport.setMinorGridYVisible(1) if paramConfig: if paramConfig['scale'].lower() == 'log': viewport.setLogarithmic('Y1') # This throws a warning message if y-values <= 0. We can't catch this as an exception. # Horizontal threshold lines thresholds = _get_thresholds(datasets[0], dssFilePath, config) for marker in _thresholdMarkers(thresholds): viewport.addAxisMarker(marker) # Export plot plot.saveToJpeg(os.path.join(outputFolder, param + "-" + config['version']), 95) plot.close() plotted += 1 dssFile.done() return plotted, messages
num_locations = len(csv_list[0]) - 1 num_values = len(csv_list) - NUM_METADATA_LINES # Ignore Metadata location_ids = csv_list[1][1:] for i in range(0, num_locations): precipitations = [] for j in range(NUM_METADATA_LINES, num_values + NUM_METADATA_LINES): p = float(csv_list[j][i + 1]) precipitations.append(p) tsc = TimeSeriesContainer() # tsc.fullName = "/BASIN/LOC/FLOW//1HOUR/OBS/" # tsc.fullName = '//' + locationIds[i].upper() + '/PRECIP-INC//1DAY/GAGE/' tsc.fullName = '//' + location_ids[i].upper( ) + '/PRECIP-INC//1HOUR/GAGE/' start = HecTime(csv_list[NUM_METADATA_LINES][0]) tsc.interval = 60 # in minutes times = [] for value in precipitations: times.append(start.value()) start.add(tsc.interval) tsc.times = times tsc.values = precipitations tsc.numberValues = len(precipitations) tsc.units = "MM" tsc.type = "PER-CUM" converted_dss.put(tsc) finally: converted_dss.done()
def makeTimeSeriesContainer(tsData, timeZone, pathname=None): ''' Construct a TimeSeriesContainer object from a python dictionary that was created from a single "time-series" returned from the CWMS RADAR web service ''' #---------------# # initial setup # #---------------# tsc = None try: tz = TimeZone.getTimeZone(timeZone) sdf8601 = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX") sdfHecTime = SimpleDateFormat("ddMMMyyyy, HH:mm") cal = Calendar.getInstance() for obj in sdf8601, sdfHecTime, cal: obj.setTimeZone(tz) ht = HecTime() times, values, qualities = [], [], [] #------------------# # process the data # #------------------# if tsData.has_key("regular-interval-values"): #----------------------------------------# # regular time series (a lot to process) # #----------------------------------------# rts = tsData["regular-interval-values"] intvlStr = rts["interval"] unit = rts["unit"].split()[0] if intvlStr.startswith("PT"): intvlNum, intvlUnit = int(intvlStr[2:-1]), intvlStr[-1] try: factor, field = { "M": (1, Calendar.MINUTE), "H": (60, Calendar.HOUR_OF_DAY) }[intvlUnit] except KeyError: raise Exception("Unexpected interval: %s" % intvlStr) else: intvlNum, intvlUnit = int(intvlStr[1:-1]), intvlStr[-1] try: factor, field = { "Y": (1440 * 365, Calendar.YEAR), "M": (1440 * 30, Calendar.MONTH), "D": (1440, Calendar.DATE) }[intvlUnit] except KeyError: raise Exception("Unexpected interval: %s" % intvlStr) intvl = intvlNum * factor segmentCount = rts["segment-count"] cal.setTimeInMillis( sdf8601.parse(rts["segments"][0]["first-time"]).getTime()) for i in range(segmentCount): for j in range(rts["segments"][i]["value-count"]): ht.set(sdfHecTime.format(cal.getTimeInMillis())) v, q = rts["segments"][i]["values"][j] times.append(ht.value()) values.append(v) qualities.append(q) cal.add(field, intvlNum) if i < segmentCount - 1: nextBegin = sdf8601.parse( rts["segments"][i + 1]["first-time"]).getTime() time = cal.getTimeInMillis() while time < nextBegin: ht.set(sdfHecTime.format(time)) times.append(ht.value()) values.append(Constants.UNDEFINED) qualities.append(0) cal.add(field, intvlNum) time = cal.getTimeInMillis() elif tsData.has_key("irregular-interval-values"): #------------------------------# # irregular time series (easy) # #------------------------------# its = tsData["irregular-interval-values"] unit = its["unit"].split()[0] intvl = 0 for t, v, q in its["values"]: ht.set(sdfHecTime.format(sdf8601.parse(t))) times.append(ht.value()) values.append(v) qualities.append(q) else: raise Exception("Time series has no values") #--------------------------------------------------# # code common to regular and irregular time series # #--------------------------------------------------# tsc = TimeSeriesContainer() tsc.times = times tsc.values = values tsc.quality = qualities tsc.numberValues = len(times) tsc.startTime = times[0] tsc.endTime = times[-1] tsc.interval = intvl tsc.units = unit tsc.timeZoneID = timeZone tsc.timeZoneRawOffset = tz.getRawOffset() name = tsData["name"] loc, param, paramType, intv, dur, ver = name.split(".") if pathname: #---------------------------# # use pathname if specified # #---------------------------# A, B, C, D, E, F = 1, 2, 3, 4, 5, 6 parts = pathname.split("/") parts[D] = '' tsc.fullName = "/".join(parts) tsc.watershed = parts[A] try: tsc.location, tsc.subLocation = parts[B].split("-", 1) except: tsc.location = parts[B] try: tsc.parameter, tsc.subParameter = parts[C].split("-", 1) except: tsc.parameter = parts[C] try: tsc.version, tsc.subVersion = parts[F].split("-", 1) except: tsc.version = parts[F] else: #--------------------------------------# # no pathname, use CWMS time series id # #--------------------------------------# try: tsc.location, tsc.subLocation = loc.split("-", 1) except: tsc.location = loc try: tsc.parameter, tsc.subParameter = param.split("-", 1) except: tsc.parameter = param try: tsc.version, tsc.subVersion = ver.split("-", 1) except: tsc.version = ver tsc.type = { "Total": "PER-CUM", "Max": "PER-MAX", "Min": "PER-MIN", "Const": "INST-VAL", "Ave": "PER-AVER", "Inst": ("INST-VAL", "INST-CUM")[param.startswith("Precip")] }[paramType] except: output(traceback.format_exc()) return tsc
ver = "OBS" startTime = "12Oct2003 0100" values = [12.36, 12.37, 12.42, 12.55, 12.51, 12.47, 12.43, 12.39] hecTime = HecTime() tsc = TimeSeriesContainer() tsc.watershed = watershed tsc.location = loc tsc.parameter = param tsc.version = ver tsc.fullName = "/%s/%s/%s//1HOUR/%s/" % (watershed, loc, param, ver) tsc.interval = 60 hecTime.set(startTime) times=[] for value in values: times.append(hecTime.value()) hecTime.add(tsc.interval) tsc.values = values tsc.times = times tsc.startTime = times[0] tsc.endTime = times[-1] tsc.numberValues = len(values) tsc.units = "FEET" tsc.type = "INST-VAL" dssFile = HecDss.open("myFile.dss") dssFile.put(tsc) dssFile.done()
def onePerParam(config, dssFilePath): plotted = 0 # Number of plots exported messages = [] outputFolder = tbu.relativeFolder(config['output_folder'], dssFilePath) dssFile = HecDss.open(dssFilePath) minDate = HecTime(config['period']['start']) maxDate = HecTime(config['period']['end']) colours = _coloursByLocation(config) for param, paramConfig in config['params'].iteritems(): thePlot = Plot.newPlot() dataPaths = [ "/%s/%s/%s//%s/%s/" % (config['site'].upper(), location.upper(), param.upper(), config['interval'].upper(), config['version'].upper()) for location in config['locations'] ] datasets = [dssFile.get(p, 1) for p in dataPaths] datasets = [d for d in datasets if d.numberValues > 0] if not datasets: messages.append("No data for parameter '%s'." % param) continue map(thePlot.addData, datasets) thePlot.showPlot() thePlot.setPlotTitleText(param) thePlot.setPlotTitleVisible(1) thePlot.setSize(int(config['width']), int(config['height'])) # We can only access labels and curves at this point map(lambda d: thePlot.getLegendLabel(d).setText(d.location), datasets) for dataset in datasets: curve = thePlot.getCurve(dataset) curve.setLineColor("%s, %s, %s" % tuple(colours[dataset.location])) curve.setLineWidth(config['line']['width']) units = set(ds.units for ds in datasets) for vp_index, unit in enumerate(units): # 1 viewport per distinct unit viewport = thePlot.getViewport(vp_index) viewport.getAxis("X1").setScaleLimits(minDate.value(), maxDate.value()) viewport.getAxis("Y1").setLabel(unit) viewport.setMinorGridXVisible(1) viewport.setMinorGridYVisible(1) if paramConfig: if paramConfig['scale'].lower() == 'log': viewport.setLogarithmic('Y1') # This throws a warning message if y-values <= 0. We can't catch this as an exception. thePlot.saveToJpeg(os.path.join(outputFolder, config['version'] + "_" + param), 95) thePlot.close() plotted += 1 dssFile.done() return plotted, messages
from hec.heclib.util import HecTime from hec.heclib.dss import HecDss, DSSPathname from hec.io import TimeSeriesContainer from hec.heclib.util import HecTime import java import sys import os fileName = "c:/temp/day_granularity.dss" if os.path.isfile(fileName): os.remove(fileName) dss = HecDss.open(fileName) tsc = TimeSeriesContainer() tsc.fullName = "/test/day_granularity/FLOW/04Sep3000/1YEAR/MODEL/" tsc.values = range(1, 3000, 1) start = HecTime("04Sep3000", "1330") LastYear = 3000 AnnualTimes = [] for x in range(len(tsc.values)): LastYear += 1 hecTime = HecTime('31Dec%04d 2400' % LastYear, HecTime.DAY_GRANULARITY) AnnualTimes.append(hecTime.value()) tsc.times = AnnualTimes tsc.numberValues = len(tsc.values) tsc.units = "CFS" tsc.type = "PER-AVER" tsc.setTimeGranularitySeconds(86400) dss.put(tsc)