Esempio n. 1
0
def do_mb():
    dir = 'Z:/calibration/'
    type = 'FLOW'
    stime = '01JAN2008 0000'
    etime = '31DEC2008 2400'
    fileobs = dir + 'observed/observed_flow_stage_for_compare_plots.dss'
    stations = ['RSAC128', 'SSS', 'SUT', 'RSAC155']
    direction = [-1, -1, -1, 1]
    dss = HecDss.open(fileobs, True)
    dss.setTimeWindow(stime, etime)

    plot = newPlot('Mass Balance Components (SAC)')
    mass_balance = None
    for i in range(0, len(stations)):
        sta = stations[i]
        dir = direction[i]
        data = get_matching(dss, 'A=%s C=%s E=15MIN' % (sta, type))
        if data == None:
            data = get_matching(dss, 'A=%s C=%s E=1HOUR' % (sta, type))
        data = TimeSeriesMath(data).transformTimeSeries("1HOUR", "", "AVE", 0)
        data = TimeSeriesMath(data.data)
        if dir == -1:
            data = data.negative()
        plot.addData(data.data)
        if mass_balance == None:
            mass_balance = data
        else:
            mass_balance = mass_balance.add(data)
    plot.showPlot()
    return mass_balance
Esempio n. 2
0
def create_regression_line(drun, dobs, legend):
    drunm = TimeSeriesMath(drun)
    dobsm = TimeSeriesMath(dobs)
    paired = dobsm.generateDataPairs(drunm, False)
    pairedData = paired.data
    pairedData.fullName = legend
    reg = dobsm.multipleRegression([drunm], HecMath.UNDEFINED,
                                   HecMath.UNDEFINED)
    regData = reg.data
    a = regData.yOrdinates[0][1]
    b = regData.yOrdinates[0][0]
    regData.fullName = "//REGRESSION LINE////GENERATED/"
    maxVal = drunm.max()
    minVal = drunm.min()
    regData.xOrdinates[0] = a * minVal + b
    regData.xOrdinates[1] = a * maxVal + b
    regData.yOrdinates[0][0] = minVal
    regData.yOrdinates[0][1] = maxVal
    regData.yunits = pairedData.yunits
    regData.xunits = pairedData.xunits
    regData.xtype = pairedData.xtype
    regData.ytype = pairedData.ytype
    regData.xparameter = pairedData.xparameter
    regData.yparameter = pairedData.yparameter
    regData.location = pairedData.location
    regData.version = 'LINEAR REGRESSION'
    return regData, pairedData
Esempio n. 3
0
def calculate_rms(run, obs):
    runt = TimeSeriesMath(run)
    obst = TimeSeriesMath(obs)
    tavg = obst.abs().sum() / obst.numberValidValues()
    diff = runt.subtract(obst)
    return math.fabs(
        math.sqrt(diff.multiply(diff).sum() / diff.numberValidValues()) /
        tavg) * math.log(tavg)
Esempio n. 4
0
def do_compare(paths, dssfiles, title, doAverage=False, diffToFirst=False):
    data = []
    for i in range(len(paths)):
        d = get_matching(dssfiles[i], paths[i])
        if doAverage: d = average(d, "1DAY")
        data.append(d)
    if diffToFirst:
        for i in range(1, len(paths)):
            diff = TimeSeriesMath(data[i]).subtract(TimeSeriesMath(data[0]))
            diff.container.location = d.location + '-DIFF'
            data.append(diff.data)
    plot(data, title)
Esempio n. 5
0
def ts_normalize(data):
    """
    Normalize time series by dividing by data's mean
    """
    datan = data.divide(TimeSeriesMath(data).mean())
    datan.fullName = datan.fullName + "-NORMED"
    return datan.data
Esempio n. 6
0
def ts_period_operation(data, interval="1DAY", operation_type="AVE"):
    """
    transforms the time series using a period operation with
    given interval (1DAY (default), 1HOUR, etc) and
    given operation type (AVE (default), MAX, MIN)
    """
    tdata = TimeSeriesMath(data).transformTimeSeries(interval, None,
                                                     operation_type, 0)
    tdata.data.fullName = tdata.data.fullName + operation_type
    return tdata.data
Esempio n. 7
0
def average(data, average_interval):
    """
    filter_type is one of
     "INT" - Interpolate at end of interval 
     "MAX" - Maximum over interval 
     "MIN" - Minimum over interval 
     "AVE" - Average over interval 
     "ACC" - Accumulation over interval 
     "ITG" - Integration over interval 
     "NUM" - Number of valid data over interval 
    """
    filter_type = "AVE"
    return TimeSeriesMath(data).transformTimeSeries(average_interval, None,
                                                    'AVE', 0).data
def timeWindowMod(runtimeWindow, alternative, computeOptions):
	originalRTW = computeOptions.getRunTimeWindow()
	
	dssFile = DSS.open(computeOptions.getDssFilename(), originalRTW.getTimeWindowString())
	# pathname for breaches
	twmTSM = TimeSeriesMath(alternative.getTimeSeries()) # assumes this is the mapped input to TWM
	twmPath = twmTSM.getPath().split("/") # use this for e/f parts
	breachPath = "/".join(["", "","BREACHTRACKER-TIMESTEPS REMAINING","TIMESTEPS REMAINING","",twmPath[5], twmPath[6], ""])

	# find start and end of breach timeseries
	breaches = dssFile.read(breachPath)
	dssFile.done()
	breachTSC = breaches.getData()
	
	start, end = None, None
	rtwStart = runtimeWindow.getStartTime().value()
	newStart = HecTime() # keep track of start time that is a valid ResSim timestep
	for t,v in zip(breachTSC.times, breachTSC.values):
		if v > 0:
			if start is None: # first non-zero
				start = t
			end = t
		# update until original start time occurs, make sure this is prev. timestep in ResSim
		# avoids interpolated input on start timestep in RAS
		if t <= rtwStart:
			newStart.set(t)

	# no breach
	if start is None: 
		runtimeWindow.setStartTime(newStart)
		return runtimeWindow

	# compare and adjust if needed
	startTime = HecTime()
	startTime.set(start)
	startTime.subtractDays(RAS_START_BUFFER) # add days to give RAS a little spin up time
	if startTime <= runtimeWindow.getStartTime():
		runtimeWindow.setStartTime(startTime)
		
	endTime = HecTime()
	endTime.set(end)
	endTime.addDays(RAS_END_BUFFER) # buffer at end
	if endTime >= runtimeWindow.getEndTime():
		runtimeWindow.setEndTime(endTime)

	alternative.addComputeMessage("New time window set: %s" % runtimeWindow.getTimeWindowString())
	
	return runtimeWindow
Esempio n. 9
0
fileName = R"C:\project\DSSVue-Example-Scripts\src\CDEC\Oroville.dss"
cdecName = R"C:\project\DSSVue-Example-Scripts\src\CDEC\Oroville.cdec"

daysBack = 10
#readFromCDEC(fileName,cdecName,daysBack)

dss = HecDss.open(fileName)
storage = dss.read("//OROVILLE/STORAGE//1Day/CDEC/")
inflow = dss.read("//OROVILLE/RESERVOIR INFLOW//1Day/CDEC/")

printMath(storage)
printMath(inflow)
changeInStorage = storage.successiveDifferences().divide(1.98347)
tsc = changeInStorage.getData()
tsc.units = "CFS"
tsc.type = ""
tsc.parameter = "Change in Storage"
changeInStorage = TimeSeriesMath(tsc)
printMath(changeInStorage)

outflow = inflow.subtract(changeInStorage)
tsc = outflow.getData()
tsc.parameter = "OUTFLOW"
outflow = TimeSeriesMath(tsc)
printMath(outflow)
# inflow - outflow = [change in storage]
# outflow = inflow - [change in storage]
dss.done()
sys.stdin.readline()
Esempio n. 10
0
     for t,v in zip(newTSC.times, newTSC.values):
         if v > 0:
             if int(v + epsilon) != int(v):
                 v = int(v + epsilon)
             elif int(v - epsilon) != int(v):
                 v = int(v)
         else:
             if int(v + epsilon) != int(v):
                 v = int(v)
             elif int(v - epsilon) != int(v):
                 v = int(v - epsilon)
         newTimes.append(t)
         newValues.append(float(v))
     newTSC.times = newTimes
     newTSC.values = newValues
     newTSM = TimeSeriesMath(newTSC)
     return newTSM
     
 else:
     # Return an integer type if the answer is an integer
     if int(value) == value:
         return int(value)
     
     # If Python made some silly precision error
     # like x.99999999999996, just return x + 1 as an integer
     epsilon = 0.0000000001
     if value > 0:
         if int(value + epsilon) != int(value):
             return int(value + epsilon)
         elif int(value - epsilon) != int(value):
             return int(value)
Esempio n. 11
0
def doall(locations,
          fileobs,
          filerun1,
          filerun2,
          stime,
          etime,
          imageDir='d:/temp',
          weights=None,
          filter_type="AVE",
          normalize=False):
    obs = HecDss.open(fileobs, True)
    obs.setTimeWindow(stime, etime)
    run1 = HecDss.open(filerun1, True)
    run1.setTimeWindow(stime, etime)
    if filerun2 != None:
        run2 = HecDss.open(filerun2, True)
        run2.setTimeWindow(stime, etime)
    else:
        run2 = None
    rms1 = 0
    rms1_min, rms1_max = 0, 0
    rms2 = 0
    rms2_min, rms2_max = 0, 0
    rmsmap = {}
    #run2=None
    sumwts = 0
    average_interval = None
    for l in locations:
        data1 = get_matching(obs, 'A=%s C=%s E=15MIN' % (l, type))
        if data1 == None:
            data1 = get_matching(obs, 'A=%s C=%s E=1DAY' % (l, type))
        if data1 == None:
            data1 = get_matching(obs, 'A=%s C=%s E=IR-DAY' % (l, type))
        if data1 == None:
            data1 = get_matching(obs, 'A=%s C=%s E=1HOUR' % (l, type))
        drun1 = get_matching(run1, 'B=%s C=%s' % (l, type))
        if run2 != None:
            drun2 = get_matching(run2, 'B=%s C=%s' % (l, type))
        else:
            drun2 = None
        avg_intvl = "1DAY"
        if data1 != None:
            if average_interval != None:
                dobsd = TimeSeriesMath(data1).transformTimeSeries(
                    average_interval, None, filter_type, 0)
            else:
                dobsd = TimeSeriesMath(data1)
            if normalize:
                dobsd = dobsd.divide(TimeSeriesMath(data1).mean())
            dobsm = TimeSeriesMath(data1).transformTimeSeries(
                avg_intvl, None, filter_type, 0)
            dobsm_max = TimeSeriesMath(data1).transformTimeSeries(
                avg_intvl, None, "MAX", 0)
            dobsm_max.data.fullName = dobsm_max.data.fullName + "MAX"
            dobsm_min = TimeSeriesMath(data1).transformTimeSeries(
                avg_intvl, None, "MIN", 0)
            dobsm_min.data.fullName = dobsm_min.data.fullName + "MIN"
            if normalize:
                dobsm = dobsm.divide(TimeSeriesMath(data1).mean())
        if drun1 == None:
            continue
        else:
            if average_interval != None:
                drun1d = TimeSeriesMath(drun1).transformTimeSeries(
                    average_interval, None, filter_type, 0)
            else:
                drun1d = TimeSeriesMath(drun1)
            if normalize:
                drun1d = drun1d.divide(TimeSeriesMath(drun1).mean())
            if drun2 != None:
                if average_interval != None:
                    drun2d = TimeSeriesMath(drun2).transformTimeSeries(
                        average_interval, None, filter_type, 0)
                else:
                    drun2d = TimeSeriesMath(drun2)
                if normalize:
                    drun2d = drun2d.divide(TimeSeriesMath(drun2).mean())
            drun1m = TimeSeriesMath(drun1).transformTimeSeries(
                avg_intvl, None, filter_type, 0)
            drun1m_max = TimeSeriesMath(drun1).transformTimeSeries(
                avg_intvl, None, "MAX", 0)
            drun1m_min = TimeSeriesMath(drun1).transformTimeSeries(
                avg_intvl, None, "MIN", 0)
            if normalize:
                drun1m = drun1m.divide(TimeSeriesMath(drun1).mean())
            if drun2 != None:
                drun2m = TimeSeriesMath(drun2).transformTimeSeries(
                    avg_intvl, None, filter_type, 0)
                drun2m_max = TimeSeriesMath(drun2).transformTimeSeries(
                    avg_intvl, None, "MAX", 0)
                drun2m_min = TimeSeriesMath(drun2).transformTimeSeries(
                    avg_intvl, None, "MIN", 0)
                if normalize:
                    drun2m = drun2m.divide(TimeSeriesMath(drun2).mean())
            else:
                drun2m = None
        if weights != None:
            sumwts = sumwts + weights[l]
            lrms1 = calculate_rms(drun1m.data, dobsm.data) * weights[l]
            lrms1_min = calculate_rms(drun1m_min.data,
                                      dobsm_min.data) * weights[l]
            lrms1_max = calculate_rms(drun1m_max.data,
                                      dobsm_max.data) * weights[l]
            rms1 = rms1 + lrms1
            rms1_min = rms1_min + lrms1_min
            rms1_max = rms1_max + lrms1_max
            lrms2 = calculate_rms(drun2m.data, dobsm.data) * weights[l]
            lrms2_min = calculate_rms(drun2m_min.data,
                                      dobsm_min.data) * weights[l]
            lrms2_max = calculate_rms(drun2m_max.data,
                                      dobsm_max.data) * weights[l]
            rmsmap[
                l] = lrms1, lrms2, lrms1_min, lrms2_min, lrms1_max, lrms2_max
            rms2 = rms2 + lrms2
            rms2_min = rms2_min + lrms2_min
            rms2_max = rms2_max + lrms2_max
        plotd = newPlot("Hist vs New Geom [%s]" % l)
        if data1 != None:
            plotd.addData(dobsd.data)
        plotd.addData(drun1d.data)
        if drun2 != None:
            plotd.addData(drun2d.data)
        plotd.showPlot()
        legend_label = plotd.getLegendLabel(drun1d.data)
        legend_label.setText(legend_label.getText() + " [" +
                             str(int(lrms1 * 100) / 100.) + "," +
                             str(int(lrms1_min * 100) / 100.) + "," +
                             str(int(lrms1_max * 100) / 100.) + "]")
        legend_label = plotd.getLegendLabel(drun2d.data)
        legend_label.setText(legend_label.getText() + " [" +
                             str(int(lrms2 * 100) / 100.) + "," +
                             str(int(lrms2_min * 100) / 100.) + "," +
                             str(int(lrms2_max * 100) / 100.) + "]")
        plotd.setVisible(False)
        xaxis = plotd.getViewport(0).getAxis("x1")
        vmin = xaxis.getViewMin() + 261500.  # hardwired to around july 1, 2008
        xaxis.setViewLimits(vmin, vmin + 10000.)
        if data1 != None:
            pline = plotd.getCurve(dobsd.data)
            pline.setLineVisible(1)
            pline.setLineColor("blue")
            pline.setSymbolType(Symbol.SYMBOL_CIRCLE)
            pline.setSymbolsVisible(0)
            pline.setSymbolSize(3)
            pline.setSymbolSkipCount(0)
            pline.setSymbolFillColor(pline.getLineColorString())
            pline.setSymbolLineColor(pline.getLineColorString())
            g2dPanel = plotd.getPlotpanel()
            g2dPanel.revalidate()
            g2dPanel.paintGfx()
        plotm = newPlot("Hist vs New Geom Monthly [%s]" % l)
        plotm.setSize(1800, 1200)
        if data1 != None:
            plotm.addData(dobsm.data)
        #plotm.addData(dobsm_max.data)
        #plotm.addData(dobsm_min.data)
        plotm.addData(drun1m.data)
        #plotm.addData(drun1m_max.data)
        #plotm.addData(drun1m_min.data)
        if drun2 != None:
            plotm.addData(drun2m.data)
            #plotm.addData(drun2m_max.data)
            #plotm.addData(drun2m_min.data)
        plotm.showPlot()
        if data1 != None:
            pline = plotm.getCurve(dobsm.data)
            pline.setLineVisible(1)
            pline.setLineColor("blue")
            pline.setSymbolType(Symbol.SYMBOL_CIRCLE)
            pline.setSymbolsVisible(0)
            pline.setSymbolSize(3)
            pline.setSymbolSkipCount(0)
            pline.setSymbolFillColor(pline.getLineColorString())
            pline.setSymbolLineColor(pline.getLineColorString())
        plotm.setVisible(False)
        if data1 != None:
            plots = do_regression_plots(dobsm, drun1m, drun2m)
            if plots != None:
                spanel = plots.getPlotpanel()
                removeToolbar(spanel)
        mpanel = plotm.getPlotpanel()
        removeToolbar(mpanel)
        dpanel = plotd.getPlotpanel()
        removeToolbar(dpanel)
        from javax.swing import JPanel, JFrame
        from java.awt import GridBagLayout, GridBagConstraints
        mainPanel = JPanel()
        mainPanel.setLayout(GridBagLayout())
        c = GridBagConstraints()
        c.fill = c.BOTH
        c.weightx, c.weighty = 0.5, 1
        c.gridx, c.gridy, c.gridwidth, c.gridheight = 0, 0, 10, 4
        if data1 != None:
            if plots != None:
                pass
                #mainPanel.add(spanel,c)
        c.gridx, c.gridy, c.gridwidth, c.gridheight = 0, 0, 10, 4
        c.weightx, c.weighty = 1, 1
        mainPanel.add(mpanel, c)
        c.gridx, c.gridy, c.gridwidth, c.gridheight = 0, 4, 10, 6
        mainPanel.add(dpanel, c)
        fr = JFrame()
        fr.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
        fr.getContentPane().add(mainPanel)
        fr.setSize(1100, 850)
        fr.show()
        mainPanel.setSize(1100, 850)
        mainPanel.setBackground(Color.WHITE)
        #import time; time.sleep(5)
        saveToPNG(mainPanel, imageDir + l + ".png")
    if weights != None:
        rms1 = (rms1 + rms1_min + rms1_max) / sumwts
        rms2 = (rms2 + rms2_min + rms2_max) / sumwts
        print 'RMS Run 1: %f' % rms1
        print 'RMS Run 2: %f' % rms2
        for loc in rmsmap.keys():
            print loc, rmsmap[loc]