def ErrorModel_discharge(obs_file, Ass_folder, nbrch, Enddate, Startdate):
    """Fits an AR1 model to the time series"""

    #Load observed data
    Q_obs = ReadObsFlowsAss(obs_file)
    Q_obs[:,0] = Q_obs[:,0] + OUTSPECS.PYEX_DATE_OFFSET
    Q_obs = Q_obs[find(Q_obs[:,0] >= Startdate),:]
    reachID = []
    reachID.append(Q_obs[0,3])
    for i in range(1,len(Q_obs)):
        if Q_obs[i,3] != reachID[-1]:
            reachID.append(Q_obs[i,3])

    for n in range(0,len(reachID)):
        #Get simulated data
        q_out = BaseRun(Ass_folder, nbrch, Enddate, Startdate)
        sim_daily = q_out[int(reachID[n])-1,:]

        # Compute weekly values
        sim = []
        obs = []
        for i in range(0,len(sim_daily)/7):
            if numpy.sum(numpy.isnan(Q_obs[i*7:(i+1)*7,1])) > 2:
                sim.append(numpy.mean(sim_daily[i*7:(i+1)*7]))
                obs.append(numpy.nan)
            else:
                sim.append(numpy.mean(sim_daily[i*7:(i+1)*7]))
                obs.append(numpy.mean(numpy.ma.masked_array(Q_obs[i*7:(i+1)*7,1],numpy.isnan(Q_obs[i*7:(i+1)*7,1]))))

        print('Number of weeks without observed data: ' + str(sum(numpy.isnan(obs))))

        #Excluding weeks with no observed flow and zero flow
        sim_weekly = numpy.array(sim)
        obs_weekly = numpy.array(obs)
        obs_weekly[find(numpy.isnan(obs))] = -1
        a = numpy.where(obs_weekly>0)
        ts = numpy.zeros([len(a[0])])
        for i in range(0,len(a[0])):
            ts[i] = (sim_weekly[a[0][i]]-obs_weekly[a[0][i]])/(obs_weekly[a[0][i]])

        # Estimate alpha
        x = ts[0:-1]
        y = ts[1:]
        p = polyfit(x,y,1)
        alpha = p[0]

        # Estimate sigma from the residuals of the regression.
        yhat = polyval(p,x)
        sigma = std(y-yhat)

        with open(Ass_folder + os.sep + 'ErrorModelReach' + str(int(reachID[n])) + '_weekly.txt', 'wb') as csvfile:
            file_writer = csv.writer(csvfile, delimiter=' ')
            file_writer.writerow(['alphaerr']+['q'])
            file_writer.writerow([str(alpha)]+[str(sigma)])
Esempio n. 2
0
def ErrorModel_discharge(obs_file, Ass_folder, nbrch, Enddate, Startdate):
    """Fits an AR1 model to the time series"""

    #Load observed data
    Q_obs = ReadObsFlowsAss(obs_file)
    Q_obs[:,0] = Q_obs[:,0] + OUTSPECS.PYEX_DATE_OFFSET
    Q_obs = Q_obs[find(Q_obs[:,0] >= Startdate),:]
    Q_obs_Startdate = Q_obs[0,0]
    if sum(Q_obs[:,0] <= Enddate-8) > 0:
            Q_obs = Q_obs[find(Q_obs[:,0] <= Enddate-8),:]

    reachID = []
    reachID.append(Q_obs[0,3])
    for i in range(1,len(Q_obs)):
        if Q_obs[i,3] != reachID[-1]:
            reachID.append(Q_obs[i,3])

    for n in range(0,len(reachID)):
        #Get simulated data
        q_out = BaseRun(Ass_folder, nbrch, Enddate, Startdate)
        DeltaStart = Q_obs_Startdate-Startdate
        sim = q_out[int(reachID[n])-1,DeltaStart:]

        #Excluding zeroflow and missing data
        Q_obs[find(numpy.isnan(Q_obs[:,1])==1)] = -1
        a = numpy.where(Q_obs[:,1]>0)
        ts = numpy.zeros([len(a[0])])
        for i in range(0,len(a[0])):
            ts[i] = (sim[a[0][i]]-Q_obs[a[0][i],1])/(Q_obs[a[0][i],1])

        # Estimate alpha
        x = ts[0:-1]
        y = ts[1:]
        N = len(x)
        Sxx = sum(x**2.)-sum(x)**2./N
        Syy = sum(y**2.)-sum(y)**2./N
        Sxy = sum(x*y)-sum(x)*sum(y)/N
        a = Sxy/Sxx
        b = mean(y)-a*mean(x)
        alpha = a

        # Estimate sigma from the residuals of the regression.
        yhat = a*x + b
        sigma = std(y-yhat)

        with open(Ass_folder + os.sep + 'ErrorModelReach' + str(int(reachID[n])) + '.txt', 'wb') as csvfile:
            file_writer = csv.writer(csvfile, delimiter=' ')
            file_writer.writerow(['alphaerr']+['q'])
            file_writer.writerow([str(alpha)]+[str(sigma)])
Esempio n. 3
0
dates = range(int(ASS_startdate), int(ASS_enddate) + 1, 1)
dates = numpy.array(dates)
dates.reshape(len(dates), 1)

today = date.today()
period = today - date(2012, 01, 01) + timedelta(1)
enddays = period.days

# Loop through each day of validation period
# In 2012 observations are available between 01-Jan and 30-Sep (Range 0..274)

#--------------------------Plotting----------------------------------------
for jj in range(0, 8, 1):
    #Getting the observed data and identify reach(es)
    if os.path.isfile(OBS_FILE):
        Q_obs = ReadObsFlowsAss(OBS_FILE)
        Q_obs[:, 0] = Q_obs[:, 0] + OUTSPECS.PYEX_DATE_OFFSET
        reachID = []
        reachID.append(Q_obs[0, 3])
        for i in range(1, len(Q_obs)):
            if Q_obs[i, 3] != reachID[-1]:
                reachID.append(Q_obs[i, 3])
    else:
        reachID = [rch_ID]

    for n in range(0, len(reachID)):

        #Routed simulation data
        data = genfromtxt(PREDICTION_FOLDER + os.sep + 'day_' + str(jj) +
                          '_ass.csv',
                          delimiter=',')
def Results(obs_file, Startdate, Enddate, Ass_folder, nbrch):

    #Getting the observed data for the assimilation (assimilation starting date to issue day (Enddate-8))
    if os.path.isfile(obs_file):
        Q_obs = ReadObsFlowsAss(obs_file)
        Q_obs[:, 0] = Q_obs[:, 0] + OUTSPECS.PYEX_DATE_OFFSET
        reachID = []
        reachID.append(Q_obs[0, 3])
        for i in range(1, len(Q_obs)):
            if Q_obs[i, 3] != reachID[-1]:
                reachID.append(Q_obs[i, 3])
    else:
        raise GeoAlgorithmExecutionException('File ' + obsfile +
                                             ' does not exist')

    for n in range(0, len(reachID)):
        # Calculate weekly values
        Q_obs_weekly = numpy.zeros([numpy.size(Q_obs, 0) / 7, 1], dtype=float)
        for i in range(0, numpy.size(Q_obs, 0) / 7):
            if numpy.mean(Q_obs[i * 7:(i + 1) * 7, 3]) == reachID[n]:
                if numpy.sum(numpy.isnan(Q_obs[i * 7:(i + 1) * 7, 1])) > 2:
                    Q_obs_weekly[i] = numpy.nan
                else:
                    Q_obs_weekly[i] = numpy.mean(
                        numpy.ma.masked_array(
                            Q_obs[i * 7:(i + 1) * 7, 1],
                            numpy.isnan(Q_obs[i * 7:(i + 1) * 7, 1])))

        mean_obs_weekly = numpy.mean(
            numpy.ma.masked_array(Q_obs_weekly, numpy.isnan(Q_obs_weekly)))

        #Routed simulation data
        x3 = genfromtxt(Ass_folder + os.sep + 'Assimilation_Output.csv',
                        delimiter=',')
        P3 = genfromtxt(Ass_folder + os.sep + 'Assimilation_Cov.csv',
                        delimiter=',')
        q_ass = x3[int(reachID[n]) - 1, :]
        std_ass = P3[int(reachID[n]) - 1, :]

        #Creating the bounds
        up_bound_ass = q_ass + 2 * std_ass
        low_bound_ass = numpy.zeros([len(q_ass)])
        for j in range(0, len(q_ass)):
            if q_ass[j] - 2 * std_ass[j] > 0:
                low_bound_ass[j] = q_ass[j] - 2 * std_ass[j]
            else:
                low_bound_ass[j] = 0

        timestep = (Enddate - Startdate + 1) / len(q_ass)

        #Preparing the simulated flows
        sim_ass_daily = q_ass

        q_ass_weekly = []
        for i in range(1, len(q_ass) / 7):
            q_ass_weekly.append(mean(q_ass[(i - 1) * 7 + 1:i * 7]))

        #Excluding nan flows
        obs_weekly = array(Q_obs_weekly)
        obs_weekly[find(numpy.isnan(Q_obs_weekly))] = -1

        a = numpy.where(obs_weekly > 0)
        obs_det_sum = 0
        obs_ass_sum = 0
        for i in range(0, len(a[0])):
            obs_det_sum = obs_det_sum + (
                (sim_det_weekly[a[0][i]] - obs_weekly[a[0][i]])**2)
            obs_ass_sum = obs_ass_sum + (
                (sim_ass_weekly[a[0][i]] - obs_weekly[a[0][i]])**2)

        # Create plot
        xsim = numpy.arange(1, len(q_ass_weekly))
        xobs = numpy.arange(1, len(Q_obs_weekly))
        fig = plt.figure()
        plt.title('Assimilation results for reach  ' + str(int(reachID[n])),
                  fontsize=12)
        plt.ylabel('Discharge [$m^3/s$]')
        p1, = plt.plot_date(xsim,
                            q_ass_weekly,
                            linestyle='-',
                            color='green',
                            marker='None')
        plt.plot_date(xsim,
                      low_bound_ass,
                      linestyle='--',
                      color='green',
                      marker='None')
        plt.plot_date(xsim,
                      up_bound_ass,
                      linestyle='--',
                      color='green',
                      marker='None')
        p2, = plt.plot_date(xobs, Q_obs_weekly, color='red', marker='.')
        plt.legend([p1, p2], ['Assimilated Run', 'Observed'])
        plt.legend(loc=0)
        grid(True)
        grid(True)
        ax1 = fig.add_subplot(111)
        ax1.fill_between(xsim,
                         low_bound_ass,
                         up_bound_ass,
                         color='green',
                         alpha=.3)
        p = []
        for i in range(-30, 9):
            p.append(str(i))
        p[30] = str(num2date(Enddate - 8))[0:10]
        plt.xticks(numpy.arange(dates[0], dates[-1] + 1), p, size='xx-small')
        plt.xlim([Startdate + 20, Enddate])
        figname = Ass_folder + os.sep + 'Assimilation_Results_reach' + str(
            int(reachID[n])) + '.pdf'
        plt.savefig(figname)
        plt.show()
Esempio n. 5
0
def kf_flows(obs_file, Ass_folder, nbrch, Enddate, Startdate, RR_enddate,
             RR_startdate):
    """Returns deterministic and assimilated discharges"""
    #-------------------------------------------------------------------------------

    # x deterministic run
    # x2 and x3 will be the baseline and assimilation runs and P2 and P3 the
    # covariances

    #-------------------------------------------------------------------------------
    days = int(Enddate - Startdate) + 1

    #Getting the observed data for the assimilation
    if os.path.isfile(obs_file):
        Q_obs = ReadObsFlowsAss(obs_file)
        Q_obs = Q_obs[find(numpy.isnan(Q_obs[:, 1]) == 0), :]
        Q_obs[:, 0] = Q_obs[:, 0] + OUTSPECS.PYEX_DATE_OFFSET
        if sum(Q_obs[:, 0] >= Startdate) > 0:
            Q_obs = Q_obs[find(Q_obs[:, 0] >= Startdate), :]
        if sum(Q_obs[:, 0] <= Enddate - 8) > 0:
            Q_obs = Q_obs[find(Q_obs[:, 0] <= Enddate), :]

    #Getting input data and parameters
    (X, K, drainsTo, alphaerr, q, RR, nbrch_add, timestep,
     loss) = LoadData(Ass_folder, nbrch, RR_enddate, RR_startdate)

    # Adjust observed data to overlap with simulation period
    RR_skip = Startdate - RR_startdate
    if RR_skip > 0:
        RR = RR[RR_skip:RR_skip + days, :]

    #Fitting the RR to the timestep
    Inputs = numpy.zeros([days * (1 / timestep), nbrch_add])
    for i in range(0, days):
        for k in range(0, int(1 / timestep)):
            Inputs[1 / timestep * i + k, :] = RR[i]

    simlength = len(Inputs)

    modeltime = numpy.zeros([simlength, 1])
    for i in range(0, len(modeltime)):
        modeltime[i] = Startdate + timestep * i

    IniScov = identity(3 * nbrch_add)

    (F, G1, G2) = MuskSetupFlows(Ass_folder, nbrch, RR_enddate, RR_startdate)

    Ga = G1
    Gb = G2

    # Base Run
    xtemp = numpy.zeros([nbrch_add])
    x = numpy.zeros([nbrch_add, simlength])
    x[:, 0] = xtemp

    for i in range(1, simlength):
        x[:, i] = dot(F, xtemp) + dot(Ga, Inputs[i - 1, :].T) + dot(
            Gb, Inputs[i, :].T)
        xtemp = x[:, i]

    #Prepare matrices and correlation structure

    #Spatial correlation of inflows

    RHO = corrcoef(RR, rowvar=0)

    #Keep correlation only in same reaches
    SP = numpy.zeros([nbrch_add, nbrch_add])

    a = numpy.where(drainsTo == 0)
    outlets = a[0] + 1
    nb_outlets = len(outlets)

    drainsTo_max = {}
    for j in range(1, nbrch + 1):
        drainsTo_max[j] = []

    drainsTo_add_max = {}
    for j in range(nbrch, nbrch_add + 1):
        drainsTo_max[j] = []

    for k in range(0, nbrch_add):
        p = int(drainsTo[k])
        TempIndex_add = []
        TempIndex = []
        if p > 0:
            while p > 0:
                if p <= nbrch:
                    TempIndex.append(p)
                    p = int(drainsTo[p - 1])
                else:
                    TempIndex_add.append(p)
                    p = int(drainsTo[p - 1])

            drainsTo_max[k + 1] = max(TempIndex)
            if len(TempIndex_add) > 0:
                drainsTo_add_max[k + 1] = max(TempIndex_add)

    Reaches = {}
    for i in range(0, nb_outlets):
        Reaches[outlets[i]] = []

    for m in range(0, nb_outlets):
        for n in drainsTo_max:
            if drainsTo_max[n] == outlets[m]:
                Reaches[outlets[m]].append(n)
            else:
                pass
        for n in drainsTo_add_max:
            if drainsTo_add_max[n] == outlets[m]:
                Reaches[outlets[m]].append(n)
            else:
                pass

    for i in range(0, nb_outlets):
        for j in range(0, len(Reaches[outlets[i]])):
            for k in range(0, len((Reaches[outlets[i]]))):
                SP[Reaches[outlets[i]][j] - 1, Reaches[outlets[i]][k] - 1] = 1

    RHO = RHO * SP

    #Define F1: model for both process and AR model
    F1 = numpy.zeros([3 * nbrch_add, 3 * nbrch_add])

    for i in range(0, nbrch_add):
        for j in range(0, nbrch_add):
            F1[i, j] = F[i, j]

    for i in range(nbrch_add, 2 * nbrch_add):
        F1[i, i] = alphaerr[i - nbrch_add]

    for i in range(2 * nbrch_add, 3 * nbrch_add):
        F1[i, i] = alphaerr[i - 2 * nbrch_add]

    G1 = numpy.zeros([3 * nbrch_add, 2 * nbrch_add])

    for i in range(0, nbrch_add):
        for j in range(0, nbrch_add):
            G1[i, j] = Ga[i, j]
        for j in range(nbrch_add, 2 * nbrch_add):
            G1[i, j] = Gb[i, j - nbrch_add]

    Q2 = dot(RHO, (q**2))
    Q = numpy.zeros([3 * nbrch_add, 3 * nbrch_add])

    for t in range(nbrch_add, 2 * nbrch_add):
        for v in range(nbrch_add, 2 * nbrch_add):
            Q[t, v] = Q2[t - nbrch_add, v - nbrch_add]

    for t in range(2 * nbrch_add, 3 * nbrch_add):
        for v in range(2 * nbrch_add, 3 * nbrch_add):
            Q[t, v] = Q2[t - 2 * nbrch_add, v - 2 * nbrch_add]

    #Run x2 - Run with no assimilation - with state augmentation

    xinit = numpy.zeros(3 * nbrch_add)
    P = IniScov
    xtemp = xinit
    P1all = empty([nbrch_add, simlength])
    P1all[:] = NAN
    x2 = numpy.zeros([3 * nbrch_add, simlength])

    x2[:, 0] = xinit
    for i in range(1, simlength):

        for c in range(0, nbrch_add):
            for j in range(nbrch_add, 2 * nbrch_add):
                F1[c, j] = Ga[c, j - nbrch_add] * Inputs[i - 1, j - nbrch_add]

            for j in range(2 * nbrch_add, 3 * nbrch_add):
                F1[c,
                   j] = Gb[c, j - 2 * nbrch_add] * Inputs[i, j - 2 * nbrch_add]

        x2[:, i] = dot(F1, xtemp) + dot(
            G1, concatenate((Inputs[i - 1, :].T, Inputs[i, :].T), axis=0))

        xtemp = x2[:, i]

        P = dot(dot(F1, P), F1.T) + Q

        for b in range(0, nbrch_add):
            P1all[b, i] = sqrt(P[b, b])

    #Assimilation Run
    P = IniScov
    xtemp = xinit
    Innov = empty([simlength])
    Innov[:] = NAN
    PredStd = empty([simlength])
    PredStd[:] = NAN
    Loc = empty([simlength])
    Loc[:] = NAN
    Pall = empty([nbrch_add, simlength])
    Pall[:, :] = NAN
    x3 = numpy.zeros([3 * nbrch_add, simlength])
    x4 = numpy.zeros([3 * nbrch_add, simlength])
    x_ahead_temp = numpy.zeros([3 * nbrch_add, simlength])
    Pall_ahead = Pall
    P_4 = Pall

    for i in range(1, simlength):
        for c in range(0, nbrch_add):
            for j in range(nbrch_add, 2 * nbrch_add):
                F1[c, j] = Ga[c, j - nbrch_add] * Inputs[i - 1, j - nbrch_add]

            for j in range(2 * nbrch_add, 3 * nbrch_add):
                F1[c,
                   j] = Gb[c, j - 2 * nbrch_add] * Inputs[i, j - 2 * nbrch_add]

        x3[:, i] = dot(F1, xtemp) + dot(
            G1, concatenate((Inputs[i - 1, :].T, Inputs[i, :].T), axis=0))
        P = dot(dot(F1, P), F1.T) + Q

        if os.path.isfile(obs_file):
            a = numpy.where(
                Q_obs[:, 0] == modeltime[i])  #look for measurement on day i
            a = a[0].T
        else:
            a = numpy.array([])

        if a.size > 0:
            for mn in range(0, len(a)):
                pt = Q_obs[a[mn], 3]  #Reach where measurement is taken
                r = Q_obs[a[mn], 2]  #Measurement std [m3/s]

                z1 = x3[pt - 1, i]  #Modelled flow
                if isnan(Q_obs[a[mn], 1]) == False:
                    #Measurement operator at the state measurement
                    H1 = numpy.zeros([1, nbrch_add * 3])
                    H1[0, pt - 1] = 1
                    H = H1
                    #Kalman gain
                    R = r**2
                    K = dot(dot(P, H.T), (dot(dot(H, P), H.T) + R)**(-1))
                    Innov[i] = Q_obs[a[mn], 1] - z1
                    PredStd[i] = math.sqrt(dot(dot(H, P), H.T) + R)
                    Loc[i] = pt
                    x3[:, i] = x3[:, i] + K.squeeze() * (Q_obs[a[mn], 1] - z1)
                    P = P - dot(dot(K, H), P)

        for v in range(0, nbrch_add):
            Pall[v, i] = math.sqrt(P[v, v])

        xtemp = x3[:, i]

    P_2 = P1all
    P_3 = Pall

    (a, index) = numpy.where([numpy.isnan(Innov) == False])

    Innov1 = Innov
    PredStd1 = PredStd

    count = 0
    for i in range(0, len(index)):
        if Innov1[index[i]] > 2 * PredStd1[index[i]] or Innov1[
                index[i]] < -2 * PredStd1[index[i]]:
            count = count + 1

    #Adjust to one flow per day
    q2 = numpy.zeros([3 * nbrch_add, days])
    for i in range(0, days):
        q_temp = 0
        for j in range(0, int(1 / timestep)):
            q_temp = q_temp + x2[:, i * (1 / timestep) + j]
        q2[:, i] = q_temp / (1 / timestep)

    q3 = numpy.zeros([3 * nbrch_add, days])
    for i in range(0, days):
        q_temp = 0
        for j in range(0, int(1 / timestep)):
            q_temp = q_temp + x3[:, i * (1 / timestep) + j]
        q3[:, i] = q_temp / (1 / timestep)

    P2 = numpy.zeros([nbrch_add, days])
    for i in range(0, days):
        q_temp = 0
        for j in range(0, int(1 / timestep)):
            q_temp = q_temp + P_2[:, i * (1 / timestep) + j]
        P2[:, i] = q_temp / (1 / timestep)

    P3 = numpy.zeros([nbrch_add, days])
    for i in range(0, days):
        q_temp = 0
        for j in range(0, int(1 / timestep)):
            q_temp = q_temp + P_3[:, i * (1 / timestep) + j]
        P3[:, i] = q_temp / (1 / timestep)

    #Creating output files for plotting function
    with open(Ass_folder + os.sep + 'Deterministic_Output.csv',
              'wb') as csvfile:
        file_writer = csv.writer(csvfile, delimiter=',')
        for i in range(0, len(q2)):
            file_writer.writerow(q2[i])
    csvfile.close

    with open(Ass_folder + os.sep + 'Deterministic_Cov.csv', 'wb') as csvfile:
        file_writer = csv.writer(csvfile, delimiter=',')
        for i in range(0, len(P2)):
            file_writer.writerow(P2[i])
    csvfile.close

    with open(Ass_folder + os.sep + 'Assimilation_Output.csv',
              'wb') as csvfile:
        file_writer = csv.writer(csvfile, delimiter=',')
        for i in range(0, len(q3)):
            file_writer.writerow((q3[i]))
    csvfile.close

    with open(Ass_folder + os.sep + 'Assimilation_Cov.csv', 'wb') as csvfile:
        file_writer = csv.writer(csvfile, delimiter=',')
        for i in range(0, len(P3)):
            file_writer.writerow(P3[i])
    csvfile.close

    #Creating output files for users
    out_header = []
    out_header.append('Dates')
    for i in range(0, nbrch):
        out_header.append('Reach ' + str(i + 1) + ' flow')
        out_header.append('Reach ' + str(i + 1) + ' std')

    simdates = arange(Startdate, Enddate + 1, 1)
    simdates = simdates - OUTSPECS.PYEX_DATE_OFFSET

    output = zeros([days, nbrch * 2 + 1])
    for i in range(0, nbrch):
        output[:, i * 2 + 1] = q3[i, :]
        output[:, i * 2 + 2] = P2[i, :]

    output[:, 0] = simdates

    with open(Ass_folder + os.sep + 'Assimilation_Final_Output.csv',
              'wb') as csvfile:
        file_writer = csv.writer(csvfile, delimiter=',')
        file_writer.writerow(out_header)
        for i in range(0, len(output)):
            file_writer.writerow(output[i])

    return x, x2, x3, P_2, P_3, Innov, PredStd, Loc
Esempio n. 6
0
def Results(obs_file, IssueDate, Startdate, Enddate, Ass_folder, rch_ID):

    #Getting the observed data and identify reach(es)
    if os.path.isfile(obs_file):
        Q_obs = ReadObsFlowsAss(obs_file)
        Q_obs[:, 0] = Q_obs[:, 0] + OUTSPECS.PYEX_DATE_OFFSET
        reachID = []
        reachID.append(Q_obs[0, 3])
        for i in range(1, len(Q_obs)):
            if Q_obs[i, 3] != reachID[-1]:
                reachID.append(Q_obs[i, 3])
    else:
        reachID = [rch_ID]

    for n in range(0, len(reachID)):

        #Routed simulation data
        x3 = genfromtxt(Ass_folder + os.sep + 'Assimilation_Output.csv',
                        delimiter=',')
        P3 = genfromtxt(Ass_folder + os.sep + 'Assimilation_Cov.csv',
                        delimiter=',')
        q_ass = x3[int(reachID[n]) - 1, :]
        std_ass = P3[int(reachID[n]) - 1, :]

        #Creating the bounds
        up_bound_ass = q_ass + 2 * std_ass
        low_bound_ass = numpy.zeros([len(q_ass)])
        for j in range(0, len(q_ass)):
            if q_ass[j] - 2 * std_ass[j] > 0:
                low_bound_ass[j] = q_ass[j] - 2 * std_ass[j]
            else:
                low_bound_ass[j] = 0

        timestep = (Enddate - Startdate + 1) / len(q_ass)
        print(timestep)

        # Create plot
        dates = numpy.arange(Startdate,
                             Startdate + len(q_ass) / (1 / timestep), timestep)
        fig = plt.figure()
        plt.title('Assimilation results for reach  ' + str(int(reachID[n])),
                  fontsize=12)
        plt.ylabel('Discharge [$m^3/s$]')
        p1, = plt.plot_date(dates,
                            q_ass,
                            linestyle='-',
                            color='green',
                            marker='None')
        plt.plot_date(dates,
                      low_bound_ass,
                      linestyle='--',
                      color='green',
                      marker='None')
        plt.plot_date(dates,
                      up_bound_ass,
                      linestyle='--',
                      color='green',
                      marker='None')
        if os.path.isfile(obs_file):
            # Extract obsdata for current reachID
            obsdata = Q_obs[find(Q_obs[:, 3] == int(reachID[n])), :]
            if sum(obsdata[:, 0] >= Startdate) > 0:
                obsdata = obsdata[find(obsdata[:, 0] >= Startdate), :]
            if sum(obsdata[:, 0] <= Enddate - 8) > 0:
                obsdata = obsdata[find(obsdata[:, 0] <= Enddate - 8), :]
            obstimes = obsdata[:, 0]
            obs_dates = obstimes
            p2, = plt.plot_date(obs_dates,
                                obsdata[:, 1],
                                color='red',
                                marker='.')
            plt.legend([p1, p2], ['Assimilated Run', 'Observed'])
        plt.legend(loc=0)
        #        grid(True)
        #        grid(True)
        ax1 = fig.add_subplot(111)
        ax1.fill_between(dates,
                         low_bound_ass,
                         up_bound_ass,
                         color='green',
                         alpha=.3)
        #        p = []
        #       for i in range(-30,9):
        #            p.append(str(i))
        #        p[30]= str(num2date(Enddate-8))[0:10]
        #        plt.xticks(numpy.arange(dates[0],dates[-1]+1), p, size='xx-small')
        #        plt.xlim([Startdate+20, Enddate])
        plt.ylim([0, max(up_bound_ass[~numpy.isnan(up_bound_ass)]) + 5])
        figname = Ass_folder + os.sep + 'Assimilation_Results_reach' + str(
            int(reachID[n])) + '_' + IssueDate + '.pdf'
        plt.savefig(figname)


#        plt.show()
Esempio n. 7
0
from datetime import date, timedelta
from matplotlib.pylab import *
import subprocess
from PyQt4 import QtGui
from read_SWAT_out import read_SWAT_time
from SWAT_output_format_specs import SWAT_output_format_specs
from ASS_utilities import ReadNoSubs
import ASS_module3_Assimilation
import ASS_module1_PrepData
import ASS_module2_ErrorModel
import ASS_module4_Results
from ASS_utilities import ReadObsFlowsAss
import ASS_Evaluation

#Load observed data
Q_obs = ReadObsFlowsAss(obs_file)
Q_obs[:,0] = Q_obs[:,0] + OUTSPECS.PYEX_DATE_OFFSET
Q_obs = Q_obs[find(Q_obs[:,0] >= Startdate),:]

##Q_obs[1:100,1]=NaN       # Removes 2005.02.15 - 2005.05.25
##Q_obs[670:995,1]=NaN     # Remves 2006.12.16 - 2007.11.06, no flow
##Q_obs[0:1416,1]=NaN     # Remves 2005.02.15 - 2009.01.01, no flow


#Get simulated data
q_out = BaseRun(Ass_folder, nbrch, Enddate, Startdate)
sim = q_out[int(ReachNo)-1,:]

#Excluding zeroflow and missing data
Q_obs[find(numpy.isnan(Q_obs[:,1])==1)] = -1
a = numpy.where(Q_obs[:,1]>0)
Esempio n. 8
0
def ErrorModel_discharge(obs_file, Ass_folder, nbrch, Enddate, Startdate):
    """Fits an AR1 model to the time series"""

    relativeerror = False
    #Load observed data
    Q_obs = ReadObsFlowsAss(obs_file)
    Q_obs[:, 0] = Q_obs[:, 0] + OUTSPECS.PYEX_DATE_OFFSET
    Q_obs = Q_obs[find(Q_obs[:, 0] >= Startdate), :]
    Q_obs_Startdate = Q_obs[0, 0]
    if sum(Q_obs[:, 0] <= Enddate - 8) > 0:
        Q_obs = Q_obs[find(Q_obs[:, 0] <= Enddate - 8), :]

    reachID = []
    reachID.append(Q_obs[0, 3])
    for i in range(1, len(Q_obs)):
        if Q_obs[i, 3] != reachID[-1]:
            reachID.append(Q_obs[i, 3])

    for n in range(0, len(reachID)):
        #Get simulated data
        q_out = BaseRun(Ass_folder, nbrch, Enddate, Startdate)
        DeltaStart = Q_obs_Startdate - Startdate
        sim = q_out[int(reachID[n]) - 1, DeltaStart:]

        #Excluding zeroflow and missing data
        Q_obs[find(numpy.isnan(Q_obs[:, 1]) == 1)] = -1
        a = numpy.where(Q_obs[:, 1] > 0)
        ts = numpy.zeros([len(a[0])])
        simobs = numpy.zeros([len(a[0])])
        obsobs = numpy.zeros([len(a[0])])
        for i in range(0, len(a[0])):
            if relativeerror:
                ts[i] = (sim[a[0][i]] - Q_obs[a[0][i], 1]) / (Q_obs[a[0][i],
                                                                    1])
                simobs[i] = sim[a[0][i]]
                obsobs[i] = Q_obs[a[0][i], 1]
            else:
                ts[i] = (sim[a[0][i]] - Q_obs[a[0][i], 1])
                simobs[i] = sim[a[0][i]]
                obsobs[i] = Q_obs[a[0][i], 1]

        with open(Ass_folder + os.sep + 'simobs.txt', 'wb') as csvfile:
            file_writer = csv.writer(csvfile, delimiter=' ')
            file_writer.writerow(simobs)

        with open(Ass_folder + os.sep + 'obsobs.txt', 'wb') as csvfile:
            file_writer = csv.writer(csvfile, delimiter=' ')
            file_writer.writerow(obsobs)

        # Estimate alpha
        x = ts[0:-1]
        y = ts[1:]
        N = len(x)
        Sxx = sum(x**2.) - sum(x)**2. / N
        Syy = sum(y**2.) - sum(y)**2. / N
        Sxy = sum(x * y) - sum(x) * sum(y) / N
        a = Sxy / Sxx
        b = mean(y) - a * mean(x)
        alpha = a

        # Estimate sigma from the residuals of the regression.
        yhat = a * x + b
        sigma = std(y - yhat)

        with open(
                Ass_folder + os.sep + 'ErrorModelReach' +
                str(int(reachID[n])) + '.txt', 'wb') as csvfile:
            file_writer = csv.writer(csvfile, delimiter=' ')
            file_writer.writerow(['alphaerr'] + ['q'])
            file_writer.writerow([str(alpha)] + [str(sigma)])

        AR_res = y - yhat

        with open(Ass_folder + os.sep + 'residuals.txt', 'wb') as csvfile:
            file_writer = csv.writer(csvfile, delimiter=' ')
            file_writer.writerow(AR_res)

        N = float(len(AR_res))

        #Correlogram

        rh = numpy.zeros([len(AR_res)])

        ch_sum = numpy.zeros([len(AR_res)])
        co_sum = numpy.zeros([len(AR_res)])

        ch = numpy.zeros([len(AR_res)])
        co = numpy.zeros([len(AR_res)])

        for h in range(0, int(N)):
            for i in range(0, int(N) - h):
                ch_sum[i] = (AR_res[i] - mean(AR_res)) * (AR_res[i + h] -
                                                          mean(AR_res))
                ch[i] = 1 / N * sum(ch_sum[0:i])
            for j in range(0, int(N)):
                co_sum[j] = (AR_res[j] - mean(AR_res))**2
                co[j] = 1 / N * sum(co_sum[0:j])

            rh[h] = ch[i] / co[j]

        ul = ones([len(rh)]) * (-1 / float(N) + 1.96 / N**(0.5))
        ll = ones([len(rh)]) * (-1 / float(N) - 1.96 / N**(0.5))

        count = 0
        for i in range(0, len(rh)):
            if ul[i] < rh[i] or rh[i] < ll[i]:
                count = count + 1

        pct = count / float(len(rh)) * 100
        print('Percent out of bounds', pct)

        fig = matplotlib.pyplot.figure(figsize=(4.5, 3.5))

        matplotlib.rcParams.update({'font.size': 8, 'font.family': 'sans'})

        matplotlib.rc('ytick', labelsize=8)
        matplotlib.rc('xtick', labelsize=8)

        ylabel('Correlation coefficient')
        xlabel('Lag [days]')

        xlim([0, 2500])

        plot(rh, linestyle='-', marker='.', markersize=2)
        plot(ul, linestyle='--', marker='None', color='black')
        plot(ll, linestyle='--', marker='None', color='black')

        ##figname = 'C:\Users\Gudny\Thesis\Writing\Figures\Correlogram_Mokolo.pdf'
        ##savefig(figname)

        show()