Ejemplo n.º 1
0
def download_daily_discharge(USGS_siteCode,
                             beginDate,
                             endDate,
                             outFile,
                             Q_max_min_mean="mean"):
    # Example of calling GetValuesObject from a WaterOneFlow web service
    # Then create a time series plot using matplotlib
    from suds.client import Client
    from pandas import Series
    import matplotlib.pyplot as plt

    # Create the inputs needed for the web service call
    wsdlURL = 'http://hydroportal.cuahsi.org/nwisuv/cuahsi_1_1.asmx?WSDL'
    NWIS = Client(wsdlURL).service

    variableCode = 'NWISUV:00060'

    siteCode = 'NWISUV:' + USGS_siteCode
    beginDate = beginDate  #'2015-08-01'
    endDate = endDate  #'2015-11-06'

    # Create a new object named "NWIS" for calling the web service methods

    # Call the GetValuesObject method to return datavalues
    response = NWIS.GetValuesObject(siteCode, variableCode, beginDate, endDate)

    # Get the site's name from the response
    try:
        siteName = response.timeSeries[0].sourceInfo.siteName
    except Exception, e:
        print 'ERROR: SiteCode: %s does not have the data' % USGS_siteCode
        return
    def btn_RetrieveDataOnButtonClick(self, event):
        from suds.client import Client

        # self.m_textCtrl22.Value = 'NWISUV:10105900'
        # self.m_textCtrl23.Value = 'NWISUV:00060'
        # self.m_textCtrl24.Value = '2016-10-20'
        # self.m_textCtrl25.Value = '2016-11-06'

        # Check whether user select needed items correctly
        validation_check = self.check_validation()
        if not validation_check:
            return

        # Create the inputs needed for the web service call //daily values
        wsdlURL = 'http://hydroportal.cuahsi.org/nwisdv/cuahsi_1_1.asmx?WSDL'

        # siteCode = self.m_textCtrl22.Value #'NWISUV:10105900'
        # variableCode = self.m_textCtrl23.Value #'NWISUV:00060'

        siteCode = self.comboBox_Code.Value  #'NWISUV:10105900'
        variableCode = self.comboBox_VariableCode.Value  #'NWISUV:00060'
        beginDate = self.m_textCtrl24.Value  #'2016-10-20'
        endDate = self.m_textCtrl25.Value  #'2016-11-06'

        #siteCode= NWISUV:10039500 for BEAR RIVER AT BORDER, WY
        # variableCode NWISDV:00060/DataType=Mean for Discharge, cubic feet per second

        # Create a new object named "NWIS" for calling the web service methods
        NWIS = Client(wsdlURL).service

        # Call the GetValuesObject method to return datavalues
        try:
            self.response = NWIS.GetValuesObject(siteCode, variableCode,
                                                 beginDate, endDate)
        except Exception as e:
            message = msg_somethigWrong(
                None,
                msg=
                '\n\n\nError, The provided values do not have data in the server.\nPlease make sure to enter correct values.'
            )
            message.ShowModal()
            return

        self.btn_Load.Enabled = True

        # Get the site's name from the response
        siteName = self.response.timeSeries[0].sourceInfo.siteName
from suds.client import Client
from pandas import Series
import matplotlib.pyplot as plt

# Create the inputs needed for the web service call
wsdlURL = 'http://hydroportal.cuahsi.org/nwisuv/cuahsi_1_1.asmx?WSDL'
siteCode = 'NWISUV:10105900'
variableCode = 'NWISUV:00060'
beginDate = '2016-10-20'
endDate = '2016-11-06'

# Create a new object named "NWIS" for calling the web service methods
NWIS = Client(wsdlURL).service

# Call the GetValuesObject method to return datavalues
response = NWIS.GetValuesObject(siteCode, variableCode, beginDate, endDate)

# Get the site's name from the response
siteName = response.timeSeries[0].sourceInfo.siteName

# Create some blank lists in which to put the values and their dates
a = []  # The values
b = []  # The dates

# Get the values and their dates from the web service response
values = response.timeSeries[0].values[0].value

# Loop through the values and load into the blank lists using append
for v in values:
    a.append(float(v.value))
    b.append(v._dateTime)
Ejemplo n.º 4
0
def search_gamut_data(request):

    if request.method == 'GET':
        get_data = request.GET

        siteCodestr = str(get_data['siteCode'])
        beginDate = str(get_data['beginDate'])
        endDate = str(get_data['endDate'])
        # Create the inputs needed for the web service call

        if "LR" in siteCodestr:
            wsdlURL = 'http://data.iutahepscor.org/loganriverwof/cuahsi_1_1.asmx?WSDL'
        elif "PR" in siteCodestr:
            wsdlURL = 'http://data.iutahepscor.org/provoriverwof/cuahsi_1_1.asmx?WSDL'
        else:
            wsdlURL = 'http://data.iutahepscor.org/redbuttecreekwof/cuahsi_1_1.asmx?WSDL'

        siteCode = ':' + siteCodestr
        variableCode_pH = 'iutah:pH'
        variableCode_Temp = 'iutah:WaterTemp_EXO'
        variableCode_DO = 'iutah:ODO'
        variableCode_N = 'iutah:Nitrate-N'

        # Create a new object named "NWIS" for calling the web service methods
        NWIS = Client(wsdlURL).service

        # Call the GetValuesObject method to return datavalues
        response_pH = NWIS.GetValuesObject(siteCode, variableCode_pH,
                                           beginDate, endDate, '')
        response_Temp = NWIS.GetValuesObject(siteCode, variableCode_Temp,
                                             beginDate, endDate, '')
        response_DO = NWIS.GetValuesObject(siteCode, variableCode_DO,
                                           beginDate, endDate, '')
        response_N = NWIS.GetValuesObject(siteCode, variableCode_N, beginDate,
                                          endDate, '')

        # Get the site's name from the response
        siteName = response_pH.timeSeries[0].sourceInfo.siteName

        # Create some blank lists in which to put the values and their dates
        a1 = []  # The pH values
        b1 = []  # The dates
        a2 = []  # The Temperature values
        b2 = []  # The dates
        a3 = []  # The DO values
        b3 = []  # The dates
        a4 = []  # The Nitrate-N values
        b4 = []  # The dates

        # Get the values and their dates from the web service response
        pH_values = response_pH.timeSeries[0].values[0].value
        Temp_values = response_Temp.timeSeries[0].values[0].value
        DO_values = response_DO.timeSeries[0].values[0].value
        N_values = response_N.timeSeries[0].values[0].value

        # Loop through the values and load into the blank lists using append
        for v in pH_values:
            if float(v.value) > -100.0:
                a1.append(float(v.value))
                b1.append(v._dateTime)
        for v in Temp_values:
            if float(v.value) > -100.0:
                a2.append(float(v.value))
                b2.append(v._dateTime)
        for v in DO_values:
            if float(v.value) > -100.0:
                a3.append(float(v.value))
                b3.append(v._dateTime)
        for v in N_values:
            if float(v.value) > -100.0:
                a4.append(float(v.value))
                b4.append(v._dateTime)

        pH_ts = Series(a1, index=b1)
        dailyAvepH = pH_ts.resample('D', how='mean')
        dailyAvepH.to_csv("/tmp/avepH.csv")
        with open('/tmp/avepH.csv', 'rb') as f:
            reader = csv.reader(f)
            your_list = list(reader)
        a5 = []
        b5 = []
        for line in your_list:
            b5.append(line[0] + "T00:00:00")
            a5.append(float(line[1]))

    return JsonResponse({
        'success': "123",
        'a1': a1,
        'b1': b1,
        'a2': a2,
        'b2': b2,
        'a3': a3,
        'b3': b3,
        'a4': a4,
        'b4': b4,
        'a5': a5,
        'b5': b5,
    })
Ejemplo n.º 5
0
def download_daily_discharge(USGS_siteCode, beginDate, endDate,outFile, Q_max_min_mean= "mean"):
    """
    HW7  in hydroinformatics
    startDate, endDate: string,  format-  yyyy-mm-dd
    Q_max_min_mean: string, "max", "mean" or "min"
    """

    # GetValuesObject from a WaterOneFlow web service
    # Then create a time series plot using matplotlib
    from suds.client import Client
    from pandas import Series
    # import matplotlib.pyplot as plt

    # Create the inputs needed for the web service call
    wsdlURL = 'http://hydroportal.cuahsi.org/nwisuv/cuahsi_1_1.asmx?WSDL'
    siteCode = 'NWISUV:%s'%USGS_siteCode
    variableCode = 'NWISUV:00060'

    # Create a new object named "NWIS" for calling the web service methods
    NWIS = Client(wsdlURL).service

    # Call the GetValuesObject method to return datavalues
    response = NWIS.GetValuesObject(siteCode, variableCode, beginDate, endDate)

    # Get the site's name from the response
    siteName = response.timeSeries[0].sourceInfo.siteName

    # Create some blank lists in which to put the values and their dates
    a = []  # The values
    b = []  # The dates

    # Get the values and their dates from the web service response
    values = response.timeSeries[0].values[0].value

    # Loop through the values and load into the blank lists using append
    for v in values:
        a.append(float(v.value))
        b.append(v._dateTime)

    # Create a Pandas Series object from the lists
    # Set the index of the Series object to the dates
    ts = Series(a, index=b)

    # # resample is like group by clause in SQL
    # # summed = ts.resample('1D', how='sum')                            #ts.resample('1440T', how='sum')
    # ts_maxx = ts.resample('1D', how='max')
    # ts_minn = ts.resample('1D', how='min')
    # ts_mean = ts.resample('1D', how='mean')

    # # Use MatPlotLib to create a plot of the time series
    # # Create a plot of the streamflow statistics
    # # ------------------------------------------
    # # Create a figure object and add a subplot
    # # figure() creates  a big area where we can create multiple or single drawings
    # fig = plt.figure()
    # ax = fig.add_subplot(1, 1, 1)  # arguments for add_subplot - add_subplot(nrows, ncols, plot_number)
    #
    # # Call the plot() methods on the series object to plot the data
    # ts.plot(color='0.9', linestyle='solid', label='15-minute streamflow values')
    # ts_maxx.plot(color='red', linestyle='solid', label='Daily streamflow values', marker="o")
    # ts_mean.plot(color='green', linestyle='solid', label='Daily streamflow values', marker="o")
    # ts_minn.plot(color='blue', linestyle='solid', label='Daily streamflow values', marker="o")
    #
    # # Set some properties of the subplot to make it look nice
    # ax.set_ylabel('Discharge, cubic feet per second')
    # ax.set_xlabel('Date')
    # ax.grid(True)
    # ax.set_title(siteName)
    #
    # # Add a legend with some customizations
    # legend = ax.legend(loc='upper left', shadow=True)
    #
    # # Create a frame around the legend.
    # frame = legend.get_frame()
    # frame.set_facecolor('0.95')

    # # Set the font size in the legend
    # for label in legend.get_texts():
    #     label.set_fontsize('large')
    #
    # for label in legend.get_lines():
    #     label.set_linewidth(1.5)  # the legend line width
    #
    # plt.savefig("HW7.png")
    # plt.show()

    if Q_max_min_mean.lower() == "max":
        ts_maxx = ts.resample('1D').max()
        r = ts_maxx
    elif Q_max_min_mean.lower() == "min":
        ts_minn = ts.resample('1D').min
        r = ts_minn
    else:
        # old syntax was  ts_mean = ts.resample('1D' , how='mean')
        ts_mean = ts.resample('1D').mean()
        r = ts_mean

    r.to_csv(outFile)

    # change format of the date in the saved file
    # replace - with ,
    f = file(outFile, "r")
    str_to_save = f.read().replace('-',",")
    f.close()

    #save it again
    f = file(outFile, "w")
    f.write(str_to_save)

    f = np.loadtxt(outFile, delimiter=",")
    q = f[:,-1]
    q = q * 0.028316847

    # to make shape of q (x,1) instead of (x,)
    qq = np.zeros((q.shape[0], 1))
    qq[:, 0] = q


    # take the first 3 columns,
    # add two extra colum = 0 0 for min and sec
    # add third column = q
    # save by seperating by space " "
    date_part = f[:,:-1]
    empty_col = np.zeros((f.shape[0],1))
    date_n_hour = np.append(date_part,empty_col, axis =1 )
    date_hour_n_min = np.append(date_n_hour, empty_col, axis=1)
    date_hr_min_n_Q = np.append(date_hour_n_min,qq, axis=1)
    np.savetxt(outFile,date_hr_min_n_Q, fmt='%i %i %i %i %i %f' , delimiter= "\t")

    return
Ejemplo n.º 6
0
# -*- coding: utf-8 -*-
from suds.client import Client
from pandas import Series

#Create a new object named NWIS for calling the web service methods (https://fedorahosted.org/suds/wiki/Documentation)
NWIS = Client(
    "http://river.sdsc.edu/wateroneflow/NWIS/UnitValues.asmx?WSDL").service

#Call the GetValuesObject method (http://river.sdsc.edu/wateroneflow/NWIS/UnitValues.asmx?op=GetValuesObject)
response = NWIS.GetValuesObject("USGS:10109000", "USGS:00060", "2014-10-31",
                                "2014-11-04")
#(If you get an error message saying ‘Error connecting to USGS’, double check your Internet connection and the input parameters above.)

#create a Pandas Series object from the response
a = []
b = []
values = response.timeSeries.values.value
for v in values:
    a.append(float(v.value))
    b.append(v._dateTime)
ts = Series(a, index=b)

#print the site’s minimum value and datetime of occurrence to the console
print "Minimum streamflow was %s cfs on %s" % (ts.min(), ts.idxmin())
#(this should produce the following output: Minimum streamflow was 81.0 cfs on 2014-11-03 16:30:00)