def updtChart(site_triplet, siteName): print('Working on TAVG POR Chart for ' + siteName) statsData = [] minData = [] maxData = [] meanData = [] lowestData = [] highestData = [] lowData = [] highData = [] sliderDates = [] meanData = [] trace = [] sitePlotData = [] PORplotData = [] sitePlotNormData = [] validTrip = [site_triplet] sensor = r"TAVG" date_series = [date(2015,10,1) + datetime.timedelta(days=x) for x in range(0, 366)] #could use any year with a leap day sitePlotNormData = [] beginDateDict = {} for siteMeta in meta: beginDateDict.update( {str(siteMeta['stationTriplet']) : dt.strptime(str(siteMeta['beginDate']), "%Y-%m-%d %H:%M:%S")}) siteBeginDate = min(beginDateDict.values()) sYear = siteBeginDate.year if siteBeginDate.year > sYear: if siteBeginDate.month < 10: sYear = siteBeginDate.year else: if siteBeginDate.month == 10 and siteBeginDate.day == 1: sYear = siteBeginDate.year else: sYear = siteBeginDate.year + 1 sDate = date(sYear, 10, 1).strftime("%Y-%m-%d") eDate = today.date().strftime("%Y-%m-%d") data = [] for triplet in validTrip: url = '/'.join([dataUrl,'DAILY', sensor, triplet.replace(':','_') + '.json']) with request.urlopen(url) as d: jTemp = json.loads(d.read().decode()) data.append(trimToOct1(jTemp)) for dataSite in data: if dataSite: padMissingData(dataSite,sDate,eDate) sitePlotData = np.array(data[0]['values'], dtype=np.float) PORplotData = list([sitePlotData[i:i+366] for i in range(0,len(sitePlotData),366)]) allButCurrWY = list(PORplotData) del allButCurrWY[-1] statsData = list(map(list,zip(*allButCurrWY))) if len(statsData[0]) > 1: statsData[151] = statsData[150] with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) minData = [np.nanmin(a) for a in statsData] maxData = [np.nanmax(a) for a in statsData] meanData = [np.nanpercentile(a,50) for a in statsData] lowestData = [np.nanpercentile(a,10) for a in statsData] highestData = [np.nanpercentile(a,90) for a in statsData] lowData = [np.nanpercentile(a,30) for a in statsData] highData = [np.nanpercentile(a,70) for a in statsData] future_date_pad = 14 if len(PORplotData[-1]) > 351: future_date_pad = 366 - len(PORplotData[-1]) - 1 sliderDates = list(chain([(date_series[0])] + [date_series[len(PORplotData[-1])+ future_date_pad]])) else: sliderDates = list(chain([(date_series[0])] + [date_series[-1]])) if len(PORplotData) > 0: for index, i in enumerate(PORplotData): if index == len(PORplotData)-1: trace.extend( [go.Scatter( x=date_series,y=i, name=str(sYear + index + 1), visible=True,connectgaps=True, line=dict(color='rgb(0,0,0)'))]) elif np.nansum(i) > 0: trace.extend( [go.Scatter(x=date_series,y=i, name=str(sYear + index + 1), visible='legendonly', connectgaps=True)]) if meanData: if lowestData: trace.extend( [go.Scatter(x=date_series,y=minData ,legendgroup='centiles',name=r'Min', visible=True,mode='line', line=dict(width=0),connectgaps=True, fillcolor='rgba(237,0,1,0.15)', fill='none',showlegend=False, hoverinfo='none')]) trace.extend( [go.Scatter(x=date_series,y=lowestData ,legendgroup='centiles',name=r'10%', visible=True,mode='line', line=dict(width=0),connectgaps=True, fillcolor='rgba(237,0,1,0.15)', fill='tonexty',showlegend=False, hoverinfo='none')]) if lowData: trace.extend( [go.Scatter(x=date_series,y=lowData, legendgroup='centiles',name=r'30%', visible=True,mode='line', line=dict(width=0),connectgaps=True, fillcolor='rgba(237,237,0,0.15)', fill='tonexty',showlegend=False, hoverinfo='none')]) if highData: trace.extend( [go.Scatter(x=date_series,y=highData, legendgroup='centiles', name=r'Stats. Shading', visible=True,mode='line', line=dict(width=0),connectgaps=True, fillcolor='rgba(115,237,115,0.15)', fill='tonexty',showlegend=True, hoverinfo='none')]) if highestData: trace.extend( [go.Scatter(x=date_series,y=highestData, legendgroup='centiles',connectgaps=True, name=r'90%',visible=True ,mode='line',line=dict(width=0), fillcolor='rgba(0,237,237,0.15)', fill='tonexty',showlegend=False, hoverinfo='none')]) trace.extend( [go.Scatter(x=date_series,y=maxData ,legendgroup='centiles',name=r'Max', visible=True,mode='line', line=dict(width=0),connectgaps=True, fillcolor='rgba(1,0,237,0.15)', fill='tonexty',showlegend=False, hoverinfo='none')]) if minData: trace.extend( [go.Scatter(x=date_series,y=minData, name=r'Min',visible=True, hoverinfo='none',connectgaps=True, line=dict(color='rgba(237,0,0,0.5)'))]) if len(sitePlotNormData) > 0: trace.extend( [go.Scatter(x=date_series, y=sitePlotNormData, name=r"Normal ('81-'10)",connectgaps=True, visible=True,hoverinfo='none', line=dict(color='rgba(0,237,0,0.4)'))]) if meanData: if len(sitePlotNormData) > 0: trace.extend( [go.Scatter(x=date_series, y=meanData,name=r'Normal (POR)', visible='legendonly', hoverinfo='none', connectgaps=True, line=dict(color='rgba(0,237,0,0.4)', dash='dash'))]) else: trace.extend( [go.Scatter(x=date_series,y=meanData, name=r'Normal (POR)',connectgaps=True, visible=True,hoverinfo='none', line=dict(color='rgba(0,237,0,0.4)'))]) if maxData: trace.extend( [go.Scatter(x=date_series,y=maxData, name=r'Max',visible=True, hoverinfo='none',connectgaps=True, line=dict(color='rgba(0,0,237,0.4)'))]) annoText = str(r"Statistical shading breaks at 10th, 30th, 50th, 70th, and 90th Percentiles<br>Normal ('81-'10) - Official median calculated from 1981 thru 2010 data <br>Normal (POR) - Unofficial mean calculated from Period of Record data <br>For more information visit: <a href='https://www.wcc.nrcs.usda.gov/normals/30year_normals_data.htm'>30 year normals calcuation description</a>") # asterisk = '' # if len(sitePlotNormData) == 0: # sitePlotNormData = meanData # annoText = annoText + '<br>*POR data used to calculate Normals since no published 30-year normals available for this site' # asterisk = '*' # jDay = len(PORplotData[-1])-1 # if len(sitePlotNormData) == 0: # perNorm = r'N/A' # else: # perNorm = str('{0:g}'.format(100*round( # PORplotData[-1][jDay]/sitePlotNormData[jDay],2))) # perPeak = str('{0:g}'.format(100*round( # PORplotData[-1][jDay]/max(sitePlotNormData),2))) # if not math.isnan(PORplotData[-1][jDay]): # centile = ordinal(int(round( # stats.percentileofscore( # statsData[jDay],PORplotData[-1][jDay]),0))) # else: # centile = 'N/A' # # dayOfPeak = sitePlotNormData.index(max(sitePlotNormData)) # if jDay > dayOfPeak: # tense = r'Since' # else: # tense = r'Until' # daysToPeak = str(abs(jDay-dayOfPeak)) annoData = ''#str(r"Current" + asterisk + ":<br>% of Normal - " + # perNorm + r"%<br>" + # r"% Normal Peak - " + perPeak + r"%<br>" + # r"Days " + tense + # r" Normal Peak - " + daysToPeak + r"<br>" # r"Percentile Rank- " + centile) layout = go.Layout( images= [dict( source= "https://upload.wikimedia.org/wikipedia/commons/thumb/7/7f/US-NaturalResourcesConservationService-Logo.svg/2000px-US-NaturalResourcesConservationService-Logo.svg.png", xref="paper", yref="paper", x= 0, y= 0.9, xanchor="left", yanchor="bottom", sizex= 0.4, sizey= 0.1, opacity= 0.5, layer= "above" )], annotations=[dict( font=dict(size=10), text=annoText, x=0,y=-0.41, yref='paper',xref='paper', align='left', showarrow=False), dict(font=dict(size=10), text=annoData, x=0,y=0.9, yref='paper',xref='paper', align='left', xanchor="left", yanchor="top", showarrow=False)], legend=dict(traceorder='reversed',tracegroupgap=1, bordercolor='#E2E2E2',borderwidth=2), showlegend = True, title='Average Daily Temperature at<br>' + siteName, height=622, width=700, autosize=False, yaxis=dict(title=r'Avg. Daily Temperature (°F)',hoverformat='.1f', tickformat="0f"), xaxis=dict( range=sliderDates, tickformat="%b %e", rangeselector=dict( buttons=list([ dict(count=9, label='Jan', step='month', stepmode='todate'), dict(count=6, label='Apr', step='month', stepmode='todate'), dict(count=3, label='July', step='month', stepmode='todate'), dict(label='WY', step='all') ]) ), rangeslider=dict(thickness=0.1), type='date' ) ) return {'data': trace, 'layout': layout}
def updtChart(basinName, basinSites): basin = basinName print('Working on TAVG POR Chart for ' + basinName) statsData = [] minData = [] maxData = [] meanData = [] lowestData = [] highestData = [] lowData = [] highData = [] sliderDates = [] meanData = [] trace = [] plotData = [] basinPlotData = [] PORplotData = [] basinPlotNormData = [] validTrip = [] networks = [r'SNTL', r'SCAN', r'SNTLT'] sensor = r"TAVG" url = '/'.join([dataUrl, 'metadata', sensor, 'metadata.json']) with request.urlopen(url) as data: meta = json.loads(data.read().decode()) meta[:] = [ x for x in meta if str.split(x['stationTriplet'], ":")[2] in networks and str.split(x['stationTriplet'], ":")[0] in basinSites ] validTrip = [x['stationTriplet'] for x in meta] date_series = [ date(2015, 10, 1) + datetime.timedelta(days=x) for x in range(0, 366) ] #could use any year with a leap day if validTrip: beginDateDict = {} for siteMeta in meta: beginDateDict.update({ str(siteMeta['stationTriplet']): dt.strptime(str(siteMeta['beginDate']), "%Y-%m-%d %H:%M:%S") }) basinBeginDate = min(beginDateDict.values()) sYear = basinBeginDate.year if basinBeginDate.year > sYear: if basinBeginDate.month < 10: sYear = basinBeginDate.year else: if basinBeginDate.month == 10 and basinBeginDate.day == 1: sYear = basinBeginDate.year else: sYear = basinBeginDate.year + 1 sDate = date(sYear, 10, 1).strftime("%Y-%m-%d") eDate = (today.date() - datetime.timedelta(days=1)).strftime("%Y-%m-%d") data = [] for triplet in validTrip: url = '/'.join([ dataUrl, 'DAILY', sensor, triplet.replace(':', '_') + '.json' ]) with request.urlopen(url) as d: jTemp = json.loads(d.read().decode()) data.append(trimToOct1(jTemp)) for dataSite in data: if dataSite: padMissingData(dataSite, sDate, eDate) plotData = [np.array(x['values'], dtype=np.float) for x in data] with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) basinPlotData = list( np.nanmean(np.array([i for i in plotData]), axis=0)) PORplotData = list([ basinPlotData[i:i + 366] for i in range(0, len(basinPlotData), 366) ]) allButCurrWY = list(PORplotData) del allButCurrWY[-1] statsData = list(map(list, zip(*allButCurrWY))) if len(statsData[0]) > 1: statsData[151] = statsData[150] with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) minData = [np.nanmin(a) for a in statsData] maxData = [np.nanmax(a) for a in statsData] meanData = [np.nanmean(a) for a in statsData] lowestData = [np.nanpercentile(a, 10) for a in statsData] highestData = [np.nanpercentile(a, 90) for a in statsData] lowData = [np.nanpercentile(a, 30) for a in statsData] highData = [np.nanpercentile(a, 70) for a in statsData] future_date_pad = 30 if len(PORplotData[-1]) > 335: future_date_pad = 366 - len(PORplotData[-1]) - 1 sliderDates = list( chain([(date_series[0])] + [date_series[len(PORplotData[-1]) + future_date_pad]])) else: sliderDates = list(chain([(date_series[0])] + [date_series[-1]])) if len(PORplotData) > 0: for index, i in enumerate(PORplotData): if index == len(PORplotData) - 1: trace.extend([ go.Scatter(x=date_series, y=i, name=str(sYear + index + 1), visible=True, connectgaps=True, line=dict(color='rgb(0,0,0)')) ]) elif np.nansum(i) > 0: trace.extend([ go.Scatter(x=date_series, y=i, name=str(sYear + index + 1), visible='legendonly', connectgaps=True) ]) if meanData: if lowestData: trace.extend([ go.Scatter(x=date_series, y=minData, legendgroup='centiles', name=r'Min', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(237,0,1,0.15)', fill='none', showlegend=False, hoverinfo='none', connectgaps=True) ]) trace.extend([ go.Scatter(x=date_series, y=lowestData, legendgroup='centiles', name=r'10%', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(237,0,1,0.15)', fill='tonexty', showlegend=False, hoverinfo='none', connectgaps=True) ]) if lowData: trace.extend([ go.Scatter(x=date_series, y=lowData, legendgroup='centiles', name=r'30%', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(237,237,0,0.15)', fill='tonexty', showlegend=False, hoverinfo='none', connectgaps=True) ]) if highData: trace.extend([ go.Scatter(x=date_series, y=highData, legendgroup='centiles', name=r'Stats. Shading', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(115,237,115,0.15)', fill='tonexty', showlegend=True, hoverinfo='none', connectgaps=True) ]) if highestData: trace.extend([ go.Scatter(x=date_series, y=highestData, legendgroup='centiles', name=r'90%', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(0,237,237,0.15)', fill='tonexty', showlegend=False, hoverinfo='none', connectgaps=True) ]) trace.extend([ go.Scatter(x=date_series, y=maxData, legendgroup='centiles', name=r'Max', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(1,0,237,0.15)', fill='tonexty', showlegend=False, hoverinfo='none', connectgaps=True) ]) if minData: trace.extend([ go.Scatter(x=date_series, y=minData, name=r'Min', visible=True, hoverinfo='none', connectgaps=True, line=dict(color='rgba(237,0,0,0.5)')) ]) if basinPlotNormData: trace.extend([ go.Scatter(x=date_series, y=basinPlotNormData, name=r"Normal ('81-'10)", visible=True, hoverinfo='none', connectgaps=True, line=dict(color='rgba(0,237,0,0.4)')) ]) if meanData: if basinPlotNormData: trace.extend([ go.Scatter(x=date_series, y=meanData, name=r'Normal (POR)', visible='legendonly', connectgaps=True, hoverinfo='none', line=dict(color='rgba(0,237,0,0.4)', dash='dash')) ]) else: trace.extend([ go.Scatter(x=date_series, y=meanData, name=r'Normal (POR)', connectgaps=True, visible=True, hoverinfo='none', line=dict(color='rgba(0,237,0,0.4)')) ]) if maxData: trace.extend([ go.Scatter(x=date_series, y=maxData, name=r'Max', visible=True, hoverinfo='none', connectgaps=True, line=dict(color='rgba(0,0,237,0.4)')) ]) annoText = str( r"Statistical shading breaks at 10th, 30th, 50th, 70th, and 90th Percentiles<br>Normal ('81-'10) - Official mean calculated from 1981 thru 2010 data <br>Normal (POR) - Unofficial mean calculated from Period of Record data <br>For more information visit: <a href='https://www.wcc.nrcs.usda.gov/normals/30year_normals_data.htm'>30 year normals calcuation description</a>" ) layout = go.Layout(images=[ dict( source= "https://upload.wikimedia.org/wikipedia/commons/thumb/7/7f/US-NaturalResourcesConservationService-Logo.svg/2000px-US-NaturalResourcesConservationService-Logo.svg.png", xref="paper", yref="paper", x=0, y=0.9, xanchor="left", yanchor="bottom", sizex=0.4, sizey=0.1, opacity=0.5, layer="above") ], annotations=[ dict(font=dict(size=10), text=annoText, x=0, y=-0.41, yref='paper', xref='paper', align='left', showarrow=False) ], legend=dict(traceorder='reversed', tracegroupgap=1, bordercolor='#E2E2E2', borderwidth=2), showlegend=True, title='Average Daily Temperature in ' + str(basin), height=622, width=700, autosize=False, yaxis=dict( title=r'Avg. Daily Temperature (°F)', hoverformat=".1f", tickformat="0f"), xaxis=dict(range=sliderDates, tickformat="%b %e", rangeselector=dict(buttons=list([ dict(count=9, label='Jan', step='month', stepmode='todate'), dict(count=6, label='Apr', step='month', stepmode='todate'), dict(count=3, label='July', step='month', stepmode='todate'), dict(label='WY', step='all') ])), rangeslider=dict(thickness=0.1), type='date')) return {'data': trace, 'layout': layout}
def updtChart(site_triplet, siteName): print('Working on SMS POR Chart for ' + siteName) statsData = [] minData = [] maxData = [] meanData = [] lowestData = [] highestData = [] lowData = [] highData = [] sliderDates = [] meanData = [] trace = [] plotData = [] sitePlotData = [] PORplotData = [] validTrip = [site_triplet] sensor = r"SMS" date_series = [ date(2015, 10, 1) + datetime.timedelta(days=x) for x in range(0, 366) ] #could use any year with a leap day if validTrip: beginDateDict = {} for siteMeta in meta: beginDateDict.update({ str(site_triplet): dt.strptime(str(siteMeta['beginDate']), "%Y-%m-%d %H:%M:%S") }) siteBeginDate = min(beginDateDict.values()) sYear = siteBeginDate.year if siteBeginDate.year > sYear: if siteBeginDate.month < 10: sYear = siteBeginDate.year else: if siteBeginDate.month == 10 and siteBeginDate.day == 1: sYear = siteBeginDate.year else: sYear = siteBeginDate.year + 1 sDate = date(sYear, 10, 1).strftime("%Y-%m-%d") eDate = (today.date() - datetime.timedelta(days=1)).strftime("%Y-%m-%d") dataDict = {} sensorDepths = [-8, -20] #[-2,-4,-8,-20,-40] # for sensorDepth in sensorDepths: data = [] for triplet in validTrip: url = '/'.join([ dataUrl, 'DAILY', sensor, triplet.replace(':', '_') + '.json' ]) with request.urlopen(url) as d: jTemp = json.loads(d.read().decode()) data.append(trimToOct1(jTemp)) depthData = {} for dataSite in data: siteData = [] if hasattr(dataSite, r'values'): if dataSite['values']: sat = getSaturation(sensorDepth, str(dataSite['stationTriplet'])) # sat = np.nanmax([float(c) for c in dataSite.values if c != None]) padMissingData(dataSite, sDate, eDate) siteData = np.array(dataSite['values'], dtype=np.float) siteData[:] = [ 100 if 100 * (c / float(sat)) > 100 else 100 * (c / float(sat)) for c in siteData ] depthData.update( {str(dataSite['stationTriplet']): list(siteData)}) dataDict.update({sensorDepth: dict(depthData)}) depthData.clear() plotData = {} # plotData = calcSMSAvg(dataDict) plotData = integrateSMS(dataDict) # numDays = max(len(l) for l in plotData.values()) for siteID, smsValues in plotData.items(): plotData.update({siteID: fillMissingData(plotData[siteID], 30)}) smsPlotData = list(plotData.values()) sitePlotData = np.array(smsPlotData[0], dtype=np.float) PORplotData = list([ sitePlotData[i:i + 366] for i in range(0, len(sitePlotData), 366) ]) allButCurrWY = list(PORplotData) del allButCurrWY[-1] statsData = list(map(list, zip(*allButCurrWY))) if len(statsData[0]) > 1: statsData[151] = statsData[150] with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) minData = [np.nanmin(a) for a in statsData] maxData = [np.nanmax(a) for a in statsData] meanData = [np.nanmean(a) for a in statsData] lowestData = [np.nanpercentile(a, 10) for a in statsData] highestData = [np.nanpercentile(a, 90) for a in statsData] lowData = [np.nanpercentile(a, 30) for a in statsData] highData = [np.nanpercentile(a, 70) for a in statsData] future_date_pad = 30 if len(PORplotData[-1]) > 334: future_date_pad = 366 - len(PORplotData[-1]) - 1 sliderDates = list( chain([(date_series[0])] + [date_series[len(PORplotData[-1]) + future_date_pad]])) else: sliderDates = list(chain([(date_series[0])] + [date_series[-1]])) if len(PORplotData) > 0: for index, i in enumerate(PORplotData): if index == len(PORplotData) - 1: trace.extend([ go.Scatter(x=date_series, y=i, name=str(sYear + index + 1), visible=True, connectgaps=True, line=dict(color='rgb(0,0,0)')) ]) elif np.nansum(i) > 0: trace.extend([ go.Scatter(x=date_series, y=i, name=str(sYear + index + 1), visible='legendonly', connectgaps=True) ]) if meanData: if lowestData: trace.extend([ go.Scatter(x=date_series, y=minData, legendgroup='centiles', name=r'Min', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(237,0,1,0.15)', fill='none', showlegend=False, hoverinfo='none', connectgaps=True) ]) trace.extend([ go.Scatter(x=date_series, y=lowestData, legendgroup='centiles', name=r'10%', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(237,0,1,0.15)', fill='tonexty', showlegend=False, hoverinfo='none', connectgaps=True) ]) if lowData: trace.extend([ go.Scatter(x=date_series, y=lowData, legendgroup='centiles', name=r'30%', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(237,237,0,0.15)', fill='tonexty', showlegend=False, hoverinfo='none', connectgaps=True) ]) if highData: trace.extend([ go.Scatter(x=date_series, y=highData, legendgroup='centiles', name=r'Stats. Shading', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(115,237,115,0.15)', fill='tonexty', showlegend=True, hoverinfo='none', connectgaps=True) ]) if highestData: trace.extend([ go.Scatter(x=date_series, y=highestData, legendgroup='centiles', name=r'90%', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(0,237,237,0.15)', fill='tonexty', showlegend=False, hoverinfo='none', connectgaps=True) ]) trace.extend([ go.Scatter(x=date_series, y=maxData, legendgroup='centiles', name=r'Max', visible=True, mode='line', line=dict(width=0), fillcolor='rgba(1,0,237,0.15)', fill='tonexty', showlegend=False, hoverinfo='none', connectgaps=True) ]) if minData: trace.extend([ go.Scatter(x=date_series, y=minData, name=r'Min', visible=True, hoverinfo='none', connectgaps=True, line=dict(color='rgba(237,0,0,0.5)')) ]) if meanData: trace.extend([ go.Scatter(x=date_series, y=meanData, name=r'Normal (POR)', connectgaps=True, visible=True, hoverinfo='none', line=dict(color='rgba(0,237,0,0.4)')) ]) if maxData: trace.extend([ go.Scatter(x=date_series, y=maxData, name=r'Max', visible=True, hoverinfo='none', connectgaps=True, line=dict(color='rgba(0,0,237,0.4)')) ]) annoText = str( r"Statistical shading breaks at 10th, 30th, 50th, 70th, and 90th Percentiles<br>Normal (POR) - Unofficial mean calculated from Period of Record data <br>For more information visit: <a href='https://www.wcc.nrcs.usda.gov/normals/30year_normals_data.htm'>30 year normals calcuation description</a>" ) layout = go.Layout(images=[ dict( source= "https://upload.wikimedia.org/wikipedia/commons/thumb/7/7f/US-NaturalResourcesConservationService-Logo.svg/2000px-US-NaturalResourcesConservationService-Logo.svg.png", xref="paper", yref="paper", x=0, y=0.9, xanchor="left", yanchor="bottom", sizex=0.4, sizey=0.1, opacity=0.5, layer="above") ], annotations=[ dict(font=dict(size=10), text=annoText, x=0, y=-0.41, yref='paper', xref='paper', align='left', showarrow=False) ], legend=dict(traceorder='reversed', tracegroupgap=1, bordercolor='#E2E2E2', borderwidth=2), showlegend=True, title='Average Soil Saturation in ' + siteName, height=622, width=700, autosize=False, yaxis=dict(title=r'Percent Saturation (%)', hoverformat='.1f', tickformat="0f"), xaxis=dict(range=sliderDates, tickformat="%b %e", rangeselector=dict(buttons=list([ dict(count=9, label='Jan', step='month', stepmode='todate'), dict(count=6, label='Apr', step='month', stepmode='todate'), dict(count=3, label='July', step='month', stepmode='todate'), dict(label='WY', step='all') ])), rangeslider=dict(thickness=0.1), type='date')) return {'data': trace, 'layout': layout}
def updtChart(basinName, basinSites): basin = basinName print('Working on PREC Projection Chart for ' + basinName) statsData = [] minData = [] maxData = [] meanData = [] lowestData = [] highestData = [] lowData = [] highData = [] sliderDates = [] meanData = [] trace = [] plotData = [] basinPlotData = [] PORplotData = [] basinNormData = [] basinPlotNormData = [] validTrip = [] networks = [r'SNTL',r'SCAN',r'SNTLT'] sensor = r"PREC" url = '/'.join([dataUrl,'metadata', sensor, 'metadata.json']) with request.urlopen(url) as data: meta = json.loads(data.read().decode()) meta[:] = [x for x in meta if str.split(x['stationTriplet'],":")[2] in networks and str.split(x['stationTriplet'],":")[0] in basinSites] validTrip = [x['stationTriplet'] for x in meta] date_series = [date(2015,10,1) + datetime.timedelta(days=x) for x in range(0, 366)] #could use any year with a leap day if validTrip: normData = [] for triplet in validTrip: url = '/'.join([dataUrl,'normals', 'DAILY', sensor, triplet.replace(':','_') + '.json']) with request.urlopen(url) as d: jTemp = json.loads(d.read().decode()) normData.append(jTemp) basinNormData = [np.array(x['values'], dtype=np.float) for x in normData if x['values']] if basinNormData: basinPlotNormData = list( np.nanmean(np.array([i for i in basinNormData]), axis=0)) validTrip[:] = [x for index, x in enumerate(validTrip) if normData[index]['values']] beginDateDict = {} for siteMeta in meta: beginDateDict.update( {str(siteMeta['stationTriplet']) : dt.strptime(str(siteMeta['beginDate']), "%Y-%m-%d %H:%M:%S")}) basinBeginDate = min(beginDateDict.values()) sYear = basinBeginDate.year if basinBeginDate.year > sYear: if basinBeginDate.month < 10: sYear = basinBeginDate.year else: if basinBeginDate.month == 10 and basinBeginDate.day == 1: sYear = basinBeginDate.year else: sYear = basinBeginDate.year + 1 sDate = date(sYear, 10, 1).strftime("%Y-%m-%d") eDate = (today.date() - datetime.timedelta(days=1)).strftime("%Y-%m-%d") data = [] for triplet in validTrip: url = '/'.join([dataUrl,'DAILY', sensor, triplet.replace(':','_') + '.json']) with request.urlopen(url) as d: jTemp = json.loads(d.read().decode()) data.append(trimToOct1(jTemp)) for dataSite in data: if dataSite: padMissingData(dataSite,sDate,eDate) plotData = [np.array(x['values'], dtype=np.float) for x in data] with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) basinPlotData = list(np.nanmean( np.array([i for i in plotData]), axis=0)) PORplotData = list([basinPlotData[i:i+366] for i in range(0,len(basinPlotData),366)]) allButCurrWY = list(PORplotData) del allButCurrWY[-1] statsData = list(map(list,zip(*allButCurrWY))) if len(statsData[0]) > 1: statsData[151] = statsData[150] with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) minData = [np.nanmin(a) for a in statsData] maxData = [np.nanmax(a) for a in statsData] meanData = [np.nanmean(a) for a in statsData] lowestData = [np.nanpercentile(a,10) for a in statsData] highestData = [np.nanpercentile(a,90) for a in statsData] lowData = [np.nanpercentile(a,30) for a in statsData] highData = [np.nanpercentile(a,70) for a in statsData] sliderDates = list(chain([(date_series[0])] + [date_series[-1]])) else: sliderDates = list(chain([(date_series[0])] + [date_series[-1]])) jDay = len(PORplotData[-1])-1 lastValue = PORplotData[-1][-1] nanList = [np.nan]*jDay projData = [createPRECProjTrace(a,jDay,lastValue,nanList) for a in allButCurrWY] statsProj = list(map(list,zip(*projData))) cleanStatsProj = list(statsProj) if cleanStatsProj: with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) minProj = [np.nanmin(a) for a in cleanStatsProj] maxProj = [np.nanmax(a) for a in cleanStatsProj] medianProj = [np.nanpercentile(a,50) for a in cleanStatsProj] lowestProj = [np.nanpercentile(a,10) for a in cleanStatsProj] highestProj = [np.nanpercentile(a,90) for a in cleanStatsProj] lowProj = [np.nanpercentile(a,30) for a in cleanStatsProj] highProj = [np.nanpercentile(a,70) for a in cleanStatsProj] if len(PORplotData) > 0: for index, i in enumerate(PORplotData): if index == len(PORplotData)-1: trace.extend( [go.Scatter( x=date_series,y=i, name=str(sYear + index + 1), visible=True,connectgaps=True, line=dict(color='rgb(0,0,0)'))]) elif np.nansum(i) > 0: trace.extend( [go.Scatter( x=date_series, y=projData[index], name=str(sYear + index + 1), visible='legendonly',connectgaps=True)]) if medianProj: if minProj: trace.extend( [go.Scatter(x=date_series, y=minProj, name=r'Min Proj', visible=True,connectgaps=True, line=dict(color='rgba(237,0,0,0.4)'))]) if lowestProj: trace.extend( [go.Scatter(x=date_series, y=lowestProj, name=r'10% Proj', visible=True,connectgaps=True, line=dict(color='rgba(237,0,1,0.4)'))]) if lowProj: trace.extend( [go.Scatter(x=date_series, y=lowProj, name=r'30% Proj', visible=True,connectgaps=True, line=dict(color='rgba(0,237,0,0.4)'))]) if medianProj: trace.extend( [go.Scatter(x=date_series, y=medianProj, name=r'50% Proj',connectgaps=True, visible=True, line=dict(color='rgba(0,237,0,0.4)'))]) if highProj: trace.extend( [go.Scatter(x=date_series, y=highProj, name=r'70% Proj', visible=True,connectgaps=True, line=dict(color='rgba(115,237,115,0.4)'))]) if highestProj: trace.extend( [go.Scatter(x=date_series, y=highestProj, connectgaps=True, name=r'90% Proj',visible=True, line=dict(color='rgba(1,237,237,0.4)'))]) if maxProj: trace.extend( [go.Scatter(x=date_series, y=maxProj, name=r'Max Proj', visible=True,connectgaps=True, line=dict(color='rgba(0,0,237,0.4)'))]) if meanData: if lowestData: trace.extend( [go.Scatter(x=date_series,y=minData ,legendgroup='centiles',name=r'Min', visible=True,mode='line', line=dict(width=0),connectgaps=True, fillcolor='rgba(237,0,1,0.15)', fill='none',showlegend=False, hoverinfo='none')]) trace.extend( [go.Scatter(x=date_series,y=lowestData ,legendgroup='centiles',name=r'10%', visible=True,mode='line', line=dict(width=0),connectgaps=True, fillcolor='rgba(237,0,1,0.15)', fill='tonexty',showlegend=False, hoverinfo='none')]) if lowData: trace.extend( [go.Scatter(x=date_series,y=lowData, legendgroup='centiles',name=r'30%', visible=True,mode='line', line=dict(width=0),connectgaps=True, fillcolor='rgba(237,237,0,0.15)', fill='tonexty',showlegend=False, hoverinfo='none')]) if highData: trace.extend( [go.Scatter(x=date_series,y=highData, legendgroup='centiles', name=r'Stats. Shading', visible=True,mode='line', line=dict(width=0),connectgaps=True, fillcolor='rgba(115,237,115,0.15)', fill='tonexty',showlegend=True, hoverinfo='none')]) if highestData: trace.extend( [go.Scatter(x=date_series,y=highestData, legendgroup='centiles',connectgaps=True, name=r'90%',visible=True ,mode='line',line=dict(width=0), fillcolor='rgba(0,237,237,0.15)', fill='tonexty',showlegend=False, hoverinfo='none')]) trace.extend( [go.Scatter(x=date_series,y=maxData ,legendgroup='centiles',name=r'Max', visible=True,mode='line', line=dict(width=0),connectgaps=True, fillcolor='rgba(1,0,237,0.15)', fill='tonexty',showlegend=False, hoverinfo='none')]) if basinPlotNormData: trace.extend( [go.Scatter(x=date_series, y=basinPlotNormData, name=r"Normal ('81-'10)",connectgaps=True, visible=True,hoverinfo='none', line=dict(color='rgba(0,237,0,0.4)'))]) if meanData: if basinPlotNormData: trace.extend( [go.Scatter(x=date_series, y=meanData,name=r'Normal (POR)', visible='legendonly', hoverinfo='none', connectgaps=True, line=dict(color='rgba(0,237,0,0.4)', dash='dash'))]) else: trace.extend( [go.Scatter(x=date_series,y=meanData, name=r'Normal (POR)',connectgaps=True, visible=True,hoverinfo='none', line=dict(color='rgba(0,237,0,0.4)'))]) annoText = str(r"Statistical shading breaks at 10th, 30th, 50th, 70th, and 90th Percentiles<br>Normal ('81-'10) - Official mean calculated from 1981 thru 2010 data <br>Normal (POR) - Unofficial mean calculated from Period of Record data <br>For more information visit: <a href='https://www.wcc.nrcs.usda.gov/normals/30year_normals_data.htm'>30 year normals calcuation description</a>") asterisk = '' if not basinPlotNormData: basinPlotNormData = meanData annoText = annoText + '<br>*POR data used to calculate Normals since no published 30-year normals available for this basin' asterisk = '*' if basinPlotNormData[jDay] == 0: perNorm = r'N/A' else: perNorm = str('{0:g}'.format(100*round( PORplotData[-1][jDay]/basinPlotNormData[jDay],2))) perPeak = str('{0:g}'.format(100*round( PORplotData[-1][jDay]/max(basinPlotNormData),2))) if not math.isnan(PORplotData[-1][jDay]): centile = ordinal(int(round( stats.percentileofscore( statsData[jDay],PORplotData[-1][jDay]),0))) else: centile = 'N/A' dayOfPeak = basinPlotNormData.index(max(basinPlotNormData)) if jDay > dayOfPeak: tense = r'Since' else: tense = r'Until' daysToPeak = str(abs(jDay-dayOfPeak)) annoData = str(r"Current" + asterisk + ":<br>% of Normal - " + perNorm + r"%<br>" + r"% of Yearly Avg - " + perPeak + r"%<br>" + r"Days " + tense + r" End of WY - " + daysToPeak + r"<br>" r"Percentile Rank- " + centile) layout = go.Layout( images= [dict( source= "https://upload.wikimedia.org/wikipedia/commons/thumb/7/7f/US-NaturalResourcesConservationService-Logo.svg/2000px-US-NaturalResourcesConservationService-Logo.svg.png", xref="paper", yref="paper", x= 0, y= 0.9, xanchor="left", yanchor="bottom", sizex= 0.4, sizey= 0.1, opacity= 0.5, layer= "above" )], annotations=[dict( font=dict(size=10), text=annoText, x=0,y=-0.41, yref='paper',xref='paper', align='left', showarrow=False), dict(font=dict(size=10), text=annoData, x=0,y=0.9, yref='paper',xref='paper', align='left', xanchor="left", yanchor="top", showarrow=False)], legend=dict(traceorder='reversed',tracegroupgap=1, bordercolor='#E2E2E2',borderwidth=2), showlegend = True, title='Precipitation Projections in<br> ' + str(basin), height=622, width=700, autosize=False, yaxis=dict(title=r'Precipitation (in.)', hoverformat=".1f", tickformat="0f"), xaxis=dict( range=sliderDates, tickformat="%b %e", rangeselector=dict( buttons=list([ dict(count=9, label='Jan', step='month', stepmode='todate'), dict(count=6, label='Apr', step='month', stepmode='todate'), dict(count=3, label='July', step='month', stepmode='todate'), dict(label='WY', step='all') ]) ), rangeslider=dict(thickness=0.1), type='date' ) ) return {'data': trace, 'layout': layout}
def updtChart(site_meta): siteName = site_meta['name'] site_triplet = site_meta['stationTriplet'] meta = [site_meta] siteTriplet = site_triplet.split(':') state = siteTriplet[1] sensor = r"SMS" date_series = [] print('Working on SMS Contour Chart for ' + siteName) date_series = [date(2015,10,1) + datetime.timedelta(days=x) for x in range(0, 366)] # could use any year with a leap day cscale=[[0.0, 'rgb(165,0,38)'], [0.1111111111111111, 'rgb(215,48,39)'], [0.2222222222222222, 'rgb(244,109,67)'], [0.3333333333333333, 'rgb(253,174,97)'], [0.4444444444444444, 'rgb(254,224,144)'], [0.5555555555555556, 'rgb(224,243,248)'], [0.6666666666666666, 'rgb(171,217,233)'], [0.7777777777777778, 'rgb(116,173,209)'], [0.8888888888888888, 'rgb(69,117,180)'], [1.0, 'rgb(49,54,149)']] eDate = today.date().strftime("%Y-%m-%d") if today.month > 9: sDateWY = date(today.year, 10, 1).strftime("%Y-%m-%d") else: sDateWY = date(today.year-1, 10, 1).strftime("%Y-%m-%d") depths = {} for site in meta: elements = awdb.service.getStationElements(site['stationTriplet'],sDateWY,eDate) siteDepths = [] for element in elements: if element.elementCd == sensor and element.ordinal == 1 and element.duration == "DAILY": siteDepths.append(element.heightDepth) depths[site['stationTriplet']] = siteDepths SMSPlotData = [] if meta: for validSite in meta: trace = [] barPrec = [] barSWE = [] depthVals = [] plotData = [] SMSPlotData = [] date_series = [] y = [] for depth in depths[validSite['stationTriplet']]: y.extend([depth.value]) if len(y) > 1: smsDepth = [float(min(y))+(0.5*float((y[-1]-y[-2]))),0] else: smsDepth = [float(min(y))*1.5,0] for i in y: triplet = validSite['stationTriplet'] for depth in depths[triplet]: data = [] if depth.value == i and depth.unitCd == r'in': sDate = date(dt.strptime(validSite['beginDate'],"%Y-%m-%d %H:%M:%S").year, 10, 1).strftime("%Y-%m-%d") url = '/'.join([dataUrl,'DAILY', sensor, triplet.replace(':','_') + '.json']) with request.urlopen(url) as d: jTemp = json.loads(d.read().decode()) data.append(trimToOct1(jTemp)) if data[0]['beginDate']: sDate = date(dt.strptime(data[0]['beginDate'],"%Y-%m-%d %H:%M:%S").year, 10, 1).strftime("%Y-%m-%d") date_series = [dt.strptime(sDate,"%Y-%m-%d") + datetime.timedelta(days=x) for x in range(0, (dt.strptime(eDate,"%Y-%m-%d")- dt.strptime(sDate,"%Y-%m-%d")).days+1)] sliderDates= list(chain([(date_series[0])] + [date_series[-1]])) if len(date_series) > 365: currDates= list(chain([date_series[-365]] + [date_series[-1]])) else: currDates= list(chain([date_series[0]] + [date_series[-1]])) if hasattr(data[0],r'values'): sat = getSaturation(i,validSite['stationTriplet']) siteDepthData = np.array(data[0]['values'], dtype=np.float) plotData = [100*(c/int(sat)) if c else np.nan for c in siteDepthData] plotData[:] = [100 if c and c > 100 else c for c in plotData] if i == max(y): SMSPlotData.extend([plotData]) depthVals.extend([0]) SMSPlotData.extend([plotData]) depthVals.extend([i]) dataPrec = [] url = '/'.join([dataUrl,'DAILY', 'PREC', triplet.replace(':','_') + '.json']) try: with request.urlopen(url) as d: jTemp = json.loads(d.read().decode()) dataPrec.append(trimToOct1(jTemp)) except: dataPrec = [{"values": []}] #might need to deal with mismatched start date between SMS and PREC dataSWE = [] url = '/'.join([dataUrl,'DAILY', 'WTEQ', triplet.replace(':','_') + '.json']) try: with request.urlopen(url) as d: jTemp = json.loads(d.read().decode()) dataSWE.append(trimToOct1(jTemp)) except: dataSWE = [{"values": []}] maxPrecRng = 5 if dataPrec[0]['values']: precValues = np.array(dataPrec[0]['values'], dtype=np.float) precDelta = list(np.diff(precValues)) if precDelta: maxPrecRng = list(chain([0], [3 * np.nanmax(precDelta)])) precDelta.extend([0]) barPrec = go.Bar(x=date_series,y=precDelta,yaxis='y2', showlegend=True, marker=dict(color='rgba(0,0,0,0.60)'), name='Daily Precip.') if dataSWE[0]['values']: sweValues = np.array(dataSWE[0]['values'], dtype=np.float) sweDelta = list(np.diff(sweValues)) sweDeltaNeg = [round(-1*c,1) if c < 0 else np.nan for c in sweDelta] sweDeltaNeg.extend([0]) barSWE = go.Bar(x=date_series,y=sweDeltaNeg,yaxis='y2', showlegend=True, marker=dict(color='rgba(0,0,0,0.35)'), name='SWE melt') if data[0]['values']: trace = go.Heatmap(z=SMSPlotData,x=date_series,y=depthVals, connectgaps=True,zsmooth='best', colorbar=dict(title='% Saturation', titleside='right',x=1.125), colorscale=cscale, hoverinfo='none') annoText = str(validSite['countyName'] + r' County, ' + state + r'. Elev = ' + str(int(round(validSite['elevation'],0))) + r', Lat = ' + str(round(validSite['latitude'],3)) + r', Long = ' + str(round(validSite['longitude'],3))) layout = go.Layout( images= [dict( source= "https://upload.wikimedia.org/wikipedia/commons/thumb/7/7f/US-NaturalResourcesConservationService-Logo.svg/2000px-US-NaturalResourcesConservationService-Logo.svg.png", xref="paper", yref="paper", x= 0, y= 0.9, xanchor="left", yanchor="bottom", sizex= 0.4, sizey= 0.1, opacity= 0.75, layer= "above" )], annotations=[dict( font=dict(size=10), text=annoText, x=0,y=-0.31, yref='paper',xref='paper', align='left', showarrow=False )], showlegend = True, legend=dict(orientation="h",x=0.5,y=1.1), barmode='stack', title='Soil Moisture at ' + siteName, height=622, width=700, autosize=False, yaxis=dict(title=r'Soil Depth (in.)',range=smsDepth, tickformat="0f", hoverformat='.1f',), yaxis2=dict( title=r'Daily Incremental Precip./Snow Melt (in.)', overlaying='y', side='right', anchor='free', position=1, range=maxPrecRng, tickformat="0f", hoverformat='.1f',), xaxis=dict( range=currDates, rangeselector=dict( buttons=list([ dict(count=1, label='1m', step='month', stepmode='backward'), dict(count=6, label='6m', step='month', stepmode='backward'), dict(count=1, label='1y', step='year', stepmode='backward'), dict(count=3, label='3y', step='year', stepmode='backward'), dict(label='POR', step='all') ]) ), rangeslider=dict(thickness=0.1,range=sliderDates), type='date' ) ) plots = [trace, barPrec, barSWE] figPlots = [] for plot in plots: if plot: figPlots.extend([plot]) return {'data': figPlots, 'layout': layout}