def GM_org(table, dt, fname): ############# App-Level Query ############# #query = prepareGMQuery(table, dt) #df = db.dbFetch(query) ########################################### ######### Stored Procedure Query ########## query = 'EXEC uspGlobalMap ?, ?, ?' fields = ['sla', 'adt', 'ugosa', 'vgosa'] args = [table, ', '.join(fields), dt] args = [table, 'sla', dt] df = db.dbFetchStoredProc(query, args) df = pd.DataFrame.from_records(df, columns=fields) ########################################### shape = (720, 1440) # global -- quarter degree #shape = (200,200) # NPG sla = df['sla'].reshape(shape) sst = df['sst'].reshape(shape) u = df['u'] v = df['v'] vel = np.power(u, 2) + np.power(v, 2) vel = np.power(vel, 0.5) vel = vel.reshape(shape) bokehGM(data=[sla, sst, vel], subject=['SLA (m) ' + dt, 'SST (C) ' + dt, 'Velocity (m/s) ' + dt]) return
def sectionMap(tables, variabels, dt, lat1, lat2, lon1, lon2, depth1, depth2, fname, exportDataFlag): ''' ############# App-Level Query ############# query = prepareGMQuery(table, dt) df = db.dbFetch(query) ########################################### ''' ######### Stored Procedure Query ########## data = [] subs = [] for i in range(len(tables)): args = [tables[i], variabels[i], dt, lat1, lat2, lon1, lon2, depth1, depth2] query = 'EXEC uspSectionMap ?, ?, ?, ?, ?, ?, ?, ?, ?' df = db.dbFetchStoredProc(query, args) df = pd.DataFrame.from_records(df, columns=['time', 'lat', 'lon', 'depth', variabels[i]]) lat = df.lat.unique() lon = df.lon.unique() depth = df.depth.unique() shape = (len(lat), len(lon), len(depth)) data.append(df[variabels[i]].values.reshape(shape)) unit = ' [' + db.getVar(tables[i], variabels[i]).iloc[0]['Unit'] + ']' sub = variabels[i] + unit + ' ' + dt subs.append(sub) if exportDataFlag: # export data dirPath = 'data/' if not os.path.exists(dirPath): os.makedirs(dirPath) exportData(df, path=dirPath + fname + '_' + tables[i] + '_' + variabels[i] + '.csv') bokehSec(data=data, subject=subs, fname=fname, lat=lat, lon=lon, depth=depth, variabels=variabels) return
def RegionalMap(tables, variabels, dt, lat1, lat2, lon1, lon2, arg8, arg9, fname, exportDataFlag): ''' ############# App-Level Query ############# query = prepareGMQuery(table, dt) df = db.dbFetch(query) ########################################### ''' ######### Stored Procedure Query ########## data = [] subs = [] for i in range(len(tables)): if arg8[i].find('ignore') != -1: arg8[i] = None if arg9[i].find('ignore') != -1: arg9[i] = None for i in range(len(tables)): args = [ tables[i], variabels[i], dt, lat1, lat2, lon1, lon2, arg8[i], arg9[i] ] query = 'EXEC uspRegionalMap ?, ?, ?, ?, ?, ?, ?, ?, ?' df = db.dbFetchStoredProc(query, args) df = pd.DataFrame.from_records( df, columns=['time', 'lat', 'lon', variabels[i]]) lat = df.lat.unique() lon = df.lon.unique() shape = (len(lat), len(lon)) if tables[i].find('Vort') != -1: data.append(np.transpose(df[variabels[i]].values.reshape(shape))) else: data.append(df[variabels[i]].values.reshape(shape)) unit = ' [' + db.getVar(tables[i], variabels[i]).iloc[0]['Unit'] + ']' if arg8[i] != None: if tables[i].find('Wind') != -1: sub = variabels[i] + unit + ' ' + dt + ' ' + arg9[i] + 'H' if tables[i].find('Pisces') != -1: sub = variabels[i] + unit + ' ' + dt + ' Depth: ' + arg9[ i] + ' m' else: sub = variabels[i] + unit + ' ' + dt subs.append(sub) if exportDataFlag: # export data dirPath = 'data/' if not os.path.exists(dirPath): os.makedirs(dirPath) exportData(df, path=dirPath + 'RM_' + tables[i] + '_' + variabels[i] + '.csv') bokehGM(data=data, subject=subs, fname=fname, lat=lat, lon=lon, variabels=variabels) return
def section(tablesName, field, dt1, dt2, lat1, lat2, lon1, lon2, depth1, depth2): if not validate.stringCheck(tablesName, field): return None args = [ tablesName, field, dt1, dt2, lat1, lat2, lon1, lon2, depth1, depth2 ] query = 'EXEC uspSectionMap ?, ?, ?, ?, ?, ?, ?, ?, ?, ?' df = db.dbFetchStoredProc(query, args) return df
def GM(tables, variabels, dt, arg4, fname): ''' ############# App-Level Query ############# query = prepareGMQuery(table, dt) df = db.dbFetch(query) ########################################### ''' ######### Stored Procedure Query ########## shape = (720, 1440) # global -- quarter degree data = [] subs = [] for i in range(len(tables)): args = [tables[i], variabels[i], dt] query = 'EXEC uspGlobalMap ?, ?, ?' if tables[i].find('Wind') != -1: args = [tables[i], variabels[i], dt, arg4] query = 'EXEC uspGlobalWind ?, ?, ?, ?' if tables[i].find('Pisces') != -1: args = [tables[i], variabels[i], dt, arg4] query = 'EXEC uspGlobalPisces ?, ?, ?, ?' df = db.dbFetchStoredProc(query, args) #df = pd.DataFrame.from_records(df, columns=variabels[i]) df = pd.DataFrame.from_records(df) x = np.sqrt((len(df) / 2)) x = int(x) shape = (x, 2 * x) if tables[i].find('Vort') != -1: shape = (2 * x, x) if tables[i].find('Wind') != -1: shape = (641, 1440) if tables[i].find('Pisces') != -1: shape = (359, 720) if tables[i].find('Vort') != -1: data.append(np.transpose(df.values.reshape(shape))) else: data.append(df.values.reshape(shape)) if len(arg4) > 0: if tables[i].find('Wind') != -1: sub = variabels[i] + ' ' + dt + ' ' + arg4 + 'H' if tables[i].find('Pisces') != -1: sub = variabels[i] + ' ' + dt + ' Depth: ' + arg4 + ' m' else: sub = variabels[i] + ' ' + dt subs.append(sub) ########################################### bokehGM(data=data, subject=subs, fname=fname) return
def depthProfile_iterative(table, field, dt, lat1, lat2, lon1, lon2, depth1, depth2, fname): y = np.array([]) y_std = np.array([]) depths = depthLevels(depth1, depth2) for depth in depths: ######### Stored Procedure Query ########## query = 'EXEC uspDepthProfile ?, ?, ?, ?, ?, ?, ?, ?' args = [ table, field, dt, str(lat1), str(lat2), str(lon1), str(lon2), str(depth) ] df = db.dbFetchStoredProc(query, args) df = pd.DataFrame.from_records(df, columns=['lat', 'lon', 'depth', field]) ########################################### try: if len(df[field]) > 0: tempY = np.nanmean(df[field]) else: tempY = np.nan except: tempY = np.nan if abs(tempY) > 1e30: ## remove outliers (extremes) tempY = np.nan y = np.append(y, tempY) try: if len(df[field]) > 0: tempY_std = np.nanstd(df[field]) else: tempY_std = np.nan except: tempY_std = np.nan if abs(tempY_std) > 1e30: ## remove outliers (extremes) tempY_std = np.nan y_std = np.append(y_std, tempY_std) if exportDataFlag: exportData(depths, y, y_std, table, field, lat1, lat2, lon1, lon2, fname) return depths, y, y_std
def match(geomTable, bkgTable, startDate, endDate, lat1, lat2, lon1, lon2, depth1, depth2, ftleField, ftleValue, bkgField, margin, bkgFlag): query = 'EXEC uspftleMatch ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?' args = [ geomTable, ftleField, str(ftleValue), bkgTable, bkgField, startDate, endDate, str(lat1), str(lat2), str(lon1), str(lon2), str(depth1), str(depth2), str(margin), bkgFlag ] df = db.dbFetchStoredProc(query, args) return df
def match(geomTable, bkgTable, startDate, enDate, lat1, lat2, lon1, lon2, extV, extVV, ftleField, ftleValue, bkgField, margin): ######### Stored Procedure Query ########## query = 'EXEC uspftleMatch ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?' args = [ geomTable, ftleField, ftleValue, bkgTable, bkgField, startDate, endDate, str(lat1), str(lat2), str(lon1), str(lon2), str(margin), extV, extVV ] df = db.dbFetchStoredProc(query, args) df = pd.DataFrame.from_records(df, columns=['time', 'lat', 'lon', bkgField]) ########################################### return df
def call(table, field, dt1, dt2, lat1, lat2, lon1, lon2, extV, extVV, savePlot, saveData): ############################################ tic = time.clock() args = [table, field, dt1, lat1, lat2, lon1, lon2, extV, extVV] query = 'EXEC uspRegionalMap ?, ?, ?, ?, ?, ?, ?, ?, ?' df = db.dbFetchStoredProc(query, args) elapsed = ( time.clock() - tic ) # elapsed time ############################################ df = pd.DataFrame.from_records(df, columns=['time', 'lat', 'lon', field]) lat = df.lat.unique() lon = df.lon.unique() shape = (len(lat), len(lon)) data = df[field].values.reshape(shape) if saveData: makedir('./data') df.to_csv('./data/%s_%s_%4d.csv' % (field, dt1, random.randint(1,101)), index=False) # export if savePlot: plot(field, dt1, lat, lon, data) # plot return elapsed
def timeSeries(table, field, startDate, endDate, lat1, lat2, lon1, lon2, extV, extVV, extV2, extVV2, fmt='%Y-%m-%d', dt=24 * 60): if iterative(table, field, dt): ts, y, y_std = timeSeries_iterative(table, field, startDate, endDate, lat1, lat2, lon1, lon2, extV, extVV, extV2, extVV2, fmt, dt) else: ######### Stored Procedure Query ########## query = 'EXEC uspTimeSeries ?, ?, ?, ?, ?, ?, ?, ?, ?, ?' args = [ table, field, startDate, endDate, str(lat1), str(lat2), str(lon1), str(lon2), extV, extVV ] df = db.dbFetchStoredProc(query, args) df = pd.DataFrame.from_records( df, columns=['time', 'lat', 'lon', field, field + '_std']) ts, y, y_std = pd.to_datetime(df['time']), df[field], df[field + '_std'] ########################################### ts, y, y_std = fillGaps(ts, y, y_std, startDate, endDate, fmt, dt) return ts, y, y_std
import sys sys.path.append('../../') import db import pandas as pd import matplotlib.pyplot as plt def plot(lat, lon, data): plt.imshow(data, extent=[lon1, lon2, lat1, lat2], origin='bottom', vmin=0, vmax=1e-4) plt.title(field + '\n ' + dt) plt.colorbar() plt.show() table = 'tblPisces_NRT' field = 'Fe' dt = '2017-06-03' lat1, lat2, lon1, lon2 = 10, 55, -180, -100 extV, extVV = 'depth', '0.494024991989' args = [table, field, dt, lat1, lat2, lon1, lon2, extV, extVV] query = 'EXEC uspRegionalMap ?, ?, ?, ?, ?, ?, ?, ?, ?' df = db.dbFetchStoredProc(query, args) df = pd.DataFrame.from_records(df, columns=['time', 'lat', 'lon', field]) lat = df.lat.unique() lon = df.lon.unique() shape = (len(lat), len(lon)) data = df[field].values.reshape(shape) #df.to_csv(field+'.csv', index=False) # export data if needed! plot(lat, lon, data)
def plotMonthly(tables, variables, lat1, lat2, lon1, lon2, extV, extVV, extV2, extVV2, exportDataFlag, marker='-', msize=30, clr='purple'): p = [] lw = 2 w = 800 h = 400 months = range(1, 13) TOOLS = 'pan,wheel_zoom,zoom_in,zoom_out,box_zoom, undo,redo,reset,tap,save,box_select,poly_select,lasso_select' for i in tqdm(range(len(tables)), desc='overall'): monthly = np.array([]) monthly_std = np.array([]) for mon in tqdm(months, desc=variables[i]): mon = int(mon) args = [ tables[i], variables[i], mon, lat1, lat2, lon1, lon2, extV[i], extVV[i] ] query = 'EXEC uspMonthly ?, ?, ?, ?, ?, ?, ?, ?, ?' df = db.dbFetchStoredProc(query, args) df = pd.DataFrame.from_records( df, columns=['lat', 'lon', variables[i]]) ############# removing outlier values ################ vals = df[variables[i]] vals = vals[abs(vals) < 1e30] # remove outliers ###################################################### monthly = np.append(monthly, np.nanmean(vals)) monthly_std = np.append(monthly_std, np.nanstd(vals)) if exportDataFlag: exportData(months, monthly, monthly_std, tables[i], variables[i], lat1, lat2, lon1, lon2, extV[i], extVV[i], extV2[i], extVV2[i]) p1 = figure(tools=TOOLS, toolbar_location="above", plot_width=w, plot_height=h) #p1.xaxis.axis_label = 'Month' p1.yaxis.axis_label = variables[i] + ' [' + db.getVar( tables[i], variables[i]).iloc[0]['Unit'] + ']' leg = variables[i] if extV[i] != None: leg = leg + ' ' + extV[i] + ': ' + ('%d' % float(extVV[i])) if tables[i].find('Pisces') != -1: leg = leg + ' ' + 'm' fill_alpha = 0.07 if tables[i].find('Pisces') != -1: fill_alpha = 0.3 cr = p1.circle(months, monthly, fill_color="grey", hover_fill_color="firebrick", fill_alpha=fill_alpha, hover_alpha=0.3, line_color=None, hover_line_color="white", legend=leg, size=msize) p1.line(months, monthly, line_color=clr, line_width=lw, legend=leg) p1.add_tools(HoverTool(tooltips=None, renderers=[cr], mode='hline')) p.append(p1) dirPath = 'embed/' if not os.path.exists(dirPath): os.makedirs(dirPath) if not inline: ## if jupyter is not the caller output_file(dirPath + fname + ".html", title="Monthly Trend") show(column(p)) return
def plotMonthly(tables, variables, lat1, lat2, lon1, lon2, extV, extVV, extV2, extVV2, marker='-', msize=30, clr='purple'): p = [] lw = 2 w = 800 h = 400 months = range(1, 13) TOOLS = "hover,crosshair,pan,wheel_zoom,zoom_in,zoom_out,box_zoom,undo,redo,reset,tap,save,box_select,poly_select,lasso_select," TOOLS = 'pan,wheel_zoom,zoom_in,zoom_out,box_zoom, undo,redo,reset,tap,save,box_select,poly_select,lasso_select' for i in range(len(tables)): monthly = np.array([]) monthly_std = np.array([]) for mon in months: mon = int(mon) args = [ tables[i], variables[i], mon, lat1, lat2, lon1, lon2, extV[i], extVV[i] ] query = 'EXEC uspMonthly ?, ?, ?, ?, ?, ?, ?, ?, ?' df = db.dbFetchStoredProc(query, args) df = pd.DataFrame.from_records( df, columns=['lat', 'lon', variables[i]]) ############# removing outlier values ################ vals = df[variables[i]] vals = vals[abs(vals) < 1e30] # remove outliers ###################################################### monthly = np.append(monthly, np.nanmean(vals)) monthly_std = np.append(monthly_std, np.nanstd(vals)) p1 = figure(tools=TOOLS, toolbar_location="above", plot_width=w, plot_height=h) #p1.xaxis.axis_label = 'Month' p1.yaxis.axis_label = variables[i] + ' [' + db.getVar( tables[i], variables[i]).iloc[0]['Unit'] + ']' leg = variables[i] if extV[i] != None: leg = leg + ' ' + extV[i] + ': ' + ('%d' % float(extVV[i])) if tables[i].find('Pisces') != -1: leg = leg + ' ' + 'm' fill_alpha = 0.07 if tables[i].find('Pisces') != -1: fill_alpha = 0.3 cr = p1.circle(months, monthly, fill_color="grey", hover_fill_color="firebrick", fill_alpha=fill_alpha, hover_alpha=0.3, line_color=None, hover_line_color="white", legend=leg, size=msize) p1.line(months, monthly, line_color=clr, line_width=lw, legend=leg) p1.add_tools(HoverTool(tooltips=None, renderers=[cr], mode='hline')) #p1.xaxis.major_label_orientation = pi/4 #p1.xaxis.visible = False p.append(p1) output_file("embed/" + fname + ".html", title="Monthly Trend") show(column(p)) ''' p1_script, p1_div = components(p1) embedComponents('embed/scriptMon1.js', p1_script) embedComponents('embed/divMon1.js', p1_div) p2_script, p2_div = components(p2) embedComponents('embed/scriptMon2.js', p1_script) embedComponents('embed/divMon2.js', p1_div) p3_script, p3_div = components(p3) embedComponents('embed/scriptMon3.js', p1_script) embedComponents('embed/divMon3.js', p1_div) ''' return