def catalog(dirPath): query = catQuery() df = db.dbFetch(query) if not os.path.exists(dirPath): os.makedirs(dirPath) exportData(df, path=dirPath + 'catalog.csv') return
def isClimatology(tableName, varName): query = "SELECT Climatology FROM tblVariables " query = query + "JOIN tblDatasets ON [tblVariables].Dataset_ID=[tblDatasets].ID " query = query + "WHERE Table_Name='%s' AND Short_Name='%s' " query = query % (tableName, varName) df = db.dbFetch(query) return df.Climatology[0]
def getFronts(table, startDate, endDate, lat1, lat2, lon1, lon2, ftleField, ftleValue): query = prepareQuery(table, startDate, endDate, lat1, lat2, lon1, lon2, ftleField, ftleValue) df = db.dbFetch(query) df = pd.DataFrame.from_records(df, columns=['time', 'lat', 'lon']) return df
def getCruiseTrack(DB_Cruise, source, cruise): df = None if DB_Cruise: query = CruiseQuery(source, cruise) df = db.dbFetch(query) else: df = pd.read_csv(source) return df
def depthProfile(table, field, date1, lat1, lat2, lon1, lon2, depth1, depth2, fname): query = prepareDepthProfileQuery(table, field, date1, lat1, lat2, lon1, lon2, depth1, depth2) df = db.dbFetch(query) if exportDataFlag: exportData(df['depth'], df[field], df[field + '_std'], table, field, date1, lat1, lat2, lon1, lon2, fname) return df['depth'], df[field], df[field + '_std']
def hasField(table, field): has = False # query = "SELECT COL_LENGTH('%s','%s') AS len " % (table, field) query = "SELECT 1 FROM sys.columns WHERE Name=N'%s' AND Object_ID=Object_ID(N'%s') " % ( field, table) df = db.dbFetch(query) if len(df) > 0: has = True return has
def isGrid(table, variable): grid = True query = "SELECT Spatial_Res_ID, RTRIM(LTRIM(Spatial_Resolution)) AS Spatial_Resolution FROM tblVariables " query = query + "JOIN tblSpatial_Resolutions ON [tblVariables].Spatial_Res_ID=[tblSpatial_Resolutions].ID " query = query + "WHERE Table_Name='%s' AND Short_Name='%s' " % (table, variable) df = db.dbFetch(query) if len(df) < 1: return None if df.Spatial_Resolution[0].lower().find('irregular') != -1: grid = False return grid
def getSrcDF(DB, source): srcDF = None if DB: query = sourceQuery(source) srcDF = db.dbFetch(query) else: filename, fileExtension = os.path.splitext(source) if fileExtension == '.xlsx': srcDF = pd.read_excel(open(source, 'rb'), sheet_name='data') elif fileExtension == '.csv': srcDF = pd.read_csv(source) srcDF['time'] = pd.to_datetime(srcDF['time']) return srcDF
def interpolateUV(t, lat, lon): query = prepareQuery(t, lat, lon) df = db.dbFetch(query) df = pd.DataFrame.from_records(df, columns=['time', 'lat', 'lon', 'u', 'v']) n = len(df) u, v = 0, 0 if n == 1: u, v = np.array(df['u'])[0], np.array(df['v'])[0] else: u, v = weightedMean(df, lat, lon) if u == None: u = 0 if v == None: v = 0 return u, v
def match_temptable(geomTable, bkgTable, startDate, lat1, lat2, lon1, lon2, ftleField, ftleValue, bkgField, margin): args = (geomTable, startDate, lat1, lat2, lon1, lon2, ftleField, ftleValue, bkgTable, startDate, lat1, lat2, lon1, lon2, bkgField, margin, margin) query = '' query = query + "DROP TABLE IF EXISTS #tblGeometry; " query = query + "DROP TABLE IF EXISTS #tblBkg; " query = query + "SELECT * INTO #tblGeometry FROM %s WHERE [time]='%s' AND lat>=%f AND lat<=%f AND lon>=%f AND lon<=%f AND %s>=%f; " query = query + "SELECT * INTO #tblBkg FROM %s WHERE [time]='%s' AND lat>=%f AND lat<=%f AND lon>=%f AND lon<=%f; " query = query + "SELECT [#tblBkg].[time], [#tblBkg].lat, [#tblBkg].lon, [#tblBkg].%s FROM #tblBkg WHERE EXISTS " query = query + "(SELECT [#tblGeometry].[time], [#tblGeometry].lat, [#tblGeometry].lon FROM #tblGeometry WHERE " query = query + "[#tblGeometry].[time]=[#tblBkg].[time] AND ABS([#tblGeometry].lat-([#tblBkg].lat))<=%f AND ABS([#tblGeometry].lon-([#tblBkg].lon))<=%f)" query = query % args df = db.dbFetch(query) df = pd.DataFrame.from_records(df, columns=['time', 'lat', 'lon']) return df
def sectionMap(tables, variabels, dt, lat1, lat2, lon1, lon2, depth1, depth2, fname, exportDataFlag): data = [] subs = [] for i in range(len(tables)): ''' ######### Stored Procedure Query ########## args = [tables[i], variabels[i], dt, lat1, lat2, lon1, lon2, depth1, depth2] query = 'EXEC uspSectionMap ?, ?, ?, ?, ?, ?, ?, ?, ?' df = db.dbFetchStoredProc(query, args) df = pd.DataFrame.from_records(df, columns=['time', 'lat', 'lon', 'depth', variabels[i]]) ''' query = prepareSectionQuery(tables[i], variabels[i], dt, lat1, lat2, lon1, lon2, depth1, depth2) df = db.dbFetch(query) lat = df.lat.unique() lon = df.lon.unique() depth = df.depth.unique() shape = (len(lat), len(lon), len(depth)) data.append(df[variabels[i]].values.reshape(shape)) unit = ' [' + db.getVar(tables[i], variabels[i]).iloc[0]['Unit'] + ']' sub = variabels[i] + unit + ' ' + dt subs.append(sub) if exportDataFlag: # export data dirPath = 'data/' if not os.path.exists(dirPath): os.makedirs(dirPath) exportData(df, path=dirPath + fname + '_' + tables[i] + '_' + variabels[i] + '.csv') bokehSec(data=data, subject=subs, fname=fname, lat=lat, lon=lon, depth=depth, variabels=variabels) return
def timesBetween(calTable, startDate, endDate): query = "SELECT [time] FROM %s WHERE " % calTable query += "[time] BETWEEN '%s' AND '%s' " % (startDate, endDate) df = db.dbFetch(query) return np.array(df['time'])
def getEddies(table, startDate, endDate, lat1, lat2, lon1, lon2): query = prepareQuery(table, startDate, endDate, lat1, lat2, lon1, lon2) df = db.dbFetch(query) df = pd.DataFrame.from_records(df, columns=['track', 'time', 'lat', 'lon']) return df
def getModelDepthLevels(tableName): query = "SELECT * FROM %s" % tableName depthLevs = db.dbFetch(query) return depthLevs
def getReferences(datasetID): query = "SELECT Reference FROM dbo.udfDatasetReferences(%d)" % datasetID return db.dbFetch(query)
query = "SELECT [time], lat, lon, depth, %s FROM %s WHERE " query = query + "[time]>='%s' AND [time]<='%s' AND " query = query + "lat>=%f AND lat<=%f AND " query = query + "lon>=%f AND lon<=%f AND " query = query + "depth>=%f AND depth<=%f " query = query + "ORDER BY [time], lat, lon, depth " query = query % args return query ############## set parameters ################ table = 'tblPisces_NRT' field = 'Fe' dt1 = '2017-06-03' dt2 = '2017-06-03' lat1, lat2, lon1, lon2 = 10, 55, -180, -100 depth1 = 0 depth2 = 1 ############################################## args = (field, table, dt1, dt2, lat1, lat2, lon1, lon2, depth1, depth2) query = prepareQuery(args) df = db.dbFetch(query) df = pd.DataFrame.from_records(df, columns=['time', 'lat', 'lon', 'depth', field]) lat = df.lat.unique() lon = df.lon.unique() shape = (len(lat), len(lon)) data = df[field].values.reshape(shape) #df.to_csv(field+'.csv', index=False) # export data if needed! plot(lat, lon, data)
def getMetadata_NoRef(table, variable): query = "SELECT * FROM dbo.udfMetaData_NoRef('%s', '%s')" % (variable, table) return db.dbFetch(query)