def main(): import sys app = QtGui.QApplication(sys.argv) data = load('PCA_Matrix_noProE.csv', delimiter = ',', skiprows = 1) w = PCA_Widget(data) #w.show() sys.exit(app.exec_())
def getdrift_raw(filename,id3,interval,datetime_wanted): # range_time is a number,unit by one day. datetime_wanted format is num d=ml.load(filename) lat1=d[:,8] lon1=d[:,7] idd=d[:,0] year=[] for n in range(len(idd)): year.append(str(idd[n])[0:2]) h=d[:,4] day=d[:,3] month=d[:,2] time1=[] for i in range(len(idd)): time1.append(date2num(datetime.datetime.strptime(str(int(h[i]))+' '+str(int(day[i]))+' '+str(int(month[i]))+' '+str(int(year[i])), "%H %d %m %y"))) idg1=list(ml.find(idd==id3)) idg2=list(ml.find(np.array(time1)<=datetime_wanted+interval/24)) "'0.25' means the usual Interval, It can be changed base on different drift data " idg3=list(ml.find(np.array(time1)>=datetime_wanted-0.1)) idg23=list(set(idg2).intersection(set(idg3))) # find which data we need idg=list(set(idg23).intersection(set(idg1))) print 'the length of drifter data is '+str(len(idg)),str(len(set(idg)))+' . if same, no duplicate' lat,lon,time=[],[],[] for x in range(len(idg)): lat.append(round(lat1[idg[x]],4)) lon.append(round(lon1[idg[x]],4)) time.append(round(time1[idg[x]],4)) # time is num return lat,lon,time
def range_latlon(filename,driftnumber): # function neede in case of "raw" drifter date d=ml.load(filename) id=ml.find(d[:,0]==int(driftnumber)) lat1=d[id,8] lon1=d[id,7] maxlon=max(lon1) minlon=min(lon1) maxlat=max(lat1) minlat=min(lat1) return maxlon,minlon,maxlat,minlat,lat1,lon1
def read_CSV_mlab(self, filename): dirname, fname = os.path.split(str(filename)) name, ext = os.path.splitext(fname) delim = mdictFileFormats[ext] DataArray = [] try: DataArray = load(filename, delimiter=delim, skiprows=1, dtype=str) return DataArray.tolist() except: return []
def range_latlon(filename, driftnumber): # function neede in case of "raw" drifter date d = ml.load(filename) id = ml.find(d[:, 0] == int(driftnumber)) lat1 = d[id, 8] lon1 = d[id, 7] maxlon = max(lon1) minlon = min(lon1) maxlat = max(lat1) minlat = min(lat1) return maxlon, minlon, maxlat, minlat, lat1, lon1
def __init__(self, ticker): # load assumes floats unless you provide a converter # dictionary keyed by column number self.ticker = ticker tickerfile = os.path.join(datadir, '%s.csv'%ticker) # datestr2num is slow but it's easy (self.date, self.open, self.high, self.low, self.close, self.volume, self.adjclose) = load( tickerfile, delimiter=',', #converters={0:to_datenum2}, converters={0:datestr2num}, skiprows=1, unpack=True)
def getdrift_raw_range_latlon(filename, id3, interval, datetime_wanted_1, num, step_size): # this is for plot all the data in same range of lat and lon. id3 means int format of drift number #'interval' means range of time, 'num' means how many pictures we will get d = ml.load(filename) lat1 = d[:, 8] lon1 = d[:, 7] idd = d[:, 0] year = [] for n in range(len(idd)): year.append(str(idd[n])[0:2]) h = d[:, 4] day = d[:, 3] month = d[:, 2] time1 = [] for i in range(len(idd)): time1.append( date2num( datetime.datetime.strptime( str(int(h[i])) + ' ' + str(int(day[i])) + ' ' + str(int(month[i])) + ' ' + str(int(year[i])), "%H %d %m %y"))) idg1 = list(ml.find(idd == id3)) idg2 = list( ml.find( np.array(time1) <= datetime_wanted_1 + step_size / 24.0 * (num - 1) + 0.25)) "'0.25' means the usual Interval, It can be changed base on different drift data " idg3 = list( ml.find(np.array(time1) >= datetime_wanted_1 - interval / 24.0)) idg23 = list(set(idg2).intersection(set(idg3))) # find which data we need idg = list(set(idg23).intersection(set(idg1))) # print len(idg),len(set(idg)) lat, lon, time = [], [], [] for x in range(len(idg)): lat.append(round(lat1[idg[x]], 4)) lon.append(round(lon1[idg[x]], 4)) maxlon = max(lon) minlon = min(lon) maxlat = max(lat) minlat = min(lat) # time is num return maxlon, minlon, maxlat, minlat
def getdrift_raw_range_latlon(filename,id3,interval,datetime_wanted_1,num,step_size): # this is for plot all the data in same range of lat and lon. id3 means int format of drift number #'interval' means range of time, 'num' means how many pictures we will get d=ml.load(filename) lat1=d[:,8] lon1=d[:,7] idd=d[:,0] year=[] for n in range(len(idd)): year.append(str(idd[n])[0:2]) h=d[:,4] day=d[:,3] month=d[:,2] time1=[] for i in range(len(idd)): time1.append(date2num(datetime.datetime.strptime(str(int(h[i]))+' '+str(int(day[i]))+' '+str(int(month[i]))+' '+str(int(year[i])), "%H %d %m %y"))) idg1=list(ml.find(idd==id3)) idg2=list(ml.find(np.array(time1)<=datetime_wanted_1+step_size/24.0*(num-1)+0.25)) "'0.25' means the usual Interval, It can be changed base on different drift data " idg3=list(ml.find(np.array(time1)>=datetime_wanted_1-interval/24.0)) idg23=list(set(idg2).intersection(set(idg3))) # find which data we need idg=list(set(idg23).intersection(set(idg1))) # print len(idg),len(set(idg)) lat,lon,time=[],[],[] for x in range(len(idg)): lat.append(round(lat1[idg[x]],4)) lon.append(round(lon1[idg[x]],4)) maxlon=max(lon) minlon=min(lon) maxlat=max(lat) minlat=min(lat) # time is num return maxlon,minlon,maxlat,minlat
for kind in ['P', 'C']: for i in range(num+1): plt.clf() plt.axes([0.3,0.2,0.95-0.3,0.95-0.2]) if kind=='C' and i==0: continue if kind=='C' and len(data)>66000: continue data = mlab.load("%s_%s%d.dat" % (filename, kind, i)) num_elems = len(data) t = np.arange(num_elems) plt.hold(True) max_val = max(abs(data)) * threshold if kind == 'P': size=4 width = 0.2 else:
from matplotlib.dates import strpdate2num from matplotlib.mlab import load from pylab import figure, show dates, closes = load('data/msft.csv', delimiter=',', converters={0: strpdate2num('%d-%b-%y')}, skiprows=1, usecols=(0, 2), unpack=True) fig = figure() ax = fig.add_subplot(111) ax.plot_date(dates, closes, '-') fig.autofmt_xdate() show()
from matplotlib.dates import num2date import numpy as np ############ ##HARDCODES mthyr='feb2015' # mthyr of processing ############ f=open('/net/data5/jmanning/drift/drift2header_'+mthyr+'.out','w'); # opens output file #pipe = subprocess.Popen(["perl", "/home3/ocn/jmanning/sql/getdrift_header.plx" # this reads a list of ids that need to be loaded into drift_header table from a file like drift2header_'+mthyr+'mar2013.dat # which is created by running the code "update_drift_header.py" # It gets LAT_START,LON_START,BTM_DEPTH_START,START_DATE,DROGUE_DEPTH_START, # DROGUE_DEPTH_END where start and end drogue depth is in the # vertical dimension and the _data file list only the midvalue #d=ml.load('/net/data5/jmanning/drift/sqldump_header.dat') d=ml.load('/net/data5/jmanning/drift/sqldump_header.dat') # loads list of codes=ml.load('/net/data5/jmanning/drift/codes.dat') jj=0 for k in range(len(d)): if len(set([d[k,0]]).intersection(set([138410703,146410701]))): if (d[k,0]==138410703): deployer='SEA ' # 12 character deployer name notes='deployed by undergrad students, the aluminum surface drifter went silent on 10 September, recovered by Chathams Harbormaster Stuart Smith, and picked up by Miles Manning; the 12m drogue (25 Aug) dragged bottom on Tuckernuck Shoals until Dick Limeburner recovered it in mid-October' typei='Irina ' droguetop=0 droguebot=1.0 droguedia=1.0 elif d[k,0]==146410701: deployer='Tiejie' notes='deployed by Hollings students, Conner and Ed; apparently lost drogue 3 weeks later, came ashore on Craigville Beach, possibly taken home by someone to Yarmouth' typei='Drogue '
#from matplotlib.basemap import Basemap, shiftgrid #import matplotlib.numerix.ma as M import matplotlib.pyplot as plt import matplotlib.mlab #import matplotlib.colors as colors #from matplotlib import * #import asciidata #import MA mapfile = sys.argv[1]+'.lonlatT' lonfile = sys.argv[1]+'.lon' latfile = sys.argv[1]+'.lat' # load data map=np.array(mlab.load(mapfile)) #minv=MA.minimum(map) #maxv=MA.maximum(map) #print(minv,maxv) lons=np.array(mlab.load(lonfile)) lats=np.array(mlab.load(latfile)) #tmp=lons #tmp=lons[0] #lons=lons-tmp #lons[size(lons)-1]=360 print lons
def from_asc(fin,sep=' '): """read an array from an ascii file, return a numpy array""" from matplotlib.mlab import load return load(fin,delimiter=sep)
""" parse and load ge.csv into a record array """ import time, datetime, csv import dateutil.parser import matplotlib.mlab as mlab import matplotlib.dates as mdates import matplotlib.cbook as cbook import numpy as np # this is how you can use the function np.loadtxt to do the same # JDH TODO: this isn't working in mlab.load or np.loadtxt. Fix #rows = np.loadtxt('data/ge.csv', skiprows=1, converters={0:mdates.date2num}, delimiter=',') rows = mlab.load('data/ge.csv', skiprows=1, converters={0:mdates.date2num}, delimiter=',') r = np.rec.fromrecords(rows, names='date,open,high,low,close,volume,adjclose') # compute the average approximate dollars traded over the last 10 days # hint: closing price * volume trades approx equals dollars trades recent = r[-10:] dollars = (recent.close * recent.volume).mean() print '$%1.2fM'%(dollars/1e6) # plot the adjusted closing price vs time since 2003 - hint, you must # use date2num to convert the date to a float for mpl. Make two axes, # one for price and one for volume. Use a bar chart for volume import matplotlib.pyplot as plt dates = mdates.num2date(r.date) # convert these to native datetime.date objects mask = dates > datetime.date(2003,1,1) price = r.adjclose[mask] volume = r.volume[mask]
m = a*numpy.ones((len(t),), dtype=numpy.float_) for k, tk in enumerate(t): if k==0: m[k] = a else: m[k] = m[k-1]*numpy.exp(-alpha*(t[k]-t[k-1])) + a return m + 1. # data files are located in 'data' and have filenames # synapse_times.dat and synapse_data.dat, each of which contain a # single column of ASCII floating point numbers. load each into an # array t and s, where t is the array of times and s an equal length # array of synapse amplitudes t = load(os.path.join('data', 'synapse_times.dat')) s = load(os.path.join('data', 'synapse_data.dat')) guess = (-0.05, 0.3) # create the model and do the best fit model = Model(t, s, one_exponential) bounds = [(None, 0.), (0., None)] bestpars = model.fit(guess, bounds=bounds) # plot the interspike-interval histogram, the actual data, and the best model fit fig = figure() ax1 = fig.add_subplot(311) model.plot_isi(ax1)
from matplotlib import mlab filename = '/Users/miura/Dropbox/data/segmented.tif91_92.txt' x, y, ux1, uy1, mag1, ang1= mlab.load(filename, usecols=[0,1,2,3,4,5], unpack=True)
#data = csv.reader(open('/Users/miura/Dropbox/Mette/Tracks.csv')) #p1 = [] #tA = [] #for row, points in enumerate(data): # tA.append(points[2]) # p1.append(points[3:6]) # p2.append(points[3:]) #for d in tA: # print d from matplotlib import mlab as matp ind, frame, gx, gy, gz, ux, vy, wz = matp.load( filename, skiprows=1, delimiter=',', usecols=[0, 1, 2, 3, 4, 5, 6, 7], unpack=True) #for d in frame: #print d #from mayavi.mlab import points3d #from mayavi.mlab import plot3d from mayavi.mlab import quiver3d #from mayavi.mlab import flow from mayavi import mlab as maya ##p1s = points3d(sx, sy, sz, scale_factor=.45, color=(1, 0, 0)) #trial with colorlist
#import csv #data = csv.reader(open('C:/dropbox/My Dropbox/Pairs_NowCorrectDot.txt', 'rb'), delimiter='\t') #p1 = [] #p2 = [] #for row, points in enumerate(data): # p1.append(points[:3]) # p2.append(points[3:]) #for d in p2: #print d from matplotlib import mlab as matp #filename = '/Users/miura/Dropbox/Pairs_NowCorrectDot.txt' filename = 'c:/dropbox/My Dropbox/Wani_3D/Pairs_NowCorrectDot.txt' x1, y1, z1, x2, y2, z2 = matp.load(filename, usecols=[0, 1, 2, 3, 4, 5], unpack=True) #for d in z1: #print d #print x1[0]+x1[2] #dotsfile = '/Users/miura/Dropbox/ProfileDiscdata.txt' dotsfile = 'c:/dropbox/My Dropbox/Wani_3D/ProfileDiscdata.txt' dx1, dy1, dz1 = matp.load(dotsfile, usecols=[0, 1, 2], unpack=True) tabledots = matp.load(dotsfile) xc1 = tabledots[0:(len(tabledots) / 2 - 1), 0] xc2 = tabledots[(len(tabledots) / 2):, 0] yc1 = tabledots[0:(len(tabledots) / 2 - 1), 1] yc2 = tabledots[(len(tabledots) / 2):, 1] zc1 = tabledots[0:(len(tabledots) / 2 - 1), 2] zc2 = tabledots[(len(tabledots) / 2):, 2]
# exampleMayavi.py # Kota Miura ([email protected]) #example script to load coordinates data from tab-delimited file #and plot in 3D using mayavi2. #We assume that data in the file has a pair pf coordinates per line, #so 6 numbers are in one line separated by tab. from matplotlib import mlab as matp filename = '/Users/miura/data.txt' x1, y1, z1, x2, y2, z2 = matp.load(filename, usecols=[0, 1, 2, 3, 4, 5], unpack=True) from mayavi.mlab import points3d from mayavi.mlab import plot3d from mayavi import mlab as maya p1s = points3d(x1, y1, z1, scale_factor=.25, color=(0, 1, 1)) p2s = points3d(x2, y2, z2, scale_factor=.25, color=(1, 0, 0)) for idx, xval in enumerate(x1): plin1 = plot3d([x1[idx], x2[idx]], [y1[idx], y2[idx]], [z1[idx], z2[idx]], tube_radius=0.1, colormap='Spectral', color=(0, 0, 1)) maya.show()
#data = csv.reader(open('/Users/miura/Dropbox/Mette/Tracks.csv')) #p1 = [] #tA = [] #for row, points in enumerate(data): # tA.append(points[2]) # p1.append(points[3:6]) # p2.append(points[3:]) #for d in tA: # print d from matplotlib import mlab as matp ind, trajID, tA, x1, y1, z1, sx, sy, sz, ptID = matp.load( filename, skiprows=1, delimiter=',', usecols=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], unpack=True) #for d in ind: #print d from mayavi.mlab import points3d from mayavi.mlab import plot3d from mayavi.mlab import quiver3d from mayavi.mlab import flow from mayavi import mlab as maya p1s = points3d(sx, sy, sz, scale_factor=.45, color=(1, 0, 0)) curtid = -1.0
# pylab-free version of plotmap.py ################################## # set backend to Agg. import matplotlib matplotlib.use('Agg') from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas from matplotlib.toolkits.basemap import Basemap, shiftgrid from matplotlib.figure import Figure import matplotlib.numerix as NX import matplotlib.cm as cm from matplotlib.mlab import load # read in topo data (on a regular lat/lon grid) # longitudes go from 20 to 380. topoin = load('etopo20data.gz') lons = load('etopo20lons.gz') lats = load('etopo20lats.gz') # shift data so lons go from -180 to 180 instead of 20 to 380. topoin, lons = shiftgrid(180., topoin, lons, start=False) # create figure, axes instance. fig = Figure() canvas = FigureCanvas(fig) ax = fig.add_axes([0.1, 0.1, 0.7, 0.7]) # setup of basemap ('lcc' = lambert conformal conic). # use major and minor sphere radii from WGS84 ellipsoid. # pass axes instance to Basemap constructor so pylab won't # be imported. m = Basemap(llcrnrlon=-145.5,llcrnrlat=1.,urcrnrlon=-2.566,urcrnrlat=46.352,\
#import csv #data = csv.reader(open('C:/dropbox/My Dropbox/Pairs_NowCorrectDot.txt', 'rb'), delimiter='\t') #p1 = [] #p2 = [] #for row, points in enumerate(data): # p1.append(points[:3]) # p2.append(points[3:]) #for d in p2: #print d from matplotlib import mlab as matp #filename = '/Users/miura/Dropbox/Pairs_NowCorrectDot.txt' filename = 'c:/dropbox/My Dropbox/Wani_3D/Pairs_NowCorrectDot.txt' x1, y1, z1, x2, y2, z2 = matp.load(filename, usecols=[0, 1, 2, 3, 4, 5], unpack=True) #for d in z1: #print d #print x1[0]+x1[2] #dotsfile = '/Users/miura/Dropbox/ProfileDiscdata.txt' dotsfile = 'c:/dropbox/My Dropbox/Wani_3D/ProfileDiscdata.txt' dx1, dy1, dz1 = matp.load(dotsfile, usecols=[0,1,2], unpack=True) tabledots = matp.load(dotsfile) xc1 = tabledots[0:(len(tabledots)/2 - 1), 0] xc2 = tabledots[(len(tabledots)/2):, 0] yc1 = tabledots[0:(len(tabledots)/2 - 1), 1] yc2 = tabledots[(len(tabledots)/2):, 1] zc1 = tabledots[0:(len(tabledots)/2 - 1), 2] zc2 = tabledots[(len(tabledots)/2):, 2] from mayavi.mlab import points3d from mayavi.mlab import plot3d
""" parse and load ge.csv into a record array """ import time, datetime, csv import dateutil.parser import matplotlib.mlab as mlab import matplotlib.dates as mdates import matplotlib.cbook as cbook import numpy as np # this is how you can use the function np.loadtxt to do the same # JDH TODO: this isn't working in mlab.load or np.loadtxt. Fix #rows = np.loadtxt('data/ge.csv', skiprows=1, converters={0:mdates.date2num}, delimiter=',') rows = mlab.load('data/ge.csv', skiprows=1, converters={0: mdates.date2num}, delimiter=',') r = np.rec.fromrecords(rows, names='date,open,high,low,close,volume,adjclose') # compute the average approximate dollars traded over the last 10 days # hint: closing price * volume trades approx equals dollars trades recent = r[-10:] dollars = (recent.close * recent.volume).mean() print '$%1.2fM' % (dollars / 1e6) # plot the adjusted closing price vs time since 2003 - hint, you must # use date2num to convert the date to a float for mpl. Make two axes, # one for price and one for volume. Use a bar chart for volume import matplotlib.pyplot as plt dates = mdates.num2date(
from matplotlib.dates import strpdate2num from matplotlib.mlab import load from pylab import figure, show dates, closes = load( '../data/msft.csv', delimiter=',', converters={0:strpdate2num('%d-%b-%y')}, skiprows=1, usecols=(0,2), unpack=True) fig = figure() ax = fig.add_subplot(111) ax.plot_date(dates, closes, '-') fig.autofmt_xdate() show()
#import csv # data = csv.reader(open('C:/dropbox/My Dropbox/Pairs_NowCorrectDot.txt', 'rb'), delimiter='\t') #data = csv.reader(open('/Users/miura/Dropbox/Mette/Tracks.csv')) #p1 = [] #tA = [] #for row, points in enumerate(data): # tA.append(points[2]) # p1.append(points[3:6]) # p2.append(points[3:]) #for d in tA: # print d from matplotlib import mlab as matp ind, frame, gx, gy, gz, ux, vy, wz = matp.load(filename, skiprows=1, delimiter=',', usecols=[0, 1, 2, 3, 4, 5, 6, 7], unpack=True) #for d in frame: #print d #from mayavi.mlab import points3d #from mayavi.mlab import plot3d from mayavi.mlab import quiver3d from mayavi.mlab import flow from mayavi import mlab as maya import numpy as np ##p1s = points3d(sx, sy, sz, scale_factor=.45, color=(1, 0, 0)) #trial with colorlist colorlist = []
# pylab-free version of simpletest.py ##################################### # set backend to Agg. import matplotlib matplotlib.use('Agg') from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas from matplotlib.toolkits.basemap import Basemap from matplotlib.figure import Figure from numpy import meshgrid import matplotlib.numerix as nx import matplotlib.cm as cm from matplotlib.mlab import load # read in topo data (on a regular lat/lon grid) # longitudes go from 20 to 380. etopo = load('etopo20data.gz') lons = load('etopo20lons.gz') lats = load('etopo20lats.gz') # create figure. fig = Figure() canvas = FigureCanvas(fig) # create axes instance, leaving room for colorbar at bottom. ax = fig.add_axes([0.125,0.175,0.75,0.75]) # create Basemap instance for Robinson projection. # set 'ax' keyword so pylab won't be imported. m = Basemap(projection='robin',lon_0=0.5*(lons[0]+lons[-1]),ax=ax) # make filled contour plot. x, y = m(*meshgrid(lons, lats)) cs = m.contourf(x,y,etopo,30,cmap=cm.jet) # draw coastlines. m.drawcoastlines()
#import csv # data = csv.reader(open('C:/dropbox/My Dropbox/Pairs_NowCorrectDot.txt', 'rb'), delimiter='\t') #data = csv.reader(open('/Users/miura/Dropbox/Mette/Tracks.csv')) #p1 = [] #tA = [] #for row, points in enumerate(data): # tA.append(points[2]) # p1.append(points[3:6]) # p2.append(points[3:]) #for d in tA: # print d from matplotlib import mlab as matp ind, trajID, tA, x1, y1, z1, sx, sy, sz, ptID = matp.load(filename, skiprows=1, delimiter=',', usecols=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], unpack=True) #for d in ind: #print d from mayavi.mlab import points3d from mayavi.mlab import plot3d from mayavi.mlab import quiver3d from mayavi.mlab import flow from mayavi import mlab as maya p1s = points3d(sx, sy, sz, scale_factor=.45, color=(1, 0, 0)) curtid = -1.0 cx = [] cy = []