def __init__(self, sID, onePiece): today = datetime.date.today() self.sID = sID self.opp = onePiece self.historyDB = Base("F://alfStock//"+"alf123"+'.history') self.currentDB = Base("F://alfStock//"+"alf123"+'.current') self.historyDB.open() self.currentDB.open() db = Base("F://alfStock//"+str(today)+'.db') impactDB = Base("F://alfStock//"+str(today)+'.yv') if db.exists(): db.open() recs = [ r for r in db if r['sid'] == self.sID ] if len(recs) > 0: self.history = recs[0]['history'] self.sCurrent = recs[0]['current'] else: print "already existed: ", len(db) self.insertHistory(db) else: db.create('sid','history', 'current') self.insertHistory(db) if impactDB.exists(): self.idb = impactDB else: impactDB.create('sid','UpOrDown')# U:up; D:down impactDB.open() impactDB.commit() self.idb = impactDB
def host(): # show the user profile for that stock db = Base('overall.db') db.open() #stocks = [r for r in db if r['state']==1] stocks = [r for r in db] return render_template('all.html', stocks=stocks)
def resetHisDB(): historyDB = Base("F://alfStock//"+"alf123"+'.history') historyDB.create('sid','Edate', 'length')#Edate := started day not end day historyDB.open() historyDB.commit() currentDB = Base("F://alfStock//"+"alf123"+'.current') currentDB.create('sid','Edate', 'length') currentDB.open() currentDB.commit()
def sell(): # show the user profile for that stock sm = StockManager() stock = Stock_Profile('2002') current = stock.get_result(0)[0] db = Base('overall.db') db.open() stocks = [r for r in db if r['sell']==1 and (r['date']==current)] return render_template('buy.html', stocks=stocks)
def users(): db = Base('alf.db') db.open() users = [] for r in db: n = r['name'] p = r['pwd'] users.append((n, p)) return render_template('user.html', users=users)
def resetHisDB(): historyDB = Base("F://alfStock//" + "alf123" + '.history') historyDB.create('sid', 'Edate', 'length') #Edate := started day not end day historyDB.open() historyDB.commit() currentDB = Base("F://alfStock//" + "alf123" + '.current') currentDB.create('sid', 'Edate', 'length') currentDB.open() currentDB.commit()
def alf(): db = Base('alf.db') db.open() user = request.form['Username'] pwd = request.form['password'] user_verf = [r['password'] for r in db if r['name'] == user] if len(user_verf) > 0: if user_verf[0] == pwd: return 'Hello, ' + user else: return 'Who are you!!!'
class alf123: def __init__(self, sID, onePiece): today = datetime.date.today() self.sID = sID self.opp = onePiece self.historyDB = Base("F://alfStock//"+"alf123"+'.history') self.currentDB = Base("F://alfStock//"+"alf123"+'.current') self.historyDB.open() self.currentDB.open() db = Base("F://alfStock//"+str(today)+'.db') impactDB = Base("F://alfStock//"+str(today)+'.yv') if db.exists(): db.open() recs = [ r for r in db if r['sid'] == self.sID ] if len(recs) > 0: self.history = recs[0]['history'] self.sCurrent = recs[0]['current'] else: print "already existed: ", len(db) self.insertHistory(db) else: db.create('sid','history', 'current') self.insertHistory(db) if impactDB.exists(): self.idb = impactDB else: impactDB.create('sid','UpOrDown')# U:up; D:down impactDB.open() impactDB.commit() self.idb = impactDB def insertHistory(self, db): db.open() self.history = self.stockHistoryGet() self.sCurrent = self.stockCurrent() db.insert(sid = self.sID, history = self.history, current = self.sCurrent) db.commit() def TTLUrlOpen(self, url, rc): retryCount = rc if retryCount > 2: pass else: try: response = urllib2.urlopen(url, None, 2.5) except URLError, e: print "url" except socket.timeout: print "Timed out!" response = TTLUrlOpen(self, url, retryCount+1)
def getStock(self, sid): sname = self.dbname(sid) exist = os.path.isfile(sname) db = '' if (exist): #read db db = Base(sname) db.open() else: #create a new db db = Base(sname) db.create('date','buySig','sellSig', 'state', 'buy', 'sell' ,mode="override") db.open() return db
def makeDB(read, write, startTime = "2010-01-01 00:00:00", \ endTime = "2010-01-01 00:10:00"): db = Base(write) startTime = calc.datetonumber(startTime) endTime = calc.datetonumber(endTime) #Day comes from day of the week. It is a number from 0 to 6. #0 = Monday 6 = Sunday. db.create('sensor', 'date', 'weekday', 'index', mode="override") db.open() allData = {} for i in range(len(bbdata.allSensors)): s = bbdata.allSensors[i] data = [] print "Parsing sensor " + str(s) try: sString = read + "sensor" + str(s) + ".txt" f = open(sString).readlines() oldD = None for timeLine in f: tmp = timeLine.split() tmp = tmp[1] + " " + tmp[2] #tmp = tmp[0] + " " + tmp[1] d = datetime.datetime.strptime(tmp, "%Y-%m-%d %H:%M:%S") foo = calc.datetonumber(d) if foo >= startTime and foo <= endTime: data.append(calc.datetonumber(d)) if d.toordinal() != oldD: #Add to database db.insert(s, d.toordinal(), d.weekday(), len(data) - 1) oldD = d.toordinal() print " " + str(d) except Exception, e: print "Except:" + str(e) pass allData[s] = data
jarfile = 'ark-tweet-nlp-0.3.2.jar' logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) service_url = 'https://www.googleapis.com/freebase/v1/search' unnecessary = [ 'sunday', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'january', 'february', 'march', 'april', 'may', 'june', 'july', 'august', 'september', 'october', 'november', 'december', 'it' ] freebase_link = 'http://www.freebase.com' model1 = word2vec.Word2Vec.load_word2vec_format( 'freebase-vectors-skipgram1000-en.bin.gz', binary=True) chant = enchant.Dict("en_US") bcluster = Base('bcluster.pdl') bcluster.open() api_key = 'AIzaSyAW9RPEnSFbJfGsuVXSiTV_xbMySmJfGMw' mslink = 'http://weblm.research.microsoft.com/rest.svc/bing-body/2013-12/3/jp?u=4e9af3bb-4cd3-4e29-a10b-e15754d454cb' #Tokenize and Tag individual tokens using Owoputi et al. tagger def tokenize(): cmd = 'java -XX:ParallelGCThreads=2 -Xmx500m -jar ' + jarfile + ' \"' + testfile + '\"' process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) return iter(process.stdout.readline, b'') #Collect ngrams from the segments
class EventPassportOffice: #what do we need in init? #pressure run ID number #acoustic ID number #(btw marking those separate is a bad idea on the operators part) def __init__(self): self.EventPassport = Base('EventPassport/EventPassport.pdl') #check if the DB exists. If Yes, open, if not #create it: if not self.EventPassport.exists(): self.genPDL() else: self.EventPassport.open() self.CleanEvents = CleanEvents.CleanData() def genPDL(self): #Create the PDL file for database self.EventPassport.create('EventID','Temperature','Pressure','Time', 'RunNumber','Path', 'RunType', mode = "open") #RunNumber is defined as RunNumberAcoustic #Runtype can be neutron or alpha def genPassport(self, Path, RunNumberAcoustic, RunNumberPressure, RunType_WS): FilteredData = self.CleanEvents.MatchEvent_PressurePiezo(Path, str(RunNumberAcoustic), str(RunNumberPressure)) #Get the last EventID recs = [ Record['EventID'] for Record in self.EventPassport if Record['RunNumber'] == RunNumberAcoustic] if len(recs) == 0: EID = str(RunNumberAcoustic)+"0001" EID = int(EID) else: EID = max(recs)+1 #check if we have a duplicate! for DataPoint in FilteredData: timestamp = DataPoint[1] #Check if we have a dupe/conflict x = [Event for Event in self.EventPassport if Event['Time']-timedelta(seconds=2)<=timestamp<=Event['Time']+timedelta(seconds=2)] if len(x) == 0: self.EventPassport.insert(EventID = EID ,Temperature = DataPoint[3],Pressure = DataPoint[2],Time = DataPoint[1], RunNumber = RunNumberAcoustic, Path = DataPoint[0], RunType = RunType_WS) EID += 1 print("Inserting Entry ...") else: print "Duplicate entry found at: "+str(DataPoint[1])+" Event ID: "+str(x[0]['EventID']) self.EventPassport.commit() def CheckPassport_RunNumber(self, RunNumberQry): return self.EventPassport(RunNumber = RunNumberQry) def CheckPassport_Temperature(self, HighTemp, LowTemp): return self.EventPassport(HighTemp>Temperature>LowTemp) def CheckPassport_Time(self, fromTime, toTime): recs = [ r for r in self.EventPassport if fromTime < r['Time'] < toTime] return recs def SizeofPassportDB(self): return len(self.EventPassport) def CheckPassport_Runtype(self, runtype_WS): return self.EventPassport(RunType = runtype_WS) def CheckPassport_eventID(self, EventID_WS): return self.EventPassport(EventID = EventID_WS) def _deleteEvent(self, RecID_WS): del self.EventPassport[RecID_WS] self.EventPassport.commit()
class StockManager(object): def __init__(self): self.getSummary() def dbname(self, sid): return sid + '.db' def getStock(self, sid): sname = self.dbname(sid) exist = os.path.isfile(sname) db = '' if (exist): #read db db = Base(sname) db.open() else: #create a new db db = Base(sname) db.create('date','buySig','sellSig', 'state', 'buy', 'sell' ,mode="override") db.open() return db def updateStock(self, sid, sc): db = self.getStock(sid) sdb = self.sdb if sc['date'] in [r['date'] for r in db]: pass else: db.insert(date = sc['date'],buySig = sc['buySig'],sellSig = sc['sellSig'], state = sc['state'], buy = sc['buy'], sell = sc['sell']) db.commit() print '[stock] # ', sid, ' updating' conflict = [r for r in sdb if ((r['date'] == sc['date']) and (r['stock'] == sid))] if len(conflict) == 0: sdb.insert(date = sc['date'], stock = sid ,state = sc['state'], buy = sc['buy'], sell = sc['sell']) sdb.commit() print '[summary] creating ', sid update = [r for r in sdb if ((r['date'] != sc['date']) and (r['stock'] == sid))] if len(update)>0: sdb.update(update[0], date = sc['date'], state = sc['state'], buy = sc['buy'], sell = sc['sell']) sdb.commit() print '[summary] updating ', sid def getSummary(self): self.sdb = Base('overall.db') self.sdb.open() return self.sdb def getCurStatus(self, sid): db = self.sdb match = [r for r in db if r['stock'] == sid] if len(match) == 0: return 0 else: return match[0]['state'] def getLatestDate(self, sid): db = self.sdb records = [r for r in db if r['stock']==sid] if len(records) > 0: return records[0]['date'] else: return '69/06/19'
class PressureVeto: #We need the run number for init. We will use PyDBLite #so we need to gen the db first. There will be another #function for that. The reason we use this is because #of native python compatibility def __init__(self, RunNumber): #property self.RunNumber assigned. #This is typecasted to string for manipulation self.RunNumber = str(RunNumber) #property self.PyDB -> Database for pressures self.PyDB = Base('pressures/'+self.RunNumber+'.dbl') #check if the DB exists. If Yes, open, if not #create it: if not self.PyDB.exists(): self.genPDL() else: self.PyDB.open() #Define the time iteration between bubbles minimum threshold #Remember, each iteration is 1/10th second! #Iter must be integer! minSecondsBetweenBubbles = 4 self.minIterBetweenBubbles = int(minSecondsBetweenBubbles*10) #Funtion to generate PyDBLite database #I will deliberately not give this MySQL abilities #since I dont want my data wiped out by "mistake" #The human veto has to be in here somewhere. def genPDL(self): #Create the PDL file for database self.PyDB.create('id','temp','pressure','time', mode = "override") #import CSV for CSV file ops. Import ONLY if needed, so its here. import csv #filename in CSV file. Assumption -> RunID.csv fname_csv = self.RunNumber+".csv" PTcsv = csv.reader(open(fname_csv)) #convert CSV to PyDB line by line for line in PTcsv: self.PyDB.insert(id = int(line[0]),temp=float(line[1]), pressure=float(line[2]), time=datetime.strptime(line[3], "%Y-%m-%d %H:%M:%S")) #Commit the database self.PyDB.commit() #Print a confirmation print "Creating PyDB complete." #this function finds the "peaks" in the pressures. #Criterion: Peaks are above 30 PSI def findBubbleTimings(self): '''Finds the bubble timings In -> Pressure data Out -> Timings (datetime.datetime) Assumptions -> Bubble PSI > 30 PSI ''' #Select records with pressure > 30.0 PSI recs = [r for r in self.PyDB] #Make an iterator of this list RecIter = itertools.islice(recs, None) #Declare memory space for: #Valid Bubbles #Temporary Storage #Last Record's ID (to stop Iterator) ValidBubbles = [] _VBubbleAmpTemporaryStorage = [] RecLastID = recs[-1:][0]['__id__'] #Go record by record: for record in RecIter: #If pressure > 30: if record['pressure'] >= 30.0: #Assign the temporary memory with present pressure, time _VBubbleAmpTemporaryStorage = [record['pressure'], record['time'], record['temp']] #Number of steps to iter so we dont go beyond the last rec stepsTillLastRec = RecLastID - record['__id__'] stepsIter = self.minIterBetweenBubbles if ( stepsTillLastRec > self.minIterBetweenBubbles) else stepsTillLastRec #Investigate for next minIterBetweenBubbles for a maxima for i in xrange(stepsIter): #Progress iterator by 1 record = RecIter.next() #is present iteration > memory stored variable? Yes: Store it, No: Continue _VBubbleAmpTemporaryStorage = [record['pressure'], record['time'], record['temp']] if record['pressure']>=_VBubbleAmpTemporaryStorage else _VBubbleAmpTemporaryStorage #The local maxima is found, store it as good data, continue searching ValidBubbles.append(_VBubbleAmpTemporaryStorage) #clear the temporary space _VBubbleAmpTemporaryStorage = [] #Return the time cut! return ValidBubbles
import enchant from PyDbLite import Base import jellyfish import subprocess testfile = str(sys.argv[1]) jarfile = 'ark-tweet-nlp-0.3.2.jar' logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO) service_url = 'https://www.googleapis.com/freebase/v1/search' unnecessary = ['sunday','monday','tuesday','wednesday','thursday','friday','saturday','january','february','march','april','may','june','july','august','september','october','november','december','it'] freebase_link = 'http://www.freebase.com' model1 = word2vec.Word2Vec.load_word2vec_format('freebase-vectors-skipgram1000-en.bin.gz', binary=True) chant = enchant.Dict("en_US") bcluster = Base('bcluster.pdl') bcluster.open() api_key = 'AIzaSyAW9RPEnSFbJfGsuVXSiTV_xbMySmJfGMw' mslink = 'http://weblm.research.microsoft.com/rest.svc/bing-body/2013-12/3/jp?u=4e9af3bb-4cd3-4e29-a10b-e15754d454cb' #Tokenize and Tag individual tokens using Owoputi et al. tagger def tokenize(): cmd = 'java -XX:ParallelGCThreads=2 -Xmx500m -jar '+jarfile+' \"'+testfile+'\"' process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,shell=True) return iter(process.stdout.readline, b'') #Collect ngrams from the segments def ngrams(input, n): input = input.split(' ')
class SignalManip: #usual stuff in init def __init__(self): self.AnalysisResults = Base('AnalysisResults/AnalysisResults.pdl') #check if the DB exists. If Yes, open, if not #create it: if not self.AnalysisResults.exists(): self.genPDL() else: self.AnalysisResults.open() self.PassportOffice = EventPassport.EventPassportOffice() self.LoadWaveform = WaveformLoader.LoadWave() self.AcousticAnalysis = AnalysisTools.AcousticsAnalysis() #If DB doesnt exist, make it! def genPDL(self): #Create the PDL file for database self.AnalysisResults.create('EventID','PVar', mode = "open") #Function to generate signal average def genSignalAverage(self, EventType = "Neutron"): #get all Events of type EventType EventList = [] EventList = self.PassportOffice.CheckPassport_Runtype(EventType) SignalAvgMem = numpy.zeros((50000)) for Event in EventList: #Load Raw data raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3]) SignalAvgMem += raw_data[0] SignalAvgMem /= len(EventList) ####Storage##### Storage = open("AnalysisResults/signalAvg."+EventType+".binary", "wb") SignalAvgMem.tofile(Storage, format="%f") Storage.close() return SignalAvgMem #function to generate FFT avergae def genFFTAverage(self, EventType="Neutron", doWin = False, winStart=10000, winEnd=30000, Fs = 1250000.0): #get all Events of type EventType EventList = [] EventList = self.PassportOffice.CheckPassport_Runtype(EventType) FFTAvgMem = numpy.zeros((50000)) FFTAvgBins = numpy.fft.fftfreq(len(FFTAvgMem), 1.0/Fs) for Event in EventList: #Load Raw data raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3]) ####SignalWindow#### if doWin: print "is it" TempSigMem = numpy.zeros((50000)) TempSigMem[winStart:winEnd] = raw_data[0][winStart:winEnd] R_data = TempSigMem else: R_data = raw_data[0] # FFTs = numpy.fft.fft(R_data) #for i in range(5000,6000): #pwrspec = abs(numpy.mean(FFTs[5000:6000])) #if pwrspec>10: # print pwrspec, Event FFTAvgMem += FFTs FFTAvgMem /= len(EventList) ####Storage##### #FFT# Storage = open("AnalysisResults/FFTAvg."+EventType+"win"+str(doWin)+".binary", "wb") FFTAvgMem.tofile(Storage, format="%f") Storage.close() #FFT FREQS# Storage = open("AnalysisResults/FFTAvgBins."+EventType+"win"+str(doWin)+".binary", "wb") FFTAvgBins.tofile(Storage, format="%f") Storage.close() ####Plotting##### return FFTAvgMem, FFTAvgBins #Functions to show the Average values (load from cache) def getSignalAverage(self, EventType = "Neutron"): Storage = "AnalysisResults/signalAvg."+EventType+".binary" if not os.path.exists(Storage): data = self.genSignalAverage(EventType) else: data = numpy.fromfile(Storage) return data #function to show average FFT def getFFTAverage(self, EventType = "Neutron", doWin = False,): Storage_FFT = "AnalysisResults/FFTAvg."+EventType+"win"+str(doWin)+".binary" Storage_FFTfreq = "AnalysisResults/FFTAvgBins."+EventType+"win"+str(doWin)+".binary" #Broken. Needs param check and hassles. #if os.path.exists(Storage_FFT) and os.path.exists(Storage_FFTfreq) : # data_FFT = numpy.fromfile(Storage_FFT) # data_FFTFreq = numpy.fromfile(Storage_FFTfreq) #else: # data_FFT, data_FFTFreq = self.genFFTAverage(EventType, doWin) data_FFT, data_FFTFreq = self.genFFTAverage(EventType, doWin) return data_FFT, data_FFTFreq
''' Created on 2014/7/17 @author: USER ''' from stock_profile import * from PyDbLite import Base ''' k = Base('2325.db') k.open() for r in k: print r ''' one_dream = Base('overall.db') one_dream.open() print one_dream.fields stock = Stock_Profile('2002') current = stock.get_result(0)[0] stocks = [r for r in one_dream if (r['buy']==1) and (r['date']==current)] for s in stocks: print s['date'], s['buy'], s['stock']
class doAnalysis: #What do we need in init now? Ah, the analysis cache DB def __init__(self): self.AnalysisResults = Base('AnalysisResults/AnalysisResults.pdl') #check if the DB exists. If Yes, open, if not #create it: if not self.AnalysisResults.exists(): self.genPDL() else: self.AnalysisResults.open() self.PassportOffice = EventPassport.EventPassportOffice() self.LoadWaveform = WaveformLoader.LoadWave() self.AcousticAnalysis = AnalysisTools.AcousticsAnalysis() self.SignalManip = SignalManip.SignalManip() #If DB doesnt exist, make it! def genPDL(self): #Create the PDL file for database self.AnalysisResults.create('EventID','PVar', mode = "open") #Gen PVAr of the Signals! def genPVAR(self): ''' Filter Params. doFilter -> Filter on or OFF lowfreq_HP -> Low frequency High Pass highFreq_LP -> High Frequency low pass Set both for a band pass filter. Filter Types: ApplyFiltersWall -> Boxcar window ApplyFiltersFIR -> Kaiser Window ''' doFilter = True lowFreq_HP = 3000 highFreq_LP = None ####Neutron Data##### #get the list of events PVar_Neutron_List = [] EventList = self.PassportOffice.CheckPassport_Runtype("Neutron") #For every Event for Event in EventList: #Load Raw data raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3]) #Apply filter. See the docstring #for options if doFilter: filtered_data = self.AcousticAnalysis.ApplyFiltersWall(raw_data[0], lowFreq=lowFreq_HP, highFreq=highFreq_LP) else: filtered_data = raw_data[0] #Calculate PVAR PVar = self.AcousticAnalysis.calculatePVar(filtered_data) #PVAr > 25 were observed for events from the wall from 1 specific run! #We dont know what to do with those yet. #if PVar<20: PVar_Neutron_List.append(PVar) ##########Plotting######### hist_bins = numpy.arange(10,13.0,0.1) #hist_bins=20 plt.hist(PVar_Neutron_List, bins=hist_bins, normed=True, facecolor='green', alpha=0.75) plt.grid(True) plt.xlabel("PVar") plt.ylabel("Count") plt.title("PVar of Entire Dataset") #### ALPHA DATA #### PVar_Alpha_List = [] EventList = self.PassportOffice.CheckPassport_Runtype("Alpha") for Event in EventList: #get raw data raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3]) #Apply filter. See the docstring #for options if doFilter: filtered_data = self.AcousticAnalysis.ApplyFiltersWall(raw_data[0], lowFreq=lowFreq_HP, highFreq=highFreq_LP) else: filtered_data = raw_data[0] PVar = self.AcousticAnalysis.calculatePVar(filtered_data) PVar_Alpha_List.append(PVar) ########Plotting####### #print PVar_Alpha_List plt.hist(PVar_Alpha_List, bins=hist_bins, normed=True, facecolor='red', alpha=0.40) plt.show() #Functions to show the Average values (load from cache) def PlotSignalAverage(self): #Run 2X to get data for alpha and for neutron data_neutron = self.SignalManip.getSignalAverage(EventType = "Neutron") data_alpha = self.SignalManip.getSignalAverage(EventType = "Alpha") ###Plotting### plt.plot(data_neutron,'g-') plt.plot(data_alpha,'r-') plt.xlabel("Timestep") plt.ylabel("Signal (mv)") plt.grid(True) plt.show() #function to show average FFT def PlotFFTAverage(self): #Run 2X to get data for alpha and for neutron FFTs_neutron, FFTfreqs = self.SignalManip.getFFTAverage(EventType = "Neutron", doWin=False) FFTs_alpha, FFTfreqs_alpha = self.SignalManip.getFFTAverage(EventType = "Alpha", doWin=False) #get half length of FFT for plotting length = len(FFTs_neutron) halflength = length/2 FFTAvgBins_kHz_HL = FFTfreqs[:halflength]/1000.0 #PLOTTING# plt.plot(FFTAvgBins_kHz_HL, abs(FFTs_neutron[:halflength]),'g-') #plt.plot(abs(FFTs_neutron[:halflength]),'g-') plt.plot(FFTAvgBins_kHz_HL, abs(FFTs_alpha[:halflength]),'r-') plt.xlabel("Frequency") plt.ylabel("Count") plt.title("Average FFT of all signals") plt.grid(True) plt.show() ################# ################ ################ ###THIS FUNCTION IS MY TEST BED AND HAS NO COMMENTS #Nor do I plan on putting some!! def _ApplyFilter(self): PVar_Neutron_List = [] EventList = self.PassportOffice.CheckPassport_Runtype("Neutron") Loc = EventList[12]['Path'][:-3] EventList2 = self.PassportOffice.CheckPassport_Runtype("Alpha") Loc2 = EventList2[12]['Path'][:-3] raw_dataNeutron = self.LoadWaveform.LoadData('Piezo/triggers.Nov23/trigger_2012.11.23_12.56.15_run_196_110_85') #raw_dataNeutron = self.LoadWaveform.LoadData(Loc) raw_dataN = raw_dataNeutron[0] raw_dataAlpha = self.LoadWaveform.LoadData(Loc2) raw_dataA = raw_dataAlpha[0] #r_data = numpy.zeros((50000)) #r_data[13000:20000]=raw_data[13000:20000] #r_data=raw_data[0] #raw_data=r_data SampleTime = raw_dataN[1] #print 1.0/SampleTime n = len(raw_dataN) #filtered_data = self.AcousticAnalysis.ApplyFiltersWall(raw_data, lowFreq=10000, highFreq=None) #print filtered_data fftsN = numpy.fft.rfft(raw_dataN) fftsN = fftsN[:n/2] fftfreqsN = numpy.fft.fftfreq(len(raw_dataN), 1.0/1250000.0) fftfreqsN = fftfreqsN[:n/2] fftsA = numpy.fft.rfft(raw_dataA) fftsA = fftsA[:n/2] fftfreqsA = numpy.fft.fftfreq(len(raw_dataA), 1.0/1250000.0) fftfreqsA = fftfreqsA[:n/2] #############PLotting############## plt.title('Data and FFT of Signal') ax1 = plt.subplot2grid((4,3), (0,0), colspan=3) ax2 = plt.subplot2grid((4,3), (1,0), colspan=3) #data ax1.plot(raw_dataN,'g-') ax1.set_xlabel('Sample (S.Time = 8e-7s)') ax1.set_ylabel('Amplitude (mV)') ##### Data INFO #Low vs High cutoff plotrange = 10000 plotrange_cutoff = 600 ########## #All ax2.plot(raw_dataA,'r-') #ax2.locator_params(axis = 'x',nbins=50) ax2.set_xlabel('Frequency (kHz)') ##plt.show() ##plt.clf() # # # #########Plot 2######## ax3 = plt.subplot2grid((4,3), (2,0), colspan=3) ax4 = plt.subplot2grid((4,3), (3,0), colspan=3) ##Low ax3.plot(abs(fftN),'g-') #ax3.plot(fftfreqsN/1000,abs(fftsN),'g-') #ax3.locator_params(axis = 'x',nbins=50) #ax3.set_xlabel('Frequency (kHz)') #ax3.set_ylabel('Count') ##high ax4.plot(abs(fftsA),'r-') #ax4.plot(fftfreqsA/1000,abs(fftsA),'r-') ax4.locator_params(axis = 'x',nbins=50) ax4.set_xlabel('Frequency (kHz)') ##ax4.set_ylabel('Count') plt.show()
''' Created on 2014/6/18 @author: USER ''' from PyDbLite import Base db = Base('overall.db') db.create('date', 'stock', 'state', 'buy', 'sell' ,mode="override") db.commit() ''' db.open() print db.fields for r in db: print r['date'], r['state'], r['stock'], r['buy'], r['sell'] '''