def __init__(self, sID, onePiece): today = datetime.date.today() self.sID = sID self.opp = onePiece self.historyDB = Base("F://alfStock//"+"alf123"+'.history') self.currentDB = Base("F://alfStock//"+"alf123"+'.current') self.historyDB.open() self.currentDB.open() db = Base("F://alfStock//"+str(today)+'.db') impactDB = Base("F://alfStock//"+str(today)+'.yv') if db.exists(): db.open() recs = [ r for r in db if r['sid'] == self.sID ] if len(recs) > 0: self.history = recs[0]['history'] self.sCurrent = recs[0]['current'] else: print "already existed: ", len(db) self.insertHistory(db) else: db.create('sid','history', 'current') self.insertHistory(db) if impactDB.exists(): self.idb = impactDB else: impactDB.create('sid','UpOrDown')# U:up; D:down impactDB.open() impactDB.commit() self.idb = impactDB
def welcome(): db = Base('alf.db') db.create('name','pwd',mode="open") #override user = request.form['Username'] passwd = request.form['password1'] db.insert(name=user,pwd=passwd) db.commit() return 'welcome ' + user
def resetHisDB(): historyDB = Base("F://alfStock//"+"alf123"+'.history') historyDB.create('sid','Edate', 'length')#Edate := started day not end day historyDB.open() historyDB.commit() currentDB = Base("F://alfStock//"+"alf123"+'.current') currentDB.create('sid','Edate', 'length') currentDB.open() currentDB.commit()
def resetHisDB(): historyDB = Base("F://alfStock//" + "alf123" + '.history') historyDB.create('sid', 'Edate', 'length') #Edate := started day not end day historyDB.open() historyDB.commit() currentDB = Base("F://alfStock//" + "alf123" + '.current') currentDB.create('sid', 'Edate', 'length') currentDB.open() currentDB.commit()
class Queue(object): """ Simple queue with PyDbLite backend. """ def __init__(self, queue_type=0, config=None): if config is not None: # copy config pass else: self.commit = False self.db_filename = "/tmp/queue.pydb" self.mode = "override" self.queue = Base(self.db_filename) def create_queue(self): self.queue.create('id', 'item', mode = self.mode) self.queue.create_index('id') return None def push(self, item): self.queue.insert(self.length(), item) return None def pop(self): if not self.is_empty(): id = self.length() - 1 r = self.queue._id[id] self.queue.delete(r) return r else: return None def list(self): return self.queue.records def length(self): return len(self.queue) def is_empty(self): return self.length() == 0 def commit(self): if self.commit is True: self.queue.commit() return None
class EventPassportOffice: #what do we need in init? #pressure run ID number #acoustic ID number #(btw marking those separate is a bad idea on the operators part) def __init__(self): self.EventPassport = Base('EventPassport/EventPassport.pdl') #check if the DB exists. If Yes, open, if not #create it: if not self.EventPassport.exists(): self.genPDL() else: self.EventPassport.open() self.CleanEvents = CleanEvents.CleanData() def genPDL(self): #Create the PDL file for database self.EventPassport.create('EventID','Temperature','Pressure','Time', 'RunNumber','Path', 'RunType', mode = "open") #RunNumber is defined as RunNumberAcoustic #Runtype can be neutron or alpha def genPassport(self, Path, RunNumberAcoustic, RunNumberPressure, RunType_WS): FilteredData = self.CleanEvents.MatchEvent_PressurePiezo(Path, str(RunNumberAcoustic), str(RunNumberPressure)) #Get the last EventID recs = [ Record['EventID'] for Record in self.EventPassport if Record['RunNumber'] == RunNumberAcoustic] if len(recs) == 0: EID = str(RunNumberAcoustic)+"0001" EID = int(EID) else: EID = max(recs)+1 #check if we have a duplicate! for DataPoint in FilteredData: timestamp = DataPoint[1] #Check if we have a dupe/conflict x = [Event for Event in self.EventPassport if Event['Time']-timedelta(seconds=2)<=timestamp<=Event['Time']+timedelta(seconds=2)] if len(x) == 0: self.EventPassport.insert(EventID = EID ,Temperature = DataPoint[3],Pressure = DataPoint[2],Time = DataPoint[1], RunNumber = RunNumberAcoustic, Path = DataPoint[0], RunType = RunType_WS) EID += 1 print("Inserting Entry ...") else: print "Duplicate entry found at: "+str(DataPoint[1])+" Event ID: "+str(x[0]['EventID']) self.EventPassport.commit() def CheckPassport_RunNumber(self, RunNumberQry): return self.EventPassport(RunNumber = RunNumberQry) def CheckPassport_Temperature(self, HighTemp, LowTemp): return self.EventPassport(HighTemp>Temperature>LowTemp) def CheckPassport_Time(self, fromTime, toTime): recs = [ r for r in self.EventPassport if fromTime < r['Time'] < toTime] return recs def SizeofPassportDB(self): return len(self.EventPassport) def CheckPassport_Runtype(self, runtype_WS): return self.EventPassport(RunType = runtype_WS) def CheckPassport_eventID(self, EventID_WS): return self.EventPassport(EventID = EventID_WS) def _deleteEvent(self, RecID_WS): del self.EventPassport[RecID_WS] self.EventPassport.commit()
db.create_index("parent") nbthreads = 200 for i in range(nbthreads): # generate thread author = "pierre" title = sentence(10, 10) text = sentence(100, 10) date = datetime.datetime( random.randint(2004, 2006), random.randint(1, 12), random.randint(1, 28), random.randint(0, 23), random.randint(0, 59), random.randint(0, 59), ) thread_id = db.insert(parent=-1, author=author, title=title, text=text, date=date) # generate comments nbcomments = random.randint(0, 5) for i in range(nbcomments): author = word(10) text = sentence(50, 10) tdelta = datetime.date(2007, 1, 1) - date.date() c_date = date + datetime.timedelta(random.randint(1, tdelta.days)) c_date = datetime.datetime( c_date.year, c_date.month, c_date.day, random.randint(0, 23), random.randint(0, 59), random.randint(0, 59) ) db.insert(parent=thread_id, author=author, title=title, text=text, date=c_date) db.commit()
driver.find_element_by_id('tabLink_u1240l1s214').click() driver.find_element_by_id('formMenu:linknotes1').click() driver.find_element_by_id('_id137Pluto_108_u1240l1n228_50520_:tabledip:0:_id158Pluto_108_u1240l1n228_50520_').click() page = driver.find_element_by_id('_id111Pluto_108_u1240l1n228_50520_:tableel:tbody_element') i = 0 for item in page.text.splitlines( ): if item.endswith('20'): line = item.split(' ',1)[1].lstrip() note = line.rsplit(' ', 1)[1] field = line.rsplit(' ', 1)[0] courseindb = db("course")==field if (len(courseindb) == 0): db.insert(grade=note, course=field) graph.post(path='486181564779150/feed/', message='Nouvelle note : ' + field) db.commit() Freq = 2500 # Set Frequency To 2500 Hertz Dur = 1000 # Set Duration To 1000 ms == 1 second winsound.Beep(Freq,Dur) print "A new grade is available " + field + " : " + note else: for rec in courseindb: #only one if (rec["grade"] != note): #FB update Freq = 2500 # Set Frequency To 2500 Hertz Dur = 1000 # Set Duration To 1000 ms == 1 second winsound.Beep(Freq,Dur) print "A grade has just been updated for " + field + " : " + note i+=1
class PressureVeto: #We need the run number for init. We will use PyDBLite #so we need to gen the db first. There will be another #function for that. The reason we use this is because #of native python compatibility def __init__(self, RunNumber): #property self.RunNumber assigned. #This is typecasted to string for manipulation self.RunNumber = str(RunNumber) #property self.PyDB -> Database for pressures self.PyDB = Base('pressures/'+self.RunNumber+'.dbl') #check if the DB exists. If Yes, open, if not #create it: if not self.PyDB.exists(): self.genPDL() else: self.PyDB.open() #Define the time iteration between bubbles minimum threshold #Remember, each iteration is 1/10th second! #Iter must be integer! minSecondsBetweenBubbles = 4 self.minIterBetweenBubbles = int(minSecondsBetweenBubbles*10) #Funtion to generate PyDBLite database #I will deliberately not give this MySQL abilities #since I dont want my data wiped out by "mistake" #The human veto has to be in here somewhere. def genPDL(self): #Create the PDL file for database self.PyDB.create('id','temp','pressure','time', mode = "override") #import CSV for CSV file ops. Import ONLY if needed, so its here. import csv #filename in CSV file. Assumption -> RunID.csv fname_csv = self.RunNumber+".csv" PTcsv = csv.reader(open(fname_csv)) #convert CSV to PyDB line by line for line in PTcsv: self.PyDB.insert(id = int(line[0]),temp=float(line[1]), pressure=float(line[2]), time=datetime.strptime(line[3], "%Y-%m-%d %H:%M:%S")) #Commit the database self.PyDB.commit() #Print a confirmation print "Creating PyDB complete." #this function finds the "peaks" in the pressures. #Criterion: Peaks are above 30 PSI def findBubbleTimings(self): '''Finds the bubble timings In -> Pressure data Out -> Timings (datetime.datetime) Assumptions -> Bubble PSI > 30 PSI ''' #Select records with pressure > 30.0 PSI recs = [r for r in self.PyDB] #Make an iterator of this list RecIter = itertools.islice(recs, None) #Declare memory space for: #Valid Bubbles #Temporary Storage #Last Record's ID (to stop Iterator) ValidBubbles = [] _VBubbleAmpTemporaryStorage = [] RecLastID = recs[-1:][0]['__id__'] #Go record by record: for record in RecIter: #If pressure > 30: if record['pressure'] >= 30.0: #Assign the temporary memory with present pressure, time _VBubbleAmpTemporaryStorage = [record['pressure'], record['time'], record['temp']] #Number of steps to iter so we dont go beyond the last rec stepsTillLastRec = RecLastID - record['__id__'] stepsIter = self.minIterBetweenBubbles if ( stepsTillLastRec > self.minIterBetweenBubbles) else stepsTillLastRec #Investigate for next minIterBetweenBubbles for a maxima for i in xrange(stepsIter): #Progress iterator by 1 record = RecIter.next() #is present iteration > memory stored variable? Yes: Store it, No: Continue _VBubbleAmpTemporaryStorage = [record['pressure'], record['time'], record['temp']] if record['pressure']>=_VBubbleAmpTemporaryStorage else _VBubbleAmpTemporaryStorage #The local maxima is found, store it as good data, continue searching ValidBubbles.append(_VBubbleAmpTemporaryStorage) #clear the temporary space _VBubbleAmpTemporaryStorage = [] #Return the time cut! return ValidBubbles
class BARTSIDEE_CONFIG: def __init__(self): self.path_temp = mc.GetTempDir() self.path_module = os.path.join(CWD, "modules") if not os.path.exists(self.path_module): os.makedirs(self.path_module) sys.path.append(self.path_module) self.app_version = VERSION self.db_version = 11 self.initDB() GA.debug = self.get("debug") def initDB(self): self.db = Base("maindb") self.db.create("id", "data", mode="open") self.searchdb = Base("searchdb") self.searchdb.create("module", "id", "timestamp", "data", mode="open") self.cache = tools.storage() try: if len(self.db) < 3: self.default() except: self.default() records = self.db(id="version") if records[0]["data"] < self.db_version: self.default() def get(self, key): records = self.db(id=key) if records: return records[0]["data"] else: return False def set(self, key, data): records = self.db(id=key) if records: self.db.update(records, data=data) else: self.db.insert(id=key, data=data) self.db.commit() def default(self): self.clearDB() try: pointer = os.path.join(CWD, "settings.json") file = open(pointer, "r") defaults = json.load(file) file.close() except: print traceback.format_exc() defaults = {} for key in defaults.keys(): self.db.insert(str(key), defaults[key]) self.db.insert("version", self.db_version) self.db.commit() def clearDB(self): try: records = self.db() self.db.delete(records) except: self.db.create("id", "data", mode="override") self.db.commit() def clearCache(self): self.cache.empty() def clearSearch(self): try: records = self.searchdb() self.searchdb.delete(records) except: self.searchdb.create("module", "id", "timestamp", "data", mode="override") self.searchdb.commit() def clearAll(self): self.default() self.clearCache() self.clearSearch()