def persist_ts_values(self, filename, tstype, row): tablename = self.sourcevaluecolname + "_values" col_defn = [("filename", "string"), ("min", "integer"), ("max", "integer"), ("avg", "integer")] col_names = ["filename", "min", "max", "avg"] row.insert(0, filename) row = _quotestrs([row]) with self.database: if tbl_exists(self.database, tablename) == False: logmsg = "creating [" + tablename + "] in db [" + self.databasename + "]" try: tbl_create(self.database, tablename, col_defn) except Exception, e: log.log(PRIORITY.FAILURE, msg=logmsg + " [" + e.message + "]") else: log.log(PRIORITY.SUCCESS, msg=logmsg) logmsg = "inserting row [" + str( row) + "] into table [" + tablename + "]" try: tbl_rows_insert(self.database, tablename, col_names, row) except Exception, e: log.log(PRIORITY.FAILURE, msg=logmsg + " [" + e.message + "]")
def dbbulkloader(database, dbrecords, tblname, cols, maxsize=300): with database: dbcoldefn = _gencoldefn(dbrecords[0], cols) if not tbl_exists(database, tblname) == True: tbl_create(database, tblname, dbcoldefn) dbrecords = _quotestrs(dbrecords) if len(dbrecords) > maxsize: for starti in range(0, len(dbrecords), maxsize): if starti + maxsize > len(dbrecords): endi = len(dbrecords) else: endi = starti + maxsize tbl_rows_insert(database, tblname, cols, dbrecords[starti:endi]) log.log(thisfuncname(), 10, msg="loaded rows to " + tblname, numrow=endi - starti) else: tbl_rows_insert(database, tblname, cols, dbrecords) log.log(thisfuncname(), 10, msg="loaded rows to " + tblname, numrow=len(dbrecords))
def _write_to_db(self, row, dname, tname, cdefn, cnames): database = Database(dname) with database: if tbl_exists(database, tname) == False: tbl_create(database, tname, cdefn) tbl_rows_insert(database, tname, cnames, row)
def test_tbl_rows_insert_dupe_key(self): database = Database(test_db.name, True) with database: tbl_create(database,test_db.tbl_name,test_db.col_defn, test_db.tbl_pk_defn) with self.assertRaises(S3IntegrityError): tbl_rows_insert(database,test_db.tbl_name, test_db.col_name, test_db.tbl_rows_dupe_key)
def test_tbl_rows_insert_str(self): database = Database(test_db_str.name) with database: tbl_create(database,test_db_str.tbl_name,test_db_str.col_defn) tbl_rows_insert(database,test_db_str.tbl_name,test_db_str.col_name, test_db_str.tbl_rows) database = Database(test_db_str.name,True) with database: self.assertEquals('foobar',database.execute("select col_name1 from tbl_name_test",True))
def test_tbl_rows_insert(self): database = Database(test_db.name) with database: tbl_create(database,test_db.tbl_name,test_db.col_defn) tbl_rows_insert(database,test_db.tbl_name,test_db.col_name, test_db.tbl_rows) database = Database(test_db.name,True) with database: col_name,tbl_rows = tbl_rows_get(database,test_db.tbl_name) self.assertListEqual(col_name,test_db.col_name) self.assertListEqual(tbl_rows,test_db.tbl_rows)
def test_tbl_rows_get_spoecific_field(self): database = Database(test_db.name) with database: tbl_create(database,test_db.tbl_name,test_db.col_defn) tbl_rows_insert(database,test_db.tbl_name,test_db.col_name, test_db.tbl_rows) database = Database(test_db.name,True) with database: col_name,tbl_rows,_ = tbl_rows_get(database,test_db.tbl_name, ['col_name1','col_name2', 'col_name3','col_name4']) self.assertListEqual(col_name,test_db.col_name) self.assertListEqual(tbl_rows,test_db.tbl_rows)
def color_db_load(dbname="test_ssloader"): tblname = "colors" colors_col_defn = [('name', 'text'), ('hex', 'text'), ('rgb', 'text'), ('__id', 'text'), ('__timestamp', 'text')] colors_col_names = [row[0] for row in colors_col_defn] dbrows = [] for key, value in colors.attr_get_keyval(include_callable=False, include_baseattr=False): rgb = hex2rgb(value) rgbstr = ",".join(map(str, rgb)) __id = IDGenerator().getid() __timestamp = datetime.now().strftime("%H:%M:%S") dbrows.append([ "\"" + key + "\"", "\"" + value + "\"", "\"" + rgbstr + "\"", "\"" + __id + "\"", "\"" + __timestamp + "\"" ]) database = Database(dbname) with database: if tbl_exists(database, tblname) == True: tbl_remove(database, tblname) tbl_create(database, tblname, colors_col_defn) exec_str, result = tbl_rows_insert(database, tblname, colors_col_names, dbrows)
def color_db_load(dbname="test_ssloader"): tblname = "colors" colors_col_defn = [('name','text'),('hex','text'),('rgb','text'),('__id','text'),('__timestamp','text')] colors_col_names = [row[0] for row in colors_col_defn] dbrows=[] for key,value in colors.attr_get_keyval(include_callable=False,include_baseattr=False): rgb = hex2rgb(value) rgbstr = ",".join(map(str,rgb)) __id = IDGenerator().getid() __timestamp = datetime.now().strftime("%H:%M:%S") dbrows.append(["\""+key+"\"","\""+value+"\"","\""+rgbstr+"\"", "\""+__id+"\"","\""+__timestamp+"\""]) database = Database(dbname) with database: if tbl_exists(database,tblname) == True: tbl_remove(database,tblname) tbl_create(database,tblname, colors_col_defn) exec_str, result = tbl_rows_insert(database,tblname,colors_col_names,dbrows)
def formats_db_load(dbname="test_ssloader"): tblname = "formats" hex2name = {} for key,value in colors.attr_get_keyval(include_callable=False,include_baseattr=False): hex2name[value] = key formats_col_defn = [('name','text'),('fgcolor','text'), ('bgcolor','text'), ('__id','text'),('__timestamp','text')] formats_col_names = [row[0] for row in formats_col_defn] dbrows=[] for name,bg in colorpalette.iteritems(): if fontpalette.has_key(name): fg = fontpalette[name] else: fg = '#000000' __id = IDGenerator().getid() __timestamp = datetime.now().strftime("%H:%M:%S") dbrows.append(["\""+name+"\"", "\""+hex2name[fg]+"\"", "\""+hex2name[bg]+"\"", "\""+__id+"\"","\""+__timestamp+"\""]) database = Database(dbname) with database: if tbl_exists(database,tblname) == True: tbl_remove(database,tblname) tbl_create(database,tblname, formats_col_defn) exec_str, result = tbl_rows_insert(database,tblname,formats_col_names,dbrows)
def setUp(self): self.databasename = 'foo' self.database = Database(self.databasename) self.rawdata = [[1, 240, 'fileA'], [2, 241, 'fileA'], [3, 242, 'fileB'], [4, 240, 'fileB']] self.files = ['fileA', 'fileB'] self.col_defn = [("id", "integer"), ("watts", "integer"), ("filename", "string")] self.col_names = ["id", "watts", "filename"] self.tablename = "bar" _rows = _quotestrs(self.rawdata) with self.database: tbl_create(self.database, self.tablename, self.col_defn) tbl_rows_insert(self.database, self.tablename, self.col_names, _rows)
def test_schema_insert_rows(self): datarows = {'tbl_col_name':[],'tbl_rows':[]} schema_data_get(self.schema_file,'workout',datarows) database = Database('fitness') with database: tbl_rows_insert(database,'workout',datarows['tbl_col_name'],datarows['tbl_rows']) database = Database('fitness',True) with database: colnames, rows,_ = tbl_rows_get(database,'workout',['date','type']) self.assertListEqual(rows,[[250772, u'cycling'], [260772, u'rowing']]) database = Database('diet',True) with database: pass
def setUp(self): self.schema_file1 = "/Users/burtnolej/Development/pythonapps/clean/utils/test_misc/test_schema_vsimple.xml" schema_execute(self.schema_file1) datarows = {'tbl_col_name':[],'tbl_rows':[]} schema_data_get(self.schema_file1,'workout',datarows) self.database1 = Database('fitness') with self.database1: tbl_rows_insert(self.database1,'workout',datarows['tbl_col_name'],datarows['tbl_rows']) self.schema_file2 = "/Users/burtnolej/Development/pythonapps/clean/utils/test_misc/test_schema_vsimple2.xml" schema_execute(self.schema_file2) schema_data_get(self.schema_file2,'food',datarows) self.database2 = Database('diet') with self.database2: tbl_rows_insert(self.database2,'food',datarows['tbl_col_name'],datarows['tbl_rows'])
def dbbulkloader(database,dbrecords,tblname,cols,maxsize=300): with database: dbcoldefn = _gencoldefn(dbrecords[0],cols) if not tbl_exists(database,tblname) ==True: tbl_create(database,tblname,dbcoldefn) dbrecords = _quotestrs(dbrecords) if len(dbrecords) > maxsize: for starti in range(0,len(dbrecords),maxsize): if starti+maxsize > len(dbrecords): endi = len(dbrecords) else: endi = starti + maxsize tbl_rows_insert(database,tblname,cols,dbrecords[starti:endi]) log.log(thisfuncname(),10,msg="loaded rows to "+tblname,numrow=endi-starti) else: tbl_rows_insert(database,tblname,cols,dbrecords) log.log(thisfuncname(),10,msg="loaded rows to "+tblname,numrow=len(dbrecords))
def persist(self, f=None, column_defn=[("id", "integer"), ("bucket", "integer"), ("watts", "integer"), ("hr", "integer"), ("filename", "text")], column_names=["id", "bucket", "watts", "hr", "filename"]): database = Database(self.databasename) _rows = self.dump(f) with database: if tbl_exists(database, self.tablename) == False: tbl_create(database, self.tablename, column_defn) try: tbl_rows_insert(database, self.tablename, column_names, _rows) except Exception, e: log.log(PRIORITY.FAILURE, msg="timseries persistence failed in db [" + database.name + "] [" + e.message + "]") else:
def persist(self, createtable=True): self._metadata_set() # switch adult name to teacher try: _idx = self.tbl_col_defn.index('adult') self.tbl_col_defn.remove('adult') self.tbl_col_defn.insert(0, 'teacher') except: pass if not tbl_exists(self.database, self.tbl_name) == True: tbl_create(self.database, self.tbl_name, self.tbl_col_defn) try: _idx = self.tbl_col_names.index('adult') self.tbl_col_names.remove('adult') self.tbl_col_names.insert(0, 'teacher') except: pass # and also objtype is not persisted try: _idx = self.tbl_col_names.index('objtype') self.tbl_col_names.pop(_idx) self.tbl_row_values[0].pop(_idx) except: pass # or userobjid try: _idx = self.tbl_col_names.index('userobjid') self.tbl_col_names.pop(_idx) self.tbl_row_values[0].pop(_idx) except: pass result, exec_str = tbl_rows_insert(self.database, self.tbl_name, self.tbl_col_names, self.tbl_row_values) if hasattr(self, "id") == False: _idx = self.tbl_col_names.index('__id') setattr(self, "id", self.tbl_row_values[_idx]) if self.dm.has_key('id') == False: _idx = self.tbl_col_names.index('__id') self.dm["id"] = self.tbl_row_values[0][_idx].replace('\"', '') return (result, exec_str)
def setUp(self): self.databasename = 'foo' self.database = Database(self.databasename) self.rawdata = [[1, 1, 130, 240, 'fileA'], [2, 2, 131, 241, 'fileA'], [3, 3, 132, 242, 'fileA'], [4, 4, 133, 240, 'fileA'], [5, 5, 132, 261, 'fileA'], [6, 6, 131, 262, 'fileA'], [7, 1, 150, 310, 'fileB'], [8, 2, 152, 307, 'fileB'], [9, 3, 154, 309, 'fileB'], [10, 4, 152, 306, 'fileB'], [11, 5, 151, 310, 'fileB'], [12, 6, 157, 311, 'fileB']] self.files = ['fileA', 'fileB'] self.col_defn = [("id", "integer"), ("bucket", "integer"), ("hr", "integer"), ("watts", "integer"), ("filename", "string")] self.col_names = ["id", "bucket", "hr", "watts", "filename"] self.tablename = "bar" _rows = _quotestrs(self.rawdata) with self.database: tbl_create(self.database, self.tablename, self.col_defn) tbl_rows_insert(self.database, self.tablename, self.col_names, _rows)
def formats_db_load(dbname="test_ssloader"): tblname = "formats" hex2name = {} for key, value in colors.attr_get_keyval(include_callable=False, include_baseattr=False): hex2name[value] = key formats_col_defn = [('name', 'text'), ('fgcolor', 'text'), ('bgcolor', 'text'), ('__id', 'text'), ('__timestamp', 'text')] formats_col_names = [row[0] for row in formats_col_defn] dbrows = [] for name, bg in colorpalette.iteritems(): if fontpalette.has_key(name): fg = fontpalette[name] else: fg = '#000000' __id = IDGenerator().getid() __timestamp = datetime.now().strftime("%H:%M:%S") dbrows.append([ "\"" + name + "\"", "\"" + hex2name[fg] + "\"", "\"" + hex2name[bg] + "\"", "\"" + __id + "\"", "\"" + __timestamp + "\"" ]) database = Database(dbname) with database: if tbl_exists(database, tblname) == True: tbl_remove(database, tblname) tbl_create(database, tblname, formats_col_defn) exec_str, result = tbl_rows_insert(database, tblname, formats_col_names, dbrows)
if store.has_key(unique_key) == False: store[unique_key] = ids[i] else: # we can ignore as we only need 1 copy pass database = Database("htmlparser") # add a new column new_lesson_columns = list(lesson_columns) new_lesson_columns.insert(0,"prep") with database: try: # create table if it does not exist tbl_create(database,'lesson',zip(new_lesson_columns,["text"]*14)) except: pass rows=[] for key in store.keys(): row=['5'] # add a default value for the new column for value in key.split("^")+store[key]: row.append("\""+value+"\"") rows.append(row) #print rows tbl_rows_insert(database, 'lesson', new_lesson_columns, rows)
def update(self, of, field, newvalue, dbname=None): # this is needed to get around the sqlite limitation that # an sqlite cursor can only be used in the thread it was instantiated in if dbname <> None: database = Database(dbname) else: database = self.database # this is over writing the base class update # these are hacks; and the datatypes id/adult need to be # called the same thing internally all the time # until then though .... # remove the id field as its set on the object from the # internal __id field for convenience but should not be repersisted # as the database layer will create the new __id for any revisions if hasattr(self, 'id') == True: _oldidobj = getattr(self, 'id') elif hasattr(self, '__id') == True: _oldidobj = getattr(self, '__id') else: raise Exception("cannot find an ID field") if hasattr(_oldidobj, "name") == True: setattr(self, "__id", _oldidobj.name) else: setattr(self, "__id", _oldidobj) delattr(self, 'id') self.tbl_row_value_get(False) self.tbl_col_defn_get(False) self.tbl_name_get() # and also try: _idx = self.tbl_col_names.index('adult') self.tbl_col_names.remove('adult') self.tbl_col_names.insert(0, 'teacher') except: pass # and also objtype is not persisted try: _idx = self.tbl_col_names.index('objtype') self.tbl_col_names.pop(_idx) self.tbl_row_values[0].pop(_idx) except: pass currentrecord = dict(zip(self.tbl_col_names, self.tbl_row_values[0])) _id = IDGenerator().getid() _ts = self._gettimestamp() newrecord = deepcopy(currentrecord) newrecord['__version'] = "\"current\"" newrecord[field] = "\"" + str(newvalue) + "\"" newrecord['__id'] = "\"" + _id + "\"" newrecord['saveversion'] = 1 newrecord['__timestamp'] = "\"" + _ts + "\"" #_newvalobj = of.object_get_byval(field,newvalue) if currentrecord[field] <> newrecord[field]: # create a new row in the database with version "current" with database: result, exec_str = tbl_rows_insert(database, self.tbl_name, newrecord.keys(), [newrecord.values()]) # update version of current row to version "version" tbl_rows_update(database, self.tbl_name, [ '__version', "\"version\"", '__id', "\"" + getattr(self, "__id") + "\"" ]) # update in mem object to new val and new db version id and timestamp # assumes that field is also an objects whose value is in the name attr if field == "teacher": field = "adult" _newvalobj = of.object_get_byval(field, newvalue) if _newvalobj == None: log.log(thisfuncname(), 2, msg="tryng to update to a value that does not exist", field=field, newvalue=newvalue) return -1 #_oldobj = getattr(self,field) #setattr(_oldobj,'name',newvalue) setattr(self, field, _newvalobj) # give the new updated record the same database ref id as prev version if hasattr(_oldidobj, "name") == True: #setattr(self,"__id",_oldidobj.name) setattr(_oldidobj, "name", _id) else: #setattr(self,"__id",_oldidobj) setattr(self, _oldidobj, _id) #setattr(_oldidobj,"name",_id) setattr(self, 'id', _oldidobj) #setattr(self,'id',_id) setattr(self, '__version', "current") setattr(self, '__timestamp', _ts) # update internal dm _dm = getattr(self, "dm") _dm[field] = newvalue _dm['id'] = _id setattr(self, "dm", _dm) return _id