def insertCharacterBuffer(self, publicName, cvalues, buffer): cursor = self.conn.cursor() characterName, race, pclass, sclass, tclass, plevel, slevel, tlevel, realm = cvalues rename = 0 try: cursor.execute( "SELECT rename FROM character_buffer WHERE public_name = '%s' AND character_name = '%s';" % (publicName, characterName)) rename = cursor.fetchone()[0] except: pass try: cursor.execute( "DELETE FROM character_buffer WHERE public_name = '%s' AND character_name = '%s';" % (publicName, characterName)) except: traceback.print_exc() buffer = sqlite.Binary(buffer) values = (None, publicName, characterName, race, pclass, sclass, tclass, plevel, slevel, tlevel, realm, rename, buffer) cursor.executemany( "INSERT INTO character_buffer VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?);", (values, )) cursor.close()
def add_chunk(self, stream_id, data): chunk_id = self.current_chunk_id c = self.db.cursor() q = 'insert into chunk (id, stream_id, data) values (?, ?, ?)' c.execute(q, (chunk_id, stream_id, sqlite.Binary(data))) self.current_chunk_id += 1 return chunk_id
def __call__(self, token, callback, *args, **kwargs): rows = self.sql.get('select expire,data from cache where id=? limit 1', (token, )) if rows: if rows[0][0] and rows[0][0] < int(time.time()): pass else: try: obj = pickle.loads(rows[0][1]) except: pass else: return obj response = callback(*args, **kwargs) if not response: return response if response[0]: self.sql.set('replace into cache(id,expire,data) values(?,?,?)', (token, None if isinstance(response[0], bool) else (int(time.time()) + response[0]), sqlite.Binary(pickle.dumps(response[1])))) return response[1]
def get(self, token, callback, *param, **kwargs): cur = self.db.cursor() cur.execute('select expire,data from cache where id=? limit 1', (token, )) row = cur.fetchone() cur.close() if row: if row[0] and row[0] < int(time.time()): pass else: try: obj = pickle.loads(row[1]) except: pass else: return obj if not callback: return False response = callback(*param, **kwargs) if response[0]: obj = sqlite.Binary(pickle.dumps(response[1])) curtime = int(time.time()) cur = self.db.cursor() if isinstance(response[0], bool): cur.execute('replace into cache(id,addtime,expire,data) values(?,?,?,?)', (token, curtime, None, obj)) else: cur.execute('replace into cache(id,addtime,expire,data) values(?,?,?,?)', (token, curtime, curtime + response[0], obj)) self.db.commit() cur.close() return response[1]
def generateClientDB(self,realm,realmDB): try: #Copy the master DB to the client DB shutil.copyfile("./data/ahserver/ahmaster.db","./data/ahserver/%s"%realmDB) #Connecting to the client DB to flush out the extra tables we do not need to send over dbconn = sqlite.connect("./data/ahserver/%s"%realmDB,isolation_level = None) dcursor = dbconn.cursor() dcursor.execute("BEGIN TRANSACTION;") dcursor.execute("DELETE from ItemList WHERE id IN (select item_list_id FROM ItemTransactionDB WHERE realm != %d);"%realm) dcursor.execute("drop table ItemTransactionDB") dcursor.execute("drop table ItemInstance") dcursor.execute("drop table ItemCharacterMapping") dcursor.execute("drop table ItemVariant") dcursor.execute("END TRANSACTION;") dcursor.execute("vacuum ItemTransactionDB") dcursor.execute("vacuum ItemInstance") dcursor.execute("vacuum ItemCharacterMapping") dcursor.execute("vacuum ItemVariant") dcursor.close() dbconn.close() #Compress the client DB. Better to do this once every 10 seconds than a bunch of times under heavy load of client requests f = file("./data/ahserver/%s"%realmDB,"rb") cbuffer = f.read() cbuffer = pylzma.compress(cbuffer,algorithm=0) cbuffer = sqlite.Binary(cbuffer) f.close() f = file("./data/ahserver/%s"%realmDB,"wb") f.write(cbuffer) f.close() except: print "Failed to write %s to disk"%realmDB
def UpgradePlayerBuffer(publicName, buffer): global PCONN, NEWPCONN, LASTCHARACTER if LASTCHARACTER != publicName[0]: LASTCHARACTER = publicName[0] print "Upgrading Players: %s" % LASTCHARACTER dbuffer = zlib.decompress(buffer) f = file("./data/tmp/pbuffer", "wb") f.write(dbuffer) f.close() PCONN = sqlite.connect("./data/tmp/pbuffer") if os.path.exists("./data/tmp/nbuffer"): os.remove("./data/tmp/nbuffer") #get character names #cursor = PCONN.cursor() #cursor.execute("SELECT name from character") #for name in cursor.fetchall(): # n = (name[0],) # assert n not in CHARACTER_NAMES, "Character name collision: %s -> %s"%(publicName,n[0]) # CHARACTER_NAMES.append(n) NEWPCONN = sqlite.connect("./data/tmp/nbuffer", isolation_level=None) nc = NEWPCONN.cursor() nc.execute("BEGIN TRANSACTION;") nc.executescript(CREATE_PLAYER_TABLE_SQL) map(DoTable, PLAYER_TABLES) # Create an alias 'content' for the 'item' translation entry. # Needed for the ItemContainerContent class in item.py. try: TTRANS['content'] = TTRANS['item'] except KeyError: pass # Do translation. map(DoTranslation, PLAYER_TABLES) nc.execute("END TRANSACTION;") nc.close() PCONN.close() NEWPCONN.close() PCONN = None NEWPCONN = None f = file("./data/tmp/nbuffer", "rb") buffer = f.read() f.close() buffer = zlib.compress(buffer) buffer = sqlite.Binary(buffer) cursor = NEWCONN.cursor() cursor.executemany("INSERT INTO player_buffer VALUES(?,?,?);", ((None, publicName, buffer), )) cursor.close()
def accept(self): lst = [] idata = [] obj = count_and_fetch() chng_dir = obj.change_directory_to_recent_dir() number_of_files = obj.fetch_images_from_recent_directory() number_of_files = int(number_of_files) dir_count = obj.get_directories_count_from_disk() acc = open("image_accept.sh", "w") acc.write("#!/bin/bash\n") acc.write("cd /home/hk/Desktop/tst/{}\n".format(chng_dir)) acc.write("ls | sort > /media/hk/HK/DERBI/Tech-Tailor/SSHD/list.txt\n") acc.close() subprocess.call('chmod +x image_accept.sh', shell=True) subprocess.call('./image_accept.sh', shell=True) var = '/home/hk/Desktop/tst/list1.txt' with open("/media/hk/HK/DERBI/Tech-Tailor/SSHD/list.txt") as f1, open( var, "w") as f2: for i in f1: f2.write('/home/hk/Desktop/tst/{}'.format(chng_dir) + i) with open(var) as q: for each_line in q: lst.append(each_line) #for i in lst: # print(i) print( "There are {} Images in {} directory, Do you want to save them (Y 'or' N)?" .format(number_of_files, chng_dir)) n = input() if (n == 'Y'): con = sqlite.connect('blob.db') cur = con.cursor() cur.execute( '''CREATE TABLE `chng_dir` (`Names` REAL ,Images REAL)''') for ind, j in enumerate(lst): j = j.rstrip() file = open(j, 'rb') idata.append(file.read()) temp = idata[ind] file.close() cur.execute('''INSERT INTO `chng_dir` values (?, ?)''', (file.name, sqlite.Binary(temp))) con.commit() print("Saved to Database Successfully!!!!") update_dir_count = open("count.txt", "w") dir_count = str(dir_count) update_dir_count.write(dir_count) update_dir_count.close() elif (n == 'N'): print("Discarded!!")
def get(self, key, revid): self.cursor.execute("select revid, data from data where key = ?", (dbapi2.Binary(key), )) row = self.cursor.fetchone() if row is None: return None elif str(row[0]) != revid: return None else: return marshal.loads(zlib.decompress(row[1]))
def set(self, key, revid, data): try: self.cursor.execute('delete from data where key = ?', (dbapi2.Binary(key), )) blob = zlib.compress(marshal.dumps(data)) self.cursor.execute( "insert into data (key, revid, data) values (?, ?, ?)", map(dbapi2.Binary, [key, revid, blob])) self.connection.commit() except dbapi2.IntegrityError: # If another thread or process attempted to set the same key, we # don't care too much -- it's only a cache after all! pass
def writeData(self, oid, name, data, txn=None): if self.readonly: raise errors.StorageError('storage in readonly mode') if txn: op = txn.execute else: op = self.db.runOperation dtype = 'pickle' data = pickle.dumps(data) data = sqlite.Binary(data) qargs = (oid, name, dtype, data) q = """replace into nodedata (oid, name, datatype, data) values (?, ?, ?, ?)""" return op(q, qargs)
def clean_player_IDs(): count = 0 shutil.copyfile("./data/character/character.db","./data/character/character.bak") #Open up the Character DB...isolation_level is needed if you wish to write to it dbconn = sqlite.connect("./data/character/character.db",isolation_level = None) dcursor = dbconn.cursor() dcursor.execute("BEGIN TRANSACTION;") #Going to get the buffer and the name of the character dcursor.execute("SELECT character_name, buffer from character_buffer;") #Loop through the results for name,buffer in dcursor.fetchall(): try: #Decompress it using zlib dbuffer = zlib.decompress(buffer) #Write to a file so we can open it up as a DB f = file("./data/tmp/abuffer","wb") f.write(dbuffer) f.close() #Open that file up as a sqLite DB so we can work with it. At this point it is decompressed bconn = sqlite.connect("./data/tmp/abuffer",isolation_level = None) bcursor = bconn.cursor() #Update the Auction IDN bcursor.execute("UPDATE character SET auction_id_n = 0") #Close DB and Cursor bcursor.close() bconn.close() #Going to open the file again this time to dump it into a buffer f = file("./data/tmp/abuffer","rb") dbuffer = f.read() f.close() #Compress the file and make sure it is binary dbuffer = zlib.compress(dbuffer) dbuffer = sqlite.Binary(dbuffer) #Values passed via the ? below. Used to make SQLite happy about the BLOB value for the buffer values = (dbuffer,name) #Update the Character DB with the new buffer dcursor.execute("UPDATE character_buffer SET buffer = ? WHERE character_name = ?",values) count += 1 except: traceback.print_stack() print "AhServer: %s did not get an update to his/her character buffer"%name continue dcursor.execute("END TRANSACTION;") dcursor.close() dbconn.close() print "Character IDs Cleaned: %d"%count
def insertPlayerBuffer(self,publicName,buffer): cursor = self.conn.cursor() buffer = sqlite.Binary(buffer) cursor.executemany("INSERT INTO player_buffer VALUES(?,?,?);",((None,publicName,buffer),)) cursor.execute("SELECT id FROM player_buffer WHERE public_name = '%s' ORDER BY id DESC;"%publicName) remove = [] try: remove = cursor.fetchall()[10:] except: pass for id in remove: cursor.execute("DELETE FROM player_buffer WHERE id = %i;"%id) cursor.close()
def get(self, key, revid): if not isinstance(key, bytes): raise TypeError(key) if not isinstance(revid, bytes): raise TypeError(revid) self.cursor.execute("select revid, data from data where key = ?", (dbapi2.Binary(key), )) row = self.cursor.fetchone() if row is None: return None elif str(row[0]) != revid: return None else: try: return marshal.loads(zlib.decompress(row[1])) except (EOFError, ValueError, TypeError): return None
def set(self, token, data, expire=None): if expire: expire += int(time.time()) self.sql.set('replace into cache(id,expire,data) values(?,?,?)', (token, expire, sqlite.Binary(pickle.dumps(data))))
def _set(self, key, value): self._sql.set('replace into nosql(id,data) values(?,?)', (key, sqlite.Binary(pickle.dumps(value))))
def UpgradeCharacterBuffer(values): global PCONN, NEWPCONN, BAD_CHARACTERS, WARN, MUSTRENAMECOUNTER id, publicName, characterName, race, pclass, sclass, tclass, plevel, slevel, tlevel, realm, rename, buffer = values try: dbuffer = zlib.decompress(buffer) except: traceback.print_stack() print "Error in character buffer", publicName, characterName BAD_CHARACTERS.append((publicName, characterName)) return f = file("./data/tmp/cbuffer", "wb") f.write(dbuffer) f.close() PCONN = sqlite.connect("./data/tmp/cbuffer") if os.path.exists("./data/tmp/nbuffer"): os.remove("./data/tmp/nbuffer") #get character names #cursor = PCONN.cursor() #cursor.execute("SELECT name from character") #for name in cursor.fetchall(): # n = (name[0],) # assert n not in CHARACTER_NAMES, "Character name collision: %s -> %s"%(publicName,n[0]) # CHARACTER_NAMES.append(n) NEWPCONN = sqlite.connect("./data/tmp/nbuffer", isolation_level=None) nc = NEWPCONN.cursor() nc.execute("BEGIN TRANSACTION;") nc.executescript(CREATE_CHARACTER_TABLE_SQL) if WARN: WARN = False #for x in range(0,50): # print "WARNING: ignoring character item vault, add once it exists" map(DoTable, CHARACTER_TABLES) # Create an alias 'content' for the 'item' translation entry. # Needed for the ItemContainerContent class in item.py. try: TTRANS['content'] = TTRANS['item'] except KeyError: pass # Do translation. map(DoTranslation, CHARACTER_TABLES) #handle the freaking classchange for the conversion #nc.execute("SELECT name from character;") #name = nc.fetchone()[0] #print "Setting up change class for %s"%name #nc.execute("SELECT plevel,slevel,tlevel FROM spawn WHERE name = '%s';"%name) #plevel,slevel,tlevel = nc.fetchone() #print plevel,slevel,tlevel #if slevel and tlevel: # nc.execute("UPDATE character SET pchange=1, schange=1, tchange=1 WHERE name = '%s';"%name) #elif slevel: # nc.execute("UPDATE character SET pchange=1, schange=1, tchange=0 WHERE name = '%s';"%name) #else: # nc.execute("UPDATE character SET pchange=1, schange=0, tchange=0 WHERE name = '%s';"%name) nc.execute("END TRANSACTION;") nc.close() PCONN.close() NEWPCONN.close() PCONN = None NEWPCONN = None f = file("./data/tmp/nbuffer", "rb") buffer = f.read() f.close() buffer = zlib.compress(buffer) buffer = sqlite.Binary(buffer) #perhaps remove me post pname->cname update name = str(characterName.title()) name = name.replace("The ", "the ") name = name.replace("To ", "to ") name = name.replace("And ", "and ") name = name.replace("Of ", "of ") spawnc = SPAWN_NAMES.has_key(name.upper()) if spawnc or CHARACTER_NAMES.has_key(name): MUSTRENAMECOUNTER += 1 mname = "Please Rename %i" % MUSTRENAMECOUNTER if not spawnc: print "Character Name collision %s, renaming to %s and setting rename=2" % ( name, mname) else: print "Spawn Name collision %s, renaming to %s and setting rename=2" % ( name, mname) name = mname else: CHARACTER_NAMES[name] = name #v = (None,publicName,characterName,race,pclass,sclass,tclass,plevel,slevel,tlevel,realm,rename,buffer) v = (None, publicName, name, race, pclass, sclass, tclass, plevel, slevel, tlevel, realm, rename, buffer) cursor = NEWCONN.cursor() cursor.executemany( "INSERT INTO character_buffer VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?);", (v, )) cursor.close()
def store_in_database(cur, image_name, data): cur.execute("INSERT INTO image (name, image_file) values (?, ?)", (image_name, sqlite.Binary(data))) conn.commit()
def _serialize(self, obj): return dbapi2.Binary(pickle.dumps(obj, protocol=2))
def ConvertBuffer(publicName,buffer): print "Converting Player: %s"%publicName dbuffer = zlib.decompress(buffer) f = file("./data/tmp/pbuffer","wb") f.write(dbuffer) f.close() #first let's make the new player buffer which is just the player info shutil.copyfile("./data/tmp/pbuffer","./data/tmp/nbuffer") PLAYER_CONN = sqlite.connect("./data/tmp/nbuffer") c = PLAYER_CONN.cursor() RemoveCharacterTables(c) c.close() PLAYER_CONN.close() PCONN = sqlite.connect("./data/tmp/pbuffer") PCURSOR = PCONN.cursor() PCURSOR.execute("SELECT id from character;") for cid in PCURSOR.fetchall(): shutil.copyfile("./data/tmp/pbuffer","./data/tmp/cbuffer") CHAR_CONN = sqlite.connect("./data/tmp/cbuffer",isolation_level = None) ccursor = CHAR_CONN.cursor() ccursor.execute("BEGIN TRANSACTION;") DeleteAllCharactersExcept(ccursor,cid[0]) ccursor.execute("SELECT name,spawn_id from character where id = %i;"%cid[0]) name,spawnid = ccursor.fetchone() CHARACTER_NAMES.append(name) ccursor.execute("SELECT race,pclass_internal,sclass_internal,tclass_internal,plevel,slevel,tlevel,realm from spawn where id = %i;"%spawnid) values = [None,publicName,name] values.extend(list(ccursor.fetchone())) ccursor.execute("END TRANSACTION;") ccursor.close() CHAR_CONN.close() #insert new PBUFFER f = file("./data/tmp/cbuffer","rb") cbuffer = f.read() f.close() cbuffer = zlib.compress(cbuffer) cbuffer = sqlite.Binary(cbuffer) values.append(cbuffer) DST_CURSOR.executemany("INSERT INTO character_buffer VALUES(?,?,?,?,?,?,?,?,?,?,?,?);",(values,)) PCURSOR.close() PCONN.close() #insert new PBUFFER f = file("./data/tmp/nbuffer","rb") pbuffer = f.read() f.close() pbuffer = zlib.compress(pbuffer) pbuffer = sqlite.Binary(pbuffer) DST_CURSOR.executemany("INSERT INTO player_buffer VALUES(?,?,?);",((None,publicName,pbuffer),))
isolation_level=None) TOCONN = sqlite.connect("../../data/character/character.db", isolation_level=None) fcursor = FROMCONN.cursor() tcursor = TOCONN.cursor() tcursor.execute( "SELECT public_name FROM character_buffer WHERE character_name ='%s';" % CHARACTERNAME) results = tcursor.fetchall() print results if len(results): raise "There is already a character buffer in destination", results fcursor.execute( "SELECT public_name,character_name,race,pclass,sclass,tclass,plevel,slevel,tlevel,realm,rename,buffer FROM character_buffer WHERE public_name='%s' AND character_name ='%s';" % (PUBLICNAME, CHARACTERNAME)) results = fcursor.fetchall() assert len(results) == 1 r = results[0] print r buffer = sqlite.Binary(r[11]) values = (None, r[0], r[1], r[2], r[3], r[4], r[5], r[6], r[7], r[8], r[9], r[10], buffer) tcursor.execute( "INSERT INTO character_buffer VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?);", values)
def addDeviceConfigData(self, oid, data, timestamp): if self.readonly: raise errors.StorageError('storage in readonly mode') q = """insert into device_config_data (oid, data, timestamp) values (?, ?, ?)""" op = self.db.runOperation return op(q, (oid, sqlite.Binary(data), timestamp))