def reinit_db_new(dbpath, callback=None, sql_dump=None): from calibre.db.backend import Connection import apsw import shutil from io import StringIO from contextlib import closing if callback is None: callback = lambda x, y: None with closing(Connection(dbpath)) as conn: uv = int(conn.get('PRAGMA user_version;', all=False)) if sql_dump is None: buf = StringIO() shell = apsw.Shell(db=conn, stdout=buf) shell.process_command('.dump') sql = buf.getvalue() else: sql = open(sql_dump, 'rb').read().decode('utf-8') dest = dbpath + '.tmp' callback(1, True) try: with closing(Connection(dest)) as conn: conn.execute(sql) conn.execute('PRAGMA user_version=%d;' % int(uv)) os.remove(dbpath) shutil.copyfile(dest, dbpath) finally: callback(1, False) if os.path.exists(dest): os.remove(dest) prints('Database successfully re-initialized')
def sqlite2html(db_file,Table_name,csv_name): output=io.StringIO() conn = apsw.Connection(db_file) shell=apsw.Shell(stdout=output, db=conn) # How to execute a dot command shell.process_command(".mode html") # continue
def dump_database(db): """Create a new database dump from db object as input.""" # TEMPORARY # .dump command bugs when aspw.Shell is used with 'db' args instead 'args' # but this way stay 20x faster than running scenario with file db db_filename = tempfile.gettempdir() + '/tmpforbackup.db' remove_database_files(db_filename) filecon = apsw.Connection(db_filename) with filecon.backup("main", db, "main") as backup: backup.step() output = io.StringIO() shell = apsw.Shell(stdout=output, args=(db_filename, )) #shell = apsw.Shell(stdout=output, db=db) shell.process_command(".dump") lines = output.getvalue().split('\n')[8:] new_data = '\n'.join(lines) #clean ; in new line new_data = re.sub('\)[\n\s]+;', ');', new_data) # apsw oddness: follwing sentence not always generated! new_data = new_data.replace( '-- The values of various per-database settings\n', '') remove_database_files(db_filename) return new_data
def get_html_table(sql_command_str, db_name): import apsw import io output=io.StringIO() conn = apsw.Connection(db_name) shell=apsw.Shell(stdout=output, db=conn) # How to execute a dot command shell.process_command(".mode html") shell.process_sql(str(sql_command_str)) return (output.getvalue())
def sqlbody(bodydb, body): print( "--> Extracting data from Email (AOSP)\n\n") outputfile = os.path.join(body, "body.txt") output = open(outputfile, 'w', encoding='utf8') extractSQLconnect = apsw.Connection(bodydb) SQLShell = apsw.Shell(stdout=output, db=extractSQLconnect) try: SQLShell.process_command(".header on") SQLShell.process_sql("select * from body") except: print("Could not extract message body") output.close()
def sqlmetadata(metadatadb, metadata, table): outputfile = os.path.join(metadata, table + ".txt") output = open(outputfile, 'w', encoding='utf8') extractSQLconnect = apsw.Connection(metadatadb) SQLShell = apsw.Shell(stdout=output, db=extractSQLconnect) try: SQLShell.process_command(".header on") SQLShell.process_sql("select * from " + table) except: print("Could not extract email " + table +" info") output.close()
def runQueryOutputCSV(self, queryString): if dbConnLoaded != "apsw": print("csv currenly only works with apsw! Running non csv version") return self.runQueryOutputString(queryString) else: output = io.StringIO() self.shell = apsw.Shell(stdout=output, db=self.conn) self.shell.process_command(".mode csv") self.shell.process_command(".headers on") self.shell.process_sql(queryString) return output.getvalue()
def sql(database, extractdir): dbconnection = apsw.Connection(database) dbcursor1 = dbconnection.cursor() errors = 0 for row in dbcursor1.execute( "SELECT name FROM sqlite_master WHERE type='table';"): for entry1 in row: outfilepath = os.path.join(extractdir, str(entry1) + ".txt") outfile = open(outfilepath, "w", encoding='utf8') SQLShell = apsw.Shell(stdout=outfile, db=dbconnection) SQLShell.process_command(".header on") SQLShell.process_sql("select * from " + str(entry1)) outfile.close()
def test_db(): GOOD = CURR_DIR + '/db.dump' NEW = CURR_DIR + '/db.dump.new' with open(GOOD, 'r') as f: good_data = f.readlines() import io output = io.StringIO() shell = apsw.Shell(stdout=output, args=(config.DATABASE, )) shell.process_command(".dump") with open(NEW, 'w') as f: lines = output.getvalue().split('\n')[8:] new_data = '\n'.join(lines) f.writelines(new_data) compare('db.dump')
def sql(database, extractdir): dbconnection = apsw.Connection(database) dbcursor1 = dbconnection.cursor() dbcursor2 = dbconnection.cursor() dbcursor3 = dbconnection.cursor() errors = 0 for row in dbcursor1.execute( "select distinct account_name from calendars"): for entry1 in row: accountdir = os.path.join(extractdir, str(entry1)) if not os.path.exists(accountdir): os.makedirs(accountdir) for row2 in dbcursor2.execute( "select name from calendars where account_name = '" + str(entry1) + "'"): for entry2 in row2: caldir = os.path.join(accountdir, str(entry2)) if not os.path.exists(caldir): os.makedirs(caldir) for row3 in dbcursor3.execute( "select _id from calendars where name = '" + str(entry1) + "' AND account_name = '" + str(entry2) + "'"): for entry3 in row3: filepath = os.path.join(caldir, "Events.txt") eventfile = open(filepath, "w", encoding='utf8') SQLShell = apsw.Shell(stdout=eventfile, db=dbconnection) try: SQLShell.process_command(".header on") SQLShell.process_sql( "select * from events where calendar_id = " + str(entry3)) except Exception: print("Error: Could not process " + str(entry1) + " in " + str(entry2) + ".") errors = errors + 1 eventfile.close() if not errors == 0: print(errors + " error(s) occured in the extraction process!")
tmpfile.write(chunk) keyfile.close() tmpfile.close() dbdestfile.close() tmpfile = open(dbtmp, "rb") tmptogzip = tmpfile.read() finaldb = open(dbdecrypt, "wb") d = zlib.decompressobj(16 + zlib.MAX_WBITS) decompressdata = d.decompress(tmptogzip) finaldb.write(decompressdata) tmpfile.close() finaldb.close() #os.system('bin\\gzip.exe -d < "' + dbtmp + '" > "' + dbdecrypt + '"' if os.name == 'nt' else 'gzip -d < "' + dbtmp + '" > "' + dbdecrypt + '" 2>&1' ) print("--> Extracting WhatsApp data\n\n") txtoutput = os.path.join(case, "extracted data", "whatsapp", "messages.txt") txtoutfile = open(txtoutput, 'w', encoding='utf8') sqlconnection = apsw.Connection(dbdecrypt) sqlshell = apsw.Shell(stdout=txtoutfile, db=sqlconnection) sqlshell.process_command('.header on') sqlshell.process_sql('select * from messages') txtoutfile.close() os.remove(dbtmp) os.remove(dbnohead) else: print("--> Not extracting WhatsApp data. Reason: Not found\n\n")
if setting_match and db_match: accsettdb_src = os.path.join(importdbdir, str(row)) accsettdb_dest = os.path.join(settingdbdir, str(row)) shutil.copyfile(accsettdb_src, accsettdb_dest) copy_misc(importdbdir, dbdir, "EmailProvider.db") copy_misc(importdbdir, dbdir, "google_analytics_v2.db") copy_misc(importdbdir, dbdir, "EmailProviderBody.db") copy_misc(importdbdir, dbdir, "suggestions.db") for row in os.listdir(maildbdir): accname = str(row) accname = accname.replace("mailstore.","") accname = accname.replace(".db", "") dbselected = os.path.join(maildbdir, str(row)) dbconnection = apsw.Connection(dbselected) accfilepath = os.path.join(extractdir, accname, "Messages.txt") accfile = open(accfilepath, "w", encoding='utf8') dbshell = apsw.Shell(stdout=accfile, db=dbconnection) dbshell.process_command(".header on") dbshell.process_sql("select * from messages") accfile.close() def copy_misc(importdbdir, destdbdir, filename): other_src = os.path.join(importdbdir, filename ) other_dest = os.path.join(destdbdir, filename ) shutil.copyfile(other_src, other_dest)
#!/usr/bin/env python import os, shutil, apsw, distutils.dir_util def extract(case, userdata): print("--> Extracting SMS/MMS messages\n\n") extractdir = os.path.join(case, "extracted data") if not os.path.exists(extractdir): os.makedirs(extractdir) extractdir = os.path.join(extractdir, "mms-sms") if not os.path.exists(extractdir): os.makedirs(extractdir) dbdir = os.path.join(extractdir, "db") if not os.path.exists(dbdir): os.makedirs(dbdir) db_src = os.path.join(userdata, "data", "com.android.providers.telephony", "databases", "mmssms.db") db_dest = os.path.join(dbdir, "mmssms.db") shutil.copyfile(db_src, db_dest) dbconnection = apsw.Connection(db_dest) filepath = os.path.join(extractdir, "Messages.txt") fileopen = open(filepath, "w", encoding='utf8') dbshell = apsw.Shell(stdout=fileopen, db=dbconnection) dbshell.process_command(".header on") dbshell.process_sql("select * from sms") fileopen.close()
import sys print(\ """Cheatsheet: .tables: list tables .schema TABLE: show table schema .dump TABLE: dump table as SQL""") options=["-interactive","-header","-list","-separator", "\t"] try: import apsw args = options + sys.argv[1:] shell = apsw.Shell(args=args) shell.history_file = '~/.config/.sqlite_history' shell.cmdloop() print("") except ImportError: import subprocess argv = ["sqlite3"] + options + sys.argv[1:] subprocess.call(argv)
if os.path.exists( os.path.join(userdata, "data", "com.google.android.apps.maps")): extractdir = os.path.join(case, "extracted data") if not os.path.exists(extractdir): os.makedirs(extractdir) extractdir = os.path.join(extractdir, "google-maps") if not os.path.exists(extractdir): os.makedirs(extractdir) extract_db_dir = os.path.join(extractdir, "db") if not os.path.exists(extract_db_dir): os.makedirs(extract_db_dir) sourcedb = os.path.join(userdata, "data", "com.google.android.apps.maps", "databases", "gmm_myplaces.db") destpath = os.path.join(case, "extracted data", "google-maps", "db", "gmm_myplaces.db") outpath = os.path.join(case, "extracted data", "google-maps", "gmm_myplaces.txt") source = shutil.copyfile(sourcedb, destpath) output = open(outpath, 'w', encoding='utf8') extractSQLconnect = apsw.Connection(destpath) SQLShell = apsw.Shell(stdout=output, db=extractSQLconnect) SQLShell.process_command(".header on") SQLShell.process_sql( "select key_string, timestamp, latitude, longitude from sync_item") output.close() else: print("--> Not extracting Google Maps data. Reason: Not found\n\n")
def askfortimeline( case ): #If the user opts into reports, ask if they want to make a timeline too print( "Create a timeline of all supported events? (Takes significantly longer)" ) answer = input('[Y/N]') answer = answer.upper() if answer == 'Y': if not os.path.exists(os.path.join(case, "reports")): os.makedirs(os.path.join(case, "reports")) if not os.path.exists(os.path.join(case, "reports", "timeline")): os.makedirs(os.path.join(case, "reports", "timeline")) tldb = os.path.join(case, "reports", "timeline.db") if os.path.isfile(tldb): os.remove(tldb) tloutopen = io.StringIO() tlconnection = apsw.Connection(tldb) dbshell = apsw.Shell(stdout=tloutopen, db=tlconnection) dbshell.process_sql( "CREATE TABLE timeline(_id INTEGER PRIMARY KEY NOT NULL, service, message, timestamp)" ) return (True) elif answer == 'N': return (False) else: print("Unrecognised answer. Defaulting to 'N'") return (False)
dbpath = os.path.join(extractdir, "db") if not os.path.exists(dbpath): os.makedirs(dbpath) copy(case, userdata, cachedest, filesdest, dbpath) sql(case, dbpath) def copy(case, userdata, cachedest, filesdest, dbpath): sourcedb = os.path.join(userdata, "data", "com.tinder", "databases", "tinder.db") destpath = os.path.join (dbpath, "tinder.db" ) source = shutil.copyfile(sourcedb, destpath) datapath = os.path.join(userdata, "data", "com.tinder") cachesource=os.path.join(datapath, "cache") filessource=os.path.join(datapath, "files") cache=shutil.copytree(cachesource, cachedest) files=shutil.copytree(filessource, filesdest) def sql(case, dbpath): print("--> Extracting Tinder information") db = os.path.join (dbpath, "tinder.db" ) output = os.path.join(dbpath, '..', 'output.txt') openoutput = open(output, 'w') dbconn = apsw.Connection(db) dbshell = apsw.Shell(stdout=openoutput, db=dbconn) try: dbshell.process_command(".header on") dbshell.process_sql("select * from messages") openoutput.close() except Exception: print("Extract Failed")
### ### Shell @@ example-shell ### # Here we use the shell to do a csv export providing the existing db # connection # Export to a StringIO if py3: import io else: import StringIO as io output = io.StringIO() shell = apsw.Shell(stdout=output, db=connection) # How to execute a dot command shell.process_command(".mode csv") shell.process_command(".headers on") # How to execute SQL shell.process_sql( "create table csvtest(col1,col2); insert into csvtest values(3,4); insert into csvtest values('a b', NULL)" ) # Let the shell figure out SQL vs dot command shell.process_complete_line("select * from csvtest") # Verify output #@@CAPTURE print(output.getvalue()) #@@ENDCAPTURE
def dump(self, db): print("dump") shell = apsw.Shell(stdout=sys.stdout, db=db) shell.process_command(".dump entries xrefs generic_xrefs")
def export_to_csv(self, table, out_file): with open(out_file, 'w') as fw: shell = apsw.Shell(stdout=fw, db=self.conn) shell.process_command(".mode csv") shell.process_command(".headers on") shell.process_complete_line("SELECT * FROM {}".format(table))
op.append("") continue if line == ".. speedtest-end": incomment = False if incomment: continue op.append(line) op = "\n".join(op) if op != benchmark: open("doc/benchmarking.rst", "wt").write(op) # shell stuff import apsw, StringIO shell = apsw.Shell() incomment = False op = [] for line in open("doc/shell.rst", "rtU"): line = line.rstrip() if line == ".. help-begin:": op.append(line) incomment = True op.append("") op.append(".. code-block:: text") op.append("") s = StringIO.StringIO() def tw(*args): return 80
# connect to the sqlite database conn = sqlite3.Connection(dbpath) c = conn.cursor() # build database schema print 'Building Database Schemas...', ddl = open('ODM2_for_SQLite.sql', 'r').read() c.execute(ddl) print 'done' # load controlled vocabularies # cvload.load_cv("sqlite:///"+dbpath) output = io.StringIO() shell = sqlite3.Shell(stdout=output, db=conn) shell.process_command(".dump") with open('build_empty.sql', 'w') as f: lines = output.getvalue().split('\n') new_data = '\n'.join(lines) f.writelines(new_data) del shell # # create empty database SQL dump file # with open('../tests/data/empty_dump.sql', 'w') as f: # for line in conn.iterdump(): # try: # # handle unicode characters such as greek letters # line = line.encode('ascii', 'ignore') # f.write('%s\n' % line) # except Exception, e: