Ejemplo n.º 1
0
    def help_about(self):
        year = datetime.date.today().year
        year = "2020-{}".format(str(year)[-2:]) if year != 2020 else "2020"
        TEMPLATE = ('Python&nbsp;{}.{}.{} <br> {} <br> {}'
                    '<br>APSW&nbsp;{} <br> SQLite&nbsp;{}<br>{}<br>{}')
        BINDING = f'PySide2&nbsp;{PySide2.__version__}'
        QT = f'Qt&nbsp;{PySide2.QtCore.qVersion()}'
        info = TEMPLATE.format(sys.version_info.major, sys.version_info.minor,
                               sys.version_info.micro, BINDING, QT,
                               apsw.apswversion(), apsw.sqlitelibversion(),
                               QSysInfo.prettyProductName(),
                               platform.platform())
        QMessageBox.about(self, f'About — {APPNAME}', f'''<p>
<font color=navy><b>{APPNAME} {VERSION}</b></font></p>
<p>
<font color=navy>{APPNAME} is an easy to learn and use GUI application for
viewing, creating, editing, and updating SQLite and {APPNAME} databases.
</font>
</p>
<p><a href="https://github.com/mark-summerfield/songbird">Source Code</a>
</p>
<p>Copyright © {year} Mark Summerfield.<br>
All Rights Reserved.</p>
<p>
This software is Free Open Source Software (FOSS) licensed under the
GNU Public License version 3 (GPLv3).
</p>
<hr>
<p><font color=teal>{info}</font></p>
'''.format(year=year, info=info))  # noqa
Ejemplo n.º 2
0
    def construct_compiler(self):
        """Construct compiler."""
        # Parse version string
        version_info = tuple(
            [int(value) for value in apsw.sqlitelibversion().split('.')])

        # Construct compiler
        return self.plugins.get_compiler('sqlite')(self, version=version_info)
Ejemplo n.º 3
0
def first_time_setup():
  """Show a page with some configuration data and some simple stats."""
  dbinfo = {'note count': 0,
            'source count': 0,
            'sqlite version': apsw.sqlitelibversion()}
  from sys import version
  dbinfo['python version'] = version

  cfg = {'dbinfo': dbinfo,
         'cfg file': config}
  return template('templates/config', cfg=cfg)
Ejemplo n.º 4
0
def show_config_page():
  """Show a page with some configuration data and some simple stats."""
  dbinfo = {
    'note count': dbq("SELECT COUNT(id) FROM NOTES WHERE source_id IS NULL")[0][
                  "COUNT(id)"], 'source count':
      dbq("SELECT COUNT(id) FROM NOTES WHERE source_id IS NOT NULL")[0][
      "COUNT(id)"], 'sqlite version': apsw.sqlitelibversion()}
  from sys import version
  dbinfo['python version'] = version
  
  cfg = {'dbinfo': dbinfo,
         'cfg file': config}
  return template('templates/config', cfg=cfg)
Ejemplo n.º 5
0
def show_config_page():
    """Show a page with some configuration data and some simple stats."""
    dbinfo = {
        'note count':
        dbq("SELECT COUNT(id) FROM NOTES WHERE source_id IS NULL")[0]
        ["COUNT(id)"],
        'source count':
        dbq("SELECT COUNT(id) FROM NOTES WHERE source_id IS NOT NULL")[0]
        ["COUNT(id)"],
        'sqlite version':
        apsw.sqlitelibversion()
    }
    from sys import version
    dbinfo['python version'] = version

    cfg = {'dbinfo': dbinfo, 'cfg file': config}
    return template('templates/config', cfg=cfg)
Ejemplo n.º 6
0
def apsw_is_available():
  """Checks lock-ability for SQLite on the current file system"""

  try:
    import apsw #another python sqlite wrapper (maybe supports URIs)
  except ImportError:
    return False

  # if you got here, apsw is available, check we have matching versions w.r.t
  # the sqlit3 module
  import sqlite3

  if apsw.sqlitelibversion() != sqlite3.sqlite_version:
    return False

  # if you get to this point, all seems OK
  return True
Ejemplo n.º 7
0
 def parse_args(cls, argv):
     parser = argparse.ArgumentParser(
         description='clquery command line tool.')
     parser.add_argument('--aws-profile',
                         default=None,
                         required=False,
                         help='AWS profile name of the credentials to use.')
     parser.add_argument(
         '-V',
         '--version',
         action='version',
         version=', '.join([
             'clquery ' + pkg_resources.require('clquery')[0].version,
             'apsw ' + apsw.apswversion(),
             'sqlite ' + apsw.sqlitelibversion()
         ]))
     args = parser.parse_args()
     cls.set_all(args)
Ejemplo n.º 8
0
        def inext(v):  # next value from iterator
        return next(v) if py3 else v.next()


        ###
        ### Check we have the expected version of apsw and sqlite
        ###
        print("      Using APSW file", apsw.__file__)                # from the extension module
        print("         APSW version", apsw.apswversion())           # from the extension module
        print("   SQLite lib version", apsw.sqlitelibversion())      # from the sqlite library code
        print("SQLite header version", apsw.SQLITE_VERSION_NUMBER)   # from the sqlite header file at compile time


        #%%
        ###
        ### Opening/creating database
        ###

        connection = apsw.Connection("dbfile")
        cursor = connection.cursor()


        #%%
        ###
        ### update hook
        ###

        def myupdatehook(type, databasename, tablename, rowid):
        print("Updated: %s database %s, table %s, row %d" % (
                apsw.mapping_authorizer_function[type], databasename, tablename, rowid))


        cursor.execute("create table foo(x,y,z)")

        connection.setupdatehook(myupdatehook)

        cursor.execute("insert into foo values(?,?,?)", (1, 1.1, None))  # integer, float/real, Null
        cursor.execute("insert into foo(x) values(?)", ("abc", ))        # string (note trailing comma to ensure tuple!)
        cursor.execute("insert into foo(x) values(?)",  (b"abc\xff\xfe",))    # a blob (binary data)

        connection.setupdatehook(None)
Ejemplo n.º 9
0
 def __init__(self):
     try:
         if debuglevel > 1:
             print "\nUsing APSW file", apsw.__file__  # from the extension module
             print "APSW version", apsw.apswversion(
             )  # from the extension module
             print "SQLite lib version", apsw.sqlitelibversion(
             )  # from the sqlite library code
             print "SQLite header version", apsw.SQLITE_VERSION_NUMBER  # from the sqlite header file at compile time
         self.dbcon = apsw.Connection(settings.BAKER_DB)
         self.lastrowid = 0
         self.seconds = 6  # time to sleep before final close of DB
         pub.subscribe(self.close, 'ShutdownDB')
         pub.subscribe(self.bmRecSave, 'RecSaveDB')
         pub.subscribe(self.bmSendSave, 'SendSaveDB')
         pub.subscribe(self.BakerCmdInsertRunner, 'BakerCmdInsertRunner')
         pub.subscribe(self.BakerCmdReport1, 'BakerCmdReport1')
     except apsw.Error, e:
         print "APSW error - all args:", e.args
         self.dbcon = False
         print "Error - clsBakerDB %s:" % e.args[0]
         if self.dbcon:
             self.dbcon.close()
Ejemplo n.º 10
0
 def __init__(self, db_filename):
     self._logger = logging.getLogger(__name__)
     self._logger.debug('APSW version ' + apsw.apswversion())
     self._logger.debug('SQLite version ' + apsw.sqlitelibversion())
     self._conn = None
     self._connect_to_db(db_filename)
sqlandmtermstatements=['select ', 'create ', 'where ', 'table ', 'group by ', 'drop ', 'order by ', 'index ', 'from ', 'alter ', 'limit ', 'delete ', '..',
    "attach database '", 'detach database ', 'distinct', 'exists ']
dotcompletions=['.help ', '.colnums', '.schema ', '.functions ', '.tables', '.quote', '.explain ', '.vacuum', '.quit']
allfuncs=functions.functions['vtable'].keys()+functions.functions['row'].keys()+functions.functions['aggregate'].keys()
alltables=[]
alltablescompl=[]
updated_tables=set()
update_tablelist()
lastcols=[]
newcols=[]
colscompl=[]

#Intro Message
if not pipedinput:
    print mtermdetails
    print "running on Python: "+'.'.join([str(x) for x in sys.version_info[0:3]])+', APSW: '+apsw.apswversion()+', SQLite: '+apsw.sqlitelibversion(),
    try:
        sys.stdout.write(", madIS: "+functions.VERSION+'\n')
    except:
        print
    print intromessage

number_of_kb_exceptions=0
while True:
    statement = raw_input_no_history("mterm> ")
    if statement==None:
        number_of_kb_exceptions+=1
        print
        if number_of_kb_exceptions<2:
            continue
        else:
Ejemplo n.º 12
0
import os, sys, time
import apsw


print "using APSW file: ", apsw.__file__
print "APSW version: ", apsw.apswversion()
print "SQLite version: ", apsw.sqlitelibversion()
print "SQLite header version: ", apsw.SQLITE_VERSION_NUMBER


connection = apsw.Connection("yogeshmk.db")
cursor = connection.cursor()

cursor.execute("create table tt (a, b, c)")

cursor.execute("insert into tt values (?, ?, ?)", (1, 1.1, None))    # int, float, NULL
cursor.execute("insert into tt (a) values(?)", ("abc",))             # single element tuple
cursor.execute("insert into tt (a) values(?)", (buffer("abc\xff\xfe"), ))

# multiple statements
cursor.execute("delete from tt; insert into tt values(1,2,3); create table bar(a,b,c) ; insert into tt values(4, 'five', 6.0)")


# iterator

for a, b, c in cursor.execute("select a, b, c from tt"):
    print cursor.getdescription()           # show column names and datatypes
    print a, b, c


cursor.execute("insert into tt (:aplha, :beta, :gamma)", {'beta': 2, 'aplha':1, 'gamma':3})
Ejemplo n.º 13
0
import os, sys, time
import apsw

###
### Check we have the expected version of apsw and sqlite
###

#@@CAPTURE
print "      Using APSW file",apsw.__file__                # from the extension module
print "         APSW version",apsw.apswversion()           # from the extension module
print "   SQLite lib version",apsw.sqlitelibversion()      # from the sqlite library code
print "SQLite header version",apsw.SQLITE_VERSION_NUMBER   # from the sqlite header file at compile time
#@@ENDCAPTURE

###
### Opening/creating database
###

connection=apsw.Connection("dbfile")
cursor=connection.cursor()

###
### simple statement  @@ example-cursor
###

cursor.execute("create table foo(x,y,z)")

###
### using different types
###
Ejemplo n.º 14
0
def sanitycheck():
    "Check all dependencies are present and at the correct version"

    print "=== Sanity check ==="

    print "svn location",
    if not "$HeadURL$".split(
            ":",
            1)[1].strip().startswith("https://bitpim.svn.sourceforge.net"):
        raise Exception(
            "Needs to be checked out from https://bitpim.svn.sourceforge.net")
    print "  OK"

    print "python version",
    if sys.version_info[:2] != (2, 5):
        raise Exception("Should be  Python 2.5 - this is " + sys.version)
    print "  OK"

    print "wxPython version",
    import wx
    if wx.VERSION[:4] != (2, 8, 8, 1):
        raise Exception("Should be wxPython 2.8.8.1.  This is " +
                        ` wx.VERSION `)
    print "  OK"

    print "wxPython is unicode build",
    if not wx.USE_UNICODE:
        raise Exception("You need a unicode build of wxPython")
    print "  OK"

    if sys.platform != 'win32':
        print "native.usb",
        import native.usb
        print "  OK"

    print "pycrypto version",
    expect = '2.0.1'
    import Crypto
    if Crypto.__version__ != expect:
        raise Exception("Should be %s version of pycrypto - you have %s" %
                        (expect, Crypto.__version__))
    print "  OK"

    print "paramiko version",
    expect = '1.7.4 (Desmond)'
    import paramiko
    if paramiko.__version__ != expect:
        raise Exception("Should be %s version of paramiko - you have %s" %
                        (expect, paramiko.__version__))
    print "  OK"

    print "bitfling",
    import bitfling
    print "  OK"

    print "pyserial",
    import serial
    print "  OK"

    print "apsw",
    import apsw
    ver = "3.5.9-r2"
    if apsw.apswversion() != ver:
        raise Exception("Should be apsw version %s - you have %s" %
                        (ver, apsw.apswversion()))
    print "  OK"

    print "sqlite",
    ver = "3.6.1"
    if apsw.sqlitelibversion() != ver:
        raise Exception("Should be sqlite version %s - you have %s" %
                        (ver, apsw.sqlitelibversion()))
    print "  OK"

    print "jaro/winkler string matcher",
    import native.strings.jarow
    print "  OK"

    # bsddb (Linux only, for evolution)
    if sys.platform == "linux2":
        print "bsddb ",
        import bsddb
        print "  OK"

    # win32com.shell - See http://starship.python.net/crew/theller/moin.cgi/WinShell
    if sys.platform == 'win32':
        import py2exe.mf as modulefinder  # in py2exe < 0.6.4 use "import modulefinder"
        import win32com
        for p in win32com.__path__[1:]:
            modulefinder.AddPackagePath("win32com", p)
        for extra in ["win32com.shell"]:  #,"win32com.mapi"
            __import__(extra)
            m = sys.modules[extra]
            for p in m.__path__[1:]:
                modulefinder.AddPackagePath(extra, p)

    print "=== All checks out ==="
Ejemplo n.º 15
0
class APSWDatabase(SqliteExtDatabase):
    server_version = tuple(int(i) for i in apsw.sqlitelibversion().split('.'))

    def __init__(self, database, **kwargs):
        self._modules = {}
        super(APSWDatabase, self).__init__(database, **kwargs)

    def register_module(self, mod_name, mod_inst):
        self._modules[mod_name] = mod_inst
        if not self.is_closed():
            self.connection().createmodule(mod_name, mod_inst)

    def unregister_module(self, mod_name):
        del (self._modules[mod_name])

    def _connect(self):
        conn = apsw.Connection(self.database, **self.connect_params)
        if self._timeout is not None:
            conn.setbusytimeout(self._timeout * 1000)
        try:
            self._add_conn_hooks(conn)
        except:
            conn.close()
            raise
        return conn

    def _add_conn_hooks(self, conn):
        super(APSWDatabase, self)._add_conn_hooks(conn)
        self._load_modules(conn)  # APSW-only.

    def _load_modules(self, conn):
        for mod_name, mod_inst in self._modules.items():
            conn.createmodule(mod_name, mod_inst)
        return conn

    def _load_aggregates(self, conn):
        for name, (klass, num_params) in self._aggregates.items():

            def make_aggregate():
                return (klass(), klass.step, klass.finalize)

            conn.createaggregatefunction(name, make_aggregate)

    def _load_collations(self, conn):
        for name, fn in self._collations.items():
            conn.createcollation(name, fn)

    def _load_functions(self, conn):
        for name, (fn, num_params) in self._functions.items():
            conn.createscalarfunction(name, fn, num_params)

    def _load_extensions(self, conn):
        conn.enableloadextension(True)
        for extension in self._extensions:
            conn.loadextension(extension)

    def load_extension(self, extension):
        self._extensions.add(extension)
        if not self.is_closed():
            conn = self.connection()
            conn.enableloadextension(True)
            conn.loadextension(extension)

    def last_insert_id(self, cursor, query_type=None):
        return cursor.getconnection().last_insert_rowid()

    def rows_affected(self, cursor):
        return cursor.getconnection().changes()

    def begin(self, lock_type='deferred'):
        self.cursor().execute('begin %s;' % lock_type)

    def commit(self):
        curs = self.cursor()
        if curs.getconnection().getautocommit():
            return False
        curs.execute('commit;')
        return True

    def rollback(self):
        curs = self.cursor()
        if curs.getconnection().getautocommit():
            return False
        curs.execute('rollback;')
        return True

    def execute_sql(self, sql, params=None, commit=True):
        logger.debug((sql, params))
        with __exception_wrapper__:
            cursor = self.cursor()
            cursor.execute(sql, params or ())
        return cursor
Ejemplo n.º 16
0
			t = config[table]
			v = t.get(key,None)
			t[key] = value
			return v
		except KeyError:
			pass
	db.createscalarfunction(name,getter,2)
	db.createscalarfunction(name,setter,3)
	return mod

__all__ = ['WebQueryModule','attach']

if __name__ == '__main__':
	import sys, re
	shell = apsw.Shell()
	mod = attach(shell.db,'webquery')
	loaded = []
	for fn in sys.argv[1:]:
		tbl = re.sub(r'\W','_',fn)
		if tbl.endswith('_yml'):
			tbl = tbl[:-4]
		sql = 'CREATE VIRTUAL TABLE %s USING webquery(%s);'%(tbl,fn)
		try:
			shell.process_sql(sql)
			loaded.append('> '+sql+'\r\n')
		except KeyboardInterrupt:
			raise
		except:
			pass
	shell.cmdloop(intro=('SQLite version %s (APSW %s)\r\nEnter ".help" for instructions\r\nEnter SQL statements terminated with a ";"\r\n'%(apsw.sqlitelibversion(),apsw.apswversion()))+''.join(loaded))
Ejemplo n.º 17
0
except ImportError:
    # Python 2.6
    from lib.collections26 import OrderedDict

try:
    from inspect import isgeneratorfunction
except ImportError:
    # Python < 2.6
    def isgeneratorfunction(obj):
        return bool((inspect.isfunction(object) or inspect.ismethod(object))
                    and obj.func_code.co_flags & CO_GENERATOR)


sys.setcheckinterval(1000)

sqlite_version = apsw.sqlitelibversion()
apsw_version = apsw.apswversion()

VTCREATE = 'create virtual table temp.'
SQLITEAFTER3711 = False
SQLITEAFTER380 = False
sqlite_version_split = [int(x) for x in sqlite_version.split('.')]

if sqlite_version_split[0:3] >= [3, 8, 0]:
    SQLITEAFTER380 = True

try:
    if sqlite_version_split[0:3] >= [3, 7, 11]:
        VTCREATE = 'create virtual table if not exists temp.'
        SQLITEAFTER3711 = True
except Exception, e:
Ejemplo n.º 18
0
def sanitycheck():
    "Check all dependencies are present and at the correct version"
    
    print "=== Sanity check ==="

    print "svn location",
    if not "$HeadURL: https://bitpim.svn.sourceforge.net/svnroot/bitpim/releases/1.0.7/src/package.py $".split(":",1)[1].strip().startswith("https://bitpim.svn.sourceforge.net"):
        raise Exception("Needs to be checked out from https://bitpim.svn.sourceforge.net")
    print "  OK"

    print "python version",
    if sys.version_info[:2]!=(2,5):
       raise Exception("Should be  Python 2.5 - this is "+sys.version)
    print "  OK"

    print "wxPython version",
    import wx
    if wx.VERSION[:4]!=(2,8,8,1):
        raise Exception("Should be wxPython 2.8.8.1.  This is "+`wx.VERSION`)
    print "  OK"

    print "wxPython is unicode build",
    if not wx.USE_UNICODE:
        raise Exception("You need a unicode build of wxPython")
    print "  OK"

    if sys.platform!='win32':
        print "native.usb",
        import native.usb
        print "  OK"

    print "pycrypto version",
    expect='2.0.1'
    import Crypto
    if Crypto.__version__!=expect:
        raise Exception("Should be %s version of pycrypto - you have %s" % (expect, Crypto.__version__))
    print "  OK"

    print "paramiko version",
    expect='1.7.4 (Desmond)'
    import paramiko
    if paramiko.__version__!=expect:
        raise Exception("Should be %s version of paramiko - you have %s" % (expect, paramiko.__version__))
    print "  OK"
    
    print "bitfling",
    import bitfling
    print "  OK"

    print "pyserial",
    import serial
    print "  OK"

    print "apsw",
    import apsw
    ver="3.5.9-r2"
    if apsw.apswversion()!=ver:
        raise Exception("Should be apsw version %s - you have %s" % (ver, apsw.apswversion()))
    print "  OK"

    print "sqlite",
    ver="3.6.1"
    if apsw.sqlitelibversion()!=ver:
        raise Exception("Should be sqlite version %s - you have %s" % (ver, apsw.sqlitelibversion()))
    print "  OK"
        
    print "jaro/winkler string matcher",
    import native.strings.jarow
    print "  OK"

    # bsddb (Linux only, for evolution)
    if sys.platform=="linux2":
        print "bsddb ",
        import bsddb
        print "  OK"

    # win32com.shell - See http://starship.python.net/crew/theller/moin.cgi/WinShell
    if sys.platform=='win32':
        import py2exe.mf as modulefinder # in py2exe < 0.6.4 use "import modulefinder"
        import win32com
        for p in win32com.__path__[1:]:
            modulefinder.AddPackagePath("win32com", p)
        for extra in ["win32com.shell"]: #,"win32com.mapi"
            __import__(extra)
            m = sys.modules[extra]
            for p in m.__path__[1:]:
                modulefinder.AddPackagePath(extra, p)

    print "=== All checks out ==="
Ejemplo n.º 19
0
    def deserialize(self, ignore_drm=False):
        self.ignore_drm = ignore_drm
        self.fragments.clear()

        self.kpf_datafile = self.kdf_datafile = self.kcb_data = None

        if self.datafile.is_zipfile():
            self.kpf_datafile = self.datafile

            with self.kpf_datafile.as_ZipFile() as zf:
                for info in zf.infolist():
                    ext = os.path.splitext(info.filename)[1]
                    if ext == ".kdf":
                        self.kdf_datafile = DataFile(info.filename,
                                                     zf.read(info),
                                                     self.kpf_datafile)

                    elif ext == ".kdf-journal":
                        if len(zf.read(info)) > 0:
                            raise Exception("kdf-journal is not empty in %s" %
                                            self.kpf_datafile.name)

                    elif ext == ".kcb":
                        self.kcb_data = json_deserialize(zf.read(info))

            if self.kdf_datafile is None:
                raise Exception("Failed to locate KDF within %s" %
                                self.datafile.name)

        else:
            self.kdf_datafile = self.datafile

        unwrapped_kdf_datafile = SQLiteFingerprintWrapper(
            self.kdf_datafile).remove()

        db_filename = (unwrapped_kdf_datafile.name
                       if unwrapped_kdf_datafile.is_real_file
                       and not self.book.is_netfs else temp_filename(
                           "kdf", unwrapped_kdf_datafile.get_data()))

        if have_apsw:
            if natural_sort_key(
                    apsw.sqlitelibversion()) < natural_sort_key("3.8.2"):
                raise Exception(
                    "SQLite version 3.8.2 or later is necessary in order to use a WITHOUT ROWID table. Found version %s"
                    % apsw.sqlitelibversion())

            conn = apsw.Connection(db_filename)
        else:
            if sqlite3.sqlite_version_info < (3, 8, 2):
                raise Exception(
                    "SQLite version 3.8.2 or later is necessary in order to use a WITHOUT ROWID table. Found version %s"
                    % sqlite3.sqlite_version)

            conn = sqlite3.connect(db_filename, KpfContainer.db_timeout)

        cursor = conn.cursor()

        sql_list = cursor.execute(
            "SELECT sql FROM sqlite_master WHERE type='table';").fetchall()
        schema = set([x[0] for x in sql_list])

        dictionary_index_terms = set()
        first_head_word = ""
        INDEX_INFO_SCHEMA = (
            "CREATE TABLE index_info(namespace char(256), index_name char(256), property char(40), "
            "primary key (namespace, index_name)) without rowid")

        if INDEX_INFO_SCHEMA in schema:
            schema.remove(INDEX_INFO_SCHEMA)
            self.book.is_dictionary = True
            for namespace, index_name, property in cursor.execute(
                    "SELECT * FROM index_info;"):
                if namespace != "dictionary" or property != "yj.dictionary.term":
                    log.error(
                        "unexpected index_info: namespace=%s, index_name=%s, property=%s"
                        % (namespace, index_name, property))

                table_name = "index_%s_%s" % (namespace, index_name)
                index_schema = (
                    "CREATE TABLE %s ([%s] char(256),  id char(40), "
                    "primary key ([%s], id)) without rowid") % (
                        table_name, property, property)

                if index_schema in schema:
                    schema.remove(index_schema)
                    num_entries = 0
                    index_words = set()
                    index_kfx_ids = set()

                    for dictionary_term, kfx_id in cursor.execute(
                            "SELECT * FROM %s;" % table_name):
                        num_entries += 1
                        dictionary_index_terms.add(
                            (dictionary_term, IS(kfx_id)))
                        index_words.add(dictionary_term)
                        index_kfx_ids.add(kfx_id)

                        if dictionary_term < first_head_word or not first_head_word:
                            first_head_word = dictionary_term

                    log.info(
                        "Dictionary %s table has %d entries with %d terms and %d definitions"
                        % (table_name, num_entries, len(index_words),
                           len(index_kfx_ids)))

                else:
                    log.error("KPF database is missing the '%s' table" %
                              table_name)

        self.eid_symbol = {}
        KFXID_TRANSLATION_SCHEMA = "CREATE TABLE kfxid_translation(eid INTEGER, kfxid char(40), primary key(eid)) without rowid"
        if KFXID_TRANSLATION_SCHEMA in schema:
            schema.remove(KFXID_TRANSLATION_SCHEMA)
            for eid, kfx_id in cursor.execute(
                    "SELECT * FROM kfxid_translation;"):
                self.eid_symbol[eid] = self.create_local_symbol(kfx_id)

        self.element_type = {}
        FRAGMENT_PROPERTIES_SCHEMA = (
            "CREATE TABLE fragment_properties(id char(40), key char(40), value char(40), "
            "primary key (id, key, value)) without rowid")
        if FRAGMENT_PROPERTIES_SCHEMA in schema:
            schema.remove(FRAGMENT_PROPERTIES_SCHEMA)
            for id, key, value in cursor.execute(
                    "SELECT * FROM fragment_properties;"):

                if key == "child":
                    pass
                elif key == "element_type":
                    self.element_type[id] = value
                else:
                    log.error(
                        "fragment_property has unknown key: id=%s key=%s value=%s"
                        % (id, key, value))

        self.max_eid_in_sections = None
        FRAGMENTS_SCHEMA = "CREATE TABLE fragments(id char(40), payload_type char(10), payload_value blob, primary key (id))"
        if FRAGMENTS_SCHEMA in schema:
            schema.remove(FRAGMENTS_SCHEMA)

            for id in ["$ion_symbol_table", "max_id"]:
                rows = cursor.execute(
                    "SELECT payload_value FROM fragments WHERE id = ? AND payload_type = 'blob';",
                    (id, )).fetchall()
                if rows:
                    payload_data = self.prep_payload_blob(rows[0][0])
                    if payload_data is None:
                        pass
                    elif id == "$ion_symbol_table":
                        self.symtab.creating_yj_local_symbols = True
                        sym_import = IonBinary(
                            self.symtab).deserialize_annotated_value(
                                payload_data,
                                expect_annotation="$ion_symbol_table",
                                import_symbols=True)
                        self.symtab.creating_yj_local_symbols = False
                        if DEBUG:
                            log.info("kdf symbol import = %s" %
                                     json_serialize(sym_import))

                        self.fragments.append(YJFragment(sym_import))
                        break
                    else:
                        max_id = IonBinary(
                            self.symtab).deserialize_single_value(payload_data)
                        if DEBUG:
                            log.info("kdf max_id = %d" % max_id)

                        self.symtab.clear()
                        self.symtab.import_shared_symbol_table(
                            "YJ_symbols",
                            max_id=max_id - len(SYSTEM_SYMBOL_TABLE.symbols))
                        self.fragments.append(
                            YJFragment(self.symtab.create_import()))

            for id, payload_type, payload_value in cursor.execute(
                    "SELECT * FROM fragments;"):
                ftype = id

                if payload_type == "blob":
                    payload_data = self.prep_payload_blob(payload_value)

                    if id in ["max_id", "$ion_symbol_table"]:
                        pass

                    elif payload_data is None:
                        ftype = self.element_type.get(id)

                    elif id == "max_eid_in_sections":
                        ftype = None
                        self.max_eid_in_sections = IonBinary(
                            self.symtab).deserialize_single_value(payload_data)
                        if self.book.is_dictionary:
                            pass
                        else:
                            log.warning(
                                "Unexpected max_eid_in_sections for non-dictionary: %d"
                                % self.max_eid_in_sections)

                    elif not payload_data.startswith(IonBinary.SIGNATURE):
                        ftype = None
                        self.fragments.append(
                            YJFragment(ftype="$417",
                                       fid=self.create_local_symbol(id),
                                       value=IonBLOB(payload_data)))

                    elif len(payload_data) == len(IonBinary.SIGNATURE):
                        if id != "book_navigation":
                            log.warning("Ignoring empty %s fragment" % id)

                    else:
                        value = IonBinary(
                            self.symtab).deserialize_annotated_value(
                                payload_data)

                        if not isinstance(value, IonAnnotation):
                            log.error(
                                "KDF fragment id=%s is missing annotation: %s"
                                % (id, repr(value)))
                            continue
                        elif len(value.annotations
                                 ) == 2 and value.annotations[1] == "$608":
                            pass
                        elif len(value.annotations) > 1:
                            log.error(
                                "KDF fragment should have one annotation: %s" %
                                repr(value))

                        ftype = value.annotations[0]

                        if ftype in ROOT_FRAGMENT_TYPES:  # shortcut when symbol table unavailable
                            fid = None
                        else:
                            fid = self.create_local_symbol(id)

                        self.fragments.append(
                            YJFragment(ftype=ftype,
                                       fid=fid,
                                       value=self.deref_kfx_ids(value.value)))

                elif payload_type == "path":
                    ftype = "$417"

                    resource_data = self.get_resource_data(
                        self.prep_payload_blob(payload_value).decode("utf8"))
                    if resource_data is not None:
                        self.fragments.append(
                            YJFragment(ftype=ftype,
                                       fid=self.create_local_symbol(id),
                                       value=IonBLOB(resource_data)))

                else:
                    log.error(
                        "Unexpected KDF payload_type=%s, id=%s, value=%d bytes"
                        % (payload_type, id, len(payload_value)))

        else:
            log.error("KPF database is missing the 'fragments' table")

        GC_FRAGMENT_PROPERTIES_SCHEMA = (
            "CREATE TABLE gc_fragment_properties(id varchar(40), key varchar(40), "
            "value varchar(40), primary key (id, key, value)) without rowid")
        if GC_FRAGMENT_PROPERTIES_SCHEMA in schema:
            schema.remove(GC_FRAGMENT_PROPERTIES_SCHEMA)

        GC_REACHABLE_SCHEMA = (
            "CREATE TABLE gc_reachable(id varchar(40), primary key (id)) without rowid"
        )
        if GC_REACHABLE_SCHEMA in schema:
            schema.remove(GC_REACHABLE_SCHEMA)

        CAPABILITIES_SCHEMA = "CREATE TABLE capabilities(key char(20), version smallint, primary key (key, version)) without rowid"
        if CAPABILITIES_SCHEMA in schema:
            schema.remove(CAPABILITIES_SCHEMA)
            capabilities = cursor.execute(
                "SELECT * FROM capabilities;").fetchall()

            if capabilities:
                format_capabilities = [
                    IonStruct(IS("$492"), key, IS("version"), version)
                    for key, version in capabilities
                ]
                self.fragments.append(
                    YJFragment(ftype="$593", value=format_capabilities))
        else:
            log.error("KPF database is missing the 'capabilities' table")

        if len(schema) > 0:
            for s in list(schema):
                log.error("Unexpected KDF database schema: %s" % s)

        cursor.close()
        conn.close()

        self.book.is_kpf_prepub = True
        book_metadata_fragment = self.fragments.get("$490")
        if book_metadata_fragment is not None:
            for cm in book_metadata_fragment.value.get("$491", {}):
                if cm.get("$495", "") == "kindle_title_metadata":
                    for kv in cm.get("$258", []):
                        if kv.get("$492", "") in [
                                "ASIN", "asset_id", "cde_content_type",
                                "content_id"
                        ]:
                            self.book.is_kpf_prepub = False
                            break
                    break

        self.fragments.append(
            YJFragment(ftype="$270",
                       value=IonStruct(IS("$587"), "", IS("$588"), "",
                                       IS("$161"), CONTAINER_FORMAT_KPF)))

        additional_metadata = self.get_resource_data(
            ADDITIONAL_METADATA_FILENAME, report_missing=False)
        self.additional_metadata = json_deserialize(
            additional_metadata) if additional_metadata else {}
Ejemplo n.º 20
0
dotcompletions = ['.help ', '.colnums', '.schema ', '.functions ', '.tables', '.explain ', '.vacuum', '.queryplan ']
allfuncs = functions.functions['vtable'].keys() + functions.functions['row'].keys() + functions.functions[
    'aggregate'].keys()
alltables = []
alltablescompl = []
updated_tables = set()
update_tablelist()
lastcols = []
newcols = []
colscompl = []

# Intro Message
if not pipedinput:
    print mtermdetails
    print "running on Python: " + '.'.join([str(x) for x in sys.version_info[
                                                            0:3]]) + ', APSW: ' + apsw.apswversion() + ', SQLite: ' + apsw.sqlitelibversion(),
    try:
        sys.stdout.write(", madIS: " + functions.VERSION + '\n')
    except:
        print
    print intromessage

number_of_kb_exceptions = 0
while True:
    statement = raw_input_no_history("mterm> ")
    if statement == None:
        number_of_kb_exceptions += 1
        print
        if number_of_kb_exceptions < 2:
            continue
        else:
def printAPSWinfo():
    print "      Using APSW file",apsw.__file__                # from the extension module
    print "         APSW version",apsw.apswversion()           # from the extension module
    print "   SQLite lib version",apsw.sqlitelibversion()      # from the sqlite library code
    print "SQLite header version",apsw.SQLITE_VERSION_NUMBER   # from the sqlite header file at compile time
Ejemplo n.º 22
0
    'row'].keys() + functions.functions['aggregate'].keys()
alltables = []
alltablescompl = []
updated_tables = set()
update_tablelist()
lastcols = []
newcols = []
colscompl = []

#Intro Message
if not pipedinput:
    print mtermdetails
    print "running on Python: " + '.'.join(
        [str(x)
         for x in sys.version_info[0:3]]) + ', APSW: ' + apsw.apswversion(
         ) + ', SQLite: ' + apsw.sqlitelibversion(),
    try:
        sys.stdout.write(", madIS: " + functions.VERSION + '\n')
    except:
        print
    print intromessage

number_of_kb_exceptions = 0
while True:
    statement = raw_input_no_history("mterm> ")
    if statement == None:
        number_of_kb_exceptions += 1
        print
        if number_of_kb_exceptions < 2:
            continue
        else:
Ejemplo n.º 23
0
import os, sys, time
import apsw

###
### Check we have the expected version of apsw and sqlite
###

print ("      Using APSW file",apsw.__file__              ) # from the extension module
print ("         APSW version",apsw.apswversion()         ) # from the extension module
print ("   SQLite lib version",apsw.sqlitelibversion()    ) # from the sqlite library code
print ("SQLite header version",apsw.SQLITE_VERSION_NUMBER ) # from the sqlite header file at compile time


###
### Opening/creating database
###

connection=apsw.Connection("dbfile.apsw_db")
cursor=connection.cursor()
###
### simple statement
###

cursor.execute("create table foo(x,y,z)")

###
### using different types
###

cursor.execute("insert into foo values(?,?,?)", (1, 1.1, None))  # integer, float/real, Null
def sanitycheck():

    "Check all dependencies are present and at the correct version"

    print "=== Sanity check ==="

    print "python version",

    if sys.version_info[:2]!=(2,3):

       raise Exception("Should be  Python 2.3 - this is "+sys.version)

    print "  OK"

    print "wxPython version",

    import wx

    if wx.VERSION[:4]!=(2,6,2,1):

        raise Exception("Should be wxPython 2.6.2.1.  This is "+`wx.VERSION`)

    print "  OK"

    print "wxPython is unicode build",

    if not wx.USE_UNICODE:

        raise Exception("You need a unicode build of wxPython")

    print "  OK"

    if sys.platform!='win32':

        print "native.usb",

        import native.usb

        print "  OK"

    print "pycrypto version",

    expect='2.0.1'

    import Crypto

    if Crypto.__version__!=expect:

        raise Exception("Should be %s version of pycrypto - you have %s" % (expect, Crypto.__version__))

    print "  OK"

    print "paramiko version",

    expect='1.4 (oddish)'

    import paramiko

    if paramiko.__version__!=expect:

        raise Exception("Should be %s version of paramiko - you have %s" % (expect, paramiko.__version__))

    print "  OK"

    print "bitfling",

    import bitfling

    print "  OK"

    print "pyserial",

    import serial

    print "  OK"

    print "apsw",

    import apsw

    ver="3.2.7-r1"

    if apsw.apswversion()!=ver:

        raise Exception("Should be apsw version %s - you have %s" % (ver, apsw.apswversion()))

    print "  OK"

    print "sqlite",

    ver="3.2.7"

    if apsw.sqlitelibversion()!=ver:

        raise Exception("Should be sqlite version %s - you have %s" % (ver, apsw.sqlitelibversion()))

    print "  OK"

    print "jaro/winkler string matcher",

    import native.strings.jarow

    print "  OK"

    if sys.platform=="linux2":

        print "bsddb ",

        import bsddb

        print "  OK"

    if sys.platform=='win32':

        import py2exe.mf as modulefinder 

        import win32com

        for p in win32com.__path__[1:]:

            modulefinder.AddPackagePath("win32com", p)

        for extra in ["win32com.shell"]: 

            __import__(extra)

            m = sys.modules[extra]

            for p in m.__path__[1:]:

                modulefinder.AddPackagePath(extra, p)

    print "=== All checks out ==="
Ejemplo n.º 25
0

Module Attributes:
-----------

:initsql:      SQL commands that are executed whenever a new
               connection is created.
'''

from .logging import logging, QuietError # Ensure use of custom logger class
import apsw
import os

log = logging.getLogger(__name__)

sqlite_ver = tuple([ int(x) for x in apsw.sqlitelibversion().split('.') ])
if sqlite_ver < (3, 7, 0):
    raise QuietError('SQLite version too old, must be 3.7.0 or newer!\n')


initsql = (
           # WAL mode causes trouble with e.g. copy_tree, so we don't use it at the moment
           # (cf. http://article.gmane.org/gmane.comp.db.sqlite.general/65243).
           # However, if we start using it we must initiaze it *before* setting
           # locking_mode to EXCLUSIVE, otherwise we can't switch the locking
           # mode without first disabling WAL.
           'PRAGMA synchronous = OFF',
           'PRAGMA journal_mode = OFF',
           #'PRAGMA synchronous = NORMAL',
           #'PRAGMA journal_mode = WAL',
Ejemplo n.º 26
0
'''The current state of things as we approach version 3 is not bad at all. We
still need to do some work on stability and flexibility, and there's still a
lot of duplicated code in this file (db.py,) but overall we're making good
progress. The cache works fine, and UDP works as expected. Some work should
be put in on simplifying and strengthening the code in here, since this is
the part of the program that does the most work with the least clarity.'''

import os.path

import apsw

import config
import udp

version = "Database:\n  APSW version: " + apsw.apswversion() + "\n  SQLite version: " + apsw.sqlitelibversion() + "\n"

# Do we need to call Connection.close()?
# View this next line monospace...
def doclose(con):
    # If APSW's version's third major number is greater than 8...
    if int(apsw.apswversion().split('.')[2].split('-')[0]) >= 8:
        con.close()

conf = config.config()

db = os.path.normpath(os.path.expanduser("~") + "/.oadb/anime.db")

data = {
    "anime":
    ["aid", "eps", "epcount", "spcount", "arating", "avotes", "tmprating", "tmpvotes", "average", "ratings", "year", "type", "aromaji", "akanji", "aenglish", "aother", "ashort", "synonyms", "cats"],
Ejemplo n.º 27
0
		<td>v1.10.24</td>
		<td>
			<a href="https://datatables.net/">
				https://datatables.net
			</a>
		<td>
	</tr>
</table>
""".format(version=KRAIT_VERSION,
           build=KRAIT_BUILD,
           python=sys.version.split()[0],
           pyside=PySide6.__version__,
           stria=stria.version(),
           pyfastx=pyfastx.version(),
           apsw=apsw.apswversion(),
           sqlite=apsw.sqlitelibversion(),
           primerpy=primer3.__version__)

#default parameter and type for krait
KRAIT_PARAMETERS = {
    'SSR/mono': (12, int),
    'SSR/di': (7, int),
    'SSR/tri': (5, int),
    'SSR/tetra': (4, int),
    'SSR/penta': (4, int),
    'SSR/hexa': (4, int),
    'CSSR/dmax': (10, int),
    'VNTR/minmotif': (7, int),
    'VNTR/maxmotif': (30, int),
    'VNTR/minrep': (3, int),
    #'ITR/minmsize': (1, int),
Ejemplo n.º 28
0
Archivo: db.py Proyecto: asfdfdfd/AniDB
'''The current state of things as we approach version 3 is not bad at all. We
still need to do some work on stability and flexibility, and there's still a
lot of duplicated code in this file (db.py,) but overall we're making good
progress. The cache works fine, and UDP works as expected. Some work should
be put in on simplifying and strengthening the code in here, since this is
the part of the program that does the most work with the least clarity.'''

import os.path

import apsw

import config
import udp

version = "Database:\n  APSW version: " + apsw.apswversion(
) + "\n  SQLite version: " + apsw.sqlitelibversion() + "\n"


# Do we need to call Connection.close()?
# View this next line monospace...
def doclose(con):
    # If APSW's version's third major number is greater than 8...
    if int(apsw.apswversion().split('.')[2].split('-')[0]) >= 8:
        con.close()


conf = config.config()

db = os.path.normpath(os.path.expanduser("~") + "/.oadb/anime.db")

data = {
Ejemplo n.º 29
0
Module Attributes:
-----------

:initsql:      SQL commands that are executed whenever a new
               connection is created.
'''

from .logging import logging # Ensure use of custom logger class
from .common import QuietError
import apsw
import os

log = logging.getLogger(__name__)

sqlite_ver = tuple([ int(x) for x in apsw.sqlitelibversion().split('.') ])
if sqlite_ver < (3, 7, 0):
    raise QuietError('SQLite version too old, must be 3.7.0 or newer!\n')

            
initsql = (
           # WAL mode causes trouble with e.g. copy_tree, so we don't use it at the moment
           # (cf. http://article.gmane.org/gmane.comp.db.sqlite.general/65243). 
           # However, if we start using it we must initiaze it *before* setting 
           # locking_mode to EXCLUSIVE, otherwise we can't switch the locking
           # mode without first disabling WAL.
           'PRAGMA synchronous = OFF',
           'PRAGMA journal_mode = OFF',
           #'PRAGMA synchronous = NORMAL',
           #'PRAGMA journal_mode = WAL',
           
Ejemplo n.º 30
0
def doit():
    random.seed(0)
    options.tests=[t.strip() for t in options.tests.split(",")]

    write("         Python %s %s\n" % (sys.executable, str(sys.version_info)))
    write("          Scale %d\n" % (options.scale,))
    write("       Database %s\n" % (options.database,))
    write("          Tests %s\n" % (", ".join(options.tests),))
    write("     Iterations %d\n" % (options.iterations,))
    write("Statement Cache %d\n" % (options.scsize,))

    write("\n")
    if options.apsw:
        import apsw

        write("    Testing with APSW file "+apsw.__file__+"\n")
        write("              APSW version "+apsw.apswversion()+"\n")
        write("        SQLite lib version "+apsw.sqlitelibversion()+"\n")
        write("    SQLite headers version "+str(apsw.SQLITE_VERSION_NUMBER)+"\n\n")

        def apsw_setup(dbfile):
            con=apsw.Connection(dbfile, statementcachesize=options.scsize)
            con.createscalarfunction("number_name", number_name, 1)
            return con

    if options.pysqlite:
        try:
            from pysqlite2 import dbapi2 as pysqlite
        except ImportError:
            import sqlite3 as pysqlite

        write("Testing with pysqlite file "+pysqlite.__file__+"\n")
        write("          pysqlite version "+pysqlite.version+"\n")
        write("            SQLite version "+pysqlite.sqlite_version+"\n\n")

        def pysqlite_setup(dbfile):
            con=pysqlite.connect(dbfile, isolation_level=None, cached_statements=options.scsize)
            con.create_function("number_name", 1, number_name)
            return con


    ones=("zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine",
          "ten", "eleven", "twelve", "thirteen", "fourteen", "fifteen", "sixteen", "seventeen",
          "eighteen", "nineteen")
    tens=("", "ten", "twenty", "thirty", "forty", "fifty", "sixty", "seventy", "eighty", "ninety")

    others=("thousand", "hundred", "zero")

    def _number_name(n):
        if n>=1000:
            txt="%s %s" % (_number_name(int(n/1000)), others[0])
            n=n%1000
        else:
            txt=""

        if n>=100:
            txt=txt+" "+ones[int(n/100)]+" "+others[1]
            n=n%100

        if n>=20:
            txt=txt+" "+tens[int(n/10)]
            n=n%10

        if n>0:
            txt=txt+" "+ones[n]

        txt=txt.strip()

        if txt=="":
            txt=others[2]

        return txt

    def unicodify(text):
        if options.unicode and len(text):
            newt=[]
            c=options.unicode/100.0
            for t in text:
                if random.random()>c:
                    newt.append(t)
                    continue
                while True:
                    t=random.randint(0xa1, maxuni)
                    # we don't want the surrogate range or apostrophe
                    if t<0xd800 or t>0xdfff: break
                newt.append(unichr(t))
            text="".join(newt)
        return text

    if options.unicode:
        ones=tuple([unicodify(s) for s in ones])
        tens=tuple([unicodify(s) for s in tens])
        others=tuple([unicodify(s) for s in others])

    def number_name(n):
        text=_number_name(n)
        if options.size:
            text=text*int(random.randint(0, options.size)/len(text))
        return text

    def getlines(scale=50, bindings=False):
        random.seed(0)

        # RogerB added two pragmas so that only memory is used.  This means that the
        # vagaries of disk access times don't alter the results

        # database schema
        for i in """PRAGMA page_size=1024;
      PRAGMA cache_size=8192;
      PRAGMA locking_mode=EXCLUSIVE;
      PRAGMA journal_mode = OFF;
      PRAGMA temp_store = MEMORY;
      CREATE TABLE t1(a INTEGER, b INTEGER, c TEXT);
      CREATE TABLE t2(a INTEGER, b INTEGER, c TEXT);
      CREATE INDEX i2a ON t2(a);
      CREATE INDEX i2b ON t2(b);
      SELECT name FROM sqlite_master ORDER BY 1""".split(";"):
            yield (i,)

        # 50,000 inserts on an unindexed table
        yield ("BEGIN",)
        for i in xrange(1,scale*10000+1):
            r=random.randint(0,500000)
            if bindings:
                yield ("INSERT INTO t1 VALUES(:1, :2, number_name(:2))", (i, r))
            else:
                yield ("INSERT INTO t1 VALUES(%d, %d, '%s')" % (i, r, number_name(r)),)
        yield ("COMMIT",)

        # 50,000 inserts on an indexed table
        t1c_list=[]
        yield ("BEGIN",)
        for i in xrange(1,scale*10000+1):
            r=random.randint(0,500000)
            x=number_name(r)
            t1c_list.append(x)
            if bindings:
                yield ("INSERT INTO t2 VALUES(:1, :2, number_name(:2))", (i, r))
            else:
                yield ("INSERT INTO t2 VALUES(%d, %d, '%s')" % (i, r, x),)
        yield ("COMMIT",)

        # 50 SELECTs on an integer comparison.  There is no index so
        # a full table scan is required.
        for i in xrange(scale):
            yield ("SELECT count(*), avg(b) FROM t1 WHERE b>=%d AND b<%d" % (i*100, (i+10)*100),)


        # 50 SELECTs on an LIKE comparison.  There is no index so a full
        # table scan is required.
        for i in xrange(scale):
            yield ("SELECT count(*), avg(b) FROM t1 WHERE c LIKE '%%%s%%'" % (number_name(i),),)

        # Create indices
        yield ("BEGIN",)
        for i in """CREATE INDEX i1a ON t1(a);
                    CREATE INDEX i1b ON t1(b);
                    CREATE INDEX i1c ON t1(c);""".split(";"):
            yield (i,)
        yield ("COMMIT",)

        # 5000 SELECTs on an integer comparison where the integer is
        # indexed.
        for i in xrange(scale*100):
            yield ("SELECT count(*), avg(b) FROM t1 WHERE b>=%d AND b<%d" % (i*100, (i+10)*100),)

        # 100000 random SELECTs against rowid.
        for i in xrange(1,scale*2000+1):
            yield ("SELECT c FROM t1 WHERE rowid=%d" % (1+random.randint(0,50000),),)

        # 100000 random SELECTs against a unique indexed column.
        for i in xrange(1,scale*2000+1):
            yield ("SELECT c FROM t1 WHERE a=%d" % (1+random.randint(0,50000),),)

        # 50000 random SELECTs against an indexed column text column
        for i in xrange(scale*1000):
            if bindings:
                yield ("SELECT c FROM t1 WHERE c=?", (random.choice(t1c_list),),)
            else:
                yield ("SELECT c FROM t1 WHERE c='%s'" % (random.choice(t1c_list),),)

        # Vacuum
        if options.database!=":memory:":
            # opens a disk file
            yield ("VACUUM",)

        # 5000 updates of ranges where the field being compared is indexed.
        yield ("BEGIN",)
        for i in xrange(scale*100):
            yield ("UPDATE t1 SET b=b*2 WHERE a>=%d AND a<%d" % (i*2, (i+1)*2),)
        yield ("COMMIT",)

        # 50000 single-row updates.  An index is used to find the row quickly.
        yield ("BEGIN",)
        for i in xrange(scale*1000):
            if bindings:
                yield ("UPDATE t1 SET b=? WHERE a=%d" % (i,), (random.randint(0,500000),))
            else:
                yield ("UPDATE t1 SET b=%d WHERE a=%d" % (random.randint(0,500000), i),)
        yield ("COMMIT",)

        # 1 big text update that touches every row in the table.
        yield ("UPDATE t1 SET c=a",)

        # Many individual text updates.  Each row in the table is
        # touched through an index.
        yield ("BEGIN",)
        for i in xrange(1,scale*1000+1):
            if bindings:
                yield ("UPDATE t1 SET c=? WHERE a=%d" % (i,), (number_name(random.randint(0,500000)),))
            else:
                yield ("UPDATE t1 SET c='%s' WHERE a=%d" % (number_name(random.randint(0,500000)),i),)
        yield ("COMMIT",)

        # Delete all content in a table.
        yield ("DELETE FROM t1",)

        # Copy one table into another
        yield ("INSERT INTO t1 SELECT * FROM t2",)

        # Delete all content in a table, one row at a time.
        yield ("DELETE FROM t1 WHERE 1",)

        # Refill the table yet again
        yield ("INSERT INTO t1 SELECT * FROM t2",)

        # Drop the table and recreate it without its indices.
        yield ("BEGIN",)
        yield ("DROP TABLE t1",)
        yield ("CREATE TABLE t1(a INTEGER, b INTEGER, c TEXT)",)
        yield ("COMMIT",)

        # Refill the table yet again.  This copy should be faster because
        # there are no indices to deal with.
        yield ("INSERT INTO t1 SELECT * FROM t2",)

        # The three following used "ORDER BY random()" but we can't do that
        # as it causes each run to have different values, and hence different
        # amounts of sorting that have to go on.  The "random()" has been
        # replaced by "c", the column that has the stringified number

        # Select 20000 rows from the table at random.
        yield ("SELECT rowid FROM t1 ORDER BY c LIMIT %d" % (scale*400,),)

        # Delete 20000 random rows from the table.
        yield ("""  DELETE FROM t1 WHERE rowid IN
                         (SELECT rowid FROM t1 ORDER BY c LIMIT %d)""" % (scale*400,),)

        yield ("SELECT count(*) FROM t1",)

        # Delete 20000 more rows at random from the table.
        yield ("""DELETE FROM t1 WHERE rowid IN
                     (SELECT rowid FROM t1 ORDER BY c LIMIT %d)""" % (scale*400,),)

        yield ("SELECT count(*) FROM t1",)

    # Do a correctness test first
    if options.correctness:
        write("Correctness test\n")
        if 'bigstmt' in options.tests:
            text=";\n".join([x[0] for x in getlines(scale=1)])+";"
        if 'statements' in options.tests:
            withbindings=[line for line in getlines(scale=1, bindings=True)]
        if 'statements_nobindings' in options.tests:
            withoutbindings=[line for line in getlines(scale=1, bindings=False)]

        res={}
        for driver in ('apsw', 'pysqlite'):
            if not getattr(options, driver):
                continue

            for test in options.tests:
                name=driver+"_"+test

                write(name+'\t')
                sys.stdout.flush()

                if name=='pysqlite_bigstmt':
                    write('limited functionality (ignoring)\n')
                    continue

                con=globals().get(driver+"_setup")(":memory:") # we always correctness test on memory

                if test=='bigstmt':
                    cursor=con.cursor()
                    if driver=='apsw':
                        func=cursor.execute
                    else:
                        func=cursor.executescript

                    res[name]=[row for row in func(text)]
                    write(str(len(res[name]))+"\n")
                    continue

                cursor=con.cursor()
                if test=='statements':
                    sql=withbindings
                elif test=='statements_nobindings':
                    sql=withoutbindings

                l=[]
                for s in sql:
                    for row in cursor.execute(*s):
                        l.append(row)

                res[name]=l
                write(str(len(res[name]))+"\n")

        # All elements of res should be identical
        elements=res.keys()
        elements.sort()
        for i in range(0,len(elements)-1):
            write("%s == %s %s\n" % (elements[i], elements[i+1], res[elements[i]]==res[elements[i+1]]))

        del res


    text=None
    withbindings=None
    withoutbindings=None

    if options.dump_filename or "bigstmt" in options.tests:
        text=";\n".join([x[0] for x in getlines(scale=options.scale)])+";" # pysqlite requires final semicolon
        if options.dump_filename:
            open(options.dump_filename, "wt").write(text.encode("utf8"))
            sys.exit(0)

    if "statements" in options.tests:
        withbindings=list(getlines(scale=options.scale, bindings=True))

    if "statements_nobindings" in options.tests:
        withoutbindings=list(getlines(scale=options.scale, bindings=False))

    # Each test returns the amount of time taken.  Note that we include
    # the close time as well.  Otherwise the numbers become a function of
    # cache and other collection sizes as freeing members gets deferred to
    # close time.

    def apsw_bigstmt(con):
        "APSW big statement"
        try:
            for row in con.cursor().execute(text): pass
        except:
            import pdb ; pdb.set_trace()
            pass

    def pysqlite_bigstmt(con):
        "pysqlite big statement"
        for row in con.executescript(text): pass

    def apsw_statements(con, bindings=withbindings):
        "APSW individual statements with bindings"
        cursor=con.cursor()
        for b in bindings:
            for row in cursor.execute(*b): pass

    def pysqlite_statements(con, bindings=withbindings):
        "pysqlite individual statements with bindings"
        cursor=con.cursor()
        for b in bindings:
            for row in cursor.execute(*b): pass

    def apsw_statements_nobindings(con):
        "APSW individual statements without bindings"
        return apsw_statements(con, withoutbindings)

    def pysqlite_statements_nobindings(con):
        "pysqlite individual statements without bindings"
        return pysqlite_statements(con, withoutbindings)

    # Do the work
    write("\nRunning tests - elapsed, CPU (results in seconds, lower is better)\n")

    for i in range(options.iterations):
        write("%d/%d\n" % (i+1, options.iterations))
        for test in options.tests:
            # funky stuff is to alternate order each round
            for driver in ( ("apsw", "pysqlite"), ("pysqlite", "apsw"))[i%2]:
                if getattr(options, driver):
                    name=driver+"_"+test
                    func=locals().get(name, None)
                    if not func:
                        sys.stderr.write("No such test "+name+"\n")
                        sys.exit(1)

                    if os.path.exists(options.database):
                        os.remove(options.database)
                    write("\t"+func.__name__+(" "*(40-len(func.__name__))))
                    sys.stdout.flush()
                    con=locals().get(driver+"_setup")(options.database)
                    gc.collect(2)
                    b4cpu=time.clock()
                    b4=time.time()
                    func(con)
                    con.close() # see note above as to why we include this in the timing
                    gc.collect(2)
                    after=time.time()
                    aftercpu=time.clock()
                    write("%0.3f %0.3f\n" % (after-b4, aftercpu-b4cpu))

    # Cleanup if using valgrind
    if options.apsw:
        if hasattr(apsw, "_fini"):
            # Cleans out buffer recycle cache
            apsw._fini()
Ejemplo n.º 31
0
    from collections import OrderedDict
except ImportError:
    # Python 2.6
    from lib.collections26 import OrderedDict

try:
    from inspect import isgeneratorfunction
except ImportError:
    # Python < 2.6
    def isgeneratorfunction(obj):
        return bool((inspect.isfunction(object) or inspect.ismethod(object)) and
                    obj.func_code.co_flags & CO_GENERATOR)

sys.setcheckinterval(1000)

sqlite_version = apsw.sqlitelibversion()
apsw_version = apsw.apswversion()

VTCREATE = 'create virtual table temp.'
SQLITEAFTER3711 = False
SQLITEAFTER380 = False
sqlite_version_split = [int(x) for x in sqlite_version.split('.')]

if sqlite_version_split[0:3] >= [3, 8, 0]:
    SQLITEAFTER380 = True

try:
    if sqlite_version_split[0:3] >= [3, 7, 11]:
        VTCREATE = 'create virtual table if not exists temp.'
        SQLITEAFTER3711 = True
except Exception, e:
Ejemplo n.º 32
0
try:
    import apsw
except ImportError:
    # https://github.com/ghaering/pysqlite  (https://docs.python.org/2/library/sqlite3.html) -- is C code...
    # pypi.python.org/pypi/PyDbLite , www.pydblite.net/en/index.html -- pure python, but a tad different from sqlite3.
    # from pydblite import sqlite # pydblite relies on built-in sqlite3 or pysqlite2...
    try:
        from .sqlite3_adhoc import apsw
        print("chrome_extract: Using sqlite3_adhoc apsw replacement module...")
    except ImportError as exc:
        print("ImportError while importing sqlite3 apsw stand-in module:", exc)
        #raise exc # Not fatal...
        apsw = None
print("apsw module:", apsw)
if apsw:
    print("apsw sqlite version:", apsw.sqlitelibversion())
    print("apsw version:", apsw.apswversion())


try:
    from Crypto.Cipher import AES
except ImportError:
    #warnings.warn("Could not import Crypto.Cipher AES module.")
    """
    PyCrypto (Crypto) alternatives: (https://github.com/dlitz/pycrypto)

    pyOCB (github.com/kravietz/pyOCB) - could be used, but Chrome encrypts as CBC, not OCB.
    AES-Python (https://github.com/bozhu/AES-Python)
    --- another pure-python implementation. However, does not have an easy-to-use interface.
    --- Not sure what modes are supported, seems quite ad-hoc. Like... "hey, let me try to implement AES..."
    --- "only for AES-128"
Ejemplo n.º 33
0
sqlandmtermstatements=['select ', 'create ', 'where ', 'table ', 'group by ', 'drop ', 'order by ', 'index ', 'from ', 'alter ', 'limit ', 'delete ', '..',
    "attach database '", 'detach database ', 'distinct', 'exists ']
dotcompletions=['.help ', '.colnums', '.schema ', '.functions ', '.tables', '.quote', '.explain ', '.vacuum', '.quit']
allfuncs=functions.functions['vtable'].keys()+functions.functions['row'].keys()+functions.functions['aggregate'].keys()
alltables=[]
alltablescompl=[]
updated_tables=set()
update_tablelist()
lastcols=[]
newcols=[]
colscompl=[]

#Intro Message
if not pipedinput:
    print mtermdetails
    print "running on Python: "+'.'.join([str(x) for x in sys.version_info[0:3]])+', APSW: '+apsw.apswversion()+', SQLite: '+apsw.sqlitelibversion(),
    try:
        sys.stdout.write(", madIS: "+functions.VERSION+'\n')
    except:
        print
    print intromessage

number_of_kb_exceptions=0
while True:
    statement = raw_input_no_history("mterm> ")
    if statement==None:
        number_of_kb_exceptions+=1
        print
        if number_of_kb_exceptions<2:
            continue
        else:
Ejemplo n.º 34
0
    return next(v) if py3 else v.next()


import os
import time
import apsw

###
### Check we have the expected version of apsw and sqlite
###

#@@CAPTURE
print("      Using APSW file", apsw.__file__)  # from the extension module
print("         APSW version", apsw.apswversion())  # from the extension module
print("   SQLite lib version",
      apsw.sqlitelibversion())  # from the sqlite library code
print(
    "SQLite header version",
    apsw.SQLITE_VERSION_NUMBER)  # from the sqlite header file at compile time
#@@ENDCAPTURE

###
### Opening/creating database
###

connection = apsw.Connection("dbfile")
cursor = connection.cursor()

###
### simple statement  @@ example-cursor
###
Ejemplo n.º 35
0
	def __init__(self, cons_list, func_list, struc_list, apps_list):

		print "Using APSW file",apsw.__file__     # from the extension module
		print "APSW version",apsw.apswversion()  # from the extension module
		print "SQLite version",apsw.sqlitelibversion()  # from the sqlite library code



###
### Opening/creating database, initialize database
###             
		self.apsw_version = apsw.apswversion()
		self.release_number = self.apsw_version[4:6]
		
		self.db_path = os.path.join(os.environ['PWD'],'db')
		#self.confReader = ConfReader('sockets_analysis.conf')

		#self.functions = self.confReader.getItems('functions')
		#self.structures = self.confReader.getItems('structures')
		
		self.functions = func_list
		self.structures = struc_list
		self.constants = cons_list
		function_temp = ""
		
		structure_temp = ""
		constant_temp = ""

		for constant in self.constants:
			constant_temp = constant_temp + constant[0].strip() + " int,"
		
		for function in self.functions:
			function_temp = function_temp + function[0].strip() + " int,"

		i = 0
		len_item = len(self.structures) # length of items 
		for structure in self.structures:
			if i < len_item - 1:
				structure_temp = structure_temp + structure[0].strip() + " int,"
			else:
				structure_temp = structure_temp + structure[0].strip() + " int"

			i = i + 1
		
		creat_table = "CREATE TABLE socket_statistic (name varchar PRIMARY KEY, " + constant_temp + function_temp  + structure_temp + ")"
		creat_sum_table =  "CREATE TABLE socket_statistic_sum (socket_api_name varchar PRIMARY KEY , sum_number int)"
		
		print creat_table
		print creat_sum_table

		#print creat_table		
		
		

		if os.path.exists(self.db_path): 
			print "database path existing......" 
			#print "delete the existing", self.db_path
			#shutil.rmtree(self.db_path) #Removes directories recursively
			#pass
		
		else:
			print "create the db directory"
			os.mkdir('db')
		database_file =  os.path.join(self.db_path, 'socket_analysis_data_sos.db')
		self.connection=apsw.Connection(database_file)
		self.cursor=self.connection.cursor()
		
		"""
		self.cursor.execute(creat_table)		
		"""
		
		try:
			self.cursor.execute(creat_table)		
		except:
			print "socket_statistic table is already there or something wrong with creating DB!!!"


		try:
			self.cursor.execute(creat_sum_table)		
		except:
			print "socket_statistic_sum table is already there or something wrong with creating DB!!!"