#!/usr/bin/env python

import os, shutil, apsw, distutils.dir_util


def extract(case, userdata):
    print("--> Extracting SMS/MMS messages\n\n")
    extractdir = os.path.join(case, "extracted data")
    if not os.path.exists(extractdir):
        os.makedirs(extractdir)
    extractdir = os.path.join(extractdir, "mms-sms")
    if not os.path.exists(extractdir):
        os.makedirs(extractdir)
    dbdir = os.path.join(extractdir, "db")
    if not os.path.exists(dbdir):
        os.makedirs(dbdir)
    db_src = os.path.join(userdata, "data", "com.android.providers.telephony",
                          "databases", "mmssms.db")
    db_dest = os.path.join(dbdir, "mmssms.db")
    shutil.copyfile(db_src, db_dest)

    dbconnection = apsw.Connection(db_dest)
    filepath = os.path.join(extractdir, "Messages.txt")
    fileopen = open(filepath, "w", encoding='utf8')
    dbshell = apsw.Shell(stdout=fileopen, db=dbconnection)
    dbshell.process_command(".header on")
    dbshell.process_sql("select * from sms")
    fileopen.close()
예제 #2
0
파일: db.py 프로젝트: asfdfdfd/AniDB
def rmlid(lid=0):
    '''Remove a record from the anime cache.'''
    handle = apsw.Connection(db)
    handle.cursor().execute("delete from mylist where lid=?", (lid, ))
    doclose(handle)
import apsw
c = apsw.Connection("../data/imdb.sqlite")
movie_data = c.cursor().execute("select * from movie_data").fetchall()
c.close()
del c
del apsw

X = [x.split(',') for (x, y) in movie_data]
y = [y for (x, y) in movie_data]
del movie_data

from sklearn.pipeline import Pipeline
from xgboost import XGBRegressor
from sklearn.feature_extraction import FeatureHasher
from sklearn.neural_network import BernoulliRBM
thePipe = Pipeline([("hash", FeatureHasher(input_type="string")),
                    ('RBM', BernoulliRBM()), ('XGB', XGBRegressor())])

from sklearn.grid_search import GridSearchCV
from sklearn.metrics import mean_squared_error, make_scorer

paramGrid = {
    'XGB__max_depth': [3],
    'XGB__n_estimators': [100],
    'RBM__n_components': [20],
    "hash__n_features": [100000]
}

theScorer = make_scorer(mean_squared_error, greater_is_better=False)

clf = GridSearchCV(thePipe,
from CustomSQLFeatureExtractor import *
import numpy as np
import apsw
import datetime
from sklearn.pipeline import Pipeline
from sklearn.linear_model import ElasticNetCV
from sklearn.metrics import mean_squared_error, make_scorer, mean_absolute_error, median_absolute_error
from sklearn.cross_validation import train_test_split

# get and split the data
homefolder = '../data'
# read text of query from disk into memory
query = open(homefolder + '/transformData.sql', "r").read()
c = apsw.Connection(homefolder + '/imdb.sqlite').cursor()
all_data = c.execute(
    'SELECT movie_id, rating FROM movies ORDER BY movie_id').fetchall()
train_data, test_data = train_test_split(all_data,
                                         train_size=0.8,
                                         random_state=17273)
train_data = np.asarray(sorted(train_data))
test_data = np.asarray(sorted(test_data))
X_train, y_train = train_data[:, 0], train_data[:, 1]
X_test, y_test = test_data[:, 0], test_data[:, 1]

# construct pipeline
featureExtractor = CustomSQLFeatureExtractor(query)
elastic = ElasticNetCV(n_jobs=1,
                       cv=10,
                       l1_ratio=[.5, 1],
                       n_alphas=100,
                       eps=0.001)
예제 #5
0
파일: db.py 프로젝트: asfdfdfd/AniDB
def rmeid(eid=0):
    '''Remove a record from the anime cache.'''
    handle = apsw.Connection(db)
    handle.cursor().execute("delete from episodes where eid=?", (eid, ))
    doclose(handle)
예제 #6
0
import os
import re
import json
import random
import apsw
import time

# import flask web microframework
from flask import Flask
from flask import request

# import from the 21 Developer Library
from two1.lib.wallet import Wallet
from two1.lib.bitserv.flask import Payment

connection = apsw.Connection("apibb.db")

name_re = re.compile(r"^[a-zA-Z0-9][a-zA-Z0-9-\.]*$")

app = Flask(__name__)
wallet = Wallet()
payment = Payment(app, wallet)


def expire_ads():
    cursor = connection.cursor()
    cursor.execute("DELETE FROM ads WHERE expires < datetime('now')")


def expire_names():
    cursor = connection.cursor()
예제 #7
0
 def __init__(self, name):
     #------------------------
     self._db = apsw.Connection(name)
     self._cursor = self._db.cursor()
예제 #8
0
import logging
import logging.config
import os
import sys
import requests
requests.packages.urllib3.disable_warnings()
import xmltodict
from six.moves import queue as Queue
from threading import Thread
import re
import json
import apsw
import time

conn=apsw.Connection("tumblr.db")
cu=conn.cursor()
logging.config.fileConfig("logger.conf")
logger = logging.getLogger("example01")
logger03 = logging.getLogger("example03")

ss = requests.Session()

# Setting timeout
TIMEOUT = 15

# Retry times
RETRY = 5

# Medium Index Number that Starts from
START = 0
예제 #9
0
op = []
op.append(".. Automatically generated by code2rst.py")
op.append("   code2rst.py %s %s" % (sys.argv[2], sys.argv[3]))
op.append("   Edit %s not this file!" % (sys.argv[2], ))
op.append("")
if sys.argv[2] != "src/apsw.c":
    op.append(".. currentmodule:: apsw")
    op.append("")

import apsw

with tempfile.NamedTemporaryFile() as f:
    f.write(urllib.request.urlopen(basesqurl + "toc.db").read())
    f.flush()

    db = apsw.Connection(f.name)

    funclist = {}
    consts = collections.defaultdict(lambda: copy.deepcopy({"vars": []}))
    const2page = {}

    for name, type, title, uri in db.cursor().execute(
            "select name, type, title, uri from toc"):
        if type == "function":
            funclist[name] = basesqurl + uri
        elif type == "constant":
            const2page[name] = basesqurl + uri
            consts[title]["vars"].append(name)
            consts[title]["page"] = basesqurl + uri.split("#")[0]

예제 #10
0
 def __init__(self):
     self.dbConn = apsw.Connection("o.db")
     self.cursor = self.dbConn.cursor()
예제 #11
0
    def run(self):
        logger = logging.getLogger('werkzeug')
        logger.setLevel(logging.WARNING)

        db = apsw.Connection(config.DATABASE)
        db.setrowtrace(util.rowtracer)

        @dispatcher.add_method
        def get_address(address):
            try:
                return util.get_address(db, address=address)
            except exceptions.InvalidAddressError:
                return None

        @dispatcher.add_method
        def get_debits(address=None,
                       asset=None,
                       order_by=None,
                       order_dir=None):
            return util.get_debits(db,
                                   address=address,
                                   asset=asset,
                                   order_by=order_by,
                                   order_dir=order_dir)

        @dispatcher.add_method
        def get_credits(address=None,
                        asset=None,
                        order_by=None,
                        order_dir=None):
            return util.get_credits(db,
                                    address=address,
                                    asset=asset,
                                    order_by=order_by,
                                    order_dir=order_dir)

        @dispatcher.add_method
        def get_balances(address=None,
                         asset=None,
                         order_by=None,
                         order_dir=None):
            return util.get_balances(db,
                                     address=address,
                                     asset=asset,
                                     order_by=order_by,
                                     order_dir=order_dir)

        @dispatcher.add_method
        def get_sends(source=None,
                      destination=None,
                      is_valid=None,
                      order_by=None,
                      order_dir=None,
                      start_block=None,
                      end_block=None):
            return util.get_sends(db,
                                  source=source,
                                  destination=destination,
                                  validity='Valid' if bool(is_valid) else None,
                                  order_by=order_by,
                                  order_dir=order_dir,
                                  start_block=start_block,
                                  end_block=end_block)

        @dispatcher.add_method
        def get_orders(address=None,
                       is_valid=True,
                       show_empty=True,
                       show_expired=True,
                       order_by=None,
                       order_dir=None,
                       start_block=None,
                       end_block=None):
            return util.get_orders(
                db,
                address=address,
                show_empty=show_empty,
                show_expired=show_expired,
                validity='Valid' if bool(is_valid) else None,
                order_by=order_by,
                order_dir=order_dir,
                start_block=start_block,
                end_block=end_block)

        @dispatcher.add_method
        def get_order_matches(address=None,
                              is_valid=True,
                              is_mine=False,
                              tx0_hash=None,
                              tx1_hash=None,
                              order_by=None,
                              order_dir=None,
                              start_block=None,
                              end_block=None):
            return util.get_order_matches(
                db,
                is_mine=is_mine,
                address=address,
                tx0_hash=tx0_hash,
                tx1_hash=tx1_hash,
                validity='Valid' if bool(is_valid) else None,
                order_by=order_by,
                order_dir=order_dir,
                start_block=start_block,
                end_block=end_block)

        @dispatcher.add_method
        def get_btcpays(is_valid=True,
                        order_by=None,
                        order_dir=None,
                        start_block=None,
                        end_block=None):
            return util.get_btcpays(
                db,
                validity='Valid' if bool(is_valid) else None,
                order_by=order_by,
                order_dir=order_dir,
                start_block=start_block,
                end_block=end_block)

        @dispatcher.add_method
        def get_issuances(asset=None,
                          issuer=None,
                          is_valid=True,
                          order_by=None,
                          order_dir=None,
                          start_block=None,
                          end_block=None):
            return util.get_issuances(
                db,
                asset=asset,
                issuer=issuer,
                validity='Valid' if bool(is_valid) else None,
                order_by=order_by,
                order_dir=order_dir,
                start_block=start_block,
                end_block=end_block)

        @dispatcher.add_method
        def get_broadcasts(source=None,
                           is_valid=True,
                           order_by=None,
                           order_dir=None,
                           start_block=None,
                           end_block=None):
            return util.get_broadcasts(
                db,
                source=source,
                validity='Valid' if bool(is_valid) else None,
                order_by=order_by,
                order_dir=order_dir,
                start_block=start_block,
                end_block=end_block)

        @dispatcher.add_method
        def get_bets(address=None,
                     show_empty=False,
                     is_valid=True,
                     order_by=None,
                     order_dir=None,
                     start_block=None,
                     end_block=None):
            return util.get_bets(db,
                                 address=address,
                                 show_empty=show_empty,
                                 validity='Valid' if bool(is_valid) else None,
                                 order_by=order_by,
                                 order_dir=order_dir,
                                 start_block=start_block,
                                 end_block=end_block)

        @dispatcher.add_method
        def get_bet_matches(address=None,
                            is_valid=True,
                            tx0_hash=None,
                            tx1_hash=None,
                            order_by=None,
                            order_dir=None,
                            start_block=None,
                            end_block=None):
            return util.get_bet_matches(
                db,
                address=address,
                tx0_hash=tx0_hash,
                tx1_hash=tx1_hash,
                validity='Valid' if bool(is_valid) else None,
                order_by=order_by,
                order_dir=order_dir,
                start_block=start_block,
                end_block=end_block)

        @dispatcher.add_method
        def get_dividends(address=None,
                          asset=None,
                          is_valid=True,
                          order_by=None,
                          order_dir=None,
                          start_block=None,
                          end_block=None):
            return util.get_dividends(
                db,
                address=address,
                asset=asset,
                validity='Valid' if bool(is_valid) else None,
                order_by=order_by,
                order_dir=order_dir,
                start_block=start_block,
                end_block=end_block)

        @dispatcher.add_method
        def get_burns(address=None,
                      is_valid=True,
                      order_by=None,
                      order_dir=None,
                      start_block=None,
                      end_block=None):
            return util.get_burns(db,
                                  address=address,
                                  validity='Valid' if bool(is_valid) else None,
                                  order_by=order_by,
                                  order_dir=order_dir,
                                  start_block=start_block,
                                  end_block=end_block)

        @dispatcher.add_method
        def do_send(source, destination, quantity, asset, unsigned=False):
            unsigned_tx_hex = send.create(db, source, destination, quantity,
                                          asset)
            return bitcoin.transmit(unsigned_tx_hex,
                                    unsigned=unsigned,
                                    ask=False)

        @dispatcher.add_method
        def do_order(source,
                     give_quantity,
                     give_asset,
                     get_quantity,
                     get_asset,
                     expiration,
                     fee_required=0,
                     fee_provided=config.MIN_FEE / config.UNIT,
                     unsigned=False):
            unsigned_tx_hex = order.create(db, source, give_asset,
                                           give_quantity, get_asset,
                                           get_quantity, expiration,
                                           fee_required, fee_provided)
            return bitcoin.transmit(unsigned_tx_hex,
                                    unsigned=unsigned,
                                    ask=False)

        @dispatcher.add_method
        def do_btcpay(order_match_id, unsigned=False):
            unsigned_tx_hex = btcpay.create(db, order_match_id)
            return bitcoin.transmit(unsigned_tx_hex,
                                    unsigned=unsigned,
                                    ask=False)

        @dispatcher.add_method
        def do_issuance(source,
                        quantity,
                        asset,
                        divisible,
                        transfer_destination=None,
                        unsigned=False):
            unsigned_tx_hex = issuance.create(db, source, transfer_destination,
                                              asset, quantity, divisible)
            return bitcoin.transmit(unsigned_tx_hex,
                                    unsigned=unsigned,
                                    ask=False)

        @dispatcher.add_method
        def do_broadcast(source,
                         fee_multiplier,
                         text,
                         value=0,
                         unsigned=False):
            unsigned_tx_hex = broadcast.create(db, source, int(time.time()),
                                               value, fee_multiplier, text)
            return bitcoin.transmit(unsigned_tx_hex,
                                    unsigned=unsigned,
                                    ask=False)

        @dispatcher.add_method
        def do_bet(source,
                   feed_address,
                   bet_type,
                   deadline,
                   wager,
                   counterwager,
                   target_value=0.0,
                   leverage=5040,
                   unsigned=False):
            bet_type_id = util.BET_TYPE_ID[bet_type]
            unsigned_tx_hex = bet.create(db, source, feed_address, bet_type_id,
                                         deadline, wager, counterwager,
                                         target_value, leverage, expiration)
            return bitcoin.transmit(unsigned_tx_hex,
                                    unsigned=unsigned,
                                    ask=False)

        @dispatcher.add_method
        def do_dividend(source,
                        quantity_per_share,
                        share_asset,
                        unsigned=False):
            unsigned_tx_hex = dividend.create(db, source, quantity_per_share,
                                              share_asset)
            return bitcoin.transmit(unsigned_tx_hex,
                                    unsigned=unsigned,
                                    ask=False)

        @dispatcher.add_method
        def do_burn(source, quantity, unsigned=False):
            unsigned_tx_hex = burn.create(db, source, quantity)
            return bitcoin.transmit(unsigned_tx_hex,
                                    unsigned=unsigned,
                                    ask=False)

        @dispatcher.add_method
        def do_cancel(offer_hash, unsigned=False):
            unsigned_tx_hex = cancel.create(db, offer_hash)
            return bitcoin.transmit(unsigned_tx_hex,
                                    unsigned=unsigned,
                                    ask=False)

        @Request.application
        def application(request):
            response = JSONRPCResponseManager.handle(
                request.get_data(cache=False, as_text=True), dispatcher)
            return Response(response.json, mimetype='application/json')

        # util.database_check(db) # TODO Have this run regularly.
        run_simple(config.RPC_HOST, config.RPC_PORT, application)
예제 #12
0
파일: docmissing.py 프로젝트: yap2p/apsw
        if len(line)>=2:
            if line[0]==".." and line[1] in ("method::", "automethod::", "attribute::"):
                funcname=line[2].split("(")[0].strip()

                if "." in funcname:
                    klass, funcname=funcname.split(".",1)
                else:
                    klass="apsw"
                if klass not in classes:
                    classes[klass]=[]
                classes[klass].append(funcname)


# ok, so we know what was documented.  Now lets see what exists

con=apsw.Connection(":memory:")
cur=con.cursor()
cur.execute("create table x(y); insert into x values(x'abcdef1012');select * from x")
blob=con.blobopen("main", "x", "y", con.last_insert_rowid(), 0)
vfs=apsw.VFS("aname", "")
vfsfile=apsw.VFSFile("", ":memory:", [apsw.SQLITE_OPEN_MAIN_DB|apsw.SQLITE_OPEN_CREATE|apsw.SQLITE_OPEN_READWRITE, 0])

# virtual tables aren't real - just check their size hasn't changed
assert len(classes['VTModule'])==2
del classes['VTModule']
assert len(classes['VTTable'])==13
del classes['VTTable']
assert len(classes['VTCursor'])==6
del classes['VTCursor']

for name, obj in ( ('Connection', con),
예제 #13
0
def _resume_db_factory() -> apsw.Connection:
    conn = apsw.Connection(str(RESUME_DB_PATH))
    conn.setbusytimeout(120_000)
    return conn
def maketable(reportfile, case):
    reportfiledb = os.path.join(case, "extracted data", "mms-sms", "db",
                                "mmssms.db")
    reportfile_connection = apsw.Connection(reportfiledb)
    reportfile_cursor1 = reportfile_connection.cursor()
    reportfile_cursor2 = reportfile_connection.cursor()
    reportfile_cursor3 = reportfile_connection.cursor()

    contactfiledb = os.path.join(case, "extracted data", "contacts", "db",
                                 "contacts2.db")
    contactfile_connection = apsw.Connection(contactfiledb)
    contactfile_cursor1 = contactfile_connection.cursor()
    contactfile_cursor2 = contactfile_connection.cursor()

    phone1 = re.compile("0")
    phone2 = re.compile("\+")

    reportfile.write("<table CELLPADDING=8 CELLSPACING=0 VALIGN=TOP>\n")
    reportfile.write("</table>\n")
    reportfile.write("<div class=\"ResultsTable\">\n")
    reportfile.write("<table>\n")
    reportfile.write(
        "<tr class=\"title\"><td><b>Status</b></td><td><b>Name</b></td><td><b>Number</b></td><td><b>Content</b></td><td><b>Date/Time Recieved</b></td><td><b>Date/Time Sent</b></td></tr>\n"
    )
    for row1 in reportfile_cursor1.execute(
            "SELECT _id FROM sms ORDER BY date DESC"):
        for entry in row1:
            for row2 in reportfile_cursor2.execute(
                    "SELECT type FROM sms where _id = " + str(entry)):
                for status in row2:
                    if str(status) == '1':
                        typename = 'Recieved'
                        reportfile.write("<TR class=\"Recieved\">")
                    elif str(status) == '2':
                        typename = 'Sent'
                        reportfile.write("<TR class=\"Sent\">")
                    elif str(status) == '3':
                        typename = 'Draft'
                        reportfile.write("<TR class=\"Draft\">")
                    elif str(status) == '5':
                        typename = 'Error'
                        reportfile.write("<TR class=\"Error\">")
                    else:
                        typename = 'Unknown (' + str(status) + ')'
                        reportfile.write("<TR>")
                    reportfile.write("<TD>" + typename + "</TD>")
            for row2 in reportfile_cursor2.execute(
                    "SELECT address FROM sms where _id = " + str(entry)):
                for number in row2:
                    namestr = 'Unknown'
                    address = str(number)
                    address = address.replace(" ", "")
                    if phone1.match(address) or phone2.match(address):
                        for row3 in contactfile_cursor1.execute(
                                "SELECT raw_contact_id FROM phone_lookup where normalized_number = '"
                                + address + "'"):
                            for contactid in row3:
                                if str(contactid) == 'None':
                                    namestr = 'None'
                                else:
                                    for row4 in contactfile_cursor2.execute(
                                            "SELECT display_name FROM raw_contacts where _id = '"
                                            + str(contactid) + "'"):
                                        for name in row4:
                                            namestr = str(name)
                    reportfile.write("<TD>" + namestr + "</TD>")
            for row2 in reportfile_cursor2.execute(
                    "SELECT address FROM sms where _id = " + str(entry)):
                for number in row2:
                    reportfile.write("<TD>" + str(number) + "</TD>")
            for row2 in reportfile_cursor2.execute(
                    "SELECT body FROM sms where _id = " + str(entry)):
                for body in row2:
                    reportfile.write("<TD>" + str(body) + "</TD>")
            for row2 in reportfile_cursor2.execute(
                    "SELECT datetime(date/1000,'unixepoch','localtime') as date FROM sms where _id = "
                    + str(entry)):
                for date in row2:
                    reportfile.write("<TD>" + str(date) + "</TD>")
            for row2 in reportfile_cursor2.execute(
                    "SELECT datetime(date/1000,'unixepoch','localtime') as date_sent FROM sms where _id = "
                    + str(entry)):
                for sent in row2:
                    reportfile.write("<TD>" + str(sent) + "</TD>")

            reportfile.write("</TR>")
예제 #15
0
def db_connect():
    return apsw.Connection(DB)
예제 #16
0
    def books(self, oncard=None, end_session=True):
        import apsw
        dummy_bl = BookList(None, None, None)

        if ((oncard == 'carda' and not self._card_a_prefix)
                or (oncard and oncard != 'carda')):
            self.report_progress(1.0, _('Getting list of books on device...'))
            return dummy_bl

        prefix = self._card_a_prefix if oncard == 'carda' else self._main_prefix

        # Let parent driver get the books
        self.booklist_class.rebuild_collections = self.rebuild_collections
        bl = USBMS.books(self, oncard=oncard, end_session=end_session)

        dbpath = self.normalize_path(prefix + DBPATH)
        debug_print("SQLite DB Path: " + dbpath)

        with closing(apsw.Connection(dbpath)) as connection:
            cursor = connection.cursor()
            # Query collections
            query = '''
                SELECT books._id, tags.tagname
                    FROM booktags
                    LEFT OUTER JOIN books
                    LEFT OUTER JOIN tags
                    WHERE booktags.book_id = books._id AND
                    booktags.tag_id = tags._id
                '''
            cursor.execute(query)

            bl_collections = {}
            for i, row in enumerate(cursor):
                bl_collections.setdefault(row[0], [])
                bl_collections[row[0]].append(row[1])

            # collect information on offsets, but assume any
            # offset we already calculated is correct
            if self.device_offset is None:
                query = 'SELECT filename, addeddate FROM books'
                cursor.execute(query)

                time_offsets = {}
                for i, row in enumerate(cursor):
                    try:
                        comp_date = int(
                            os.path.getmtime(
                                self.normalize_path(prefix + row[0])) * 1000)
                    except (OSError, IOError, TypeError):
                        # In case the db has incorrect path info
                        continue
                    device_date = int(row[1])
                    offset = device_date - comp_date
                    time_offsets.setdefault(offset, 0)
                    time_offsets[offset] = time_offsets[offset] + 1

                try:
                    device_offset = max(time_offsets,
                                        key=lambda a: time_offsets.get(a))
                    debug_print("Device Offset: %d ms" % device_offset)
                    self.device_offset = device_offset
                except ValueError:
                    debug_print("No Books To Detect Device Offset.")

            for idx, book in enumerate(bl):
                query = 'SELECT _id, thumbnail FROM books WHERE filename = ?'
                t = (book.lpath, )
                cursor.execute(query, t)

                for i, row in enumerate(cursor):
                    book.device_collections = bl_collections.get(row[0], None)
                    thumbnail = row[1]
                    if thumbnail is not None:
                        thumbnail = self.normalize_path(prefix + thumbnail)
                        book.thumbnail = ImageWrapper(thumbnail)

            cursor.close()

        return bl
예제 #17
0
 def _database(self, dbname):
     # return a connection if exists
     return lite.Connection(self._resource("databases",
                                           str(dbname) + ".db"))
예제 #18
0
def reparse(testnet=True):
    """
    Reparse all transaction from the database.
     - Create a new in-memory DB, copy the DB that is on-disk
     - Reparse DB, automatically compares consensus hashes to the original ones from the on-disk DB
    """
    options = dict(COUNTERPARTYD_OPTIONS)
    server.initialise(database_file=':memory:', testnet=testnet, **options)

    logger = logging.getLogger()

    if testnet:
        config.PREFIX = b'TESTXXXX'

    memory_db = database.get_connection(read_only=False)

    data_dir = appdirs.user_data_dir(appauthor=config.XCP_NAME,
                                     appname=config.APP_NAME,
                                     roaming=True)
    prod_db_path = os.path.join(
        data_dir, '{}{}.db'.format(config.APP_NAME,
                                   '.testnet' if testnet else ''))
    assert os.path.exists(
        prod_db_path), "database path {} does not exist".format(prod_db_path)
    prod_db = apsw.Connection(prod_db_path)
    prod_db.setrowtrace(database.rowtracer)

    # Copy DB from file on disk (should be a DB file with at least all the checkpoints)
    #  in-memory DB shouldn't have been written to yet up until this point
    with memory_db.backup("main", prod_db, "main") as backup:
        while not backup.done:
            backup.step(100)

    # Drop most tables (except blocks, transactions, undolog)
    memory_cursor = memory_db.cursor()
    for table in blocks.TABLES + ['balances']:
        memory_cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table))

    # Check that all checkpoint blocks are in the database to be tested.
    if testnet:
        CHECKPOINTS = check.CHECKPOINTS_TESTNET
    else:
        CHECKPOINTS = check.CHECKPOINTS_MAINNET
    for block_index in CHECKPOINTS.keys():
        block_exists = bool(
            list(
                memory_cursor.execute(
                    '''SELECT * FROM blocks WHERE block_index = ?''',
                    (block_index, ))))
        assert block_exists, "block #%d does not exist" % block_index

    # Clean consensus hashes if first block hash don’t match with checkpoint.
    checkpoints = check.CHECKPOINTS_TESTNET if config.TESTNET else check.CHECKPOINTS_MAINNET
    columns = [
        column['name']
        for column in memory_cursor.execute('''PRAGMA table_info(blocks)''')
    ]
    for field in ['ledger_hash', 'txlist_hash']:
        if field in columns:
            sql = '''SELECT {} FROM blocks  WHERE block_index = ?'''.format(
                field)
            first_hash = list(
                memory_cursor.execute(sql, (config.BLOCK_FIRST, )))[0][field]
            if first_hash != checkpoints[config.BLOCK_FIRST][field]:
                logger.info('First hash changed. Cleaning {}.'.format(field))
                memory_cursor.execute(
                    '''UPDATE blocks SET {} = NULL'''.format(field))

    # Initialise missing tables
    blocks.initialise(memory_db)
    previous_ledger_hash = None
    previous_txlist_hash = None
    previous_messages_hash = None

    # Reparse each block, if ConsensusError is thrown then the difference
    memory_cursor.execute('''SELECT * FROM blocks ORDER BY block_index''')
    for block in memory_cursor.fetchall():
        try:
            util.CURRENT_BLOCK_INDEX = block['block_index']
            previous_ledger_hash, previous_txlist_hash, previous_messages_hash, previous_found_messages_hash = blocks.parse_block(
                memory_db,
                block['block_index'],
                block['block_time'],
                previous_ledger_hash=previous_ledger_hash,
                ledger_hash=block['ledger_hash'],
                previous_txlist_hash=previous_txlist_hash,
                txlist_hash=block['txlist_hash'],
                previous_messages_hash=previous_messages_hash)
            logger.info(
                'Block (re-parse): %s (hashes: L:%s / TX:%s / M:%s%s)' %
                (block['block_index'], previous_ledger_hash[-5:],
                 previous_txlist_hash[-5:], previous_messages_hash[-5:],
                 (' [overwrote %s]' %
                  previous_found_messages_hash) if previous_found_messages_hash
                 and previous_found_messages_hash != previous_messages_hash
                 else ''))

        except check.ConsensusError as e:
            message = str(e)
            if message.find('ledger_hash') != -1:
                new_ledger = get_block_ledger(memory_db, block['block_index'])
                old_ledger = get_block_ledger(prod_db, block['block_index'])
                compare_strings(old_ledger, new_ledger)
            elif message.find('txlist_hash') != -1:
                new_txlist = get_block_txlist(memory_db, block['block_index'])
                old_txlist = get_block_txlist(prod_db, block['block_index'])
                compare_strings(old_txlist, new_txlist)

            raise e
예제 #19
0
 def fromDatabaseName(cls, dbFilename, timeout=None):
     return cls(apsw.Connection(dbfname), timeout)
                tmpfile.write(chunk)
        keyfile.close()
        tmpfile.close()
        dbdestfile.close()

        tmpfile = open(dbtmp, "rb")
        tmptogzip = tmpfile.read()
        finaldb = open(dbdecrypt, "wb")
        d = zlib.decompressobj(16 + zlib.MAX_WBITS)
        decompressdata = d.decompress(tmptogzip)
        finaldb.write(decompressdata)
        tmpfile.close()
        finaldb.close()

        #os.system('bin\\gzip.exe -d < "' + dbtmp + '" > "' + dbdecrypt + '"' if os.name == 'nt' else 'gzip -d < "' + dbtmp + '" > "' + dbdecrypt + '" 2>&1' )
        print("--> Extracting WhatsApp data\n\n")
        txtoutput = os.path.join(case, "extracted data", "whatsapp",
                                 "messages.txt")
        txtoutfile = open(txtoutput, 'w', encoding='utf8')
        sqlconnection = apsw.Connection(dbdecrypt)
        sqlshell = apsw.Shell(stdout=txtoutfile, db=sqlconnection)
        sqlshell.process_command('.header on')
        sqlshell.process_sql('select * from messages')
        txtoutfile.close()

        os.remove(dbtmp)
        os.remove(dbnohead)

    else:
        print("--> Not extracting WhatsApp data. Reason: Not found\n\n")
예제 #21
0
def rawtransactions_db(request):
    """Return a database object."""
    db = apsw.Connection(util_test.CURR_DIR + '/fixtures/rawtransactions.db')
    if (request.module.__name__ == 'integration_test'):
        util_test.initialise_rawtransactions_db(db)
    return db
예제 #22
0
def get_apsw_connection():
    if constants.dev_mode == True:
        pref_file = os.path.join(os.getcwd(), 'local_settings')
    else:
        pref_file = os.path.join(constants.preferences_path, 'local_settings')
    return apsw.Connection(pref_file)
예제 #23
0
def reparse(testnet=True):
    """Reparse all transaction from the database, create a new blockchain and compare it to the old one."""
    options = dict(COUNTERPARTYD_OPTIONS)
    server.initialise(database_file=':memory:', testnet=testnet, **options)

    logger = logging.getLogger()

    if testnet:
        config.PREFIX = b'TESTXXXX'

    memory_db = database.get_connection(read_only=False)
    initialise_db(memory_db)

    data_dir = appdirs.user_data_dir(appauthor=config.XCP_NAME,
                                     appname=config.APP_NAME,
                                     roaming=True)
    prod_db_path = os.path.join(
        data_dir, '{}{}.db'.format(config.APP_NAME,
                                   '.testnet' if testnet else ''))
    assert os.path.exists(
        prod_db_path), "database path {} does not exist".format(prod_db_path)
    prod_db = apsw.Connection(prod_db_path)
    prod_db.setrowtrace(database.rowtracer)

    with memory_db.backup("main", prod_db, "main") as backup:
        backup.step()

    # Here we don’t use block.reparse() because it reparse db in transaction (`with db`).
    memory_cursor = memory_db.cursor()
    for table in blocks.TABLES + ['balances']:
        memory_cursor.execute('''DROP TABLE IF EXISTS {}'''.format(table))

    # Check that all checkpoint blocks are in the database to be tested.
    if testnet:
        CHECKPOINTS = check.CHECKPOINTS_TESTNET
    else:
        CHECKPOINTS = check.CHECKPOINTS_MAINNET
    for block_index in CHECKPOINTS.keys():
        block_exists = bool(
            list(
                memory_cursor.execute(
                    '''SELECT * FROM blocks WHERE block_index = ?''',
                    (block_index, ))))
        assert block_exists, "block #%d does not exist" % block_index

    # Clean consensus hashes if first block hash don’t match with checkpoint.
    checkpoints = check.CHECKPOINTS_TESTNET if config.TESTNET else check.CHECKPOINTS_MAINNET
    columns = [
        column['name']
        for column in memory_cursor.execute('''PRAGMA table_info(blocks)''')
    ]
    for field in ['ledger_hash', 'txlist_hash']:
        if field in columns:
            sql = '''SELECT {} FROM blocks  WHERE block_index = ?'''.format(
                field)
            first_hash = list(
                memory_cursor.execute(sql, (config.BLOCK_FIRST, )))[0][field]
            if first_hash != checkpoints[config.BLOCK_FIRST][field]:
                logger.info('First hash changed. Cleaning {}.'.format(field))
                memory_cursor.execute(
                    '''UPDATE blocks SET {} = NULL'''.format(field))

    blocks.initialise(memory_db)
    previous_ledger_hash = None
    previous_txlist_hash = None
    previous_messages_hash = None

    memory_cursor.execute('''SELECT * FROM blocks ORDER BY block_index''')
    for block in memory_cursor.fetchall():
        try:
            util.CURRENT_BLOCK_INDEX = block['block_index']
            previous_ledger_hash, previous_txlist_hash, previous_messages_hash, previous_found_messages_hash = blocks.parse_block(
                memory_db,
                block['block_index'],
                block['block_time'],
                previous_ledger_hash=previous_ledger_hash,
                ledger_hash=block['ledger_hash'],
                previous_txlist_hash=previous_txlist_hash,
                txlist_hash=block['txlist_hash'],
                previous_messages_hash=previous_messages_hash)
            logger.info(
                'Block (re-parse): %s (hashes: L:%s / TX:%s / M:%s%s)' %
                (block['block_index'], previous_ledger_hash[-5:],
                 previous_txlist_hash[-5:], previous_messages_hash[-5:],
                 (' [overwrote %s]' %
                  previous_found_messages_hash) if previous_found_messages_hash
                 and previous_found_messages_hash != previous_messages_hash
                 else ''))

        except check.ConsensusError as e:
            message = str(e)
            if message.find('ledger_hash') != -1:
                new_ledger = get_block_ledger(memory_db, block['block_index'])
                old_ledger = get_block_ledger(prod_db, block['block_index'])
                compare_strings(old_ledger, new_ledger)
            elif message.find('txlist_hash') != -1:
                new_txlist = get_block_txlist(memory_db, block['block_index'])
                old_txlist = get_block_txlist(prod_db, block['block_index'])
                compare_strings(old_txlist, new_txlist)
            raise (e)
예제 #24
0
 def _build_connection(self):
     return apsw.Connection(self._build_sqlite_s3_uri(),
                            flags=apsw.SQLITE_OPEN_READONLY
                            | apsw.SQLITE_OPEN_URI,
                            vfs=S3FS.vfsname)
예제 #25
0
파일: db.py 프로젝트: asfdfdfd/AniDB
def rmgid(gid=0):
    '''Remove a record from the anime cache.'''
    handle = apsw.Connection(db)
    handle.cursor().execute("delete from groups where gid=?", (gid, ))
    doclose(handle)
예제 #26
0
#!/usr/bin/env python

import apsw
import collections
import sys

db = apsw.Connection(sys.argv[1])
cursor = db.cursor()

# load namespace definitions
nsName = dict()
nsID = dict()
for row in cursor.execute("select namespace_id,namespace from namespace;"):
    nsName[row[0]] = row[1]
    nsID[row[1]] = row[0]

# load namespace aliases
alias = dict()
for row in cursor.execute(
        "select namespace_id1,name1,namespace_id2,name2 from unit_name_name where namespace_id1 = namespace_id2;"
):
    n1 = (row[0], row[1])
    n2 = (row[2], row[3])
    if n1 < n2:
        alias[n2] = min(n1, alias.get(n2, n1))
    elif n1 > n2:
        alias[n1] = min(n2, alias.get(n1, n2))
print "%d namespace aliases" % (len(alias), )

# load name graph
graph = collections.defaultdict(set)
예제 #27
0
 def test_apsw(self):
     import apsw
     conn = apsw.Connection(':memory:')
     conn.close()
예제 #28
0
    def _open_connection(self):
        """ Opens a connection to the database. If the database doesn't exist, we create a new one and run the
            initialization SQL scripts. If the database doesn't exist, we simply connect to it.
            And finally, we read the database version.
        """
        # check if it is in memory
        is_in_memory = self.sqlite_db_path == u":memory:"
        is_new_db = is_in_memory

        # check if database file exists
        if not is_in_memory:
            if not os.path.exists(self.sqlite_db_path):
                # create a new one
                is_new_db = True
            elif not os.path.isfile(self.sqlite_db_path):
                msg = u"Not a file: %s" % self.sqlite_db_path
                raise OSError(msg)

        # create connection
        try:
            self._connection = apsw.Connection(self.sqlite_db_path)
            self._connection.setbusytimeout(self._busytimeout)
        except CantOpenError as e:
            msg = u"Failed to open connection to %s: %s" % (
                self.sqlite_db_path, e)
            raise CantOpenError(msg)

        cursor = self.get_cursor()

        # Check integrity of the database
        check_response, = self.execute(u"PRAGMA quick_check").next()
        if check_response != 'ok':
            msg = u"Quick integrity check of database failed"
            self._logger.error(msg)
            raise CorruptedDatabaseError(msg)

        # apply pragma
        page_size, = next(cursor.execute(u"PRAGMA page_size"))
        if page_size < 8192:
            # journal_mode and page_size only need to be set once.  because of the VACUUM this
            # is very expensive
            self._logger.info(u"begin page_size upgrade...")
            cursor.execute(u"PRAGMA journal_mode = DELETE;")
            cursor.execute(u"PRAGMA page_size = 8192;")
            cursor.execute(u"VACUUM;")
            self._logger.info(u"...end page_size upgrade")

        # http://www.sqlite.org/pragma.html
        # When synchronous is NORMAL, the SQLite database engine will still
        # pause at the most critical moments, but less often than in FULL
        # mode. There is a very small (though non-zero) chance that a power
        # failure at just the wrong time could corrupt the database in
        # NORMAL mode. But in practice, you are more likely to suffer a
        # catastrophic disk failure or some other unrecoverable hardware
        # fault.
        #
        cursor.execute(u"PRAGMA synchronous = NORMAL;")
        cursor.execute(u"PRAGMA cache_size = 10000;")

        # Niels 19-09-2012: even though my database upgraded to increase the pagesize it did not keep wal mode?
        # Enabling WAL on every starup
        cursor.execute(u"PRAGMA journal_mode = WAL;")

        # create tables if this is a new database
        if is_new_db and self.db_script_path is not None:
            self._logger.info(u"Initializing new database...")
            # check if the SQL script exists
            if not os.path.exists(self.db_script_path):
                msg = u"SQL script doesn't exist: %s" % self.db_script_path
                raise OSError(msg)
            if not os.path.isfile(self.db_script_path):
                msg = u"SQL script is not a file: %s" % self.db_script_path
                raise OSError(msg)

            try:
                f = open(self.db_script_path, "r")
                sql_script = f.read()
                f.close()
            except IOError as e:
                msg = u"Failed to load SQL script %s: %s" % (
                    self.db_script_path, e)
                raise IOError(msg)

            cursor.execute(sql_script)

        if self.db_script_path is not None:
            # read database version
            self._logger.info(u"Reading database version...")
            try:
                version_str, = cursor.execute(
                    u"SELECT value FROM MyInfo WHERE entry == 'version'").next(
                    )
                self._version = int(version_str)
                self._logger.info(u"Current database version is %s",
                                  self._version)
            except (StopIteration, SQLError) as e:
                msg = u"Failed to load database version: %s" % e
                raise CorruptedDatabaseError(msg)
        else:
            self._version = 1
예제 #29
0
 def start(self, args):
     self.node = args["file"].value()
     avfs = apswvfs.apswVFS()
     self.db = apsw.Connection(self.node.absolute(), vfs=avfs.vfsname)
예제 #30
0
파일: test_build.py 프로젝트: kba/calibre
def test_apsw():
    import apsw
    conn = apsw.Connection(':memory:')
    conn.close()
    print ('apsw OK!')