Ejemplo n.º 1
0
    def openDB(self):
        try:
            self._config = ConfigManager()
            self.db = self._config.config[self._config.database]["database"]
            create_db = False
            if self.db == self._config.Sqlite:
                folder = self._config.config[self._config.database]["folder"]
                loc = folder + '/icepapcms.db'
                print("Using Sqlite database at %s" % loc)
                create_db = not os.path.exists(loc)
                if create_db:
                    print("No database file found, creating it")
                    if not os.path.exists(folder):
                        os.mkdir(folder)
                self._database = create_database("%s:%s" % (self.db, loc))
            else:
                server = self._config.config[self._config.database]["server"]
                user = self._config.config[self._config.database]["user"]
                pwd = self._config.config[self._config.database]["password"]
                scheme = "{}://{}:{}@{}/icepapcms".format(
                    self.db, user, pwd, server)

                if self.db == 'mysql':
                    self._database = MySQL(scheme)
                else:
                    self._database = create_database(scheme)

            self._store = Store(self._database)
            if create_db:
                self.dbOK = self.createSqliteDB()
            else:
                self.dbOK = True
        except Exception as e:
            self.log.error("Unexpected error on openDB: %s", e)
            self.dbOK = False
Ejemplo n.º 2
0
    def test_check_unmodifiable_strings(self):
        # This test case asserts that data migration updates unmodifiable l10n strings
        self._initStartDB(34)

        notification_l10n = NotificationL10NFactory(self.store)

        t0 = notification_l10n.get_val("export_template", "it")

        notification_l10n.set_val("export_template", "it", "")

        t1 = notification_l10n.get_val("export_template", "it")

        self.assertEqual(t1, "")

        self.store.commit()

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = "2.XX.XX"
        prv.set_val("version", self.dummy_ver)
        self.assertEqual(prv.get_val("version"), self.dummy_ver)
        store.commit()
        store.close()

        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        notification_l10n = NotificationL10NFactory(store)
        t2 = notification_l10n.get_val("export_template", "it")
        self.assertEqual(t2, t0)
        store.commit()
        store.close()

        shutil.rmtree(GLSettings.db_path)
Ejemplo n.º 3
0
    def test_check_unmodifiable_strings(self):
        # This test case asserts that data migration updates unmodifiable l10n strings
        self._initStartDB(34)

        notification_l10n = NotificationL10NFactory(self.store)

        t0 = notification_l10n.get_val('export_template', 'it')

        notification_l10n.set_val('export_template', 'it', '')

        t1 = notification_l10n.get_val('export_template', 'it')

        self.assertEqual(t1, '')

        self.store.commit()

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val('version', self.dummy_ver)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.commit()
        store.close()

        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        notification_l10n = NotificationL10NFactory(store)
        t2 = notification_l10n.get_val('export_template', 'it')
        self.assertEqual(t2, t0)
        store.commit()
        store.close()

        shutil.rmtree(GLSettings.db_path)
Ejemplo n.º 4
0
    def __init__(self, table_history, old_db_file, new_db_file, start_ver):
        self.table_history = table_history
        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        for k, v in table_history.iteritems():
            # +1 because count start from 0,
            # -8 because the relase befor the 8th are not supported anymore
            length = DATABASE_VERSION + 1 - 8
            if len(v) != length:
                msg = 'Expecting a table with {} statuses ({})'.format(length, k)
                raise TypeError(msg)

        log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file))
        old_database = create_database('sqlite:' + self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database('sqlite:' + new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:
            log.msg('{} Acquire SQL schema {}'.format(self.debug_info, GLSetting.db_schema_file))

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                log.msg('Unable to access', GLSetting.db_schema_file)
                raise IOError('Unable to access db schema file')

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        log.msg('OperationalError in "{}"'.format(create_query))
            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                log.msg('{} OperationalError in [{}]'.format(self.debug_info, create_query))
                raise excep

        self.store_new.commit()
Ejemplo n.º 5
0
    def __init__(self, table_history, old_db_file, new_db_file, start_ver):
        self.table_history = table_history
        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        for k, v in table_history.iteritems():
            length = DATABASE_VERSION + 1 - FIRST_DATABASE_VERSION_SUPPORTED
            if len(v) != length:
                msg = 'Expecting a table with {} statuses ({})'.format(length, k)
                raise TypeError(msg)

        log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file))
        old_database = create_database('sqlite:' + self.old_db_file)
        self.store_old = Store(old_database)

        GLSettings.db_file = new_db_file

        new_database = create_database('sqlite:' + new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:
            log.msg('{} Acquire SQL schema {}'.format(self.debug_info, GLSettings.db_schema_file))

            if not os.access(GLSettings.db_schema_file, os.R_OK):
                log.msg('Unable to access', GLSettings.db_schema_file)
                raise IOError('Unable to access db schema file')

            with open(GLSettings.db_schema_file) as f:
                create_queries = ''.join(f).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        log.msg('OperationalError in "{}"'.format(create_query))
            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                log.msg('{} OperationalError in [{}]'.format(self.debug_info, create_query))
                raise excep

        self.store_new.commit()
Ejemplo n.º 6
0
    def setUp(self):
        super(PatchTest, self).setUp()

        self.patchdir = self.makeDir()
        self.pkgdir = os.path.join(self.patchdir, "mypackage")
        os.makedirs(self.pkgdir)

        f = open(os.path.join(self.pkgdir, "__init__.py"), "w")
        f.write("shared_data = []")
        f.close()

        # Order of creation here is important to try to screw up the
        # patch ordering, as os.listdir returns in order of mtime (or
        # something).
        for pname, data in [("patch_380.py", patch_test_1),
                            ("patch_42.py", patch_test_0)]:
            self.add_module(pname, data)

        sys.path.append(self.patchdir)

        self.filename = self.makeFile()
        self.uri = "sqlite:///%s" % self.filename
        self.store = Store(create_database(self.uri))

        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")

        self.assertFalse(self.store.get(Patch, (42)))
        self.assertFalse(self.store.get(Patch, (380)))

        import mypackage
        self.mypackage = mypackage

        # Create another connection just to keep track of the state of the
        # whole transaction manager.  See the assertion functions below.
        self.another_store = Store(create_database("sqlite:"))
        self.another_store.execute("CREATE TABLE test (id INT)")
        self.another_store.commit()
        self.prepare_for_transaction_check()

        class Committer(object):

            def commit(committer):
                self.store.commit()
                self.another_store.commit()

            def rollback(committer):
                self.store.rollback()
                self.another_store.rollback()

        self.committer = Committer()
        self.patch_applier = PatchApplier(self.store, self.mypackage,
                                          self.committer)
Ejemplo n.º 7
0
    def setUp(self):
        super(PatchApplierTest, self).setUp()

        self.patchdir = self.makeDir()
        self.pkgdir = os.path.join(self.patchdir, "mypackage")
        os.makedirs(self.pkgdir)

        f = open(os.path.join(self.pkgdir, "__init__.py"), "w")
        f.write("shared_data = []")
        f.close()

        # Order of creation here is important to try to screw up the
        # patch ordering, as os.listdir returns in order of mtime (or
        # something).
        for pname, data in [("patch_380.py", patch_test_1),
                            ("patch_42.py", patch_test_0)]:
            self.add_module(pname, data)

        sys.path.append(self.patchdir)

        self.filename = self.makeFile()
        self.uri = "sqlite:///%s" % self.filename
        self.store = Store(create_database(self.uri))

        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")

        self.assertFalse(self.store.get(Patch, (42)))
        self.assertFalse(self.store.get(Patch, (380)))

        import mypackage
        self.mypackage = mypackage
        self.patch_set = PatchSet(mypackage)

        # Create another connection just to keep track of the state of the
        # whole transaction manager.  See the assertion functions below.
        self.another_store = Store(create_database("sqlite:"))
        self.another_store.execute("CREATE TABLE test (id INT)")
        self.another_store.commit()
        self.prepare_for_transaction_check()

        class Committer(object):
            def commit(committer):
                self.store.commit()
                self.another_store.commit()

            def rollback(committer):
                self.store.rollback()
                self.another_store.rollback()

        self.committer = Committer()
        self.patch_applier = PatchApplier(self.store, self.patch_set,
                                          self.committer)
Ejemplo n.º 8
0
    def test_detect_and_fix_cfg_change(self):
        store = Store(create_database(GLSettings.db_uri))
        ret = config.is_cfg_valid(store)
        self.assertFalse(ret)
        store.close()

        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val(u'version'), __version__)
        self.assertEqual(prv.get_val(u'xx_smtp_password'), self.dp)
        ret = config.is_cfg_valid(store)
        self.assertTrue(ret)
        store.close()
Ejemplo n.º 9
0
    def test_detect_and_fix_cfg_change(self):
        store = Store(create_database(GLSettings.db_uri))
        ret = config.is_cfg_valid(store)
        self.assertFalse(ret)
        store.close()

        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val('version'), __version__)
        self.assertEqual(prv.get_val('xx_smtp_password'), self.dp)
        ret = config.is_cfg_valid(store)
        self.assertTrue(ret)
        store.close()
Ejemplo n.º 10
0
    def test_version_change_success(self):
        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val(u'version'), __version__)
        store.close()
Ejemplo n.º 11
0
 def on_abm_productos_clicked(self):
     DATABASE = create_database('sqlite:abm.db')
     almacen = Store(DATABASE)
     cm = ClienteManager(productos, reset=False)
     self.productos = AddProductos(manager=cm, managers=[])
     #self.cliente.setWindowIcon( self.btLibroDiario.icon() )
     self.productos.show()
Ejemplo n.º 12
0
    def setUp(self):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % DATABASE_VERSION
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val('version', self.dummy_ver)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.commit()
        store.close()

        # backup various mocks that we will use
        self._bck_f = config.is_cfg_valid
        GLConfig['private']['xx_smtp_password'] = GLConfig['private'].pop('smtp_password')
        self.dp = u'yes_you_really_should_change_me'
Ejemplo n.º 13
0
    def setUp(self):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        shutil.rmtree(GLSettings.db_path, True)
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % DATABASE_VERSION
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                               'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val(u'version', self.dummy_ver)
        self.assertEqual(prv.get_val(u'version'), self.dummy_ver)
        store.commit()
        store.close()

        # backup various mocks that we will use
        self._bck_f = config.is_cfg_valid
        GLConfig['private']['xx_smtp_password'] = GLConfig['private'].pop(
            'smtp_password')
        self.dp = u'yes_you_really_should_change_me'
Ejemplo n.º 14
0
 def __init_database(self):    
     """
     creates the sqlite database instance and checks if the database exists in biodb.
     """
     database= create_database("sqlite:%s" % biodb_sql_db_path)
     print "Created storm database from %s." % biodb_sql_db_path
     self.store= Store(database)
Ejemplo n.º 15
0
    def test_make_commits_transaction_once(self):
        """
        L{ZStormResourceManager.make} commits schema changes only once
        across all stores, after all patch and delete statements have
        been executed.
        """
        database2 = {"name": "test2",
                     "uri": "sqlite:///%s" % self.makeFile(),
                     "schema": self.databases[0]["schema"]}
        self.databases.append(database2)
        other_store = Store(create_database(database2["uri"]))
        for store in [self.store, other_store]:
            store.execute("CREATE TABLE patch "
                          "(version INTEGER NOT NULL PRIMARY KEY)")
            store.execute("CREATE TABLE test (foo TEXT)")
            store.execute("INSERT INTO test (foo) VALUES ('data')")
            store.commit()

        with CaptureTracer() as tracer:
            zstorm = self.resource.make([])

        self.assertEqual(["COMMIT", "COMMIT"], tracer.queries[-2:])
        store1 = zstorm.get("test")
        store2 = zstorm.get("test2")
        self.assertEqual([], list(store1.execute("SELECT foo FROM test")))
        self.assertEqual([], list(store2.execute("SELECT foo FROM test")))
Ejemplo n.º 16
0
def setup_request():
    # Read the configuration.
    stream = open('%s/config.yaml' % request.environ['UNISON_ROOT'])
    g.config = yaml.load(stream)
    # Set up the database.
    database = create_database(g.config['database']['string'])
    g.store = Store(database)
Ejemplo n.º 17
0
def userDatabase(uri=DB_FILE_NAME):
    """
    Give a user database
    """
    filename, uri = parseURI(uri)
    db = locals.create_database(uri)
    if filename is not None:
        # test existence of the database file so as to throw an exception when
        # the bootstrap script was not run.  Test it before creating the Store
        # because creating the Store creates the file whether it makes sense
        # to or not.
        open(filename).close()
        store = locals.Store(db)
    else:
        store = locals.Store(db)
        from .usersql import SQL_SCRIPT
        for sql in SQL_SCRIPT:
            store.execute(sql)
    assert store is not None

    global theStore  # UGLY but necessary?  many many things in resource need
                     # this; passing it around would be heinous.
    theStore = store

    return store
Ejemplo n.º 18
0
 def initialize(self, debug=None):
     """See `IDatabase`."""
     # Calculate the engine url.
     url = expand(config.database.url, config.paths)
     log.debug('Database url: %s', url)
     # XXX By design of SQLite, database file creation does not honor
     # umask.  See their ticket #1193:
     # http://www.sqlite.org/cvstrac/tktview?tn=1193,31
     #
     # This sucks for us because the mailman.db file /must/ be group
     # writable, however even though we guarantee our umask is 002 here, it
     # still gets created without the necessary g+w permission, due to
     # SQLite's policy.  This should only affect SQLite engines because its
     # the only one that creates a little file on the local file system.
     # This kludges around their bug by "touch"ing the database file before
     # SQLite has any chance to create it, thus honoring the umask and
     # ensuring the right permissions.  We only try to do this for SQLite
     # engines, and yes, we could have chmod'd the file after the fact, but
     # half dozen and all...
     self.url = url
     self._prepare(url)
     database = create_database(url)
     store = Store(database, GenerationalCache())
     database.DEBUG = (as_boolean(config.database.debug)
                       if debug is None else debug)
     self.store = store
     store.commit()
Ejemplo n.º 19
0
    def test_mig_37_valid_tor_hs_key(self):
        self._initStartDB(36)

        from globaleaks.db.migrations import update_37
        t = update_37.TOR_DIR
        update_37.TOR_DIR = GLSettings.db_path

        pk_path = os.path.join(update_37.TOR_DIR, 'private_key')
        hn_path = os.path.join(update_37.TOR_DIR, 'hostname')

        shutil.copy(os.path.join(helpers.DATA_DIR, 'tor/private_key'), pk_path)
        shutil.copy(os.path.join(helpers.DATA_DIR, 'tor/hostname'), hn_path)

        ret = update_db()
        self.assertEqual(ret, None)

        new_uri = GLSettings.make_db_uri(
            os.path.join(GLSettings.db_path, GLSettings.db_file_name))
        store = Store(create_database(new_uri))
        hs = config.NodeFactory(store).get_val('onionservice')
        pk = config.PrivateFactory(store).get_val('tor_onion_key')

        self.assertEqual('lftx7dbyvlc5txtl.onion', hs)
        with open(os.path.join(helpers.DATA_DIR,
                               'tor/ephemeral_service_key')) as f:
            saved_key = f.read().strip()
        self.assertEqual(saved_key, pk)

        store.close()

        shutil.rmtree(GLSettings.db_path)
        update_37.TOR_DIR = t
Ejemplo n.º 20
0
    def test_make_commits_transaction_once(self):
        """
        L{ZStormResourceManager.make} commits schema changes only once
        across all stores, after all patch and delete statements have
        been executed.
        """
        database2 = {
            "name": "test2",
            "uri": "sqlite:///%s" % self.makeFile(),
            "schema": self.databases[0]["schema"]
        }
        self.databases.append(database2)
        other_store = Store(create_database(database2["uri"]))
        for store in [self.store, other_store]:
            store.execute("CREATE TABLE patch "
                          "(version INTEGER NOT NULL PRIMARY KEY)")
            store.execute("CREATE TABLE test (foo TEXT)")
            store.execute("INSERT INTO test (foo) VALUES ('data')")
            store.commit()

        with CaptureTracer() as tracer:
            zstorm = self.resource.make([])

        self.assertEqual(["COMMIT", "COMMIT"], tracer.queries[-2:])
        store1 = zstorm.get("test")
        store2 = zstorm.get("test2")
        self.assertEqual([], list(store1.execute("SELECT foo FROM test")))
        self.assertEqual([], list(store2.execute("SELECT foo FROM test")))
Ejemplo n.º 21
0
    def insert_data(self):
        """
        Return the SQL syntax needed to insert the data already present
        in the table.
        """

        store = Store(create_database(config.Database().uri))
        registers = []
        rows = store.find(self.model.__class__)
        fields = [
            r._detect_attr_name(self.model.__class__)
            for r in self.model._storm_columns.keys()
        ]
        for r in rows:
            tmp_row = {}
            for field in fields:
                tmp_row[field] = getattr(r, field)
            registers.append(tmp_row)

        if self.__class__.__name__ == 'MySQL':
            commas = '`'
        else:
            commas = "'"

        query = ''
        for register in registers:
            query += ('INSERT INTO {}{}{} ({}) VALUES ({});\n'.format(
                commas, self.model.__storm_table__, commas,
                ', '.join(register.keys()),
                ', '.join([(str(field) if type(field) is not unicode else
                            "'{}'".format(field))
                           for field in register.values()])))

        return query
Ejemplo n.º 22
0
def setup_request():
    # Read the configuration.
    stream = open('%s/config.yaml' % request.environ['UNISON_ROOT'])
    g.config = yaml.load(stream)
    # Set up the database.
    database = create_database(g.config['database']['string'])
    g.store = Store(database)
Ejemplo n.º 23
0
Archivo: db.py Proyecto: teamsonic/wld
def getDB():
    """
    I return a connection to the database
    """
    db_uri = getURI() 
    db = create_database("postgres://*****:*****@127.0.0.1/postgres")
    return db 
Ejemplo n.º 24
0
    def test_version_change_success(self):
        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val('version'), __version__)
        store.close()
Ejemplo n.º 25
0
 def postconditions_34(self):
     store = Store(create_database(GLSettings.db_uri))
     notification_l10n = NotificationL10NFactory(store)
     x = notification_l10n.get_val(u'export_template', u'it')
     self.assertNotEqual(x, 'unmodifiable')
     store.commit()
     store.close()
Ejemplo n.º 26
0
def getDB():
    """
    I return a connection to the database
    """
    db_uri = getURI()
    db = create_database(
        "postgres://*****:*****@127.0.0.1/postgres")
    return db
Ejemplo n.º 27
0
def get_store():
    global _database, _store
    if not _database:
        _database = create_database('sqlite:///%s' % db_uri)
    if not _store:
        _store = Store(_database)

    return _store
Ejemplo n.º 28
0
    def test_migration_error_with_removed_language(self):
        store = Store(create_database(GLSettings.db_uri))
        zyx = EnabledLanguage('zyx')
        store.add(zyx)
        store.commit()
        store.close()

        self.assertRaises(Exception, migration.perform_data_update, self.db_file)
Ejemplo n.º 29
0
 def preconditions_34(self):
     store = Store(create_database(self.start_db_uri))
     notification_l10n = NotificationL10NFactory(store)
     notification_l10n.set_val(u'export_template', u'it', 'unmodifiable')
     x = notification_l10n.get_val(u'export_template', u'it')
     self.assertTrue(x, 'unmodifiable')
     store.commit()
     store.close()
Ejemplo n.º 30
0
    def __open_db(self,pathname):
	#db_url = 'sqlite:/:memory:'
	db_url = 'sqlite:' + pathname
	db_url += '?debug=1'
	storm.database.DEBUG = True
	database = stm.create_database(db_url)
	store = stm.Store(database)
	return store
Ejemplo n.º 31
0
def setup_db(uri):
    db = storm.create_database(uri)
    store = storm.Store(db)

    store.execute('create table if not exists tasks(id varchar(36) not null primary key, eta datetime not null, content text)')
    store.execute('create index if not exists eta_idx on tasks(eta asc)')

    return store
Ejemplo n.º 32
0
def connection():
  """Returns database connection."""
  try:
    if not flask.g.db:
      raise AttributeError()
  except (KeyError, AttributeError):
    flask.g.db = Store(create_database(flask.current_app.config['STORM_DATABASE_URI']))
  return flask.g.db
Ejemplo n.º 33
0
def connection():
    """Returns database connection."""
    try:
        if not flask.g.db:
            raise AttributeError()
    except (KeyError, AttributeError):
        flask.g.db = Store(
            create_database(flask.current_app.config['STORM_DATABASE_URI']))
    return flask.g.db
Ejemplo n.º 34
0
    def connect(self):
        opts = Config()

        self.database = create_database('mysql://' + opts.db_user_out + ':' +
                                        opts.db_password_out + '@' +
                                        opts.db_hostname_out + ':' +
                                        opts.db_port_out + '/' +
                                        opts.db_database_out)
        self.store = Store(self.database)
Ejemplo n.º 35
0
    def test_migration_error_with_removed_language(self):
        store = Store(create_database(GLSettings.db_uri))
        zyx = EnabledLanguage('zyx')
        store.add(zyx)
        store.commit()
        store.close()

        self.assertRaises(Exception, migration.perform_data_update,
                          self.db_file)
Ejemplo n.º 36
0
def create_store(url, debug):
    if debug:
        storm.tracer.debug(True, stream=sys.stdout)
    database = create_database(url)
    dbtype = url.partition(":")[0]
    store = Store(database)
    dbschema = Schema(schema.CREATES[dbtype], [], [], schema)
    dbschema.upgrade(store)
    return StormStore(store, debug)
Ejemplo n.º 37
0
    def _connect(self):
        opts = Config()

        self.database = create_database('mysql://' + opts.db_user_out + ':'
                                        + opts.db_password_out + '@'
                                        + opts.db_hostname_out + ':'
                                        + opts.db_port_out + '/'
                                        + opts.db_database_out)
        self.store = Store(self.database)
Ejemplo n.º 38
0
def perform_data_update(dbfile):
    new_tmp_store = Store(create_database(GLSettings.make_db_uri(dbfile)))
    try:
        db_perform_data_update(new_tmp_store)
        new_tmp_store.commit()
    except:
        new_tmp_store.rollback()
        raise
    finally:
        new_tmp_store.close()
Ejemplo n.º 39
0
 def setUp(self, dsn=conf.LITE_DSN):
     """should load the dataset"""
     from storm.uri import URI
     from storm.locals import  create_database, Store
     from storm.tracer import debug
     #debug(1)
     self.store = Store(create_database(URI(dsn)))
     self.fixture.store = self.store
     
     setup_db(self.store)
Ejemplo n.º 40
0
    def test_ver_change_exception(self):
        # Explicity throw an exception in managed_ver_update via is_cfg_valid
        config.is_cfg_valid = apply_gen(throw_excep)

        self.assertRaises(IOError, migration.perform_data_update, self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.close()
Ejemplo n.º 41
0
    def __init__(self, name, connection_string, min, max, growup, **options):

        ConnectionPool.__init__(self, name, connection_string, min, max,
                                growup, **options)
        self.__life_time = options.get('life_time')
        if self.__life_time is None or self.__life_time < 0:
            # Default is 8 hours.
            self.__life_time = 8 * 60 * 60

        self.__database = create_database(self._connection_string)
Ejemplo n.º 42
0
    def setUp(self, dsn=conf.LITE_DSN):
        """should load the dataset"""
        from storm.uri import URI
        from storm.locals import create_database, Store
        from storm.tracer import debug
        #debug(1)
        self.store = Store(create_database(URI(dsn)))
        self.fixture.store = self.store

        setup_db(self.store)
Ejemplo n.º 43
0
    def test_ver_change_exception(self):
        # Explicity throw an exception in managed_ver_update via is_cfg_valid
        config.is_cfg_valid = apply_gen(throw_excep)

        self.assertRaises(IOError, migration.perform_data_update, self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val(u'version'), self.dummy_ver)
        store.close()
Ejemplo n.º 44
0
def do_statspollute(dbfile):

    # source
    gl_database = create_database("sqlite:%s" % dbfile)
    source_store = Store(gl_database)

    stats = source_store.find(models.Stats)

    counter = 0
    for s in stats:
        source_store.remove(s)
        counter += 1

    print "removed %d entry in stats" % counter

    counter = 0
    # 21 days in the past
    for past_hours in xrange(24 * 7 * 3):
        past_hours += 4
        when = utc_past_date(hours=past_hours)

        newstat = models.Stats()
        newstat.freemb = randint(1000, 1050)
        newstat.year = when.isocalendar()[0]
        newstat.week = when.isocalendar()[1]

        level = round((randint(0, 1000) / 240.0), 1) - 2

        def random_pollution():
            return int(randint(0,11) + (5 * level))

        activity_fake = {
            'successfull_logins': random_pollution(),
            'failed_logins': random_pollution(),
            'started_submissions': random_pollution(),
            'completed_submissions': random_pollution(),
            'uploaded_files': int(randint(0,11) + (5  * level)),
            'appended_files': random_pollution(),
            'wb_comments': random_pollution(),
            'wb_messages': random_pollution(),
            'receiver_comments': random_pollution(),
            'receiver_messages': random_pollution()
        }

        for k, v in activity_fake.iteritems():
            if v < 0:
                activity_fake[k] = 0

        newstat.start = when
        newstat.summary = activity_fake
        counter += 1
        source_store.add(newstat)

    print "Committing %d stats" % counter
    source_store.commit()
Ejemplo n.º 45
0
    def insert_data(self, scheme):
        """
        Return the SQL syntax needed to insert the data already present
        in the table.
        """

        if scheme is not None:
            store = Store(create_database(config.Database().uri[scheme]))
        else:
            store = Store(create_database(config.Database().uri))
        registers = []
        rows = store.find(self.model.__class__)
        fields = [
            r._detect_attr_name(self.model.__class__) for r in
            self.model._storm_columns.keys()
        ]
        for r in rows:
            tmp_row = {}
            for field in fields:
                tmp_row[field] = getattr(r, field)
            registers.append(tmp_row)

        if self.__class__.__name__ == 'MySQL':
            commas = '`'
        else:
            commas = "'"

        query = ''
        for register in registers:
            query += ('INSERT INTO {}{}{} ({}) VALUES ({});\n'.format(
                commas,
                self.model.__storm_table__,
                commas,
                ', '.join(register.keys()),
                ', '.join([(
                    str(field) if type(field) is not unicode
                    else "'{}'".format(field))
                    for field in register.values()
                ])
            ))

        return query
Ejemplo n.º 46
0
def db():

    tid = threading.current_thread()

    try:
        return _dbpool[tid]
    except KeyError:
        database = create_database("sqlite:graph.db")
        store = Store(database)
        _dbpool[tid] = store
        return store
Ejemplo n.º 47
0
 def get_store(self, app_package=None):
     if not self._store:
         app_package = app_package or self.plugin.app_package
         c = config[app_package]['db']
         db = create_database('%s://%s:%s@%s%s/%s' % 
                              (c['interface'], 
                               c['username'], c['password'],
                               c['host'], c['port'] and ':%s' % c['port'],
                               c['database']))
         self._store = store = Store(db)
     return self._store
Ejemplo n.º 48
0
    def test_version_change_not_ok(self):
        # Set is_config_valid to false  during managed ver update
        config.is_cfg_valid = apply_gen(mod_bool)

        self.assertRaises(Exception, migration.perform_data_update, self.db_file)

        # Ensure the rollback has succeeded
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.close()
Ejemplo n.º 49
0
	def connect(connection_string):
		'''Connects to a given database and prepares it's structure.'''
		database = create_database(connection_string)
		ActiveRecord.store = Store(database)
		for type in ActiveRecord.subclasses:
			schema = type.table_schema
			if isinstance(schema, str) or isinstance(schema, unicode):
				ActiveRecord.store.execute(type.table_schema)
			else:
				for statement in schema:
					ActiveRecord.store.execute(statement)
Ejemplo n.º 50
0
 def init_storm(self, store, connection):
     try:
         import storm
         from storm.locals import create_database
         from torweb.db import CacheStore
         database = create_database(connection)
         cache_store = CacheStore(database)
         setattr(self, store, cache_store)
         setattr(self.app, store, cache_store)
     except Exception as e:
         print e
Ejemplo n.º 51
0
 def init_storm(self, store, connection):
     try:
         import storm
         from storm.locals import create_database
         from torweb.db import CacheStore
         database = create_database(connection)
         cache_store = CacheStore(database)
         setattr(self, store, cache_store)
         setattr(self.app, store, cache_store)
     except Exception as e:
         print e
Ejemplo n.º 52
0
def before_request():
    # If the database file does not exist, execute dbinit.sql to
    # initialize it
    script_dir = os.path.dirname(__file__)
    database_abs = os.path.join(script_dir, app.config["DATABASE_FILENAME"])
    dbinit_abs = os.path.join(script_dir, "dbinit.sql")
    if os.path.exists(database_abs) == False:
        os.system("sqlite3 " + database_abs + " < " + dbinit_abs)

    g.db = create_database("sqlite:///" + database_abs)
    g.store = Store(g.db)
Ejemplo n.º 53
0
def db():

    tid = threading.current_thread()

    try:
        return _dbpool[tid]
    except KeyError:
        database = create_database("sqlite:graph.db")
        store = Store(database)
        _dbpool[tid] = store
        return store
Ejemplo n.º 54
0
    def test_version_change_not_ok(self):
        # Set is_config_valid to false  during managed ver update
        config.is_cfg_valid = apply_gen(mod_bool)

        self.assertRaises(Exception, migration.perform_data_update,
                          self.db_file)

        # Ensure the rollback has succeeded
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val(u'version'), self.dummy_ver)
        store.close()
Ejemplo n.º 55
0
 def set_config(self, **kwargs):
     """Set the configuration of this back-end."""
     uri = kwargs['uri']
     database = create_database(uri)
     self.store = Store(database)
     self.logger = logging.getLogger('StormStorageBackend')
     handler = logging.StreamHandler()
     formatter = logging.Formatter(kwargs['log_format'])
     handler.setFormatter(formatter)
     self.logger.addHandler(handler)
     self.logger.setLevel(
         logging.__getattribute__(kwargs['log_level']))
Ejemplo n.º 56
0
    def test_trim_value_to_range(self):
        store = Store(create_database(GLSettings.db_uri))

        nf = config.NodeFactory(store)
        fake_cfg = nf.get_cfg(u'wbtip_timetolive')

        self.assertRaises(errors.InvalidModelInput, fake_cfg.set_v, 3650)

        fake_cfg.value = {'v': 3650}
        store.commit()

        MigrationBase.trim_value_to_range(nf, u'wbtip_timetolive')
        self.assertEqual(nf.get_val(u'wbtip_timetolive'), 365 * 2)