Example #1
0
def init_tables():
    metadata = MetaData()
    package_stats = Table('api_package_stats', metadata,
                          Column('package_id', String(60),
                                 primary_key=True),
                          Column('visits', Integer))
    metadata.create_all(model.meta.engine)
Example #2
0
 def setup_class(cls):
     global metadata, cattable, matchtable
     metadata = MetaData(testing.db)
     testing.db.execute("""
     CREATE VIRTUAL TABLE cattable using FTS3 (
         id INTEGER NOT NULL,
         description VARCHAR(50),
         PRIMARY KEY (id)
     )
     """)
     cattable = Table('cattable', metadata, autoload=True)
     testing.db.execute("""
     CREATE VIRTUAL TABLE matchtable using FTS3 (
         id INTEGER NOT NULL,
         title VARCHAR(200),
         category_id INTEGER NOT NULL,
         PRIMARY KEY (id)
     )
     """)
     matchtable = Table('matchtable', metadata, autoload=True)
     metadata.create_all()
     cattable.insert().execute([{'id': 1, 'description': 'Python'},
                               {'id': 2, 'description': 'Ruby'}])
     matchtable.insert().execute([{'id': 1, 'title'
                                 : 'Agile Web Development with Rails'
                                 , 'category_id': 2}, {'id': 2,
                                 'title': 'Dive Into Python',
                                 'category_id': 1}, {'id': 3, 'title'
                                 : "Programming Matz's Ruby",
                                 'category_id': 2}, {'id': 4, 'title'
                                 : 'The Definitive Guide to Django',
                                 'category_id': 1}, {'id': 5, 'title'
                                 : 'Python in a Nutshell',
                                 'category_id': 1}])
Example #3
0
    def test_boolean(self):
        """Test that the boolean only treats 1 as True

        """

        meta = MetaData(testing.db)
        t = Table('bool_table', meta, Column('id', Integer,
                  primary_key=True), Column('boo',
                  Boolean(create_constraint=False)))
        try:
            meta.create_all()
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (1, 'false');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (2, 'true');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (3, '1');")
            testing.db.execute("INSERT INTO bool_table (id, boo) "
                               "VALUES (4, '0');")
            testing.db.execute('INSERT INTO bool_table (id, boo) '
                               'VALUES (5, 1);')
            testing.db.execute('INSERT INTO bool_table (id, boo) '
                               'VALUES (6, 0);')
            eq_(t.select(t.c.boo).order_by(t.c.id).execute().fetchall(),
                [(3, True), (5, True)])
        finally:
            meta.drop_all()
Example #4
0
    def post(self, request, entries_id=None):

        # I do not know why this is necessary, but it is :/
        data = {k: v for k, v in request.POST.items()}
        del data['csrfmiddlewaretoken']

        engine = _get_engine()
        metadata = MetaData()
        metadata.create_all(bind=engine)
        sess = Session(bind=engine)
        bibtype = data.pop('bibtype')
        entry_type = get_bibtype_id(bibtype)

        if 'edit' in data:
            entries_id = data['edit']
            del data['edit']

        # TODO: Remove if clause after rigorous testing
        data['entry_types_id'] = entry_type.entry_types_id if entry_type else 1
        form = FORM_MAP[bibtype](**data)

        if entries_id:
            form.edit(sess,entries_id)
        else:
            form.save(sess)

        sess.commit()
        return redirect('/literature')
Example #5
0
    def test_attached_as_schema(self):
        cx = testing.db.connect()
        try:
            cx.execute('ATTACH DATABASE ":memory:" AS  test_schema')
            dialect = cx.dialect
            assert dialect.get_table_names(cx, 'test_schema') == []
            meta = MetaData(cx)
            Table('created', meta, Column('id', Integer),
                  schema='test_schema')
            alt_master = Table('sqlite_master', meta, autoload=True,
                               schema='test_schema')
            meta.create_all(cx)
            eq_(dialect.get_table_names(cx, 'test_schema'), ['created'])
            assert len(alt_master.c) > 0
            meta.clear()
            reflected = Table('created', meta, autoload=True,
                              schema='test_schema')
            assert len(reflected.c) == 1
            cx.execute(reflected.insert(), dict(id=1))
            r = cx.execute(reflected.select()).fetchall()
            assert list(r) == [(1, )]
            cx.execute(reflected.update(), dict(id=2))
            r = cx.execute(reflected.select()).fetchall()
            assert list(r) == [(2, )]
            cx.execute(reflected.delete(reflected.c.id == 2))
            r = cx.execute(reflected.select()).fetchall()
            assert list(r) == []

            # note that sqlite_master is cleared, above

            meta.drop_all()
            assert dialect.get_table_names(cx, 'test_schema') == []
        finally:
            cx.execute('DETACH DATABASE test_schema')
class TestDatabaseHelpers(unittest2.TestCase):
    def setUp(self):
        self.uri = 'sqlite:///:memory:'
        self.metadata = MetaData()
        self.table = (
            'table', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('value', String(63)),
        )
        self.engine = create_engine(self.uri)
        self.metadata.create_all(self.engine)

    def test_get_database(self):
        user = mock.Mock(database_uri='sqlite:///:memory:')
        engine = get_database_engine(user)
        self.assertIn(self.uri, _ENGINES)
        self.assertIs(_ENGINES[self.uri], engine)
        engine2 = get_database_engine(user)
        self.assertIs(engine, engine2)

    def test_get_declarative_base(self):
        user = mock.Mock(database_uri='sqlite:///:memory:')
        base = get_declarative_base(user)
        self.assertIn(self.uri, _BASES)
        self.assertIs(_BASES[self.uri], base)
        base2 = get_declarative_base(user)
        self.assertIs(base, base2)
Example #7
0
class InvalidateDuringResultTest(fixtures.TestBase):
    __backend__ = True

    def setup(self):
        self.engine = engines.reconnecting_engine()
        self.meta = MetaData(self.engine)
        table = Table(
            'sometable', self.meta,
            Column('id', Integer, primary_key=True),
            Column('name', String(50)))
        self.meta.create_all()
        table.insert().execute(
            [{'id': i, 'name': 'row %d' % i} for i in range(1, 100)]
        )

    def teardown(self):
        self.meta.drop_all()
        self.engine.dispose()

    @testing.fails_if([
        '+mysqlconnector', '+mysqldb', '+cymysql', '+pymysql', '+pg8000'],
        "Buffers the result set and doesn't check for connection close")
    def test_invalidate_on_results(self):
        conn = self.engine.connect()
        result = conn.execute('select * from sometable')
        for x in range(20):
            result.fetchone()
        self.engine.test_shutdown()
        _assert_invalidated(result.fetchone)
        assert conn.invalidated
Example #8
0
    def test_unicode_warnings(self):
        metadata = MetaData(self.engine)
        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", Unicode(30)),
        )
        metadata.create_all()
        i = [1]

        # the times here is cranked way up so that we can see
        # pysqlite clearing out its internal buffer and allow
        # the test to pass
        @testing.emits_warning()
        @profile_memory()
        def go():

            # execute with a non-unicode object. a warning is emitted,
            # this warning shouldn't clog up memory.

            self.engine.execute(
                table1.select().where(table1.c.col2 == "foo%d" % i[0])
            )
            i[0] += 1

        try:
            go()
        finally:
            metadata.drop_all()
Example #9
0
def populate_main_sql_testdatabase(engine):
    meta = MetaData()

    table = Table('events', meta,
                  Column('id', Integer, primary_key=True, ),
                  Column('time', String(30)),
                  Column('source_ip', String(30)),
                  Column('source_port', String(30)),
                  Column('request_url', String(500)),
                  Column('request_raw', String(65536)),
                  Column('pattern', String(20)),
                  Column('filename', String(500)),
    )

    meta.create_all(engine)

    insert_dicts = []
    data = open(os.path.join(file_dir, 'data/events_500.bson'), 'r').read()
    for item in bson.decode_all(data):
        new_item = {"source_ip": item["source_ip"],
                    "source_port": item["source_port"],
                    "request_url": item["request"]["url"],
                    "pattern": item["pattern"]}

        insert_dicts.append(new_item)

    conn = engine.connect()
    print "Inserted: {0}".format(len(insert_dicts))
    conn.execute(table.insert(), insert_dicts)
Example #10
0
 def __init__(self):
     metadata = MetaData()
     self.engine = create_engine('mysql://*****:*****@localhost:3006/games_online', encoding='utf-8',pool_recycle=7200,pool_size=15,max_overflow=30)
     # self._dbSession = scoped_session(
     #    sessionmaker(
     #        bind=self.engine
     #    )
     # )
     self.games_info = Table('games_info', metadata,
                             Column('id', INTEGER, primary_key=True),
                             Column('gamecode', VARCHAR(20)),
                             Column('language', VARCHAR(20)),
                             Column('sid_api', VARCHAR(255)),
                             Column('online_api', VARCHAR(255)))
     self.games_online = Table('games_online', metadata,
                               Column('id', INTEGER, primary_key=True),
                               Column('gamecode', VARCHAR(20)),
                               Column('language', VARCHAR(20)),
                               Column('region', VARCHAR(20)),
                               Column('serverid', INTEGER),
                               Column('online', INTEGER),
                               Column('time', INTEGER))
     self.games_triggers = Table('games_triggers', metadata,
                                 Column('id', INTEGER, primary_key=True),
                                 Column('gamecode', VARCHAR(20)),
                                 Column('language', VARCHAR(20)),
                                 Column('region', VARCHAR(20)),
                                 Column('serverid', INTEGER),
                                 Column('time', INTEGER))
     metadata.create_all(self.engine)
Example #11
0
    def test_unless_sqla(self):
        from sqlalchemy import (MetaData, Table, Column, Integer, String)
        from sqlalchemy.orm import create_session, mapper

        metadata = MetaData('sqlite:///:memory:')
        testtable = Table('test1', metadata,
            Column('id', Integer, primary_key=True),
            Column('val', String(8)))
        metadata.create_all()

        class Test(object):
            pass
        mapper(Test, testtable)

        testtable.insert().execute({'id': 1, 'val': 'bob'})
        testtable.insert().execute({'id': 2, 'val': 'bobby'})
        testtable.insert().execute({'id': 3, 'val': 'alberto'})

        sess = create_session()
        getunless = unless(sess.query(Test).get)

        x = getunless(1)
        assert x.val == 'bob', x

        x = getunless(2)
        assert x.val == 'bobby', x

        assert_raises(ValueError, getunless, 5)
        assert_raises(TGValidationError, Convert(getunless).to_python, '5')

        x = Convert(getunless).to_python('1')
        assert x.val == 'bob', x
Example #12
0
    def test_clauseelement(self):
        metadata = MetaData()
        table = Table('test_table', metadata,
            Column('foo', Integer))
        metadata.create_all(bind=testing.db)
        try:
            for elem in [
                table.select,
                lambda **kwargs: sa.func.current_timestamp(**kwargs).select(),
               # func.current_timestamp().select,
                lambda **kwargs:text("select * from test_table", **kwargs)
            ]:
                for bind in (
                    testing.db,
                    testing.db.connect()
                ):
                    try:
                        e = elem(bind=bind)
                        assert e.bind is bind
                        e.execute().close()
                    finally:
                        if isinstance(bind, engine.Connection):
                            bind.close()

                e = elem()
                assert e.bind is None
                assert_raises(
                    exc.UnboundExecutionError,
                    e.execute
                )
        finally:
            if isinstance(bind, engine.Connection):
                bind.close()
            metadata.drop_all(bind=testing.db)
Example #13
0
    def create_base_tables(self):
        metadata = MetaData(bind=self.create_engine())

        Table('random_quote', metadata,
              Column('id', Integer, primary_key=True, nullable=False, autoincrement=True),
              Column('quote', Text, nullable=False)
              )
        Table('exploit_type', metadata,
              Column('id', Integer, primary_key=True, nullable=False, autoincrement=True),
              Column('name', String(128), nullable=False),
              Column('short_name', String(32), nullable=False)
              )
        Table('exploit', metadata,
              Column('id', Integer, primary_key=True, nullable=False, autoincrement=True),
              Column('type_id', Integer, ForeignKey(ExploitType.id), nullable=False),
              Column('validator_id', Integer, nullable=False),
              Column('name', String(128), nullable=False),
              Column('version', String(64), nullable=False),
              Column('url', String(128), nullable=False),
              Column('request_method', String(12), nullable=False, default='GET'),
              Column('exploit_url', String(128), nullable=False),
              Column('exploit_body', Text, nullable=True),
              Column('exploit_headers', Text, nullable=True),
              Column('is_url_encode', Boolean, nullable=False, default=False),
              Column('is_authenticated', Boolean, nullable=False, default=False)
              )

        metadata.create_all(checkfirst=True)
Example #14
0
def dbinit(args):
    with utils.status("Reading config file 'catalyst.json'"):
        with open("catalyst.json", "r") as f:
            config = json.load(f)

    metadata = config['metadata']
    module_name, variable_name = metadata.split(":")

    sys.path.insert(0, '')
    metadata = importlib.import_module(module_name)
    for variable_name in variable_name.split("."):
        metadata = getattr(metadata, variable_name)

    from sqlalchemy import create_engine, MetaData

    target = args.target or config.get('dburi', None)
    if target is None:
        raise Exception("No 'target' argument specified and no 'dburi' setting in config file.")

    if not args.yes and 'y' != input("Warning: any existing data at '%s' will be erased. Proceed? [y/n]" % target):
        return

    dst_engine = create_engine(target)

    # clear out any existing tables
    dst_metadata = MetaData(bind=dst_engine)
    dst_metadata.reflect()
    dst_metadata.drop_all()

    # create new tables
    dst_metadata = metadata
    dst_metadata.create_all(dst_engine)
    def test_use_alter(self):
        m = MetaData()
        Table('t', m,
                  Column('a', Integer),
        )

        Table('t2', m,
                Column('a', Integer, ForeignKey('t.a', use_alter=True,
                                                        name='fk_ta')),
                Column('b', Integer, ForeignKey('t.a', name='fk_tb'))
        )

        e = engines.mock_engine(dialect_name='postgresql')
        m.create_all(e)
        m.drop_all(e)

        e.assert_sql([
            'CREATE TABLE t (a INTEGER)',
            'CREATE TABLE t2 (a INTEGER, b INTEGER, CONSTRAINT fk_tb '
                            'FOREIGN KEY(b) REFERENCES t (a))',
            'ALTER TABLE t2 '
                    'ADD CONSTRAINT fk_ta FOREIGN KEY(a) REFERENCES t (a)',
            'ALTER TABLE t2 DROP CONSTRAINT fk_ta',
            'DROP TABLE t2',
            'DROP TABLE t'
        ])
Example #16
0
    def test_explicit_default_schema_metadata(self):
        engine = testing.db

        if testing.against("sqlite"):
            # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc.,
            # but fails on:
            #   FOREIGN KEY(col2) REFERENCES main.table1 (col1)
            schema = "main"
        else:
            schema = engine.dialect.default_schema_name

        assert bool(schema)

        metadata = MetaData(engine, schema=schema)
        table1 = Table("table1", metadata, Column("col1", sa.Integer, primary_key=True), test_needs_fk=True)
        table2 = Table(
            "table2",
            metadata,
            Column("col1", sa.Integer, primary_key=True),
            Column("col2", sa.Integer, sa.ForeignKey("table1.col1")),
            test_needs_fk=True,
        )
        try:
            metadata.create_all()
            metadata.create_all(checkfirst=True)
            assert len(metadata.tables) == 2
            metadata.clear()

            table1 = Table("table1", metadata, autoload=True)
            table2 = Table("table2", metadata, autoload=True)
            assert len(metadata.tables) == 2
        finally:
            metadata.drop_all()
Example #17
0
def init_db():
    # create a new metadata object
    metadata = MetaData()
    # build our tweets table
    tweets = Table('deals', metadata, 
              Column('deal_id', Integer(), primary_key=True),
              Column('price', String(15)),
              Column('url', String(255)),
              Column('description', String(255)),
              Column('tweet_id', BigInteger()),
              Column('analyzed', DateTime(), default=datetime.now),
              Column('updated', DateTime(), default=datetime.now, onupdate=datetime.now)
               )
    # build our matches table
    price_check_history = Table('price_check_history', metadata, 
              Column('match_id', Integer(), primary_key=True),
              Column('merchant', String(75)),
              Column('url', String(255)),
              Column('merchant_description', String(255)),
              Column('tweet_id', BigInteger()),
              Column('merchant_price', BigInteger()),
              Column('analyzed', DateTime(), default=datetime.now),
              Column('updated', DateTime(), default=datetime.now, onupdate=datetime.now)
               )   
    # now make a new file based SQLite3 db
    engine = create_engine('sqlite:///tweet.db',encoding='latin-1')
    # and build it
    metadata.create_all(engine)
    # return the handle so we can talk to it
    return engine, tweets, price_check_history
class MySQLDatabaseLogHandler(logging.Handler):
    def __init__(self, databaseAddress):
        logging.Handler.__init__(self)
        self.databaseEngine =  create_engine(databaseAddress)
        self.metaData = MetaData()
        
        logRecordTable = Table("logRecords", self.metaData,
                               Column("id", Integer, Sequence('logRecordSeq'), primary_key=True),
                               Column("created", Float),
                               Column("funcName", String(50)),
                               Column("levelno", Integer),
                               Column("lineno", Integer),
                               Column("message", String(400)),
                               Column("name", String(50)),
                               Column("pathname", String(200))
                               )
        
        self.metaData.create_all(self.databaseEngine)
        mapper(DatabaseLogRecord, logRecordTable)
        self.Session = sessionmaker(bind=self.databaseEngine)
        
    def emit(self, record):
        if not (self.filter(record)):
            return
        
        self.format(record)
        databaseRecord = DatabaseLogRecord(record)
        session = self.Session()
        session.add(databaseRecord)
        session.commit()           
Example #19
0
    def test_basic(self):
        # the 'convert_unicode' should not get in the way of the
        # reflection process.  reflecttable for oracle, postgresql
        # (others?) expect non-unicode strings in result sets/bind
        # params

        bind = self.bind
        names = set([rec[0] for rec in self.names])

        reflected = set(bind.table_names())

        # Jython 2.5 on Java 5 lacks unicodedata.normalize

        if not names.issubset(reflected) and hasattr(unicodedata, "normalize"):

            # Python source files in the utf-8 coding seem to
            # normalize literals as NFC (and the above are
            # explicitly NFC).  Maybe this database normalizes NFD
            # on reflection.

            nfc = set([unicodedata.normalize("NFC", n) for n in names])
            self.assert_(nfc == names)

            # Yep.  But still ensure that bulk reflection and
            # create/drop work with either normalization.

        r = MetaData(bind)
        r.reflect()
        r.drop_all(checkfirst=False)
        r.create_all(checkfirst=False)
Example #20
0
class datos:
    def __init__(self):
        self.engine = create_engine('sqlite:///data/conta.db')
        self.metadata = MetaData(bind=self.engine)
        self.conn = self.engine.connect()

    def unsetup(self):
        self.metadata.drop_all()

    def setup(self):
        self.tblfacturas = Table('facturas', self.metadata,
                                 Column('id', sqlite.TEXT, primary_key=True, index=True),
                                 Column('fecha', sqlite.DATE),
                                 Column('ruc', sqlite.TEXT),
                                 Column('razon_social', sqlite.TEXT),
                                 Column('condicion', sqlite.SMALLINT, default=0),
                                 Column('numero', sqlite.TEXT),
                                 Column('timbrado', sqlite.TEXT),
                                 Column('timbrado_inicio', sqlite.DATE),
                                 Column('timbrado_fin', sqlite.DATE)
                                 )

        self.metadata.create_all()

    def __del__(self):
        self.conn.close()
Example #21
0
 def setup_class(cls):
     global metadata, cattable, matchtable
     metadata = MetaData(testing.db)
     cattable = Table(
         'cattable', metadata,
         Column(
             'id', Integer, primary_key=True),
         Column(
             'description', String(50)))
     matchtable = Table(
         'matchtable', metadata,
         Column(
             'id', Integer, primary_key=True),
         Column(
             'title', String(200)),
         Column(
             'category_id', Integer, ForeignKey('cattable.id')))
     metadata.create_all()
     cattable.insert().execute([{'id': 1, 'description': 'Python'},
                                {'id': 2, 'description': 'Ruby'}])
     matchtable.insert().execute(
         [{'id': 1, 'title': 'Agile Web Development with Rails',
           'category_id': 2},
          {'id': 2, 'title': 'Dive Into Python', 'category_id': 1},
          {'id': 3, 'title': "Programming Matz's Ruby", 'category_id': 2},
          {'id': 4, 'title': 'The Definitive Guide to Django',
           'category_id': 1},
          {'id': 5, 'title': 'Python in a Nutshell', 'category_id': 1}])
Example #22
0
    def test_join_cache(self):
        metadata = MetaData(testing.db)
        table1 = Table('table1', metadata, Column('id', Integer,
                       primary_key=True,
                       test_needs_autoincrement=True), Column('data',
                       String(30)))
        table2 = Table('table2', metadata, Column('id', Integer,
                       primary_key=True,
                       test_needs_autoincrement=True), Column('data',
                       String(30)), Column('t1id', Integer,
                       ForeignKey('table1.id')))

        class Foo(object):
            pass

        class Bar(object):
            pass

        mapper(Foo, table1, properties={'bars'
               : relationship(mapper(Bar, table2))})
        metadata.create_all()
        session = sessionmaker()

        @profile_memory
        def go():
            s = table2.select()
            sess = session()
            sess.query(Foo).join((s, Foo.bars)).all()
            sess.rollback()
        try:
            go()
        finally:
            metadata.drop_all()
Example #23
0
 def test_implicit_execution(self):
     metadata = MetaData()
     table = Table(
         "test_table",
         metadata,
         Column("foo", Integer),
         test_needs_acid=True,
     )
     conn = testing.db.connect()
     metadata.create_all(bind=conn)
     try:
         trans = conn.begin()
         metadata.bind = conn
         t = table.insert()
         assert t.bind is conn
         table.insert().execute(foo=5)
         table.insert().execute(foo=6)
         table.insert().execute(foo=7)
         trans.rollback()
         metadata.bind = None
         assert (
             conn.execute("select count(*) from test_table").scalar() == 0
         )
     finally:
         metadata.drop_all(bind=conn)
Example #24
0
    def setup_class(cls):
        global metadata
        global t1, t2, t3

        metadata = MetaData(testing.db)
        t1 = Table("t1", metadata, Column("t1_id", Integer, primary_key=True), Column("name", String(32)))
        t2 = Table(
            "t2",
            metadata,
            Column("t2_id", Integer, primary_key=True),
            Column("t1_id", Integer, ForeignKey("t1.t1_id")),
            Column("name", String(32)),
        )
        t3 = Table(
            "t3",
            metadata,
            Column("t3_id", Integer, primary_key=True),
            Column("t2_id", Integer, ForeignKey("t2.t2_id")),
            Column("name", String(32)),
        )
        metadata.drop_all()
        metadata.create_all()

        # t1.10 -> t2.20 -> t3.30
        # t1.11 -> t2.21
        # t1.12
        t1.insert().execute(
            {"t1_id": 10, "name": "t1 #10"}, {"t1_id": 11, "name": "t1 #11"}, {"t1_id": 12, "name": "t1 #12"}
        )
        t2.insert().execute({"t2_id": 20, "t1_id": 10, "name": "t2 #20"}, {"t2_id": 21, "t1_id": 11, "name": "t2 #21"})
        t3.insert().execute({"t3_id": 30, "t2_id": 20, "name": "t3 #30"})
Example #25
0
 def setup_class(cls):
     global metadata, cattable, matchtable
     metadata = MetaData(testing.db)
     cattable = Table('cattable', metadata, Column('id', Integer),
                      Column('description', String(50)),
                      PrimaryKeyConstraint('id', name='PK_cattable'))
     matchtable = Table(
         'matchtable',
         metadata,
         Column('id', Integer),
         Column('title', String(200)),
         Column('category_id', Integer, ForeignKey('cattable.id')),
         PrimaryKeyConstraint('id', name='PK_matchtable'),
     )
     DDL("""CREATE FULLTEXT INDEX
                    ON cattable (description)
                    KEY INDEX PK_cattable""").\
         execute_at('after-create', matchtable)
     DDL("""CREATE FULLTEXT INDEX
                    ON matchtable (title)
                    KEY INDEX PK_matchtable""").\
         execute_at('after-create', matchtable)
     metadata.create_all()
     cattable.insert().execute([{'id': 1, 'description': 'Python'},
                                {'id': 2, 'description': 'Ruby'}])
     matchtable.insert().execute([
         {'id': 1, 'title': 'Web Development with Rails', 'category_id': 2},
         {'id': 2, 'title': 'Dive Into Python', 'category_id': 1},
         {'id': 3, 'title': "Programming Matz's Ruby", 'category_id': 2},
         {'id': 4, 'title': 'Guide to Django', 'category_id': 1},
         {'id': 5, 'title': 'Python in a Nutshell', 'category_id': 1}])
     DDL("WAITFOR DELAY '00:00:05'"
         ).execute(bind=engines.testing_engine())
Example #26
0
    def setup_class(cls):
        global users, users2, addresses, metadata
        metadata = MetaData(testing.db)
        users = Table(
            "query_users",
            metadata,
            Column("user_id", INT, primary_key=True, test_needs_autoincrement=True),
            Column("user_name", VARCHAR(20)),
            test_needs_acid=True,
        )
        addresses = Table(
            "query_addresses",
            metadata,
            Column("address_id", Integer, primary_key=True, test_needs_autoincrement=True),
            Column("user_id", Integer, ForeignKey("query_users.user_id")),
            Column("address", String(30)),
            test_needs_acid=True,
        )

        users2 = Table(
            "u2",
            metadata,
            Column("user_id", INT, primary_key=True),
            Column("user_name", VARCHAR(20)),
            test_needs_acid=True,
        )

        metadata.create_all()
Example #27
0
    def setup_class(cls):
        global users, addresses, metadata
        metadata = MetaData(testing.db)
        users = Table(
            "query_users", metadata, Column("user_id", INT, primary_key=True), Column("user_name", VARCHAR(20))
        )
        addresses = Table(
            "query_addresses",
            metadata,
            Column("address_id", Integer, primary_key=True),
            Column("user_id", Integer, ForeignKey("query_users.user_id")),
            Column("address", String(30)),
        )
        metadata.create_all()

        users.insert().execute(user_id=1, user_name="john")
        addresses.insert().execute(address_id=1, user_id=1, address="addr1")
        users.insert().execute(user_id=2, user_name="jack")
        addresses.insert().execute(address_id=2, user_id=2, address="addr1")
        users.insert().execute(user_id=3, user_name="ed")
        addresses.insert().execute(address_id=3, user_id=3, address="addr2")
        users.insert().execute(user_id=4, user_name="wendy")
        addresses.insert().execute(address_id=4, user_id=4, address="addr3")
        users.insert().execute(user_id=5, user_name="laura")
        addresses.insert().execute(address_id=5, user_id=5, address="addr4")
        users.insert().execute(user_id=6, user_name="ralph")
        addresses.insert().execute(address_id=6, user_id=6, address="addr5")
        users.insert().execute(user_id=7, user_name="fido")
        addresses.insert().execute(address_id=7, user_id=7, address="addr5")
Example #28
0
    def test_with_inheritance(self):
        metadata = MetaData(testing.db)

        table1 = Table("mytable", metadata,
            Column('col1', Integer, primary_key=True,
                                    test_needs_autoincrement=True),
            Column('col2', String(30))
            )

        table2 = Table("mytable2", metadata,
            Column('col1', Integer, ForeignKey('mytable.col1'),
                   primary_key=True, test_needs_autoincrement=True),
            Column('col3', String(30)),
            )

        @profile_memory
        def go():
            class A(fixtures.ComparableEntity):
                pass
            class B(A):
                pass

            mapper(A, table1,
                   polymorphic_on=table1.c.col2,
                   polymorphic_identity='a')
            mapper(B, table2,
                   inherits=A,
                   polymorphic_identity='b')

            sess = create_session()
            a1 = A()
            a2 = A()
            b1 = B(col3='b1')
            b2 = B(col3='b2')
            for x in [a1,a2,b1, b2]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_(
                [
                    A(), A(), B(col3='b1'), B(col3='b2')
                ],
                alist)

            for a in alist:
                sess.delete(a)
            sess.flush()

            # dont need to clear_mappers()
            del B
            del A

        metadata.create_all()
        try:
            go()
        finally:
            metadata.drop_all()
        assert_no_mappers()
Example #29
0
 def setup_class(cls):
     global metadata, cattable, matchtable
     metadata = MetaData(testing.db)
     testing.db.execute(
         """
     CREATE VIRTUAL TABLE cattable using FTS3 (
         id INTEGER NOT NULL,
         description VARCHAR(50),
         PRIMARY KEY (id)
     )
     """
     )
     cattable = Table("cattable", metadata, autoload=True)
     testing.db.execute(
         """
     CREATE VIRTUAL TABLE matchtable using FTS3 (
         id INTEGER NOT NULL,
         title VARCHAR(200),
         category_id INTEGER NOT NULL,
         PRIMARY KEY (id)
     )
     """
     )
     matchtable = Table("matchtable", metadata, autoload=True)
     metadata.create_all()
     cattable.insert().execute([{"id": 1, "description": "Python"}, {"id": 2, "description": "Ruby"}])
     matchtable.insert().execute(
         [
             {"id": 1, "title": "Agile Web Development with Rails", "category_id": 2},
             {"id": 2, "title": "Dive Into Python", "category_id": 1},
             {"id": 3, "title": "Programming Matz's Ruby", "category_id": 2},
             {"id": 4, "title": "The Definitive Guide to Django", "category_id": 1},
             {"id": 5, "title": "Python in a Nutshell", "category_id": 1},
         ]
     )
def create_fec_master(engine):
    # "cmte_id"	"cand_id"	"cand_nm"	"contbr_nm"	"contbr_city"	"contbr_st"	"contbr_zip"
    # "contbr_employer"	"contbr_occupation"	"contb_receipt_amt"	"contb_receipt_dt"	"receipt_desc"
    # "memo_cd"	"memo_text"	"form_tp"	"file_num"	"tran_id"	"election_tp"
    metadata = MetaData()
    fec_data = Table('blownations', metadata,
        Column('cmte_id', String),
        Column('cand_id', String),
        Column('cand_nm', String),
        Column('contbr_nm', String),
        Column('contbr_city', String),
        Column('contbr_st', String),
        Column('contbr_zip', String),
        Column('contbr_employer', String),
        Column('contbr_occupation', String),
        Column('contb_receipt_amt', String),
        Column('contb_receipt_dt', String),
        Column('receipt_desc', String),
        Column('memo_cd', String),
        Column('memo_text', String),
        Column('form_tp', String),
        Column('file_num', String),
        Column('tran_id', String),
        Column('election_tp', String)
    )
    metadata.create_all(engine)
    return fec_data
Example #31
0
    def signup(self, db_flavour, input_payload, db_name='proton', schema_name='iam', table_name='PROTON_user_registry'):
        """
        Signs up users to PROTON.

        :param db_flavour: One of the supported versions. Must have an entry in dataBaseConfig.ini
        :param db_name: Name of target Database. Default: Proton
        :param schema_name: Name of target schema. Default: iam
        :param table_name: Name of target table. Default: PROTON_user_registry
        :param input_payload: Payload of user details.
        :return: A dictionary containing signup status and message.
        """

        def validate_signup_payload(payload):

            def validate_payload_contents(payload):
                """
                Validates that payload contains atleast one character. More stringent form validation
                must be a client side operation.
                :param payload: signup payload
                :return: Boolean
                """
                validity_store = []
                for k, v in payload.items():
                    if len(str(v)) > 0:
                        validity_store.append(True)
                    else:
                        validity_store.append(False)
                if all(validity_store):
                    return True
                return False

            if type(payload) is not dict:
                return False
            required_keys = ['first_name', 'last_name', 'email', 'creation_date_time',
                             'user_name', 'password']
            payload.update({'creation_date_time': datetime.utcnow()})

            actual_keys = list(payload.keys())
            if set(required_keys) == set(actual_keys):
                return validate_payload_contents(payload)
            return False

        if validate_signup_payload(input_payload):
            try:
                signup_payload = {
                    'first_name': input_payload['first_name'],
                    'last_name': input_payload['last_name'],
                    'email': input_payload['email'],
                    'creation_date_time': input_payload['creation_date_time']
                }

                login_payload = {
                    'user_name': input_payload['user_name'],
                    'password': self.hash_password(input_payload['password'])
                }

                connection = self.__alchemy_engine[db_flavour].connect()

                with connection.begin() as transaction:
                    if db_flavour == 'sqlite':

                        metadata = MetaData(self.__alchemy_engine[db_flavour], reflect=True)
                        table = metadata.tables[table_name]

                        # Check if user already exists:
                        query_pre_existance = select([table.c.id]).where(table.c.email == signup_payload['email'])
                        pre_existance_details = (connection.execute(query_pre_existance)).fetchall()

                        if len(pre_existance_details) == 0:
                            # check is user with similar user_name already exist.
                            login_table = metadata.tables['PROTON_login_registry']
                            query_login_registry = select([login_table.c.id]).where(login_table.c.user_name ==
                                                                                    login_payload['user_name'])
                            login_registry_id = (connection.execute(query_login_registry)).fetchall()

                            if len(login_registry_id) == 0:
                                df_signup_payload = pd.DataFrame(signup_payload, index=[0])
                                df_signup_payload.to_sql(table_name, self.__alchemy_engine[db_flavour], index=False,
                                                         if_exists='append')

                                query_user_registry_id = select([table.c.id]).where(table.c.email ==
                                                                                    signup_payload['email'])
                                user_registry_id = (connection.execute(query_user_registry_id)).fetchall()[0][0]
                                login_payload.update({'user_registry_id': user_registry_id})

                                df_login_payload = pd.DataFrame(login_payload, index=[0])
                                df_login_payload.to_sql('PROTON_login_registry', self.__alchemy_engine[db_flavour],
                                                        index=False, if_exists='append')
                                transaction.commit()
                                self.iam_signup_logger.info(
                                    '[ProtonSignup]: New signup of {} successfully completed in '
                                    'Sqlite.'.format(login_payload['user_name']))
                                return {
                                    'status': True,
                                    'message': 'Signup is successful! Please try login.'
                                }
                            else:
                                self.iam_signup_logger.info(
                                    '[ProtonSignup]: New signup from {} for {} username with Sqlite '
                                    'rolled back due to pre-existing '
                                    'user_name.'.format(signup_payload['email'],
                                                        login_payload['user_name']))
                                return {
                                    'status': False,
                                    'message': 'Username {} already exist. Please try '
                                               'with another unique username.'.format(login_payload['user_name'])
                                }

                        else:
                            self.iam_signup_logger.info(
                                '[ProtonSignup]: New signup from {} for {} user_name with Sqlite not '
                                'allowed due to pre-existing email.'.format(signup_payload['email'],
                                                                            login_payload['user_name']))
                            return {
                                'status': False,
                                'message': 'User with email {} already exist. Please try '
                                           'login.'.format(signup_payload['email'])
                            }

                    elif db_flavour == 'postgresql':
                        # check if schema exists & create one if not.
                        schema_status = self.pg_schema_generator(self.__alchemy_engine[db_flavour], schema_name)

                        if schema_status:

                            metadata = MetaData(self.__alchemy_engine[db_flavour], reflect=True, schema=schema_name)
                            metadata.reflect(self.__alchemy_engine[db_flavour])
                            df_signup_payload = pd.DataFrame(signup_payload, index=[0])

                            user_registry_existence_flag = False
                            login_registry_existence_flag = False

                            for table in metadata.tables.values():
                                if table.name == 'PROTON_user_registry':
                                    user_registry_existence_flag = True
                                elif table.name == 'PROTON_login_registry':
                                    login_registry_existence_flag = True
                                else:
                                    pass

                            if user_registry_existence_flag:
                                user_registry_table = Table('PROTON_user_registry', metadata)
                                query_user_registry_id = select([user_registry_table.c.id]).where(
                                    user_registry_table.c.email == signup_payload['email'])
                                user_registry_id = (connection.execute(query_user_registry_id)).fetchall()

                                if len(user_registry_id) == 0:
                                    df_signup_payload.to_sql(table_name, self.__alchemy_engine[db_flavour],
                                                             index=False, if_exists='append', schema=schema_name)
                                    user_registry_id = (connection.execute(query_user_registry_id)).fetchall()[0][0]
                                    login_payload.update({'user_registry_id': user_registry_id})
                                else:
                                    self.iam_signup_logger.info(
                                        '[ProtonSignup]: New signup with Postgresql from {} for {} user_name not '
                                        'allowed due to pre-existing email.'.format(signup_payload['email'],
                                                                                    login_payload['user_name']))
                                    return {
                                        'status': False,
                                        'message': 'User with email {} already exist. Please try '
                                                   'login.'.format(signup_payload['email'])
                                    }

                            else:
                                Table('PROTON_user_registry', metadata,
                                      Column('id', Integer, primary_key=True, nullable=False, autoincrement=True),
                                      Column('first_name', String, nullable=False),
                                      Column('last_name', String, nullable=False),
                                      Column('email', String, nullable=False),
                                      Column('creation_date_time', DateTime, nullable=False))
                                metadata.create_all()

                                df_signup_payload.to_sql(table_name, self.__alchemy_engine[db_flavour], index=False,
                                                         if_exists='append', schema=schema_name)

                                user_registry_table = Table('PROTON_user_registry', metadata)
                                query_user_registry_id = select([user_registry_table.c.id]).where(
                                    user_registry_table.c.email == signup_payload['email'])
                                user_registry_id = (connection.execute(query_user_registry_id)).fetchall()[0][0]
                                login_payload.update({'user_registry_id': user_registry_id})

                            if not login_registry_existence_flag:
                                Table('PROTON_login_registry', metadata,
                                      Column('id', Integer, primary_key=True, nullable=False, autoincrement=True),
                                      Column('user_registry_id', Integer,
                                             ForeignKey('PROTON_user_registry.id', onupdate="CASCADE",
                                                        ondelete="CASCADE"), nullable=False),
                                      Column('user_name', String, nullable=False),
                                      Column('password', String, nullable=False),
                                      Column('last_login_date_time', DateTime, nullable=True))
                                metadata.create_all()

                            login_registry_table = Table('PROTON_login_registry', metadata)
                            query_login_registry = select([login_registry_table.c.id]).where(
                                login_registry_table.c.user_name == login_payload['user_name'])
                            login_registry_id = (connection.execute(query_login_registry)).fetchall()

                            if len(login_registry_id) == 0:
                                df_login_payload = pd.DataFrame(login_payload, index=[0])
                                df_login_payload.to_sql('PROTON_login_registry', self.__alchemy_engine[db_flavour],
                                                        index=False, if_exists='append', schema=schema_name)
                            else:
                                # Another user with similar user_name exists. Invalidate signup and ask user to use
                                # another user name
                                user_registry_table = Table('PROTON_user_registry', metadata)
                                delete_instance = user_registry_table.delete().where(user_registry_table.c.email ==
                                                                                     signup_payload['email'])
                                delete_instance.execute()
                                transaction.commit()
                                self.iam_signup_logger.info(
                                    '[ProtonSignup]: New signup with Postgresql from {} rolled back due '
                                    'to pre-existing user_name of {}'.format(signup_payload['email'],
                                                                             login_payload['user_name']))
                                return {
                                    'status': False,
                                    'message': 'Username {} already exist. Please try '
                                               'with another unique username.'.format(login_payload['user_name'])
                                }

                            transaction.commit()
                            self.iam_signup_logger.info(
                                '[ProtonSignup]: New signup successfully completed in Postgresql for '
                                '{}'.format(login_payload['user_name']))
                            return {
                                'status': True,
                                'message': 'Signup is successful! Please try login.'
                            }

                        else:
                            self.iam_signup_logger.info(
                                '[Signup Controller]: Schema specified not found. Insert operation could '
                                'not be completed. Check connectionManager logs for stack trace.')

                            return {
                                'status': False,
                                'message': 'Signup is unsuccessful due to incomplete database.'
                            }
                    else:
                        self.iam_signup_logger.info(
                            '[Signup Controller]: Unsupported DB in signup payload. Db flavour = {}, '
                            'user_name={}, email = {}'.format(db_flavour, login_payload['user_name'],
                                                              signup_payload['email']))
                        return {
                            'status': False,
                            'message': 'PROTON only supports SQLite and Postgresql atm. Do you have valid db_flavour '
                                       'in your payload?'
                        }
            except Exception as e:
                self.iam_signup_logger.exception('[Signup Controller]: SignUP payload is incomplete.')
                print(Fore.LIGHTRED_EX + '[Signup Controller]: To perform successful INSERT operation, ensure the input'
                                         ' payload/signup payload is complete.' + Style.RESET_ALL)
                return {
                    'status': False,
                    'message': 'Signup is unsuccessful due to server side error.'
                }
            finally:
                if connection:
                    connection.close()
        self.iam_signup_logger.info(
            '[ProtonSignup]: New signup is unsuccessful due to incomplete input/signup payload.')
        return {
            'status': False,
            'message': 'Signup is unsuccessful. Input payload / Signup payload is incomplete.'
        }
Example #32
0
class LocalStorage:
    def __init__(self, client_name):
        self.client_name = client_name
        self.engine = None
        self.metadata = None
        self.session = None

        self.set_session()

    def connect(self):
        self.set_engine()
        self.set_metadata()
        self.create_tables()
        self.set_session()

    def set_engine(self):
        db_connection_url = f'sqlite:///{BASE_DIR}/{self.client_name}.db'
        self.engine = create_engine(db_connection_url,
                                    echo=False,
                                    pool_recycle=7200,
                                    connect_args={'check_same_thread': False})

    def set_metadata(self):
        self.metadata = MetaData()

    def set_session(self):
        self.session = sessionmaker(bind=self.engine)

    @contextmanager
    def session_scope(self, expire=True):
        """Database connection context manager."""
        if not isinstance(expire, bool):
            raise ValueError(f'Expire attr must be bool. Got {type(expire)}')

        session = self.session()
        session.expire_on_commit = expire
        try:
            yield session
            session.commit()
        except:
            session.rollback()
            raise
        finally:
            session.close()

    class Message:
        def __init__(self, text, created, from_client, to_client):
            self.id = None
            self.edited = None
            self.text = text
            self.created = created
            self.from_client = from_client
            self.to_client = to_client

        def __repr__(self):
            return 'Client message'

    class ClientSession:
        def __init__(self, token, remote_addr, remote_port, local_addr,
                     local_port):
            self.id = None
            self.token = token
            self.created = None
            self.closed = None
            self.remote_addr = remote_addr
            self.remote_port = remote_port
            self.local_addr = local_addr
            self.local_port = local_port

        def __repr__(self):
            return 'Client session'

    class ClientContact:
        def __init__(self, friend, friend_id):
            self.id = None
            self.confirmed = None
            self.friend = friend
            self.friend_id = friend_id

        def __repr__(self):
            return f'Client contact'

    def create_tables(self):
        message = Table(
            'message',
            self.metadata,
            Column('id', Integer, primary_key=True, autoincrement=True),
            Column('edited', Boolean, default=False),
            Column('text', String, nullable=False),
            Column('created', DateTime, nullable=False),
            Column('from_client', String, nullable=False),
            Column('to_client', String, nullable=False),
        )
        client_session = Table(
            'client_session',
            self.metadata,
            Column('id', Integer, primary_key=True, autoincrement=True),
            Column('token', String, nullable=False, unique=True),
            Column('created', DateTime, default=datetime.now()),
            Column('closed', DateTime, nullable=True),
            Column('remote_addr', String, nullable=False),
            Column('remote_port', Integer, nullable=False),
            Column('local_addr', String, nullable=False),
            Column('local_port', Integer, nullable=False),
        )
        client_contact = Table(
            'client_contact',
            self.metadata,
            Column('id', Integer, primary_key=True, autoincrement=True),
            Column('confirmed', Boolean, default=False),
            Column('friend', String, nullable=False),
            Column('friend_id', Integer, nullable=False),
        )

        self.metadata.create_all(self.engine)

        mapper(self.Message, message)
        mapper(self.ClientSession, client_session)
        mapper(self.ClientContact, client_contact)
Example #33
0
   Column('lastname', VARCHAR(25)), 
)

addresses = Table(
   'ADDRESSES', meta, 
   Column('id', Integer, primary_key = True), 
   Column('st_id', Integer, ForeignKey('STUDENTS.id')), 
   Column('postal_add', VARCHAR(25)), 
   Column('email_add', VARCHAR(25))
)
test = Table(
 'TST', meta,
  Column('id', Integer, primary_key = True)
)
meta.drop_all(engine)
meta.create_all(engine)
ins = students.insert()
ins = test.insert()
#ins = students.insert().values(id = '2', name = 'Ravi', lastname = 'Kapoor')
#conn.execute(ins)

conn.execute(students.insert(), [
   {'id':13,'name':'Ravi', 'lastname':'Kapoor'},
   {'id':14,'name':'Rajiv', 'lastname' : 'Khanna'},
   {'id':15,'name':'Komal','lastname' : 'Bhandari'},
   {'id':16,'name':'Abdul','lastname' : 'Sattar'},
   {'id':17,'name':'Priya','lastname' : 'Rajhans'},
])
conn.execute(addresses.insert(), [
   {'id':1,'st_id':13, 'postal_add':'Shivajinagar Pune', 'email_add':'*****@*****.**'},
   {'id':2,'st_id':14, 'postal_add':'ChurchGate Mumbai', 'email_add':'*****@*****.**'},
Example #34
0
class ServerStorage:
    # Класс - отображение таблицы всех пользователей
    # Экземпляр этого класса = запись в таблице AllUsers
    class AllUsers:
        def __init__(self, username):
            self.name = username
            self.last_login = datetime.datetime.now()
            self.id = None

    # Класс - отображение таблицы активных пользователей:
    # Экземпляр этого класса = запись в таблице ActiveUsers
    class ActiveUsers:
        def __init__(self, user_id, ip_address, port, login_time):
            self.user = user_id
            self.ip_address = ip_address
            self.port = port
            self.login_time = login_time
            self.id = None

    # Класс - отображение таблицы истории входов
    # Экземпляр этого класса = запись в таблице LoginHistory
    class LoginHistory:
        def __init__(self, name, date, ip, port):
            self.id = None
            self.name = name
            self.date_time = date
            self.ip = ip
            self.port = port

    # Класс - отображение таблицы контактов пользователей
    class UsersContacts:
        def __init__(self, user, contact):
            self.id = None
            self.user = user
            self.contact = contact

    # Класс отображение таблицы истории действий
    class UsersHistory:
        def __init__(self, user):
            self.id = None
            self.user = user
            self.sent = 0
            self.accepted = 0

    def __init__(self, path):
        # Создаём движок базы данных
        print(path)
        # SERVER_DATABASE - sqlite:///server_base.db3
        # echo=False - отключаем ведение лога (вывод sql-запросов)
        # pool_recycle - По умолчанию соединение с БД через 8 часов простоя обрывается.
        # Чтобы это не случилось нужно добавить опцию pool_recycle = 7200 (переуст-ка соед-я через 2 часа)
        self.database_engine = create_engine(
            f'sqlite:///{path}',
            echo=False,
            pool_recycle=7200,
            connect_args={'check_same_thread': False})

        # Создаём объект MetaData
        self.metadata = MetaData()

        # Создаём таблицу пользователей
        users_table = Table('Users', self.metadata,
                            Column('id', Integer, primary_key=True),
                            Column('name', String, unique=True),
                            Column('last_login', DateTime))

        # Создаём таблицу активных пользователей
        active_users_table = Table(
            'Active_users', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('user', ForeignKey('Users.id'), unique=True),
            Column('ip_address', String), Column('port', Integer),
            Column('login_time', DateTime))

        # Создаём таблицу истории входов
        user_login_history = Table('Login_history', self.metadata,
                                   Column('id', Integer, primary_key=True),
                                   Column('name', ForeignKey('Users.id')),
                                   Column('date_time', DateTime),
                                   Column('ip', String),
                                   Column('port', String))

        # Создаём таблицу контактов пользователей
        contacts = Table('Contacts', self.metadata,
                         Column('id', Integer, primary_key=True),
                         Column('user', ForeignKey('Users.id')),
                         Column('contact', ForeignKey('Users.id')))

        # Создаём таблицу истории пользователей
        users_history_table = Table('History', self.metadata,
                                    Column('id', Integer, primary_key=True),
                                    Column('user', ForeignKey('Users.id')),
                                    Column('sent', Integer),
                                    Column('accepted', Integer))

        # Создаём таблицы
        self.metadata.create_all(self.database_engine)

        # Создаём отображения
        # Связываем класс в ORM с таблицей
        mapper(self.AllUsers, users_table)
        mapper(self.ActiveUsers, active_users_table)
        mapper(self.LoginHistory, user_login_history)
        mapper(self.UsersContacts, contacts)
        mapper(self.UsersHistory, users_history_table)

        # Создаём сессию
        Session = sessionmaker(bind=self.database_engine)
        self.session = Session()

        # Если в таблице активных пользователей есть записи, то их необходимо удалить
        # Когда устанавливаем соединение, очищаем таблицу активных пользователей
        self.session.query(self.ActiveUsers).delete()
        self.session.commit()

    # Функция выполняющяяся при входе пользователя, записывает в базу факт входа
    def user_login(self, username, ip_address, port):
        print('\n Подключился клиент: ', username, ip_address, port)
        # Запрос в таблицу пользователей на наличие там пользователя с таким именем
        rez = self.session.query(self.AllUsers).filter_by(name=username)
        # print(type(rez))
        # Если имя пользователя уже присутствует в таблице, обновляем время последнего входа
        if rez.count():
            user = rez.first()
            user.last_login = datetime.datetime.now()
        # Если нет, то создаздаём нового пользователя
        else:
            # Создаем экземпляр класса self.AllUsers, через который передаем данные в таблицу
            user = self.AllUsers(username)
            self.session.add(user)
            # Комит здесь нужен, чтобы присвоился ID
            self.session.commit()
            user_in_history = self.UsersHistory(user.id)
            self.session.add(user_in_history)

        # Теперь можно создать запись в таблицу активных пользователей о факте входа.
        # Создаем экземпляр класса self.ActiveUsers, через который передаем данные в таблицу
        new_active_user = self.ActiveUsers(user.id, ip_address, port,
                                           datetime.datetime.now())
        self.session.add(new_active_user)

        # и сохранить в историю входов
        # Создаем экземпляр класса self.LoginHistory, через который передаем данные в таблицу
        history = self.LoginHistory(user.id, datetime.datetime.now(),
                                    ip_address, port)
        self.session.add(history)

        # Сохраняем изменения
        self.session.commit()

    # Функция фиксирующая отключение пользователя
    def user_logout(self, username):
        # Запрашиваем пользователя, что покидает нас
        # получаем запись из таблицы AllUsers
        user = self.session.query(
            self.AllUsers).filter_by(name=username).first()

        # Удаляем его из таблицы активных пользователей.
        # Удаляем запись из таблицы ActiveUsers
        self.session.query(self.ActiveUsers).filter_by(user=user.id).delete()

        # Применяем изменения
        self.session.commit()

    # Функция фиксирует передачу сообщения и делает соответствующие отметки в БД
    def process_message(self, sender, recipient):
        # Получаем ID отправителя и получателя
        sender = self.session.query(
            self.AllUsers).filter_by(name=sender).first().id
        recipient = self.session.query(
            self.AllUsers).filter_by(name=recipient).first().id
        # Запрашиваем строки из истории и увеличиваем счётчики
        sender_row = self.session.query(
            self.UsersHistory).filter_by(user=sender).first()
        sender_row.sent += 1
        recipient_row = self.session.query(
            self.UsersHistory).filter_by(user=recipient).first()
        recipient_row.accepted += 1

        self.session.commit()

    # Функция добавляет контакт для пользователя.
    def add_contact(self, user, contact):
        # Получаем ID пользователей
        user = self.session.query(self.AllUsers).filter_by(name=user).first()
        contact = self.session.query(
            self.AllUsers).filter_by(name=contact).first()

        # Проверяем что не дубль и что контакт может существовать (полю пользователь мы доверяем)
        if not contact or self.session.query(self.UsersContacts).filter_by(
                user=user.id, contact=contact.id).count():
            return

        # Создаём объект и заносим его в базу
        contact_row = self.UsersContacts(user.id, contact.id)
        self.session.add(contact_row)
        self.session.commit()

    # Функция удаляет контакт из базы данных
    def remove_contact(self, user, contact):
        # Получаем ID пользователей
        user = self.session.query(self.AllUsers).filter_by(name=user).first()
        contact = self.session.query(
            self.AllUsers).filter_by(name=contact).first()

        # Проверяем что контакт может существовать (полю пользователь мы доверяем)
        if not contact:
            return

        # Удаляем требуемое
        print(
            self.session.query(self.UsersContacts).filter(
                self.UsersContacts.user == user.id,
                self.UsersContacts.contact == contact.id).delete())
        self.session.commit()

    # Функция возвращает список известных пользователей со временем последнего входа.
    def users_list(self):
        query = self.session.query(
            self.AllUsers.name,
            self.AllUsers.last_login,
        )
        # Возвращаем список кортежей
        return query.all()

    # Функция возвращает список активных пользователей
    def active_users_list(self):
        # Запрашиваем соединение таблиц и собираем кортежи имя, адрес, порт, время.
        query = self.session.query(
            self.AllUsers.name, self.ActiveUsers.ip_address,
            self.ActiveUsers.port,
            self.ActiveUsers.login_time).join(self.AllUsers)
        # Возвращаем список кортежей
        return query.all()

    # Функция возвращающая историю входов по пользователю или всем пользователям
    def login_history(self, username=None):
        # Запрашиваем историю входа
        query = self.session.query(self.AllUsers.name,
                                   self.LoginHistory.date_time,
                                   self.LoginHistory.ip,
                                   self.LoginHistory.port).join(self.AllUsers)
        # Если было указано имя пользователя, то фильтруем по нему
        if username:
            query = query.filter(self.AllUsers.name == username)
        # Возвращаем список кортежей
        return query.all()

    # Функция возвращает список контактов пользователя.
    def get_contacts(self, username):
        # Запрашивааем указанного пользователя
        user = self.session.query(self.AllUsers).filter_by(name=username).one()

        # Запрашиваем его список контактов
        query = self.session.query(self.UsersContacts, self.AllUsers.name). \
            filter_by(user=user.id). \
            join(self.AllUsers, self.UsersContacts.contact == self.AllUsers.id)

        # выбираем только имена пользователей и возвращаем их.
        return [contact[1] for contact in query.all()]

    # Функция возвращает количество переданных и полученных сообщений
    def message_history(self):
        query = self.session.query(
            self.AllUsers.name, self.AllUsers.last_login,
            self.UsersHistory.sent,
            self.UsersHistory.accepted).join(self.AllUsers)
        # Возвращаем список кортежей
        return query.all()
Example #35
0
File: queue.py Project: rch/rpclib
    @rpc(Integer)
    def sleep(ctx, integer):
        print("Sleeping for %d seconds..." % (integer))
        time.sleep(integer)


def _on_method_call(ctx):
    print("This is worker id %d, processing task id %d." %
          (ctx.transport.consumer_id, ctx.transport.task_id))


AsyncService.event_manager.add_listener('method_call', _on_method_call)

if __name__ == '__main__':
    logging.basicConfig(level=logging.DEBUG)
    logging.getLogger('sqlalchemy.engine.base.Engine').setLevel(logging.DEBUG)
    metadata.create_all()

    application = Application([AsyncService],
                              'rpclib.async',
                              interface=Wsdl11(),
                              in_protocol=Soap11(),
                              out_protocol=Soap11())

    producer = Producer(db, application)
    for i in range(10):
        producer.service.sleep(i)

    consumer = Consumer(db, application, 1)
    consumer.serve_forever()
Example #36
0
class CrawlerDb:
    def __init__(self):
        self.connected = False

    def connect(self):
        e = settings.DATABASE_ENGINE + "://"
        p = ""
        if settings.DATABASE_ENGINE == "mysql":
            e += settings.DATABASE_USER + ":" + settings.DATABASE_PASS + "@"
            p = ":" + settings.DATABASE_PORT

        e += settings.DATABASE_HOST + p
        if settings.DATABASE_ENGINE != "sqlite":
            e += "/" + settings.DATABASE_NAME
        self.engine = create_engine(e)
        self.connection = self.engine.connect()
        self.connected = True if self.connection else False
        self.metadata = MetaData()

        # Define the tables
        self.queue_table = Table(
            'queue', self.metadata, Column('id', Integer, primary_key=True),
            Column('address', String, nullable=False),
            Column('added', DateTime, nullable=False, default=datetime.now()))

        self.crawl_table = Table(
            'crawl',
            self.metadata,
            Column('id', Integer, primary_key=True),
            Column('address', String, nullable=False),
            Column('http_status', String, nullable=False),
            Column('title', String, nullable=True),
            Column('size', Integer, nullable=True),
        )

        self.keyword_table = Table(
            'keywords',
            self.metadata,
            Column('id', Integer, primary_key=True),
            Column('page_id', None, ForeignKey('crawl.id')),
            Column('keyword', String, nullable=False),
            Column('weight', Integer, nullable=False),
        )

        # Create the tables
        self.metadata.create_all(self.engine)

    def enqueue(self, urls):
        if not self.connected:
            return False
        if len(urls) == 0:
            return True
        args = [{'address': u.decode("utf8")} for u in urls]
        result = self.connection.execute(self.queue_table.insert(), args)
        if result:
            return True
        return False

    def dequeue(self):
        if not self.connected:
            return False
        # Get the first thing in the queue
        s = select([self.queue_table]).limit(1)
        res = self.connection.execute(s)
        result = res.fetchall()
        res.close()
        # If we get a result
        if len(result) > 0:
            # Remove from the queue
            delres = self.connection.execute(self.queue_table.delete().where(
                self.queue_table.c.id == result[0][0]))
            if not delres:
                return False
            # Return the row
            return result[0][1]
        return False

    def checkCrawled(self, url):
        s = select([self.crawl_table
                    ]).where(self.crawl_table.c.address == url.decode("utf8"))
        result = self.connection.execute(s)
        if len(result.fetchall()) > 0:
            result.close()
            return True
        else:
            result.close()
            return False

    # Data should be a dictionary containing the following
    # key : desc
    # 	address : the url of the page
    # 	http_status : the status code returned by the request
    # 	title : the contents of the <title> element
    # 	size : the of the returned content in bytes
    def addPage(self, data):
        if not self.connected:
            return False
        # Add the page to the crawl table
        try:
            result = self.connection.execute(self.crawl_table.insert().values(
                address=unicode(data['address']),
                http_status=data['status'],
                title=unicode(data['title']),
                size=data['size']))
        except UnicodeDecodeError:
            return False
        if not result:
            return False
        # generate list of argument dictionaries for the insert many statement
        args = [{
            "page_id": result.inserted_primary_key[0],
            "keyword": unicode(k),
            "weight": w
        } for k, w in data["keywords"].items()]
        # Add all the keywords
        if len(args) > 0:
            result2 = self.connection.execute(self.keyword_table.insert(),
                                              args)
            if not result2:
                return False
        return True

    def close(self):
        self.connection.close()
Example #37
0
def create_or_update_database_schema(
    engine,
    oeclasses,
    max_binary_size=MAX_BINARY_SIZE,
):
    """
    Create a database schema from oeclasses list (see classes module).
    If the schema already exist check if all table and column exist.
    
    
    :params engine: sqlalchemy engine
    
    :params oeclasses: list of oeclass
    
    """

    metadata = MetaData(bind=engine)
    metadata.reflect()

    class_by_name = {}
    for oeclass in oeclasses:
        class_by_name[oeclass.__name__] = oeclass

    # check all tables
    for oeclass in oeclasses:
        tablename = oeclass.tablename

        if tablename not in metadata.tables.keys():
            # create table that are not present in db from class_names list
            table = create_table_from_class(oeclass, metadata)
        else:
            #check if all attributes are in SQL columns
            table = metadata.tables[tablename]
            for attrname, attrtype in oeclass.usable_attributes.items():
                create_column_if_not_exists(table, attrname, attrtype)

    if 'NumpyArrayTable' not in metadata.tables.keys():
        c1 = Column('id', Integer, primary_key=True, index=True)
        c2 = Column('dtype', String(128))
        c3 = Column('shape', String(128))
        c4 = Column('compress', String(16))
        c5 = Column('units', String(128))
        c6 = Column('blob', LargeBinary(MAX_BINARY_SIZE))
        #~ c7 = Column('tablename',  String(128))
        #~ c8 = Column('attributename',  String(128))
        #~ c9 = Column('parent_id',  Integer)
        table = Table('NumpyArrayTable', metadata, c1, c2, c3, c4, c5, c6)
        table.create()

    # check all relationship
    for oeclass in oeclasses:
        # one to many
        for childclassname in oeclass.one_to_many_relationship:
            parenttable = metadata.tables[oeclass.tablename]
            childtable = metadata.tables[
                class_by_name[childclassname].tablename]
            create_one_to_many_relationship_if_not_exists(
                parenttable, childtable)

        # many to many
        for classname2 in oeclass.many_to_many_relationship:
            table1 = metadata.tables[oeclass.tablename]
            table2 = metadata.tables[class_by_name[classname2].tablename]
            create_many_to_many_relationship_if_not_exists(
                table1, table2, metadata)

    metadata.create_all()
Example #38
0
# -*- coding: utf-8 -*-
"""
    :author: T8840
"""

from sqlalchemy import create_engine, MetaData, Table, Column, Integer, String, Sequence
from sqlalchemy.orm import scoped_session, sessionmaker

from core import config

engine = create_engine(config.SQLALCHEMY_DATABASE_URI)  # , pool_pre_ping=True
META_DATA = MetaData(bind=engine)
USERS_TABLE = Table("users",
                    META_DATA,
                    Column("id",
                           Integer,
                           Sequence("user_id_seq"),
                           primary_key=True),
                    Column("first_name", String(255)),
                    Column("last_name", String(255)),
                    keep_existing=True)
META_DATA.create_all(engine, checkfirst=True)
#
# db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine))
# SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Example #39
0
class ClientDB:
    class AllUsers:
        #Нужно получить всех существующих юзеров с сервера
        def __init__(self, userName):
            self.id = None
            self.userName = userName

    class MessageHistory:
        #История переписки
        def __init__(self, msgFrom, msgTo, msg):
            self.id = None
            self.msgFrom = msgFrom
            self.msgTo = msgTo
            self.msg = msg
            self.dateTime = datetime.datetime.now()

    class Contacts:
        #Список контактов
        def __init__(self, contact):
            self.id = None
            self.contact = contact

    def __init__(self, userName, path=defaultPath):
        #создаем движок
        self.engine = create_engine(
            f'sqlite:///{path}/client_{userName}DB.db3',
            echo=False,
            pool_recycle=7200,
            connect_args={'check_same_thread': False})
        #Создаем объект метеадата
        self.metadata = MetaData()

        #Создаем таблицы
        allUsersTable = Table('AllUsers', self.metadata,
                              Column('id', Integer, primary_key=True),
                              Column('userName', String))

        messageHistoryTable = Table(
            'MessageHistory', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('msgFrom', ForeignKey('AllUsers.userName')),
            Column('msgTo', ForeignKey('AllUsers.userName')),
            Column('msg', Text), Column('dateTime', DateTime))
        contactsTable = Table(
            'Contacts', self.metadata, Column('id', Integer, primary_key=True),
            Column('contact', ForeignKey('AllUsers.userName')))
        #Создаем таблицы
        self.metadata.create_all(self.engine)

        #Связываем объект с таблицей
        mapper(self.AllUsers, allUsersTable)
        mapper(self.MessageHistory, messageHistoryTable)
        mapper(self.Contacts, contactsTable)

        #Создаем объект сессия
        Session = sessionmaker(bind=self.engine)
        self.session = Session()

        #Очищаем список контактов
        self.session.query(self.Contacts).delete()
        self.session.commit()

    #Методы главного класса
    #----------------------
    #Добавление контакта
    def add_contact(self, contact):
        if not self.session.query(self.Contacts).count():
            newContact = self.Contacts(contact)
            self.session.add(newContact)
            self.session.commit()

    #Удаление контакта
    def del_contact(self, delContact):
        self.session.query(
            self.Contacts).filter_by(contact=delContact).delete()

    #Добавление существующих польователей
    def add_users(self, usersList):
        self.session.query(self.AllUsers).delete()
        for user in usersList:
            userToAllUsers = self.AllUsers(user)
            self.session.add(userToAllUsers)
            self.session.commit()

    #Сохранение сообщения
    def save_message(self, msgFrom, msgTo, msg):
        message = self.MessageHistory(msgFrom, msgTo, msg)
        self.session.add(message)
        self.session.commit()

    #
    def get_message(self, msg_From=None, msg_To=None):
        if msg_From:
            query = self.session.query(
                self.MessageHistory).filter_by(msgFrom=msg_From)
        if msg_To:
            query = self.session.query(
                self.MessageHistory).filter_by(msgTo=msg_To)
        return [(history.msgFrom, history.msgTo, history.msg, history.dateTime)
                for history in query.all()]

    #Возвращение имеющихся контактов
    def get_contacts(self):
        return [
            contact[0]
            for contact in self.session.query(self.Contacts.contact).all()
        ]

    #Возвращение имеющихся пользователей
    def get_users(self):
        return [
            user[0]
            for user in self.session.query(self.AllUsers.userName).all()
        ]
Example #40
0
class ListaCompra():
    """Crea un objeto conexión SQLAlchemy a una base de datos y con sus
    métodos podemos gestionar la base de datos.

    """
    def __init__(self, base):
        self.base = base
        self.engine = create_engine(self.base, echo=False)
        Session = sessionmaker(bind=self.engine)
        self.session = Session()
        self.metadata = MetaData()

    def crear_grupo(self, grupo):
        """Crea una tabla en la base de datos para alojar un grupo de
        productos. El nombre de la tabla viene dado por 'grupo'."""
        try:
            grupo = Table(grupo,
                          self.metadata,
                          Column('id', Integer, primary_key=True),
                          Column('product',
                                 String(60),
                                 nullable=False,
                                 unique=True),
                          Column('estado', Integer),
                          extend_existing=True)
            self.metadata.create_all(self.engine)
        except Exception as e:
            print(e)

    def insertar(self, grupo, producto, estado=0):
        """ Inserta una nueva fila en la tabla 'grupo'."""
        try:
            grupo = Table(grupo,
                          self.metadata,
                          autoload=True,
                          autoload_with=self.engine)
            ins = grupo.insert().values(product=producto, estado=estado)
            self.engine.execute(ins)
        except Exception as e:
            print(e)

    def grupos(self):
        """Devuelve una lista con las tablas de la BD."""
        self.metadata.reflect(self.engine)
        return list(self.metadata.tables.keys())

    def borrar_grupo(self, grupo):
        """Borra una tabla de la BD."""
        try:
            grupo = self.__carga_tabla(grupo)
        except NoSuchTableError:
            print('La tabla no existe')
        else:
            self.metadata.drop_all(tables=(grupo, ), bind=self.engine)

    def conseguir_elementos(self, grupo):
        """Devuevle lista con elementos de una tabla de la BD."""
        tabla = self.__carga_tabla(grupo)
        salida = []
        for consulta in self.session.query(tabla).order_by(tabla.c.product):
            salida.append(consulta)
        self.session.close()
        return salida

    def conseguir_ids(self, grupo):
        tabla = self.__carga_tabla(grupo)
        ids = []
        for consulta in self.session.query(tabla.c.id).order_by(tabla.c.id):
            ids.append(consulta[0])  # Devuelve tupla.
        self.session.close()
        return ids

    def cambiar_estado(self, grupo, indice, estado):
        """Cambia el estado de un producto."""
        tabla = self.__carga_tabla(grupo)
        actualizar = tabla.update().where(tabla.c.id == indice).values(
            estado=estado)
        self.engine.execute(actualizar)

    def __carga_tabla(self, tabla):
        """Retorna objeto tabla leida del servidor."""
        return Table(tabla,
                     self.metadata,
                     autoload=True,
                     autoload_with=self.engine)

    def borrar_elemento(self, grupo, indice):
        """Borra un elemento dado de su grupo (tabla) e índice"""
        tabla = self.__carga_tabla(grupo)
        borrado = tabla.delete().where(tabla.c.id == indice)
        self.engine.execute(borrado)

    def elemento(self, grupo, indice):
        """ Consulta el registro de grupo e índice. """
        stmt = "SELECT * FROM {} WHERE id = {}".format(grupo, indice)
        consulta = self.engine.execute(stmt).fetchone()
        return consulta

    def actualizar_registro(self, grupo, registro):
        """Actualiza el registro en la tabla 'grupo'"""
        tabla = self.__carga_tabla(grupo)
        actualizar = tabla.update().where(tabla.c.id == registro[0]).values(
            product=registro[1], estado=registro[2])
        self.engine.execute(actualizar)

    def buscar_registro(self, cadena):
        """Busca cadena en productos de todos los grupos."""
        salida = []
        try:
            exp_regular = re.compile(cadena, re.IGNORECASE)
        except re.error as error:
            print(error)
            return False
        tablas = self.grupos()
        for tabla in tablas:
            elementos = self.conseguir_elementos(tabla)
            for elemento in elementos:
                indice, producto, estado = elemento
                if exp_regular.search(producto):
                    salida.append((tabla, indice, producto, estado))
        return salida
Example #41
0
    def test_with_inheritance(self):
        metadata = MetaData()

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                ForeignKey("mytable.col1"),
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col3", String(30)),
        )

        @profile_memory()
        def go():
            class A(fixtures.ComparableEntity):
                pass

            class B(A):
                pass

            mapper(
                A,
                table1,
                polymorphic_on=table1.c.col2,
                polymorphic_identity="a",
            )
            mapper(B, table2, inherits=A, polymorphic_identity="b")

            sess = create_session(self.engine)
            a1 = A()
            a2 = A()
            b1 = B(col3="b1")
            b2 = B(col3="b2")
            for x in [a1, a2, b1, b2]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_([A(), A(), B(col3="b1"), B(col3="b2")], alist)

            for a in alist:
                sess.delete(a)
            sess.flush()

            # don't need to clear_mappers()
            del B
            del A

        metadata.create_all(self.engine)
        try:
            go()
        finally:
            metadata.drop_all(self.engine)
        assert_no_mappers()
class TestSqlAlchemy(unittest.TestCase):
    def setUp(self):
        self.metadata = MetaData()
        self.DeclarativeBase = declarative_base(metadata=self.metadata)
        self.engine = create_engine('sqlite:///:memory:', echo=True)
        self.Session = sessionmaker(bind=self.engine)

    def tearDown(self):
        del self.metadata
        del self.DeclarativeBase
        del self.engine
        del self.Session

    def test_declarative(self):
        from sqlalchemy import Integer
        from sqlalchemy import String

        class DbObject(TableModel, self.DeclarativeBase):
            __tablename__ = 'db_object'

            id = Column(Integer, primary_key=True)
            s = Column(String)

        self.metadata.create_all(self.engine)

    def test_mapper(self):
        import sqlalchemy

        class Address(self.DeclarativeBase):
            __tablename__ = 'address'

            id = Column(sqlalchemy.Integer, primary_key=True)
            email = Column(sqlalchemy.String(50))
            user_id = Column(sqlalchemy.Integer, ForeignKey('user.id'))

        class User(self.DeclarativeBase):
            __tablename__ = 'user'

            id = Column(sqlalchemy.Integer, primary_key=True)
            name = Column(sqlalchemy.String(50))
            addresses = relationship("Address", backref="user")

        self.metadata.create_all(self.engine)

        import spyne.model.primitive

        class AddressDetail(ComplexModel):
            id = spyne.model.primitive.Integer
            user_name = spyne.model.primitive.String
            address = spyne.model.primitive.String

            @classmethod
            def mapper(cls, meta):
                user_t = meta.tables['user']
                address_t = meta.tables['address']

                cls._main_t = user_t.join(address_t)

                cls._properties = {
                    'id': address_t.c.id,
                    'user_name': user_t.c.name,
                    'address': address_t.c.email,
                }

                cls._mapper = mapper(cls, cls._main_t,
                    include_properties=cls._properties.values(),
                    properties=cls._properties,
                    primary_key=[address_t.c.id]
                )

        AddressDetail.mapper(self.metadata)

    def test_custom_mapper(self):
        class CustomMapper(Mapper):
            def __init__(self, class_, local_table, *args, **kwargs):
                super(CustomMapper, self).__init__(class_, local_table, *args,
                    **kwargs)

            # Do not configure primary keys to check that CustomerMapper is
            # actually used
            def _configure_pks(self):
                pass

        def custom_mapper(class_, local_table=None, *args, **params):
            return CustomMapper(class_, local_table, *args, **params)

        CustomDeclarativeBase = declarative_base(metadata=self.metadata,
                                                   mapper=custom_mapper)

        class User(CustomDeclarativeBase):
            __tablename__ = 'user'

            # CustomMapper should not fail because of no primary key
            name = Column(sqlalchemy.String(50))

        self.metadata.create_all(self.engine)

    def test_rpc(self):
        import sqlalchemy
        from sqlalchemy import sql

        class KeyValuePair(TableModel, self.DeclarativeBase):
            __tablename__ = 'key_value_store'
            __namespace__ = 'punk'

            key = Column(sqlalchemy.String(100), nullable=False, primary_key=True)
            value = Column(sqlalchemy.String, nullable=False)

        self.metadata.create_all(self.engine)

        import hashlib

        session = self.Session()

        for i in range(1, 10):
            key = str(i).encode()
            m = hashlib.md5()
            m.update(key)
            value = m.hexdigest()

            session.add(KeyValuePair(key=key, value=value))

        session.commit()

        from spyne.service import Service
        from spyne.model.complex import Array
        from spyne.model.primitive import String

        class Service(Service):
            @rpc(String(max_occurs='unbounded'),
                    _returns=Array(KeyValuePair),
                    _in_variable_names={
                        'keys': 'key'
                    }
                )
            def get_values(ctx, keys):
                session = self.Session()

                return session.query(KeyValuePair).filter(sql.and_(
                    KeyValuePair.key.in_(keys)
                )).order_by(KeyValuePair.key)

        application = Application([Service],
            in_protocol=HttpRpc(),
            out_protocol=Soap11(),
            name='Service', tns='tns'
        )
        server = WsgiApplication(application)

        initial_ctx = WsgiMethodContext(server, {
            'REQUEST_METHOD': 'GET',
            'QUERY_STRING': 'key=1&key=2&key=3',
            'PATH_INFO': '/get_values',
            'SERVER_NAME': 'localhost',
        }, 'some-content-type')

        ctx, = server.generate_contexts(initial_ctx)
        server.get_in_object(ctx)
        server.get_out_object(ctx)
        server.get_out_string(ctx)

        i = 0
        for e in ctx.out_document[0][0][0]:
            i+=1
            key = str(i)
            m = hashlib.md5()
            m.update(key)
            value = m.hexdigest()

            _key = e.find('{%s}key' % KeyValuePair.get_namespace())
            _value = e.find('{%s}value' % KeyValuePair.get_namespace())

            print((_key, _key.text))
            print((_value, _value.text))

            self.assertEqual(_key.text, key)
            self.assertEqual(_value.text, value)

    def test_late_mapping(self):
        import sqlalchemy

        user_t = Table('user', self.metadata,
             Column('id', sqlalchemy.Integer, primary_key=True),
             Column('name', sqlalchemy.String),
        )

        class User(TableModel, self.DeclarativeBase):
            __table__ = user_t

        self.assertEqual(User._type_info['id'].__type_name__, 'integer')
        self.assertEqual(User._type_info['name'].__type_name__, 'string')


    def test_default_ctor(self):
        import sqlalchemy

        class User1Mixin(object):
            id = Column(sqlalchemy.Integer, primary_key=True)
            name = Column(sqlalchemy.String(256))

        class User1(self.DeclarativeBase, TableModel, User1Mixin):
            __tablename__ = 'spyne_user1'

            mail = Column(sqlalchemy.String(256))

        u = User1(id=1, mail="*****@*****.**", name='dummy')

        assert u.id == 1
        assert u.mail == "*****@*****.**"
        assert u.name == "dummy"

        class User2Mixin(object):
            id = Column(sqlalchemy.Integer, primary_key=True)
            name = Column(sqlalchemy.String(256))

        class User2(TableModel, self.DeclarativeBase, User2Mixin):
            __tablename__ = 'spyne_user2'

            mail = Column(sqlalchemy.String(256))

        u = User2(id=1, mail="*****@*****.**", name='dummy')

        assert u.id == 1
        assert u.mail == "*****@*****.**"
        assert u.name == "dummy"

    def test_mixin_inheritance(self):
        import sqlalchemy

        class UserMixin(object):
            id = Column(sqlalchemy.Integer, primary_key=True)
            name = Column(sqlalchemy.String(256))

        class User(self.DeclarativeBase, TableModel, UserMixin):
            __tablename__ = 'spyne_user_mixin'

            mail = Column(sqlalchemy.String(256))

        assert 'mail' in User._type_info
        assert 'name' in User._type_info
        assert 'id' in User._type_info

    def test_same_table_inheritance(self):
        import sqlalchemy

        class User(self.DeclarativeBase, TableModel):
            __tablename__ = 'spyne_user_sti'

            id = Column(sqlalchemy.Integer, primary_key=True)
            name = Column(sqlalchemy.String(256))

        class UserMail(User):
            mail = Column(sqlalchemy.String(256))

        assert 'mail' in UserMail._type_info
        assert 'name' in UserMail._type_info
        assert 'id' in UserMail._type_info

    def test_relationship_array(self):
        import sqlalchemy
        class Permission(TableModel, self.DeclarativeBase):
            __tablename__ = 'spyne_user_permission'

            id = Column(sqlalchemy.Integer, primary_key=True)
            user_id = Column(sqlalchemy.Integer, ForeignKey("spyne_user.id"))


        class User(TableModel, self.DeclarativeBase):
            __tablename__ = 'spyne_user'

            id = Column(sqlalchemy.Integer, primary_key=True)
            permissions = relationship(Permission)

        class Address(self.DeclarativeBase, TableModel):
            __tablename__ = 'spyne_address'

            id = Column(sqlalchemy.Integer, primary_key=True)
            address = Column(sqlalchemy.String(256))
            user_id = Column(sqlalchemy.Integer, ForeignKey(User.id), nullable=False)
            user = relationship(User)

        assert 'permissions' in User._type_info
        assert issubclass(User._type_info['permissions'], Array)
        assert issubclass(User._type_info['permissions']._type_info.values()[0], Permission)
Example #43
0
    def test_orm_many_engines(self):
        metadata = MetaData(self.engine)

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
            Column("col3", Integer, ForeignKey("mytable.col1")),
        )

        metadata.create_all()

        m1 = mapper(
            A,
            table1,
            properties={
                "bs":
                relationship(B, cascade="all, delete", order_by=table2.c.col1)
            },
            _compiled_cache_size=50,
        )
        m2 = mapper(B, table2, _compiled_cache_size=50)

        @profile_memory()
        def go():
            engine = engines.testing_engine(
                options={
                    "logging_name": "FOO",
                    "pool_logging_name": "BAR",
                    "use_reaper": False,
                })
            with Session(engine) as sess:
                a1 = A(col2="a1")
                a2 = A(col2="a2")
                a3 = A(col2="a3")
                a1.bs.append(B(col2="b1"))
                a1.bs.append(B(col2="b2"))
                a3.bs.append(B(col2="b3"))
                for x in [a1, a2, a3]:
                    sess.add(x)
                sess.commit()

                alist = sess.query(A).order_by(A.col1).all()
                eq_(
                    [
                        A(col2="a1", bs=[B(col2="b1"),
                                         B(col2="b2")]),
                        A(col2="a2", bs=[]),
                        A(col2="a3", bs=[B(col2="b3")]),
                    ],
                    alist,
                )

                for a in alist:
                    sess.delete(a)
                sess.commit()

            engine.dispose()

        go()

        metadata.drop_all()
        del m1, m2
        assert_no_mappers()
Example #44
0
    def test_with_manytomany(self):
        metadata = MetaData()

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table3 = Table(
            "t1tot2",
            metadata,
            Column("t1", Integer, ForeignKey("mytable.col1")),
            Column("t2", Integer, ForeignKey("mytable2.col1")),
        )

        @profile_memory()
        def go():
            class A(fixtures.ComparableEntity):
                pass

            class B(fixtures.ComparableEntity):
                pass

            mapper(
                A,
                table1,
                properties={
                    "bs":
                    relationship(B,
                                 secondary=table3,
                                 backref="as",
                                 order_by=table3.c.t1)
                },
            )
            mapper(B, table2)

            sess = create_session(self.engine)
            a1 = A(col2="a1")
            a2 = A(col2="a2")
            b1 = B(col2="b1")
            b2 = B(col2="b2")
            a1.bs.append(b1)
            a2.bs.append(b2)
            for x in [a1, a2]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_([A(bs=[B(col2="b1")]), A(bs=[B(col2="b2")])], alist)

            for a in alist:
                sess.delete(a)
            sess.flush()

            # mappers necessarily find themselves in the compiled cache,
            # so to allow them to be GC'ed clear out the cache
            self.engine.clear_compiled_cache()
            del B
            del A

        metadata.create_all(self.engine)
        try:
            go()
        finally:
            metadata.drop_all(self.engine)
        assert_no_mappers()
Example #45
0
class InsertTest(fixtures.TestBase, AssertsExecutionResults):

    __only_on__ = "postgresql"
    __backend__ = True

    def setup_test(self):
        self.metadata = MetaData()

    def teardown_test(self):
        with testing.db.begin() as conn:
            self.metadata.drop_all(conn)

    @testing.combinations((False, ), (True, ))
    def test_foreignkey_missing_insert(self, implicit_returning):
        engine = engines.testing_engine(
            options={"implicit_returning": implicit_returning})

        Table("t1", self.metadata, Column("id", Integer, primary_key=True))
        t2 = Table(
            "t2",
            self.metadata,
            Column("id", Integer, ForeignKey("t1.id"), primary_key=True),
        )

        self.metadata.create_all(engine)

        # want to ensure that "null value in column "id" violates not-
        # null constraint" is raised (IntegrityError on psycoopg2, but
        # ProgrammingError on pg8000), and not "ProgrammingError:
        # (ProgrammingError) relationship "t2_id_seq" does not exist".
        # the latter corresponds to autoincrement behavior, which is not
        # the case here due to the foreign key.

        with expect_warnings(".*has no Python-side or server-side default.*"):
            with engine.begin() as conn:
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    t2.insert(),
                )

    def test_sequence_insert(self):
        table = Table(
            "testtable",
            self.metadata,
            Column("id", Integer, Sequence("my_seq"), primary_key=True),
            Column("data", String(30)),
        )
        self.metadata.create_all(testing.db)
        self._assert_data_with_sequence(table, "my_seq")

    @testing.requires.returning
    def test_sequence_returning_insert(self):
        table = Table(
            "testtable",
            self.metadata,
            Column("id", Integer, Sequence("my_seq"), primary_key=True),
            Column("data", String(30)),
        )
        self.metadata.create_all(testing.db)
        self._assert_data_with_sequence_returning(table, "my_seq")

    def test_opt_sequence_insert(self):
        table = Table(
            "testtable",
            self.metadata,
            Column(
                "id",
                Integer,
                Sequence("my_seq", optional=True),
                primary_key=True,
            ),
            Column("data", String(30)),
        )
        self.metadata.create_all(testing.db)
        self._assert_data_autoincrement(table)

    @testing.requires.returning
    def test_opt_sequence_returning_insert(self):
        table = Table(
            "testtable",
            self.metadata,
            Column(
                "id",
                Integer,
                Sequence("my_seq", optional=True),
                primary_key=True,
            ),
            Column("data", String(30)),
        )
        self.metadata.create_all(testing.db)
        self._assert_data_autoincrement_returning(table)

    def test_autoincrement_insert(self):
        table = Table(
            "testtable",
            self.metadata,
            Column("id", Integer, primary_key=True),
            Column("data", String(30)),
        )
        self.metadata.create_all(testing.db)
        self._assert_data_autoincrement(table)

    @testing.requires.returning
    def test_autoincrement_returning_insert(self):
        table = Table(
            "testtable",
            self.metadata,
            Column("id", Integer, primary_key=True),
            Column("data", String(30)),
        )
        self.metadata.create_all(testing.db)
        self._assert_data_autoincrement_returning(table)

    def test_noautoincrement_insert(self):
        table = Table(
            "testtable",
            self.metadata,
            Column("id", Integer, primary_key=True, autoincrement=False),
            Column("data", String(30)),
        )
        self.metadata.create_all(testing.db)
        self._assert_data_noautoincrement(table)

    def _assert_data_autoincrement(self, table):
        engine = engines.testing_engine(options={"implicit_returning": False})

        with self.sql_execution_asserter(engine) as asserter:

            with engine.begin() as conn:
                # execute with explicit id

                r = conn.execute(table.insert(), {"id": 30, "data": "d1"})
                eq_(r.inserted_primary_key, (30, ))

                # execute with prefetch id

                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, (1, ))

                # executemany with explicit ids

                conn.execute(
                    table.insert(),
                    [
                        {
                            "id": 31,
                            "data": "d3"
                        },
                        {
                            "id": 32,
                            "data": "d4"
                        },
                    ],
                )

                # executemany, uses SERIAL

                conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}])

                # single execute, explicit id, inline

                conn.execute(table.insert().inline(), {"id": 33, "data": "d7"})

                # single execute, inline, uses SERIAL

                conn.execute(table.insert().inline(), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 1,
                    "data": "d2"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )

        with engine.begin() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (1, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (2, "d5"),
                    (3, "d6"),
                    (33, "d7"),
                    (4, "d8"),
                ],
            )

            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData()
        table = Table(table.name, m2, autoload_with=engine)

        with self.sql_execution_asserter(engine) as asserter:
            with engine.begin() as conn:
                conn.execute(table.insert(), {"id": 30, "data": "d1"})
                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, (5, ))
                conn.execute(
                    table.insert(),
                    [
                        {
                            "id": 31,
                            "data": "d3"
                        },
                        {
                            "id": 32,
                            "data": "d4"
                        },
                    ],
                )
                conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}])
                conn.execute(table.insert().inline(), {"id": 33, "data": "d7"})
                conn.execute(table.insert().inline(), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 5,
                    "data": "d2"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )
        with engine.begin() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (5, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (6, "d5"),
                    (7, "d6"),
                    (33, "d7"),
                    (8, "d8"),
                ],
            )
            conn.execute(table.delete())

    def _assert_data_autoincrement_returning(self, table):
        engine = engines.testing_engine(options={"implicit_returning": True})

        with self.sql_execution_asserter(engine) as asserter:
            with engine.begin() as conn:

                # execute with explicit id

                r = conn.execute(table.insert(), {"id": 30, "data": "d1"})
                eq_(r.inserted_primary_key, (30, ))

                # execute with prefetch id

                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, (1, ))

                # executemany with explicit ids

                conn.execute(
                    table.insert(),
                    [
                        {
                            "id": 31,
                            "data": "d3"
                        },
                        {
                            "id": 32,
                            "data": "d4"
                        },
                    ],
                )

                # executemany, uses SERIAL

                conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}])

                # single execute, explicit id, inline

                conn.execute(table.insert().inline(), {"id": 33, "data": "d7"})

                # single execute, inline, uses SERIAL

                conn.execute(table.insert().inline(), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data) RETURNING "
                "testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )

        with engine.begin() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (1, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (2, "d5"),
                    (3, "d6"),
                    (33, "d7"),
                    (4, "d8"),
                ],
            )
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData()
        table = Table(table.name, m2, autoload_with=engine)

        with self.sql_execution_asserter(engine) as asserter:
            with engine.begin() as conn:
                conn.execute(table.insert(), {"id": 30, "data": "d1"})
                r = conn.execute(table.insert(), {"data": "d2"})
                eq_(r.inserted_primary_key, (5, ))
                conn.execute(
                    table.insert(),
                    [
                        {
                            "id": 31,
                            "data": "d3"
                        },
                        {
                            "id": 32,
                            "data": "d4"
                        },
                    ],
                )
                conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}])
                conn.execute(table.insert().inline(), {"id": 33, "data": "d7"})
                conn.execute(table.insert().inline(), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data) RETURNING "
                "testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (data) VALUES (:data)",
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL("INSERT INTO testtable (data) VALUES (:data)",
                       [{
                           "data": "d8"
                       }]),
        )

        with engine.begin() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (5, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (6, "d5"),
                    (7, "d6"),
                    (33, "d7"),
                    (8, "d8"),
                ],
            )
            conn.execute(table.delete())

    def _assert_data_with_sequence(self, table, seqname):
        engine = engines.testing_engine(options={"implicit_returning": False})

        with self.sql_execution_asserter(engine) as asserter:
            with engine.begin() as conn:
                conn.execute(table.insert(), {"id": 30, "data": "d1"})
                conn.execute(table.insert(), {"data": "d2"})
                conn.execute(
                    table.insert(),
                    [
                        {
                            "id": 31,
                            "data": "d3"
                        },
                        {
                            "id": 32,
                            "data": "d4"
                        },
                    ],
                )
                conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}])
                conn.execute(table.insert().inline(), {"id": 33, "data": "d7"})
                conn.execute(table.insert().inline(), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            CursorSQL("select nextval('my_seq')", consume_statement=False),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 1,
                    "data": "d2"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
                ":data)" % seqname,
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
                ":data)" % seqname,
                [{
                    "data": "d8"
                }],
            ),
        )
        with engine.begin() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (1, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (2, "d5"),
                    (3, "d6"),
                    (33, "d7"),
                    (4, "d8"),
                ],
            )

        # cant test reflection here since the Sequence must be
        # explicitly specified

    def _assert_data_with_sequence_returning(self, table, seqname):
        engine = engines.testing_engine(options={"implicit_returning": True})

        with self.sql_execution_asserter(engine) as asserter:
            with engine.begin() as conn:
                conn.execute(table.insert(), {"id": 30, "data": "d1"})
                conn.execute(table.insert(), {"data": "d2"})
                conn.execute(
                    table.insert(),
                    [
                        {
                            "id": 31,
                            "data": "d3"
                        },
                        {
                            "id": 32,
                            "data": "d4"
                        },
                    ],
                )
                conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}])
                conn.execute(table.insert().inline(), {"id": 33, "data": "d7"})
                conn.execute(table.insert().inline(), {"data": "d8"})

        asserter.assert_(
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                {
                    "id": 30,
                    "data": "d1"
                },
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES "
                "(nextval('my_seq'), :data) RETURNING testtable.id",
                {"data": "d2"},
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 31,
                    "data": "d3"
                }, {
                    "id": 32,
                    "data": "d4"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
                ":data)" % seqname,
                [{
                    "data": "d5"
                }, {
                    "data": "d6"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (:id, :data)",
                [{
                    "id": 33,
                    "data": "d7"
                }],
            ),
            DialectSQL(
                "INSERT INTO testtable (id, data) VALUES (nextval('%s'), "
                ":data)" % seqname,
                [{
                    "data": "d8"
                }],
            ),
        )

        with engine.begin() as conn:
            eq_(
                conn.execute(table.select()).fetchall(),
                [
                    (30, "d1"),
                    (1, "d2"),
                    (31, "d3"),
                    (32, "d4"),
                    (2, "d5"),
                    (3, "d6"),
                    (33, "d7"),
                    (4, "d8"),
                ],
            )

            # cant test reflection here since the Sequence must be
            # explicitly specified

    def _assert_data_noautoincrement(self, table):
        engine = engines.testing_engine(options={"implicit_returning": False})

        # turning off the cache because we are checking for compile-time
        # warnings
        engine = engine.execution_options(compiled_cache=None)

        with engine.begin() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    [{
                        "data": "d2"
                    }, {
                        "data": "d3"
                    }],
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    [{
                        "data": "d2"
                    }, {
                        "data": "d3"
                    }],
                )

        with engine.begin() as conn:
            conn.execute(
                table.insert(),
                [{
                    "id": 31,
                    "data": "d2"
                }, {
                    "id": 32,
                    "data": "d3"
                }],
            )
            conn.execute(table.insert().inline(), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
            conn.execute(table.delete())

        # test the same series of events using a reflected version of
        # the table

        m2 = MetaData()
        table = Table(table.name, m2, autoload_with=engine)
        with engine.begin() as conn:
            conn.execute(table.insert(), {"id": 30, "data": "d1"})

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    {"data": "d2"},
                )

        with engine.begin() as conn:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    conn.execute,
                    table.insert(),
                    [{
                        "data": "d2"
                    }, {
                        "data": "d3"
                    }],
                )

        with engine.begin() as conn:
            conn.execute(
                table.insert(),
                [{
                    "id": 31,
                    "data": "d2"
                }, {
                    "id": 32,
                    "data": "d3"
                }],
            )
            conn.execute(table.insert().inline(), {"id": 33, "data": "d4"})
            eq_(
                conn.execute(table.select()).fetchall(),
                [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")],
            )
Example #46
0
    def test_mapper_reset(self):
        metadata = MetaData()

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
            Column("col3", Integer, ForeignKey("mytable.col1")),
        )

        @profile_memory()
        def go():
            mapper(
                A,
                table1,
                properties={"bs": relationship(B, order_by=table2.c.col1)},
            )
            mapper(B, table2)

            sess = create_session(self.engine)
            a1 = A(col2="a1")
            a2 = A(col2="a2")
            a3 = A(col2="a3")
            a1.bs.append(B(col2="b1"))
            a1.bs.append(B(col2="b2"))
            a3.bs.append(B(col2="b3"))
            for x in [a1, a2, a3]:
                sess.add(x)
            sess.flush()
            sess.expunge_all()

            alist = sess.query(A).order_by(A.col1).all()
            eq_(
                [
                    A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
                    A(col2="a2", bs=[]),
                    A(col2="a3", bs=[B(col2="b3")]),
                ],
                alist,
            )

            for a in alist:
                sess.delete(a)
            sess.flush()
            sess.close()
            clear_mappers()

        metadata.create_all(self.engine)
        try:
            go()
        finally:
            metadata.drop_all(self.engine)
        assert_no_mappers()
Example #47
0
def create_db_tables():

    name = os.environ['RDS_DB_NAME']
    user = os.environ['RDS_USERNAME']
    password = os.environ['RDS_PASSWORD']
    host = os.environ['RDS_HOSTNAME']
    port = os.environ['RDS_PORT']
    engine = create_engine('mysql://' + user + ':' + password + '@' + host +
                           ':' + port + '/' + name)
    metadata = MetaData(engine)
    Table(
        "FeatureRequest", metadata,
        Column('id',
               Integer,
               primary_key=True,
               nullable=False,
               autoincrement=True), Column('title', String(64),
                                           nullable=False),
        Column('description', Text, nullable=False),
        Column('clientId', Integer, nullable=False),
        Column('priority', Integer, nullable=False),
        Column('target', Date, nullable=False),
        Column('areaId', Integer, nullable=False))

    client_table = Table(
        "Client", metadata,
        Column('id',
               Integer,
               primary_key=True,
               nullable=False,
               autoincrement=True), Column('name', String(32), nullable=False))

    area_table = Table(
        "Area", metadata,
        Column('id',
               Integer,
               primary_key=True,
               nullable=False,
               autoincrement=True), Column('name', String(32), nullable=False))

    metadata.create_all()
    conn = engine.connect()
    conn.execute(client_table.insert(), [
        {
            'name': 'Client A'
        },
        {
            'name': 'Client B'
        },
        {
            'name': 'Client C'
        },
    ])
    conn.execute(area_table.insert(), [
        {
            'name': 'Policies'
        },
        {
            'name': 'Billing'
        },
        {
            'name': 'Claims'
        },
        {
            'name': 'Reports'
        },
    ])
Example #48
0
    def test_session(self):
        metadata = MetaData()

        table1 = Table(
            "mytable",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
        )

        table2 = Table(
            "mytable2",
            metadata,
            Column(
                "col1",
                Integer,
                primary_key=True,
                test_needs_autoincrement=True,
            ),
            Column("col2", String(30)),
            Column("col3", Integer, ForeignKey("mytable.col1")),
        )

        metadata.create_all(self.engine)

        m1 = mapper(
            A,
            table1,
            properties={
                "bs":
                relationship(B, cascade="all, delete", order_by=table2.c.col1)
            },
        )
        m2 = mapper(B, table2)

        @profile_memory()
        def go():
            with Session(self.engine) as sess:
                a1 = A(col2="a1")
                a2 = A(col2="a2")
                a3 = A(col2="a3")
                a1.bs.append(B(col2="b1"))
                a1.bs.append(B(col2="b2"))
                a3.bs.append(B(col2="b3"))
                for x in [a1, a2, a3]:
                    sess.add(x)
                sess.commit()

                alist = sess.query(A).order_by(A.col1).all()
                eq_(
                    [
                        A(col2="a1", bs=[B(col2="b1"),
                                         B(col2="b2")]),
                        A(col2="a2", bs=[]),
                        A(col2="a3", bs=[B(col2="b3")]),
                    ],
                    alist,
                )

                for a in alist:
                    sess.delete(a)
                sess.commit()

        go()

        metadata.drop_all(self.engine)
        del m1, m2
        assert_no_mappers()
# MetaData类主要用于保存表结构,连接字符串等数据,是一个多表共享的对象

# 创建数据库表--------------------------------------------------
student = Table(
    'student',
    metadata,
    Column('id', Integer, primary_key=True),
    Column(
        'name',
        String(50),
    ),
    Column('age', Integer),
    Column('address', String(10)),
)

metadata.create_all(engine)  # 是来创建表,这个操作是安全的操作,会先判断表是否存在。

# 创建会话-----通过sessionmake方法创建一个Session工厂,然后在调用工厂的方法来实例化一个Session对象
DBSession = sessionmaker(bind=engine)
session = DBSession()

# 添加数据
Base = declarative_base()


class Student(Base):
    __tablename__ = 'student'
    id = Column(Integer, primary_key=True)
    name = Column(String(100))
    age = Column(Integer)
    address = Column(String(100))
Example #50
0
def migrate(args):
    config = load_config(args.config)

    metadata = config['metadata']
    module_name, variable_name = metadata.split(":")

    sys.path.insert(0, '')
    metadata = importlib.import_module(module_name)
    for variable_name in variable_name.split("."):
        metadata = getattr(metadata, variable_name)

    from sqlalchemy import create_engine, MetaData, select

    target = config.get('dburi', None)
    if target is None:
        raise Exception(
            "No 'target' argument specified and no 'dburi' setting in config file."
        )

    data = config.get('dumpfile', None)
    if data is None:
        raise Exception(
            "No 'data' argument specified and no 'dumpfile' setting in config file."
        )

    if target == data:
        raise Exception("Data source and target are the same")

    if not args.yes and 'y' != input(
            "Warning: any existing data at '%s' will be erased. Proceed? [y/n]"
            % target):
        return

    dst_engine = create_engine(target)

    # clear out any existing tables
    dst_metadata = MetaData(bind=dst_engine)
    dst_metadata.reflect()
    dst_metadata.drop_all()

    # create new tables
    dst_metadata = metadata
    dst_metadata.create_all(dst_engine)

    # load source
    from .data import JSONDataSource, SQLADataSource
    if "://" in data:
        src = SQLADataSource(data)
    else:
        src = JSONDataSource(data)

    # import data
    with dst_engine.connect() as dst_conn:
        for dst_table in dst_metadata.sorted_tables:
            if src.has_table(dst_table.name):
                with utils.status("Migrating table '%s'" % dst_table.name):
                    src_cols = src.get_column_names(dst_table.name)
                    common_cols = [
                        column.name for column in dst_table.columns
                        if column.name in src_cols
                    ]
                    data = src.get_data(dst_table.name, common_cols)
                    if data:  # sql complains otherwise
                        insert_query = dst_table.insert().compile(
                            bind=dst_engine, column_keys=common_cols)
                        dst_conn.execute(insert_query, data)
session = Session()

metadata = MetaData()
users = Table('users', metadata,
    Column('id', Integer, primary_key=True),
    Column('name', String),
    Column('fullname', String),
)

addresses = Table('addresses', metadata,
    Column('id', Integer, primary_key=True),
    Column('user_id', None, ForeignKey('users.id')),
    Column('email_address', String, nullable=False)
)
metadata.create_all(test_engine)

with test_engine.connect() as conn:
    conn.execute(users.insert(), [
        {'id': 1, 'name': 'jack'},
        {'id': 2, 'name': 'jill'},
        {'id': 3, 'name': 'sally'},
        {'id': 4, 'name': 'sue'},
    ])
    conn.execute(addresses.insert(), [
        {'user_id': 1, 'email_address' : '*****@*****.**'},
        {'user_id': 2, 'email_address' : '*****@*****.**'},
        {'user_id': 3, 'email_address' : '*****@*****.**'},
        {'user_id': 4, 'email_address' : '*****@*****.**'},
    ])
    data = ( {'user_id': 1, 'email_address' : '[email protected]\''},)
class CrawlerDb:
    def __init__(self):
        self.connected = False

    def connect(self):

        self.engine = create_engine('sqlite:///' + DATABASE_NAME)
        self.connection = self.engine.connect()
        self.connected = True if self.connection else False
        self.metadata = MetaData()

        # Define the tables
        self.website_table = Table(
            'website',
            self.metadata,
            Column('id', Integer, primary_key=True),
            Column('url', Unicode, nullable=False),
            Column('has_crawled', Boolean, default=False),
            Column('emails', Unicode, nullable=True),
        )

        # Create the tables
        self.metadata.create_all(self.engine)

    def enqueue(self, url, emails=None):
        if not self.connected:
            return False

        s = select([self.website_table]).where(self.website_table.c.url == url)
        res = self.connection.execute(s)
        result = res.fetchall()
        res.close()
        # If we get a result, then this url is not unique
        if len(result) > 0:
            # 			print 'Duplicated: %s' % url
            return False

        args = [{'url': unicode(url)}]
        if (emails != None):
            args = [{
                'url': unicode(url),
                'has_crawled': True,
                'emails': unicode(",".join(emails))
            }]
        result = self.connection.execute(self.website_table.insert(), args)
        if result:
            return True
        return False

    def dequeue(self):
        if not self.connected:
            return False
        # Get the first thing in the queue
        s = select([
            self.website_table
        ]).limit(1).where(self.website_table.c.has_crawled == False)
        res = self.connection.execute(s)
        result = res.fetchall()
        res.close()
        # If we get a result
        if len(result) > 0:
            # Remove from the queue ?
            # delres = self.connection.execute(self.queue_table.delete().where(self.queue_table.c.id == result[0][0]))
            # if not delres:
            # 	return False
            # Return the row
            # print result[0].url
            return result[0]
        return False

    def crawled(self, website, new_emails=None):
        if not self.connected:
            return False
        stmt = self.website_table.update() \
         .where(self.website_table.c.id==website.id) \
         .values(has_crawled=True, emails=new_emails)
        self.connection.execute(stmt)

    def get_all_emails(self):
        if not self.connected:
            return None

        s = select([self.website_table])
        res = self.connection.execute(s)
        companies = [
            "zinetgo-services.com", "bosch-si.com", "instagrampartners.com",
            "inventateq.com", "jobswitch.in", "articlecatalog.com",
            "bosch-pt.co.in", "10bestseo.com", "unikainfocom.in",
            "crestaproject.com", "nidmindia.com", "goingsocial.ca",
            "ads2book.com", "nexevo.in", "emarketeducation.in",
            "educationtimes.com", "yseo.in", "learndigitalmarketing.com",
            "testingxperts.com", "australiancontractlaw.com",
            "sitegalleria.com", "schoolofdigitalmarketing.co.in",
            "digiterati-academy.com", "fuelingnewbusiness.com",
            "electronic-city.in", "indrasacademy.com", "palbabban.com",
            "skydreamconsulting.com", "masiradm.com", "imsolutions.co",
            "idigitalacademy.com", "seovalley.org", "thoughtminds.com",
            "oriondigital.in", "cityinnovates.com", "chromozomes.com",
            "kumarsacademy.com", "ultimez.com", "browsermedia.agency",
            "binarydigital360.com", "renavo.com", "builtinmtl.com",
            "startupcafedigital.com", "jaintechnosoft.com",
            "fintechvalleyvizag.com", "adworld-india.co.in", "taxiq.in",
            "smartechindia.com", "skimbox.co", "lodestarmg.com", "sunhill.in",
            "kepran.com", "rainmaker-india.com", "digitalcorsel.com",
            "digitallove.in", "beboldcreative.com", "zestwebdesign.in",
            "trionixglobal.com", "webgen.in", "sirigroups.com", "sigmato.com",
            "synx.com.au", "drrudresh.com", "digiverti.com",
            "optimizeindia.com", "techtrendsolutions.com", "grabstance.com",
            "jupitervidya.com", "sketchcareer.com",
            "nexgenonlinesolutions.com", "bolasintelli.co.in", "online24x7.in",
            "inboundkitchen.com", "pattronize.com", "onlinepaidlook.com",
            "performetris.com", "panworldeducation.com", "foxgloveawards.com",
            "percoyo.com", "yourseoservices.com", "entrepreneurreprints.com",
            "brandownerssummit.com", "bosch-climate.in", "scionacademy.com",
            "troppusweb.com", "hexalab.in", "prismtechnology.in",
            "hospity.com", "crosspollen.com", "access1solution.com",
            "wonesty.com", "estelle.in", "erosinfotech.com",
            "honeycombdigital.net", "thesocialmediaexpert.in",
            "bangaloremediaworks.com", "panamatechnologies.com",
            "seoconsultindia.com", "maditbox.com", "savanspaceinteriors.in",
            "bangaloreseoservices.in", "ucwebtechnologies.com", "seeknext.com",
            "jiffystorage.com", "dgepl.com", "inventussoftware.com",
            "72-pixels.com", "madgeek.in", "ads-r-us.de",
            "olivedewtechnologies.com", "mvglobe.in", "samrudhsolutions.com",
            "thought-starters.com", "ben-solutions.com", "seodigitz.com",
            "smwibangalore.in", "itservicestalk.com", "cbrainz.com",
            "imcanny.com"
        ]
        results = res.fetchall()
        res.close()
        email_set = set()
        x = set()
        for result in results:
            if (result.emails == None):
                continue
            for email in result.emails.split(','):
                email_set.add(email)
                url = urlparse.urlparse(result.url)
                hostname = url.hostname.split(".")
                hostname = ".".join(
                    len(hostname[-2]) < 4 and hostname[-3:] or hostname[-2:])
                if any(hostname in s for s in companies):
                    print email + ',' + hostname
                x.add(email + ',' + hostname)
        return email_set

    def delete(self):
        if not self.connected:
            return None
        s = self.website_table.drop(self.engine)
        self.connection.execute(s)

    def get_all_domains(self):
        if not self.connected:
            return None

        s = select([self.website_table])
        res = self.connection.execute(s)
        results = res.fetchall()
        res.close()
        domain_set = set()
        for result in results:
            if (result.url == None):
                continue
            url = urlparse.urlparse(result.url)
            hostname = url.hostname.split(".")
            # Simplistic assumeption of a domain. If 2nd last name is <4 char, then it has 3 parts eg. just2us.com.sg
            hostname = ".".join(
                len(hostname[-2]) < 4 and hostname[-3:] or hostname[-2:])
            domain_set.add(hostname)

        return domain_set

    def close(self):
        self.connection.close()

    def save_html(filename, html):
        filename = os.path.join(HTML_DIR, filename)
        file = open(filename, "w+")
        file.writelines(html)
        file.close()

    def test(self):
        c = CrawlerDb()
        c.connect()
        # c.enqueue(['a12222', '11'])
        # c.enqueue(['dddaaaaaa2', '22'])
        c.enqueue('111')
        c.enqueue('222')
        website = c.dequeue()
        c.crawled(website)
        website = c.dequeue()
        c.crawled(website, "a,b")
        print '---'
        c.dequeue()
Example #53
0
pairings = Table("pairings", metadata,
     Column("pairing_id", BigInteger, primary_key=True, autoincrement=True),
     Column("food_id1", Integer, ForeignKey("food_terms.id"), nullable=False),
     Column("food_id2", Integer, ForeignKey("food_terms.id"), nullable=False),
     Column("search_id", BigInteger, ForeignKey("searches.id"), nullable=False), 
     Column("occurences", Integer, nullable=False))

searches = Table("searches", metadata,
     Column("id", BigInteger, primary_key=True, autoincrement=True),
     Column("user_timestamp", DateTime, nullable=False),
     Column("search_window", String(10), nullable=False),
     Column("food_id", Integer, ForeignKey("food_terms.id"), nullable=False),
     Column("num_matches_total", Integer, nullable=False),
     Column("num_matches_returned", Integer, nullable=False))

results = Table("results", metadata,
     Column("id", BigInteger, primary_key=True, autoincrement=True),
     Column("publish_date", DateTime, nullable=False),
     Column("index_date", DateTime, nullable=False),
     Column("url", String(300), nullable=False),
     Column("search_id", BigInteger, ForeignKey("searches.id"), nullable=False))


if __name__ == '__main__':
     """Create tables if script run directly."""
     engine = create_engine("postgresql:///food_trends", echo=True)

     ans = input("Are you sure you want to make the tables? (Y/N) ")
     if ans.upper() == "Y":
          metadata.create_all(engine, checkfirst=True)
          print("Tables made in db.")
Example #54
0
class ServerDatabase:
    '''
    Класс - оболочка для работы с базой данных сервера.
    Использует SQLite базу данных, реализован с помощью
    SQLAlchemy ORM и используется классический подход.
    '''
    class AllUsers:
        '''Класс - отображение таблицы всех пользователей.'''
        def __init__(self, username, passwd_hash):
            self.name = username
            self.last_login = datetime.datetime.now()
            self.passwd_hash = passwd_hash
            self.pubkey = None
            self.id = None

    class ActiveUsers:
        '''Класс - отображение таблицы активных пользователей.'''
        def __init__(self, user_id, ip_address, port, login_time):
            self.user = user_id
            self.ip_address = ip_address
            self.port = port
            self.login_time = login_time
            self.id = None

    class LoginHistory:
        '''Класс - отображение таблицы истории входов.'''
        def __init__(self, name, date, ip, port):
            self.id = None
            self.name = name
            self.date_time = date
            self.ip = ip
            self.port = port

    class UsersContacts:
        '''Класс - отображение таблицы контактов пользователей.'''
        def __init__(self, user, contact):
            self.id = None
            self.user = user
            self.contact = contact

    class UsersHistory:
        '''Класс - отображение таблицы истории действий.'''
        def __init__(self, user):
            self.id = None
            self.user = user
            self.sent = 0
            self.accepted = 0

    def __init__(self, path):
        # Создаём движок базы данных
        self.database_engine = create_engine(
            f'sqlite:///{path}',
            echo=False,
            pool_recycle=7200,
            connect_args={'check_same_thread': False})

        # Создаём объект MetaData
        self.metadata = MetaData()

        # Создаём таблицу пользователей
        users_table = Table('Users', self.metadata,
                            Column('id', Integer, primary_key=True),
                            Column('name', String, unique=True),
                            Column('last_login', DateTime),
                            Column('passwd_hash', String),
                            Column('pubkey', Text))

        # Создаём таблицу активных пользователей
        active_users_table = Table(
            'Active_users', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('user', ForeignKey('Users.id'), unique=True),
            Column('ip_address', String), Column('port', Integer),
            Column('login_time', DateTime))

        # Создаём таблицу истории входов
        user_login_history = Table('Login_history', self.metadata,
                                   Column('id', Integer, primary_key=True),
                                   Column('name', ForeignKey('Users.id')),
                                   Column('date_time', DateTime),
                                   Column('ip', String),
                                   Column('port', String))

        # Создаём таблицу контактов пользователей
        contacts = Table('Contacts', self.metadata,
                         Column('id', Integer, primary_key=True),
                         Column('user', ForeignKey('Users.id')),
                         Column('contact', ForeignKey('Users.id')))

        # Создаём таблицу статистики пользователей
        users_history_table = Table('History', self.metadata,
                                    Column('id', Integer, primary_key=True),
                                    Column('user', ForeignKey('Users.id')),
                                    Column('sent', Integer),
                                    Column('accepted', Integer))

        # Создаём таблицы
        self.metadata.create_all(self.database_engine)

        # Создаём отображения
        mapper(self.AllUsers, users_table)
        mapper(self.ActiveUsers, active_users_table)
        mapper(self.LoginHistory, user_login_history)
        mapper(self.UsersContacts, contacts)
        mapper(self.UsersHistory, users_history_table)

        # Создаём сессию
        Session = sessionmaker(bind=self.database_engine)
        self.session = Session()

        # Если в таблице активных пользователей есть записи, то их необходимо
        # удалить
        self.session.query(self.ActiveUsers).delete()
        self.session.commit()

    def user_login(self, username, ip_address, port, key):
        '''
        Метод выполняющийся при входе пользователя, записывает в базу факт входа
        Обновляет открытый ключ пользователя при его изменении.
        '''
        # Запрос в таблицу пользователей на наличие там пользователя с таким
        # именем
        rez = self.session.query(self.AllUsers).filter_by(name=username)

        # Если имя пользователя уже присутствует в таблице, обновляем время последнего входа
        # и проверяем корректность ключа. Если клиент прислал новый ключ,
        # сохраняем его.
        if rez.count():
            user = rez.first()
            user.last_login = datetime.datetime.now()
            if user.pubkey != key:
                user.pubkey = key
        # Если нету, то генерируем исключение
        else:
            raise ValueError('Пользователь не зарегистрирован.')

        # Теперь можно создать запись в таблицу активных пользователей о факте
        # входа.
        new_active_user = self.ActiveUsers(user.id, ip_address, port,
                                           datetime.datetime.now())
        self.session.add(new_active_user)

        # и сохранить в историю входов
        history = self.LoginHistory(user.id, datetime.datetime.now(),
                                    ip_address, port)
        self.session.add(history)

        # Сохрраняем изменения
        self.session.commit()

    def add_user(self, name, passwd_hash):
        '''
        Метод регистрации пользователя.
        Принимает имя и хэш пароля, создаёт запись в таблице статистики.
        '''
        user_row = self.AllUsers(name, passwd_hash)
        self.session.add(user_row)
        self.session.commit()
        history_row = self.UsersHistory(user_row.id)
        self.session.add(history_row)
        self.session.commit()

    def remove_user(self, name):
        '''Метод удаляющий пользователя из базы.'''
        user = self.session.query(self.AllUsers).filter_by(name=name).first()
        self.session.query(self.ActiveUsers).filter_by(user=user.id).delete()
        self.session.query(self.LoginHistory).filter_by(name=user.id).delete()
        self.session.query(self.UsersContacts).filter_by(user=user.id).delete()
        self.session.query(
            self.UsersContacts).filter_by(contact=user.id).delete()
        self.session.query(self.UsersHistory).filter_by(user=user.id).delete()
        self.session.query(self.AllUsers).filter_by(name=name).delete()
        self.session.commit()

    def get_hash(self, name):
        '''Метод получения хэша пароля пользователя.'''
        user = self.session.query(self.AllUsers).filter_by(name=name).first()
        return user.passwd_hash

    def get_pubkey(self, name):
        '''Метод получения публичного ключа пользователя.'''
        user = self.session.query(self.AllUsers).filter_by(name=name).first()
        return user.pubkey

    def check_user(self, name):
        '''Метод проверяющий существование пользователя.'''
        if self.session.query(self.AllUsers).filter_by(name=name).count():
            return True
        else:
            return False

    def user_logout(self, username):
        '''Метод фиксирующий отключения пользователя.'''
        # Запрашиваем пользователя, что покидает нас
        user = self.session.query(
            self.AllUsers).filter_by(name=username).first()

        # Удаляем его из таблицы активных пользователей.
        self.session.query(self.ActiveUsers).filter_by(user=user.id).delete()

        # Применяем изменения
        self.session.commit()

    def process_message(self, sender, recipient):
        '''Метод записывающий в таблицу статистики факт передачи сообщения.'''
        # Получаем ID отправителя и получателя
        sender = self.session.query(
            self.AllUsers).filter_by(name=sender).first().id
        recipient = self.session.query(
            self.AllUsers).filter_by(name=recipient).first().id
        # Запрашиваем строки из истории и увеличиваем счётчики
        sender_row = self.session.query(
            self.UsersHistory).filter_by(user=sender).first()
        sender_row.sent += 1
        recipient_row = self.session.query(
            self.UsersHistory).filter_by(user=recipient).first()
        recipient_row.accepted += 1

        self.session.commit()

    def add_contact(self, user, contact):
        '''Метод добавления контакта для пользователя.'''
        # Получаем ID пользователей
        user = self.session.query(self.AllUsers).filter_by(name=user).first()
        contact = self.session.query(
            self.AllUsers).filter_by(name=contact).first()

        # Проверяем что не дубль и что контакт может существовать (полю
        # пользователь мы доверяем)
        if not contact or self.session.query(self.UsersContacts).filter_by(
                user=user.id, contact=contact.id).count():
            return

        # Создаём объект и заносим его в базу
        contact_row = self.UsersContacts(user.id, contact.id)
        self.session.add(contact_row)
        self.session.commit()

    # Функция удаляет контакт из базы данных
    def remove_contact(self, user, contact):
        '''Метод удаления контакта пользователя.'''
        # Получаем ID пользователей
        user = self.session.query(self.AllUsers).filter_by(name=user).first()
        contact = self.session.query(
            self.AllUsers).filter_by(name=contact).first()

        # Проверяем что контакт может существовать (полю пользователь мы
        # доверяем)
        if not contact:
            return

        # Удаляем требуемое
        self.session.query(self.UsersContacts).filter(
            self.UsersContacts.user == user.id,
            self.UsersContacts.contact == contact.id).delete()
        self.session.commit()

    def users_list(self):
        '''Метод возвращающий список известных пользователей со временем последнего входа.'''
        # Запрос строк таблицы пользователей.
        query = self.session.query(self.AllUsers.name,
                                   self.AllUsers.last_login)
        # Возвращаем список кортежей
        return query.all()

    def active_users_list(self):
        '''Метод возвращающий список активных пользователей.'''
        # Запрашиваем соединение таблиц и собираем кортежи имя, адрес, порт,
        # время.
        query = self.session.query(
            self.AllUsers.name, self.ActiveUsers.ip_address,
            self.ActiveUsers.port,
            self.ActiveUsers.login_time).join(self.AllUsers)
        # Возвращаем список кортежей
        return query.all()

    def login_history(self, username=None):
        '''Метод возвращающий историю входов.'''
        # Запрашиваем историю входа
        query = self.session.query(self.AllUsers.name,
                                   self.LoginHistory.date_time,
                                   self.LoginHistory.ip,
                                   self.LoginHistory.port).join(self.AllUsers)
        # Если было указано имя пользователя, то фильтруем по нему
        if username:
            query = query.filter(self.AllUsers.name == username)
        # Возвращаем список кортежей
        return query.all()

    def get_contacts(self, username):
        '''Метод возвращающий список контактов пользователя.'''
        # Запрашивааем указанного пользователя
        user = self.session.query(self.AllUsers).filter_by(name=username).one()

        # Запрашиваем его список контактов
        query = self.session.query(self.UsersContacts, self.AllUsers.name). \
            filter_by(user=user.id). \
            join(self.AllUsers, self.UsersContacts.contact == self.AllUsers.id)

        # выбираем только имена пользователей и возвращаем их.
        return [contact[1] for contact in query.all()]

    def message_history(self):
        '''Метод возвращающий статистику сообщений.'''
        query = self.session.query(
            self.AllUsers.name, self.AllUsers.last_login,
            self.UsersHistory.sent,
            self.UsersHistory.accepted).join(self.AllUsers)
        # Возвращаем список кортежей
        return query.all()
Example #55
0
    def test_alias_pathing(self):
        metadata = MetaData()

        a = Table(
            "a",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("bid", Integer, ForeignKey("b.id")),
            Column("type", String(30)),
        )

        asub = Table(
            "asub",
            metadata,
            Column("id", Integer, ForeignKey("a.id"), primary_key=True),
            Column("data", String(30)),
        )

        b = Table(
            "b",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
        )
        self.mapper_registry.map_imperatively(A,
                                              a,
                                              polymorphic_identity="a",
                                              polymorphic_on=a.c.type)
        self.mapper_registry.map_imperatively(ASub,
                                              asub,
                                              inherits=A,
                                              polymorphic_identity="asub")
        self.mapper_registry.map_imperatively(
            B, b, properties={"as_": relationship(A)})

        metadata.create_all(self.engine)
        sess = Session(self.engine)
        a1 = ASub(data="a1")
        a2 = ASub(data="a2")
        a3 = ASub(data="a3")
        b1 = B(as_=[a1, a2, a3])
        sess.add(b1)
        sess.commit()
        del sess

        # sqlite has a slow enough growth here
        # that we have to run it more times to see the
        # "dip" again
        @profile_memory(maxtimes=120)
        def go():
            sess = Session(self.engine)
            sess.query(B).options(subqueryload(B.as_.of_type(ASub))).all()
            sess.close()
            del sess

        try:
            go()
        finally:
            metadata.drop_all(self.engine)
        clear_mappers()
Example #56
0
    def test_restore(self):
        metadata = MetaData(self.engine, reflect=True)
        tj_cached = Table('TJ_1', metadata,
                          Column('A1', Integer, primary_key=True),
                          Column('A2', String(20)), Column('A4', Integer),
                          Column('A6', String(50)))
        new_tj = Table('TJ_new', metadata,
                       Column('A1', Integer, primary_key=True),
                       Column('A2', String(20)), Column('A4', Integer),
                       Column('A6', String(50)), Column('A7', String(50)))
        metadata.create_all()

        # populate with data
        with self.engine.connect() as conn:
            row_1 = {'A1': 1, 'A2': 'a12 str', 'A4': 14, 'A6': 'a16 str'}
            conn.execute(
                tj_cached.insert(),
                [row_1, {
                    'A1': 2,
                    'A2': 'a22 str',
                    'A4': 24,
                    'A6': 'a26 str'
                }])

        dest_relation = {
            "name": new_tj.name,
            "attributes": {
                "A1": "Integer",
                "A2": "String",
                "A4": "Integer",
                "A6": "String",
                "A7": "String"
            }
        }
        context = [{
            "name": "R_1",
            "attributes": {
                "A1": "Integer",
                "A2": "String",
                "A3": "Integer",
                "A4": "Boolean"
            }
        }, {
            "name": "R_2",
            "attributes": {
                "A1": "Integer",
                "A6": "String"
            }
        }, {
            "name": "R_3",
            "attributes": {
                "A1": "Integer",
                "A7": "String"
            }
        }]
        constraint = [[{"attribute": "A1", "operation": "<", "value": 2}]]
        engine = self.engine

        cache = Cache(engine, self.cache_file_path)
        cache._config[0]['constraint'] = constraint
        cache.enable(context=context, constraint=constraint)

        cache.restore(dest_relation, constraint)

        res = db.get_rows(self.engine, dest_relation)
        self.assertEqual(len(res), 1)
        first_row = res[0]
        del first_row['A7']
        self.assertDictEqual(dict(first_row), row_1)
Example #57
0
class sqlite_registry(registry):
    def __init__(self, db_name, registry_cache_dir=None):
        self.db_name = db_name
        cdir = registry_cache_dir
        if cdir == None:
            cdir = cache_dir()
        self.db_path = os.path.join('sqlite:///{}/{}.sqlite'.format(
            cdir, self.db_name))
        self.db_cnt = create_engine(self.db_path, echo=False)
        self.__metadata = MetaData(self.db_cnt)
        self.__info = Table('info', self.__metadata,
                            Column('id', Integer, primary_key=True),
                            Column('name', String(65536), unique=True),
                            Column('value', String(65536)))
        self.__hklm = Table('HKLM', self.__metadata,
                            Column('id', Integer, primary_key=True),
                            Column('hive_key', String(65536), unique=True),
                            Column('type', Integer), Column('data', String))
        self.__hkcu = Table('HKCU', self.__metadata,
                            Column('id', Integer, primary_key=True),
                            Column('sid', String),
                            Column('hive_key', String(65536)),
                            Column('type', Integer), Column('data', String),
                            UniqueConstraint('sid', 'hive_key'))
        self.__shortcuts = Table('Shortcuts', self.__metadata,
                                 Column('id', Integer, primary_key=True),
                                 Column('sid', String), Column('path', String),
                                 Column('shortcut', String),
                                 UniqueConstraint('sid', 'path'))
        self.__printers = Table('Printers', self.__metadata,
                                Column('id', Integer, primary_key=True),
                                Column('sid', String), Column('name', String),
                                Column('printer', String),
                                UniqueConstraint('sid', 'name'))
        self.__drives = Table('Drives', self.__metadata,
                              Column('id', Integer, primary_key=True),
                              Column('sid', String), Column('login', String),
                              Column('password', String),
                              Column('dir', String), Column('path', String),
                              UniqueConstraint('sid', 'dir'))
        self.__metadata.create_all(self.db_cnt)
        Session = sessionmaker(bind=self.db_cnt)
        self.db_session = Session()
        try:
            mapper(info_entry, self.__info)
            mapper(samba_preg, self.__hklm)
            mapper(samba_hkcu_preg, self.__hkcu)
            mapper(ad_shortcut, self.__shortcuts)
            mapper(printer_entry, self.__printers)
            mapper(drive_entry, self.__drives)
        except:
            pass
            #logging.error('Error creating mapper')

    def _add(self, row):
        try:
            self.db_session.add(row)
            self.db_session.commit()
        except Exception as exc:
            self.db_session.rollback()
            raise exc

    def _info_upsert(self, row):
        try:
            self._add(row)
        except:
            update_obj = dict({'value': row.value})
            (self.db_session.query(info_entry).filter(
                info_entry.name == row.name).update(update_obj))
            self.db_session.commit()

    def _hklm_upsert(self, row):
        try:
            self._add(row)
        except:
            update_obj = dict({'type': row.type, 'data': row.data})
            (self.db_session.query(samba_preg).filter(
                samba_preg.hive_key == row.hive_key).update(update_obj))
            self.db_session.commit()

    def _hkcu_upsert(self, row):
        try:
            self._add(row)
        except:
            update_obj = dict({'type': row.type, 'data': row.data})
            (self.db_session.query(samba_preg).
             filter(samba_hkcu_preg.sid == row.sid).filter(
                 samba_hkcu_preg.hive_key == row.hive_key).update(update_obj))
            self.db_session.commit()

    def _shortcut_upsert(self, row):
        try:
            self._add(row)
        except:
            update_obj = dict({'shortcut': row.shortcut})
            (self.db_session.query(ad_shortcut).filter(
                ad_shortcut.sid == row.sid).filter(
                    ad_shortcut.path == row.path).update(update_obj))
            self.db_session.commit()

    def _printer_upsert(self, row):
        try:
            self._add(row)
        except:
            update_obj = dict({'printer': row.printer})
            (self.db_session.query(printer_entry).filter(
                printer_entry.sid == row.sid).filter(
                    printer_entry.name == row.name).update(update_obj))
            self.db_session.commit()

    def _drive_upsert(self, row):
        try:
            self._add(row)
        except:
            update_obj = dict({
                'dir': row.dir,
                'path': row.path,
                'login': row.login,
                'password': row.password
            })
            (self.db_session.query(drive_entry).filter(
                drive_entry.sid == row.sid).filter(
                    drive_entry.dir == row.dir).update(update_obj))
            self.db_session.commit()

    def set_info(self, name, value):
        ientry = info_entry(name, value)
        logging.debug(slogm('Setting info {}:{}'.format(name, value)))
        self._info_upsert(ientry)

    def add_hklm_entry(self, preg_entry):
        '''
        Write PReg entry to HKEY_LOCAL_MACHINE
        '''
        pentry = samba_preg(preg_entry)
        if not pentry.hive_key.rpartition('\\')[2].startswith('**'):
            self._hklm_upsert(pentry)
        else:
            logging.warning(
                slogm('Skipping branch deletion key: {}'.format(
                    pentry.hive_key)))

    def add_hkcu_entry(self, preg_entry, sid):
        '''
        Write PReg entry to HKEY_CURRENT_USER
        '''
        hkcu_pentry = samba_hkcu_preg(sid, preg_entry)
        if not hkcu_pentry.hive_key.rpartition('\\')[2].startswith('**'):
            logging.debug(slogm('Adding HKCU entry for {}'.format(sid)))
            self._hkcu_upsert(hkcu_pentry)
        else:
            logging.warning(
                slogm('Skipping branch deletion key: {}'.format(
                    hkcu_pentry.hive_key)))

    def add_shortcut(self, sid, sc_obj):
        '''
        Store shortcut information in the database
        '''
        sc_entry = ad_shortcut(sid, sc_obj)
        logging.debug(
            slogm('Saving info about {} link for {}'.format(
                sc_entry.path, sid)))
        self._shortcut_upsert(sc_entry)

    def add_printer(self, sid, pobj):
        '''
        Store printer configuration in the database
        '''
        prn_entry = printer_entry(sid, pobj)
        logging.debug(
            slogm('Saving info about printer {} for {}'.format(
                prn_entry.name, sid)))
        self._printer_upsert(prn_entry)

    def add_drive(self, sid, dobj):
        drv_entry = drive_entry(sid, dobj)
        logging.debug(
            slogm('Saving info about drive mapping {} for {}'.format(
                drv_entry.path, sid)))
        self._drive_upsert(drv_entry)

    def get_shortcuts(self, sid):
        res = (self.db_session.query(ad_shortcut).filter(
            ad_shortcut.sid == sid).all())
        return res

    def get_printers(self, sid):
        res = (self.db_session.query(printer_entry).filter(
            printer_entry.sid == sid).all())
        return res

    def get_drives(self, sid):
        res = (self.db_session.query(drive_entry).filter(
            drive_entry.sid == sid).all())
        return res

    def get_hkcu_entry(self, sid, hive_key):
        res = (self.db_session.query(samba_preg).filter(
            samba_hkcu_preg.sid == sid).filter(
                samba_hkcu_preg.hive_key == hive_key).first())
        # Try to get the value from machine SID as a default if no option is set.
        if not res:
            machine_sid = self.get_info('machine_sid')
            res = self.db_session.query(samba_preg).filter(
                samba_hkcu_preg.sid == machine_sid).filter(
                    samba_hkcu_preg.hive_key == hive_key).first()
        return res

    def filter_hkcu_entries(self, sid, startswith):
        res = (self.db_session.query(samba_hkcu_preg).filter(
            samba_hkcu_preg.sid == sid).filter(
                samba_hkcu_preg.hive_key.like(startswith)))
        return res

    def get_info(self, name):
        res = (self.db_session.query(info_entry).filter(
            info_entry.name == name).first())
        return res.value

    def get_hklm_entry(self, hive_key):
        res = (self.db_session.query(samba_preg).filter(
            samba_preg.hive_key == hive_key).first())
        return res

    def filter_hklm_entries(self, startswith):
        res = (self.db_session.query(samba_preg).filter(
            samba_preg.hive_key.like(startswith)))
        return res

    def wipe_user(self, sid):
        self.wipe_hkcu(sid)
        self.wipe_shortcuts(sid)
        self.wipe_printers(sid)
        self.wipe_drives(sid)

    def wipe_shortcuts(self, sid):
        (self.db_session.query(ad_shortcut).filter(
            ad_shortcut.sid == sid).delete())
        self.db_session.commit()

    def wipe_printers(self, sid):
        (self.db_session.query(printer_entry).filter(
            printer_entry.sid == sid).delete())
        self.db_session.commit()

    def wipe_drives(self, sid):
        (self.db_session.query(drive_entry).filter(
            drive_entry.sid == sid).delete())
        self.db_session.commit()

    def wipe_hkcu(self, sid):
        (self.db_session.query(samba_hkcu_preg).filter(
            samba_hkcu_preg.sid == sid).delete())
        self.db_session.commit()

    def wipe_hklm(self):
        self.db_session.query(samba_preg).delete()
        self.db_session.commit()
Example #58
0
app.config["BLOGGING_DISQUS_SITENAME"] = "test"
app.config["BLOGGING_SITEURL"] = "http://localhost:8000"
app.config["BLOGGING_SITENAME"] = "My Site"
app.config["BLOGGING_PERMISSIONS"] = False  # Enable blogger permissions'
app.config["CACHE_TYPE"] = "simple"

# create cache
cache = Cache(app)

# extensions
engine = create_engine('sqlite:////tmp/blog.db')
meta = MetaData()
sql_storage = SQLAStorage(engine, metadata=meta)
blog_engine = BloggingEngine(app, sql_storage, cache=cache)
login_manager = LoginManager(app)
meta.create_all(bind=engine)


class User(UserMixin):
    def __init__(self, user_id):
        self.id = user_id

    def get_name(self):
        return "Paul Dirac"  # typically the user's name


@identity_loaded.connect_via(app)
def on_identity_loaded(sender, identity):
    identity.user = current_user
    if hasattr(current_user, "id"):
        identity.provides.add(UserNeed(current_user.id))
Example #59
0
                                     expire_on_commit=False)()
else:
    saveddata_meta = None

# Lock controlling any changes introduced to session
sd_lock = threading.RLock()

# Import all the definitions for all our database stuff
# noinspection PyPep8
from eos.db.gamedata import alphaClones, attribute, category, effect, group, item, marketGroup, metaData, metaGroup, queries, traits, unit, dynamicAttributes
# noinspection PyPep8
from eos.db.saveddata import booster, cargo, character, damagePattern, databaseRepair, drone, fighter, fit, implant, implantSet, loadDefaultDatabaseValues, \
    miscData, mutator, module, override, price, queries, skill, targetResists, user

# Import queries
# noinspection PyPep8
from eos.db.gamedata.queries import *
# noinspection PyPep8
from eos.db.saveddata.queries import *

# If using in memory saveddata, you'll want to reflect it so the data structure is good.
if config.saveddata_connectionstring == "sqlite:///:memory:":
    saveddata_meta.create_all()
    pyfalog.info("Running database out of memory.")


def rollback():
    with sd_lock:
        pyfalog.warning("Session rollback triggered.")
        saveddata_session.rollback()
Example #60
0
def translate_to_persistable(value):
    """Translates a pd.DataFrame value into a value friendly to SQLAlchemy."""
    if type(value) is pd.Timestamp:
        return value.date()
    elif pd.isna(value):
        return None
    else:
        return value


def query_cik(cik: str, conn: Connection = prelim_engine) -> pd.DataFrame:
    return pd.read_sql(
        text("select * from cik where cik = :cik").bindparams(cik=cik), conn)


def save_ciks(conn: Connection = prelim_engine):
    """Loads the cik table from the csv file in the resources directory."""
    df = pd.read_csv(
        "resources/cik.csv",
        converters={col.name: str
                    for col in prelim_cik_table.columns})
    with conn.begin() as c:
        c.execute('truncate table cik')
        df.to_sql(prelim_cik_table.name, c, if_exists='append', index=False)


if __name__ == '__main__':
    prelim_metadata.create_all(prelim_engine, checkfirst=True)
    # save_ciks()