Ejemplo n.º 1
0
    def test_query(self):
        q = Session.query(User).filter(User.name == "ed").options(eagerload(User.addresses))
        eq_(q.all(), [User(name="ed", addresses=[Address(id=2), Address(id=3), Address(id=4)])])

        q2 = serializer.loads(serializer.dumps(q, -1), users.metadata, Session)

        def go():
            eq_(q2.all(), [User(name="ed", addresses=[Address(id=2), Address(id=3), Address(id=4)])])

        self.assert_sql_count(testing.db, go, 1)

        eq_(q2.join(User.addresses).filter(Address.email == "*****@*****.**").value(func.count("*")), 1)

        u1 = Session.query(User).get(8)

        q = Session.query(Address).filter(Address.user == u1).order_by(desc(Address.email))
        q2 = serializer.loads(serializer.dumps(q, -1), users.metadata, Session)

        eq_(q2.all(), [Address(email="*****@*****.**"), Address(email="*****@*****.**"), Address(email="*****@*****.**")])

        q = Session.query(User).join(User.addresses).filter(Address.email.like("%fred%"))
        q2 = serializer.loads(serializer.dumps(q, -1), users.metadata, Session)
        eq_(q2.all(), [User(name="fred")])

        eq_(list(q2.values(User.id, User.name)), [(9, u"fred")])
Ejemplo n.º 2
0
        def _(*a, **kw):
            key, args = gen_key(*a, **kw)
            if not key:
                return f(*a, **kw)
            force = kw.pop("force", False)
            r = rdb.get(key) if not force else None
            if r is None:
                r = f(*a, **kw)
                if r is not None:
                    if not isinstance(r, BUILTIN_TYPES):
                        r = dumps(r)
                    rdb.set(key, r, expire)
                else:
                    r = dumps(empty)
                    rdb.set(key, r, expire)

            try:
                r = loads(r)
            except (TypeError, UnpicklingError):
                pass
            if isinstance(r, Empty):
                r = None
            if isinstance(r, bytes):
                r = r.decode()
            return r
Ejemplo n.º 3
0
    def post(self):
        data = request.get_json();

        try:
            qry_transactions = session.query(TransactionsTbl).join(TransactionsTbl.status).order_by(TransactionsTbl.id.asc()).all()

            list_transactions = []
            for transaction in qry_transactions:
                new_transaction = TransactionsTbl(status_id=1, date_created=now)
                session.add(new_transaction)
                session.flush()

                session.refresh(new_transaction)

                products = [];
                total = 0;
                for product in data['products']:
                    qry_products = session.query(ProductsTbl).filter(ProductsTbl.id == product['id']).first()
                    total += product['amount']*qry_products.price
                    products.append(TransactionProducts(transaction_id=new_transaction.id, product_id=product['id'], amount=product['amount']))
                    new_stock = qry_products.stock - product['amount'];
                    session.query(ProductsTbl).filter(ProductsTbl.id == product['id']).update({'stock': new_stock})

                session.bulk_save_objects(products)
                session.query(TransactionsTbl).filter(TransactionsTbl.id == new_transaction.id).update({'total': float(total)})
                session.commit()

        except (sqlalchemy.exc.SQLAlchemyError, sqlalchemy.exc.DBAPIError) as e:
            return dumps(e), 400

        return dumps({}), 201
Ejemplo n.º 4
0
        def _(*a, **kw):
            if not current_app.config["USE_REDIS"]:
                return f(*a, **kw)
            key, args = gen_key(*a, **kw)
            if not key:
                return f(*a, **kw)
            key = key + ":" + request.query_string.decode()
            force = kw.pop("force", False)
            r = rdb.get(key) if not force else None
            if r is None:
                r = f(*a, **kw)
                if r is not None:
                    if not isinstance(r, BUILTIN_TYPES):
                        r = dumps(r)
                    rdb.set(key, r, expire)
                else:
                    r = dumps(empty)
                    rdb.set(key, r, expire)

            try:
                r = loads(r)
            except (TypeError, UnpicklingError):
                pass
            if isinstance(r, Empty):
                r = None
            return r
Ejemplo n.º 5
0
    def test_query_one(self):
        q = Session.query(User).\
                filter(User.name == 'ed').\
                    options(joinedload(User.addresses))

        q2 = serializer.loads(
                    serializer.dumps(q, -1),
                            users.metadata, Session)
        def go():
            eq_(q2.all(), [
                    User(name='ed', addresses=[Address(id=2),
                    Address(id=3), Address(id=4)])])

        self.assert_sql_count(testing.db, go, 1)

        eq_(q2.join(User.addresses).filter(Address.email
            == '*****@*****.**').value(func.count('*')), 1)
        u1 = Session.query(User).get(8)
        q = Session.query(Address).filter(Address.user
                == u1).order_by(desc(Address.email))
        q2 = serializer.loads(serializer.dumps(q, -1), users.metadata,
                              Session)
        eq_(q2.all(), [Address(email='*****@*****.**'),
            Address(email='*****@*****.**'),
            Address(email='*****@*****.**')])
Ejemplo n.º 6
0
    def delete(self, id):

        try:
            session.query(ProductsTbl).filter(ProductsTbl.id == id).delete()
            session.commit()

        except (sqlalchemy.exc.SQLAlchemyError, sqlalchemy.exc.DBAPIError) as e:
            return dumps(e), 400
        return dumps({}), 201
Ejemplo n.º 7
0
    def test_query_one(self):
        q = (
            Session.query(User)
            .filter(User.name == "ed")
            .options(joinedload(User.addresses))
        )

        q2 = serializer.loads(serializer.dumps(q, -1), users.metadata, Session)

        def go():
            eq_(
                q2.all(),
                [
                    User(
                        name="ed",
                        addresses=[
                            Address(id=2),
                            Address(id=3),
                            Address(id=4),
                        ],
                    )
                ],
            )

        self.assert_sql_count(testing.db, go, 1)

        eq_(
            q2.join(User.addresses)
            .filter(Address.email == "*****@*****.**")
            .enable_eagerloads(False)
            .with_entities(func.count(literal_column("*")))
            .scalar(),
            1,
        )
        u1 = Session.get(User, 8)
        q = (
            Session.query(Address)
            .filter(Address.user == u1)
            .order_by(desc(Address.email))
        )
        q2 = serializer.loads(serializer.dumps(q, -1), users.metadata, Session)
        eq_(
            q2.all(),
            [
                Address(email="*****@*****.**"),
                Address(email="*****@*****.**"),
                Address(email="*****@*****.**"),
            ],
        )
Ejemplo n.º 8
0
 def get_data(self):
     """Go through every mapped table and dumps the data"""
     data = dict()
     for model_class in self.get_mapped_classes():
         query = self.db_session.query(model_class)
         data[model_class.__name__] = dumps(query.all())
     return data
Ejemplo n.º 9
0
 def test_query_two(self):
     q = \
         Session.query(User).join(User.addresses).\
            filter(Address.email.like('%fred%'))
     q2 = serializer.loads(serializer.dumps(q, -1), users.metadata, Session)
     eq_(q2.all(), [User(name='fred')])
     eq_(list(q2.values(User.id, User.name)), [(9, 'fred')])
Ejemplo n.º 10
0
    def get(self):
        d_result = {}

        try:
            qry_transactions = session.query(TransactionsTbl).join(TransactionsTbl.status).order_by(TransactionsTbl.id.asc()).all()

            list_transactions = []
            for transaction in qry_transactions:
                qry_products = session.query(TransactionProducts).join(TransactionProducts.product).filter(TransactionProducts.transaction_id == transaction.id).order_by(TransactionProducts.id.asc()).all()
                list_products = []
                for product in qry_products:
                    d_product = {
                        'name' : product.product.name,
                        'price' : str(product.product.price),
                        'amount' : product.amount
                    }
                    list_products.append(d_product)
                d_transact = {
                    'id': transaction.id,
                    'status': transaction.status.name,
                    'date_created': str(transaction.date_created),
                    'date_shipped': str(transaction.date_shipped),
                    'date_arrived': str(transaction.date_arrived),
                    'products': list_products,
                    'total': str(transaction.total)
                }
                list_transactions.append(d_transact)
                d_result.update({'data': list_transactions})
        except (sqlalchemy.exc.SQLAlchemyError, sqlalchemy.exc.DBAPIError) as e:
            return dumps(e), 400

        return d_result, {'Content-Type': 'application/json; character=utf-8'}
Ejemplo n.º 11
0
def save_tables(
    db: SQLAlchemy,
    backup_name: str = "",
    structure: bool = True,
    data: bool = False,
    backup_path: str = BACKUP_PATH,
):
    # Make a new folder for the backup
    backup_folder = Path("%s/%s" % (backup_path, backup_name))
    try:
        backup_folder.mkdir(parents=True, exist_ok=False)
    except FileExistsError:
        click.echo(
            "Can't save backup, because directory %s/%s already exists." %
            (backup_path, backup_name))
        return

    affected_classes = get_affected_classes(structure, data)
    c = None
    try:
        for c in affected_classes:
            file_path = "%s/%s/%s.obj" % (backup_path, backup_name,
                                          c.__tablename__)

            with open(file_path, "xb") as file_handler:
                file_handler.write(dumps(db.session.query(c).all()))
            click.echo("Successfully saved %s/%s." %
                       (backup_name, c.__tablename__))
    except SQLAlchemyError as e:
        click.echo(
            "Can't save table %s because of the following error:\n\n\t%s\n\nCleaning up..."
            % (c.__tablename__, e))
        rmtree(backup_folder)
        click.echo("Removed directory %s/%s." % (backup_path, backup_name))
Ejemplo n.º 12
0
    def backup_database(self):
        tables = (RequestList, Played, Song, Artist, Album, Mistags, Catalog)
        s = []
        for t in tables:
            s.append(dumps(self.db.query(t).all()))

        return s
Ejemplo n.º 13
0
    def test_deserailize_colprop(self):
        TestTable = self.classes.TestTable

        s = scoped_session(sessionmaker())

        expr = s.query(TestTable).filter(TestTable.some_primary_id == 123456)

        expr2 = serializer.loads(serializer.dumps(expr), TestTable.metadata, s)

        # note in the original, the same bound parameter is used twice
        self.assert_compile(
            expr,
            "SELECT test.some_id AS test_some_id, "
            "CAST(left(test.some_id, :left_1) AS INTEGER) AS anon_1, "
            "test.id AS test_id FROM test WHERE "
            "CAST(left(test.some_id, :left_1) AS INTEGER) = :param_1",
            checkparams={"left_1": 6, "param_1": 123456},
        )

        # in the deserialized, it's two separate parameter objects which
        # need to have different anonymous names.  they still have
        # the same value however
        self.assert_compile(
            expr2,
            "SELECT test.some_id AS test_some_id, "
            "CAST(left(test.some_id, :left_1) AS INTEGER) AS anon_1, "
            "test.id AS test_id FROM test WHERE "
            "CAST(left(test.some_id, :left_2) AS INTEGER) = :param_1",
            checkparams={"left_1": 6, "left_2": 6, "param_1": 123456},
        )
Ejemplo n.º 14
0
 def test_query_two(self):
     q = Session.query(User).join(User.addresses).\
         filter(Address.email.like('%fred%'))
     q2 = serializer.loads(serializer.dumps(q, -1), users.metadata,
                           Session)
     eq_(q2.all(), [User(name='fred')])
     eq_(list(q2.values(User.id, User.name)), [(9, 'fred')])
Ejemplo n.º 15
0
 def test_columns(self):
     assert (
         serializer.loads(
             serializer.dumps(users.c.name, -1), users.metadata, Session
         )
         is users.c.name
     )
Ejemplo n.º 16
0
 def test_tables(self):
     assert (
         serializer.loads(
             serializer.dumps(users, -1), users.metadata, Session
         )
         is users
     )
Ejemplo n.º 17
0
 def set_to_cache(self, version, items):
     if self.cache:
         self.cache.set(self.sections.data_cache_key,
                        dumps(items),
                        time=self.cache_timeout)
         self.cache.set(self.sections.version_cache_key,
                        version,
                        time=self.cache_timeout)
Ejemplo n.º 18
0
 def get_data(self):
     """Go through every mapped class and dumps the data."""
     db = self.db()
     data = dict()
     for model in self.get_mapped_classes():
         query = db.session.query(model)
         data[model.__name__] = dumps(query.all())
     return data
Ejemplo n.º 19
0
    def test_orm_join(self):
        from sqlalchemy.orm.util import join

        j = join(User, Address, User.addresses)

        j2 = serializer.loads(serializer.dumps(j, -1), users.metadata)
        assert j2.left is j.left
        assert j2.right is j.right
Ejemplo n.º 20
0
    def test_expression(self):

        expr = select([users]).select_from(users.join(addresses)).limit(5)
        re_expr = serializer.loads(serializer.dumps(expr, -1), users.metadata, None)
        eq_(str(expr), str(re_expr))

        assert re_expr.bind is testing.db
        eq_(re_expr.execute().fetchall(), [(7, u"jack"), (8, u"ed"), (8, u"ed"), (8, u"ed"), (9, u"fred")])
Ejemplo n.º 21
0
    def test_annotated_one(self):
        j = join(users, addresses)._annotate({"foo": "bar"})
        query = select([addresses]).select_from(j)

        str(query)
        for prot in pickle_protocols():
            pickled_failing = serializer.dumps(j, prot)
            serializer.loads(pickled_failing, users.metadata, None)
Ejemplo n.º 22
0
 def get_data(self):
     """Go through every mapped class and dumps the data"""
     db = self.db()
     data = dict()
     for model in self.get_mapped_classes():
         query = db.session.query(model)
         data[model.__name__] = dumps(query.all())
     return data
Ejemplo n.º 23
0
    def test_aliases(self):
        u7, u8, u9, u10 = Session.query(User).order_by(User.id).all()

        ualias = aliased(User)
        q = Session.query(User, ualias).join((ualias, User.id < ualias.id)).filter(User.id<9).order_by(User.id, ualias.id)

        q2 = serializer.loads(serializer.dumps(q), users.metadata, Session)
        
        eq_(list(q2.all()), [(u7, u8), (u7, u9), (u7, u10), (u8, u9), (u8, u10)])
Ejemplo n.º 24
0
def backup_everything(session):
    meta = get_declarative_base().metadata
    backupdict = {}
    for table in reversed(meta.sorted_tables):
        query = session.query(table)
        queryresults = query.all()
        backupdict[table.name] = dumps(queryresults)

    return backupdict
Ejemplo n.º 25
0
 def _put_remote(self,otype,obj,oid,hid):
     remotehost = self.datacon.db.get(Host,hid)
     self.location[otype][oid] = remotehost
     res = self.send_request_owner(remotehost, otype.__name__, oid,"put", newobj=dumps(obj))
     logger.debug("[obm]: Inserting remotely at " + remotehost + " the object: " + str(obj))
     if res == "OK":
         return True
     else:
         logger.error(res)
Ejemplo n.º 26
0
 def query_all(self, file_name):
     f = open('backup.txt', 'wb')
     session = self.Session()
     try:
         q = session.query(SpotifyTrack)
         f.write(dumps(q.all()))
         f.close()
     finally:
         session.close()
Ejemplo n.º 27
0
def dump(destination):
    dump_models = []  # List of models you want to dump
    serialized = list()
    for model in dump_models:
        print('Dumping {}'.format(model))
        serialized.append(unicode(dumps(db.session.query(model).all()), errors='ignore'))
    with open(destination, 'w') as f:
        f.writelines(json.dumps(serialized))
    print('Done.')
Ejemplo n.º 28
0
 def test_expression(self):
     expr = select(users).select_from(users.join(addresses)).limit(5)
     re_expr = serializer.loads(serializer.dumps(expr, -1), users.metadata,
                                None)
     eq_(str(expr), str(re_expr))
     eq_(
         Session.connection().execute(re_expr).fetchall(),
         [(7, "jack"), (8, "ed"), (8, "ed"), (8, "ed"), (9, "fred")],
     )
Ejemplo n.º 29
0
 def test_expression(self):
     expr = \
         select([users]).select_from(users.join(addresses)).limit(5)
     re_expr = serializer.loads(serializer.dumps(expr, -1),
                                users.metadata, None)
     eq_(str(expr), str(re_expr))
     assert re_expr.bind is testing.db
     eq_(re_expr.execute().fetchall(), [(7, 'jack'), (8, 'ed'),
         (8, 'ed'), (8, 'ed'), (9, 'fred')])
Ejemplo n.º 30
0
def main(argv=sys.argv):
    if len(argv) != 2:
        usage(argv)
    config_uri = argv[1]
    settings = get_appsettings(config_uri)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    menu_items = DBSession.query(MenuItem)
    serialized_menu_items = dumps(menu_items.all())
    menus = DBSession.query(Menu)
    serialized_menus = dumps(menus.all()) 
    allergens = DBSession.query(Allergen)
    serialized_allergens = dumps(allergens.all())
    serialized_data = {"menus" : serialized_menus,
                       "menu_items" : serialized_menu_items,
                       "allergens" : serialized_allergens}
    file = open('./db_backup.txt', 'w+')
    pickle.dump(serialized_data, file)
Ejemplo n.º 31
0
 def test_query_two(self):
     q = (
         Session.query(User)
         .join(User.addresses)
         .filter(Address.email.like("%fred%"))
     )
     q2 = serializer.loads(serializer.dumps(q, -1), users.metadata, Session)
     eq_(q2.all(), [User(name="fred")])
     eq_(list(q2.with_entities(User.id, User.name)), [(9, "fred")])
Ejemplo n.º 32
0
def backup_everything(session):
    meta = get_declarative_base().metadata
    backupdict = {}
    for table in reversed(meta.sorted_tables):
        query = session.query(table)
        queryresults = query.all()
        backupdict[table.name] = dumps(queryresults)

    return backupdict
Ejemplo n.º 33
0
    def test_orm_join(self):
        from sqlalchemy.orm.util import join

        j = join(User, Address, User.addresses)

        j2 = serializer.loads(serializer.dumps(j, -1), users.metadata)
        assert j2.left is j.left
        assert j2.right is j.right
        assert j2._target_adapter._next
Ejemplo n.º 34
0
def dump(destination):
    dump_models = []  # List of models you want to dump
    serialized = list()
    for model in dump_models:
        print('Dumping {}'.format(model))
        serialized.append(unicode(dumps(db.session.query(model).all()), errors='ignore'))
    with open(destination, 'w') as f:
        f.writelines(json.dumps(serialized))
    print('Done.')
Ejemplo n.º 35
0
    def put(self, id):
        data = request.get_json();

        try:
            d_trasaction = {
                'status_id': data['status']
            }
            if data['status'] == 2:
                d_trasaction['date_shipped'] = now
            elif data['status'] == 3:
                d_trasaction['date_arrived'] = now
            session.query(TransactionsTbl).filter(TransactionsTbl.id == id).update(d_trasaction)
            session.commit()

        except (sqlalchemy.exc.SQLAlchemyError, sqlalchemy.exc.DBAPIError) as e:
            return dumps(e), 400

        return dumps({}), 201
Ejemplo n.º 36
0
    def test_query(self):
        q = Session.query(User).filter(User.name == 'ed').options(
            eagerload(User.addresses))
        eq_(q.all(), [
            User(name='ed',
                 addresses=[Address(id=2),
                            Address(id=3),
                            Address(id=4)])
        ])

        q2 = serializer.loads(serializer.dumps(q), users.metadata, Session)

        def go():
            eq_(q2.all(), [
                User(name='ed',
                     addresses=[Address(id=2),
                                Address(id=3),
                                Address(id=4)])
            ])

        self.assert_sql_count(testing.db, go, 1)

        eq_(
            q2.join(User.addresses).filter(
                Address.email == '*****@*****.**').value(func.count('*')), 1)

        u1 = Session.query(User).get(8)

        q = Session.query(Address).filter(Address.user == u1).order_by(
            desc(Address.email))
        q2 = serializer.loads(serializer.dumps(q), users.metadata, Session)

        eq_(q2.all(), [
            Address(email='*****@*****.**'),
            Address(email='*****@*****.**'),
            Address(email='*****@*****.**')
        ])

        q = Session.query(User).join(User.addresses).filter(
            Address.email.like('%fred%'))
        q2 = serializer.loads(serializer.dumps(q), users.metadata, Session)
        eq_(q2.all(), [User(name='fred')])

        eq_(list(q2.values(User.id, User.name)), [(9, u'fred')])
Ejemplo n.º 37
0
    def run(self):
        global engine
        while True:
            logger.info('transactions db worker starting')
            engine = create_engine(self.url)
            metadata.create_all(engine)
            try:
                # get latest version in the db
                cur_ver = get_latest_version()
                cur_ver += 1  # TODO: later handle genesis
                logger.info('starting update at version {}'.format(cur_ver))
                # start the main loop
                while True:
                    try:
                        bver = get_latest_version_from_ledger()
                    except:
                        sleep(1)
                        continue
                    if cur_ver > bver:
                        if cur_ver > bver + 50: # for safety due to typical blockchain behavior
                            sleep(1)
                            continue
                        file_path = '{}_{}.gz'.format(self.db_backup_path, strftime('%Y%m%d%H%M%S'))
                        logger.info('saving database to {}'.format(file_path))
                        with gzip.open(file_path, 'wb') as f:
                            f.write(dumps(engine.execute(select([txs])).fetchall()))
                        metadata.drop_all(engine)
                        metadata.create_all(engine)
                        break

                    # batch update
                    num = min(1000, bver - cur_ver)  # at most 5000 records at once
                    tx_data = get_raw_tx_lst(cur_ver, num)

                    # read records
                    res = parse_raw_tx_lst(*tx_data)
                    if not res:
                        sleep(5)
                        continue

                    # do the insertion
                    engine.execute(txs.insert(), res)

                    # update counter to the latest version we inserted
                    cur_ver = res[-1]['version']
                    logger.debug('update to version: {} - success'.format(cur_ver))

                    # update latest version to next
                    cur_ver += 1

                    # sleep relative to amount of rows fetched so we don't get a 429 error
                    sleep(0.001 * num)

            except:
                logger.exception('Major error in tx_db_worker')
                sleep(2)
Ejemplo n.º 38
0
    def put(self, id):
        data = request.get_json()

        d_product = {
            'name': data['name'],
            'status_id': data['status'],
            'price': str(data['price']),
            'stock': data['stock']
        }

        try:
            session.query(ProductsTbl).filter(
                ProductsTbl.id == id).update(d_product)
            session.commit()
        except (sqlalchemy.exc.SQLAlchemyError,
                sqlalchemy.exc.DBAPIError) as e:
            return dumps(e), 400

        return dumps({}), 201
Ejemplo n.º 39
0
def main():
    """Main entry point for script."""
    with kingdom.app_context():
        db.metadata.create_all(db.engine)
        grid = GridLayout(25)
        grid.size[5] = 5
        user = Grid(grid=serializer.dumps(grid), size=25, user_id=1)
        db.session.add(user)
        db.session.commit()
        print('User added.')
Ejemplo n.º 40
0
    def post(self):
        backup = None
        for filename in request.files:
            if filename == 'backup':
                try:
                    backup = request.files[filename]
                    if backup: break
                except Exception as e:
                    return jsonify({'succes': False, 'msg': 'Unable to read .pynance file'}), 200

        if backup is None: return jsonify({'succes': False, 'msg': 'Your backup seems to be empty'}), 200

        to_merge = []
        from backend.models.bot import BotModel
        with zipfile.ZipFile(backup) as zf:
            for backup_file in zf.filelist:
                with zf.open(backup_file.filename) as f:
                    data = f.read()

                from backend.models.status import StatusModel
                from backend.models.config import ConfigModel
                from backend.models.orders import OrdersModel
                from backend.models.system import SystemModel
                from backend.models.keys import KeysModel
                if 'bot' in backup_file.filename: to_merge.append(loads(data, BotModel.metadata, db.session))
                elif 'status' in backup_file.filename: to_merge.append(loads(data, StatusModel.metadata, db.session))
                elif 'config' in backup_file.filename: to_merge.append(loads(data, ConfigModel.metadata, db.session))
                elif 'system' in backup_file.filename: to_merge.append(loads(data, SystemModel.metadata, db.session))
                elif 'orders' in backup_file.filename: 
                    for item in loads(data):
                        to_merge.append(loads(dumps(item), OrdersModel.metadata, db.session))
                elif 'keys' in backup_file.filename:
                    for item in loads(data):
                        to_merge.append(loads(dumps(item), KeysModel.metadata, db.session))

        for item in to_merge:
            db.session.merge(item)
        db.session.commit()
        bot = BotModel.query.first()
        bot.update_data({'online': False})

        return jsonify({'succes': True, 'msg': 'Backup has been restored!'}), 200
Ejemplo n.º 41
0
    def test_annotated_one(self):
        j = join(users, addresses)._annotate({"foo": "bar"})
        query = select([addresses]).select_from(
            j
        )

        str(query)
        for prot in pickle_protocols():
            pickled_failing = serializer.dumps(
                j, prot)
            serializer.loads(pickled_failing, users.metadata, None)
Ejemplo n.º 42
0
    def test_query_three(self):
        ua = aliased(User)
        q = \
            Session.query(ua).join(ua.addresses).\
               filter(Address.email.like('%fred%'))
        q2 = serializer.loads(serializer.dumps(q, -1), users.metadata,
                              Session)
        eq_(q2.all(), [User(name='fred')])

        # try to pull out the aliased entity here...
        ua_2 = q2._entities[0].entity_zero.entity
        eq_(list(q2.values(ua_2.id, ua_2.name)), [(9, 'fred')])
Ejemplo n.º 43
0
    def saveToAppLog(self):
        if self.action:
            if self.value:
                esito = " ERRATO"
                how = "E"
            else:
                esito = " CORRETTO"
                how = "I"
            message = self.action + esito
        else:
            if params["session"].dirty:
                message = "UPDATE " + self.dao.__class__.__name__
            elif params["session"].new:
                message = "INSERT " + self.dao.__class__.__name__
            elif params["session"].deleted:
                message = "DELETE " + self.dao.__class__.__name__
            else:
                message = "UNKNOWN ACTION"

        when = datetime.datetime.now()
        where = params["schema"]
        whoID = params["usernameLoggedList"][0]
        utentedb = params["usernameLoggedList"][3]
        utente = params["usernameLoggedList"][1]

        if self.action:
            whatstr = self.value
        else:
            salvo = self.commit()
            if salvo:
                how = "I"
            else:
                how = "E"
            mapper = object_mapper(self.dao)
            pk = mapper.primary_key_from_instance(self.dao)
            whatstr = str(pk)

        app = ApplicationLog()
        app.schema = where
        app.message = message
        app.level = how
        print dumps(whatstr)
        app.strvalue = dumps(whatstr)
        app.registrazion_date = when
        app.utentedb = utentedb
        app.id_utente = whoID
        app.pkid = dumps(whatstr)
        print dumps(self.dao)
        app.object = dumps(self.dao)
        params["session"].add(app)
        self.commit()
        print "[LOG] %s id: %s da %s in %s in data %s" % (message, whatstr, utente, where, when.strftime("%d/%m/%Y"))
Ejemplo n.º 44
0
    def test_unicode(self):
        m = MetaData()
        t = Table(ue('\u6e2c\u8a66'), m,
                Column(ue('\u6e2c\u8a66_id'), Integer))

        expr = select([t]).where(t.c[ue('\u6e2c\u8a66_id')] == 5)

        expr2 = serializer.loads(serializer.dumps(expr, -1), m)

        self.assert_compile(
            expr2,
            ue('SELECT "\u6e2c\u8a66"."\u6e2c\u8a66_id" FROM "\u6e2c\u8a66" '
                'WHERE "\u6e2c\u8a66"."\u6e2c\u8a66_id" = :\u6e2c\u8a66_id_1'),
            dialect="default"
        )
Ejemplo n.º 45
0
def _backup_database(filepath):

    table_classes = Base._decl_class_registry.values()
    session = Session()

    for table_class in table_classes:
        if hasattr(table_class, '__tablename__'):
            query = session.query(table_class)
            serialized_data = dumps(query.all())

            new_filepath = filepath + 'database/' + table_class.__tablename__

            with open(new_filepath, 'w') as text_file:
                text_file.write(serialized_data)

    return
Ejemplo n.º 46
0
    def on_after_flush(self, app, session):
        """broadcast models events into the data-models channel

        channel: data-models
        event: <model-identifier>.<event>
        data: JSON representation of model

        <event> is one of ``create``, ``update``, ``delete``
        """
        request = session.request
        if not app.channels or not request:
            return
        for instance, event in session.changes():
            app.channels.publish(
                app.config['CHANNEL_DATAMODEL'],
                '%s.%s' % (instance.__class__.__name__.lower(), event),
                dumps(instance)
            )
Ejemplo n.º 47
0
 def items_to_cache(self, query_id, items):
     if self.cache_enabled:
         self.cache.set(self.items_key(query_id), dumps(items),
                        time=self.cache_timeout)
Ejemplo n.º 48
0
import os

from tedx import model
from tedx.model import meta, user, place, comment, file, tag, scoring, notification, category

engine = sa.create_engine("mysql://*****:*****@localhost:3306/tedx?charset=utf8")
model.init_model(engine)
# ... define mappers

# pickle the query
results = meta.Session.query(user.User).all()
results.extend(meta.Session.query(user.Follower).all())
results.extend(meta.Session.query(tag.Tag).all())
results.extend(meta.Session.query(tag.User_tag).all())
results.extend(meta.Session.query(place.Place).all())
results.extend(meta.Session.query(tag.Place_tag).all())
results.extend(meta.Session.query(comment.Comment).all())
results.extend(meta.Session.query(file.File).all())
results.extend(meta.Session.query(notification.Notification).all())
results.extend(meta.Session.query(scoring.Place_scoring).all())
results.extend(meta.Session.query(scoring.Comment_scoring).all())
results.extend(meta.Session.query(tag.Comment_tag).all())
results.extend(meta.Session.query(category.Category).all())
results.extend(meta.Session.query(category.User_category).all())
results.extend(meta.Session.query(category.Place_category).all())

dumped_data = dumps(results)

dumped_file = open(os.path.join(os.getcwd(), "dumps"), 'w')
dumped_file.write(dumped_data)
dumped_file.close()
Ejemplo n.º 49
0
 def set_to_cache(self, version, items):
     if self.cache:
         self.cache.set(self.sections.data_cache_key, dumps(items),
                        time=self.cache_timeout)
         self.cache.set(self.sections.version_cache_key, version,
                        time=self.cache_timeout)
Ejemplo n.º 50
0
 def test_attribute(self):
     assert serializer.loads(serializer.dumps(User.name), None, None) is User.name
Ejemplo n.º 51
0
 def __init__(self, expression=None, name=None):
     if expression is not None:
         self.expression = expression
         blob = dumps(expression)
         self.expr_blob = blob
     self.name = name
Ejemplo n.º 52
0
 def test_mapper(self):
     user_mapper = class_mapper(User)
     assert serializer.loads(serializer.dumps(user_mapper), None, None) is user_mapper
Ejemplo n.º 53
0
 def test_columns(self):
     assert serializer.loads(serializer.dumps(users.c.name), users.metadata, Session) is users.c.name
Ejemplo n.º 54
0
 def test_tables(self):
     assert serializer.loads(serializer.dumps(users), users.metadata, Session) is users
Ejemplo n.º 55
0
 def test_any(self):
     r = User.addresses.any(Address.email=='x')
     ser = serializer.dumps(r)
     x = serializer.loads(ser, users.metadata)
     eq_(str(r), str(x))
Ejemplo n.º 56
0
def back_up_db():
    meta = MetaData()
    meta.reflect(bind=db.engine)
    for table in reversed(meta.sorted_tables):
        s_d = dumps(db.session.query(table).all())