def test_table_binds(self): # ensure tables are unbound m2 = sa.MetaData() users_unbound =users.tometadata(m2) addresses_unbound = addresses.tometadata(m2) mapper(Address, addresses_unbound) mapper(User, users_unbound, properties={ 'addresses':relationship(Address, backref=backref("user", cascade="all"), cascade="all")}) Session = sessionmaker(binds={users_unbound: self.metadata.bind, addresses_unbound: self.metadata.bind}) sess = Session() u1 = User(id=1, name='ed') sess.add(u1) eq_(sess.query(User).filter(User.id==1).all(), [User(id=1, name='ed')]) sess.execute(users_unbound.insert(), params=dict(id=2, name='jack')) eq_(sess.execute(users_unbound.select(users_unbound.c.id == 2)).fetchall(), [(2, 'jack')]) eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(), [(2, 'jack')]) sess.execute(users_unbound.delete()) eq_(sess.execute(users_unbound.select()).fetchall(), []) sess.close()
def _do_save(self): assert not isInIOThread() while not self.write_queue.empty(): items = [] try: self.writelock = True try: while True: items.append(self.write_queue.get_nowait()) except Empty: pass session = Session() try: session.add_all(items) session.commit() except: session.rollback() raise finally: session.close() finally: self.writelock = False
def test_deferred_attribute_set(self): """test no SELECT emitted when assigning to a deferred mutable attribute. """ mutable_t, Foo = self.tables.mutable_t, self.classes.Foo sa.orm.clear_mappers() mapper(Foo, mutable_t, properties={"data": sa.orm.deferred(mutable_t.c.data)}) f1 = Foo(data=pickleable.Bar(4, 5), val=u"some val") session = Session() session.add(f1) session.commit() session.close() f1 = session.query(Foo).first() def go(): f1.data = pickleable.Bar(10, 15) self.sql_count_(0, go) session.commit() eq_(f1.data.x, 10)
def test_11_pickle(self): users = self.tables.users mapper(User, users) sess = Session() u1 = User(id=1, name='ed') sess.add(u1) sess.commit() sess.close() manager = instrumentation._SerializeManager.__new__( instrumentation._SerializeManager) manager.class_ = User state_11 = { 'class_': User, 'modified': False, 'committed_state': {}, 'instance': u1, 'manager': manager, 'key': (User, (1,)), 'expired_attributes': set(), 'expired': True} state = sa_state.InstanceState.__new__(sa_state.InstanceState) state.__setstate__(state_11) eq_(state.identity_token, None) eq_(state.identity_key, (User, (1,), None))
def test_conditional_step_oneline(self): User = self.classes.User base_bq = self.bakery(lambda s: s.query(User.id, User.name)) base_bq += lambda q: q.order_by(User.id) for i in range(4): for cond1 in (False, True): bq = base_bq._clone() # we were using (filename, firstlineno) as cache key, # which fails for this kind of thing! bq += ( (lambda q: q.filter(User.name != "jack")) if cond1 else (lambda q: q.filter(User.name == "jack")) ) # noqa sess = Session(autocommit=True) result = bq(sess).all() if cond1: eq_(result, [(8, u"ed"), (9, u"fred"), (10, u"chuck")]) else: eq_(result, [(7, "jack")]) sess.close()
def test_bind_arguments(self): users, Address, addresses, User = (self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users) mapper(Address, addresses) e1 = engines.testing_engine() e2 = engines.testing_engine() e3 = engines.testing_engine() sess = Session(e3) sess.bind_mapper(User, e1) sess.bind_mapper(Address, e2) assert sess.connection().engine is e3 assert sess.connection(bind=e1).engine is e1 assert sess.connection(mapper=Address, bind=e1).engine is e1 assert sess.connection(mapper=Address).engine is e2 assert sess.connection(clause=addresses.select()).engine is e2 assert sess.connection(mapper=User, clause=addresses.select()).engine is e1 assert sess.connection(mapper=User, clause=addresses.select(), bind=e2).engine is e2 sess.close()
def test_instance_lazy_relation_loaders(self): users, addresses = (self.tables.users, self.tables.addresses) mapper(User, users, properties={ 'addresses': relationship(Address, lazy='noload') }) mapper(Address, addresses) sess = Session() u1 = User(name='ed', addresses=[ Address( email_address='*****@*****.**', ) ]) sess.add(u1) sess.commit() sess.close() u1 = sess.query(User).options( lazyload(User.addresses) ).first() u2 = pickle.loads(pickle.dumps(u1)) sess = Session() sess.add(u2) assert u2.addresses
def test_child_row_switch_two(self): P = self.classes.P Session = sessionmaker() # TODO: not sure this test is # testing exactly what its looking for sess1 = Session() sess1.add(P(id='P1', data='P version 1')) sess1.commit() sess1.close() p1 = sess1.query(P).first() sess2 = Session() p2 = sess2.query(P).first() sess1.delete(p1) sess1.commit() # this can be removed and it still passes sess1.add(P(id='P1', data='P version 2')) sess1.commit() p2.data = 'P overwritten by concurrent tx' if testing.db.dialect.supports_sane_rowcount: assert_raises_message( orm.exc.StaleDataError, r"UPDATE statement on table 'p' expected to update " r"1 row\(s\); 0 were matched.", sess2.commit ) else: sess2.commit
def test_map_to_select(self): Base, Child = self.classes.Base, self.classes.Child base, child = self.tables.base, self.tables.child base_select = select([base]).alias() mapper( Base, base_select, polymorphic_on=base_select.c.type, polymorphic_identity="base", ) mapper(Child, child, inherits=Base, polymorphic_identity="child") sess = Session() # 2. use an id other than "1" here so can't rely on # the two inserts having the same id c1 = Child(id=12, name="c1") sess.add(c1) sess.commit() sess.close() c1 = sess.query(Child).one() eq_(c1.name, "c1")
def test_get_includes_getclause(self): # test issue #3597 User = self.classes.User bq = self.bakery(lambda s: s.query(User)) for i in range(5): sess = Session() u1 = bq(sess).get(7) eq_(u1.name, 'jack') sess.close() eq_(len(bq._bakery), 2) # simulate race where mapper._get_clause # may be generated more than once from sqlalchemy import inspect del inspect(User).__dict__['_get_clause'] for i in range(5): sess = Session() u1 = bq(sess).get(7) eq_(u1.name, 'jack') sess.close() eq_(len(bq._bakery), 4)
def test_09_pickle(self): users = self.tables.users mapper(User, users) sess = Session() sess.add(User(id=1, name='ed')) sess.commit() sess.close() inst = User(id=1, name='ed') del inst._sa_instance_state state = sa_state.InstanceState.__new__(sa_state.InstanceState) state_09 = { 'class_': User, 'modified': False, 'committed_state': {}, 'instance': inst, 'callables': {'name': state, 'id': state}, 'key': (User, (1,)), 'expired': True} manager = instrumentation._SerializeManager.__new__( instrumentation._SerializeManager) manager.class_ = User state_09['manager'] = manager state.__setstate__(state_09) eq_(state.expired_attributes, {'name', 'id'}) sess = Session() sess.add(inst) eq_(inst.name, 'ed') # test identity_token expansion eq_(sa.inspect(inst).key, (User, (1, ), None))
def test_11_pickle(self): users = self.tables.users mapper(User, users) sess = Session() u1 = User(id=1, name="ed") sess.add(u1) sess.commit() sess.close() manager = instrumentation._SerializeManager.__new__( instrumentation._SerializeManager ) manager.class_ = User state_11 = { "class_": User, "modified": False, "committed_state": {}, "instance": u1, "manager": manager, "key": (User, (1,)), "expired_attributes": set(), "expired": True, } state = sa_state.InstanceState.__new__(sa_state.InstanceState) state.__setstate__(state_11) eq_(state.identity_token, None) eq_(state.identity_key, (User, (1,), None))
def test_join_w_subquery(self): User = self.classes.User Address = self.classes.Address sess = Session() q1 = sess.query(User).with_for_update().subquery() sess.query(q1).join(Address).all() sess.close()
def test_inner_joinedload_wo_limit(self): User = self.classes.User sess = Session() sess.query(User).options( joinedload(User.addresses, innerjoin=True) ).with_for_update().all() sess.close()
def test_deferred_mutate(self): """test mutations are detected on a deferred mutable attribute.""" mutable_t, Foo = self.tables.mutable_t, self.classes.Foo sa.orm.clear_mappers() mapper(Foo, mutable_t, properties={"data": sa.orm.deferred(mutable_t.c.data)}) f1 = Foo(data=pickleable.Bar(4, 5), val=u"some val") session = Session() session.add(f1) session.commit() session.close() f1 = session.query(Foo).first() def go(): f1.data.x = 10 self.sql_count_(1, go) session.commit() def go(): eq_(f1.data.x, 10) self.sql_count_(1, go)
def get_bibtype_id(bibtype): engine = _get_engine() sess = Session(bind=engine) et = sess.query(ref.EntryType).filter( ref.EntryType.label == bibtype).first() sess.close() return et
def delete_user(self, name, pwd): result = 0 session = Session(self.engine) query = session.query(User).filter(User.name == name) query = query.filter(User.pwd == pwd) user = query.first() if user: if user.followers: q = session.query(User) flrs = q.filter(User.name.in_(user.followers)) for u in flrs: u.followings.remove(name) qu = q.filter(User.name == u.name) qu.update({"followings": u.followings}) if user.followings: q = session.query(User) flgs = q.filter(User.name.in_(user.followings)) for u in flgs: u.followers.remove(name) qu = q.filter(User.name == u.name) qu.update({"followers": u.followers}) session.delete(user) session.commit() result = 1 session.close() return result
def test_09_pickle(self): users = self.tables.users mapper(User, users) sess = Session() sess.add(User(id=1, name="ed")) sess.commit() sess.close() inst = User(id=1, name="ed") del inst._sa_instance_state state = sa_state.InstanceState.__new__(sa_state.InstanceState) state_09 = { "class_": User, "modified": False, "committed_state": {}, "instance": inst, "callables": {"name": state, "id": state}, "key": (User, (1,)), "expired": True, } manager = instrumentation._SerializeManager.__new__( instrumentation._SerializeManager ) manager.class_ = User state_09["manager"] = manager state.__setstate__(state_09) eq_(state.expired_attributes, {"name", "id"}) sess = Session() sess.add(inst) eq_(inst.name, "ed") # test identity_token expansion eq_(sa.inspect(inst).key, (User, (1,), None))
def update_user_class(self, name, pwd, rkgs): session = Session(self.engine) result = 0 user = session.query(User).filter(User.name == name, User.pwd == pwd) if user.first(): liked, unliked = [], [] fids = [r[0] for r in rkgs] query = session.query(ClassRanking) query = query.filter(ClassRanking.film_id.in_(fids)) db_rkgs = {(cr.film_id, cr.class_id): cr for cr in query} class1, class2, class3 = [1, 0.0], [2, 0.0], [3, 0.0] for fid, rkg in rkgs: if rkg: liked.append(fid) else: unliked.append(fid) class1[1] += float(rkg) - db_rkgs[(fid, 1)].rate class2[1] += float(rkg) - db_rkgs[(fid, 2)].rate class3[1] += float(rkg) - db_rkgs[(fid, 3)].rate class1[1] = 1.0 - class1[1] / len(rkgs) class2[1] = 1.0 - class2[1] / len(rkgs) class3[1] = 1.0 - class3[1] / len(rkgs) class_ids = [class1, class2, class3] user.update({"class_ids": class_ids}) user.update({"pending": liked}) user.update({"liked": liked}) user.update({"unliked": unliked}) session.commit() result = 1 session.close() return result
def send_data(): bottle_auth.require(fail_redirect='/login') if set(['anomaly_id', 'comentario', 'causa_id', 'tipo_corte']) == set(request.forms.keys()): session = Session(engine) anomaly_id = request.forms.get('anomaly_id', False) causa_id = request.forms.get("causa_id", False) comentario = request.forms.get("comentario", "") tipo_corte = request.forms.get("tipo_corte", 0) if anomaly_id and causa_id: logger.info("anomaly_id {}".format(anomaly_id)) queryAnomaly = session.query(Anomaly).filter_by(id=anomaly_id) if queryAnomaly.count(): queryAnomaly.update({ 'causa_id': causa_id, 'comentario_causa': comentario, 'tipo_corte': tipo_corte, 'timestamp_asignacion': datetime.datetime.now() }) session.commit() session.close() logger.info("post guardado") return "guardado" else: logger.info("no encontre anomaly {}".format(anomaly_id)) return "no encontre anomaly {}".format(anomaly_id) else: logger.info("no encontro valor en campos anomaly_id y causa_id") return "no encontro valor en campos anomaly_id y causa_id" else: logger.error("post mal generado") return "post mal ggenerado"
def check_log_in_info(self, name, pwd): session = Session(self.engine) q = session.query(User).filter(User.name == name) qu = q.filter(User.pwd == pwd).first() result = 1 if qu else 0 session.close() return result
def test_table_binds(self): Address, addresses, users, User = ( self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User, ) # ensure tables are unbound m2 = sa.MetaData() users_unbound = users.tometadata(m2) addresses_unbound = addresses.tometadata(m2) mapper(Address, addresses_unbound) mapper( User, users_unbound, properties={ "addresses": relationship( Address, backref=backref("user", cascade="all"), cascade="all", ) }, ) Session = sessionmaker( binds={ users_unbound: self.metadata.bind, addresses_unbound: self.metadata.bind, } ) sess = Session() u1 = User(id=1, name="ed") sess.add(u1) eq_( sess.query(User).filter(User.id == 1).all(), [User(id=1, name="ed")], ) sess.execute(users_unbound.insert(), params=dict(id=2, name="jack")) eq_( sess.execute( users_unbound.select(users_unbound.c.id == 2) ).fetchall(), [(2, "jack")], ) eq_( sess.execute(users_unbound.select(User.id == 2)).fetchall(), [(2, "jack")], ) sess.execute(users_unbound.delete()) eq_(sess.execute(users_unbound.select()).fetchall(), []) sess.close()
def fill_genre_table(self): session = Session(self.engine) for i, gname in enumerate(Genre.__genres__): genre = Genre(gname) genre.id = i + 1 session.add(genre) session.commit() session.close()
def fill_user_class_table(self): session = Session(self.engine) for i in range(0, 4): user_class = UserClass("Default %s" % i) user_class.id = i session.add(user_class) session.commit() session.close()
def _test_baked_lazy_loading(self, set_option): User, Address = self.classes.User, self.classes.Address base_bq = self.bakery( lambda s: s.query(User)) if set_option: base_bq += lambda q: q.options(baked_lazyload(User.addresses)) base_bq += lambda q: q.order_by(User.id) assert_result = self.static.user_address_result for i in range(4): for cond1, cond2 in itertools.product( *[(False, True) for j in range(2)]): bq = base_bq._clone() sess = Session() if cond1: bq += lambda q: q.filter(User.name == 'jack') else: bq += lambda q: q.filter(User.name.like('%ed%')) if cond2: ct = func.count(Address.id).label('count') subq = sess.query( ct, Address.user_id).group_by(Address.user_id).\ having(ct > 2).subquery() bq += lambda q: q.join(subq) if cond2: if cond1: def go(): result = bq(sess).all() eq_([], result) self.assert_sql_count(testing.db, go, 1) else: def go(): result = bq(sess).all() eq_(assert_result[1:2], result) self.assert_sql_count(testing.db, go, 2) else: if cond1: def go(): result = bq(sess).all() eq_(assert_result[0:1], result) self.assert_sql_count(testing.db, go, 2) else: def go(): result = bq(sess).all() eq_(assert_result[1:3], result) self.assert_sql_count(testing.db, go, 3) sess.close()
def _fixture(self): Parent = self.classes.Parent sess = Session() sess.add_all([ Parent(data1='d1', data2='d2', data3='d3', data4='d4') for i in range(10) ]) sess.commit() sess.close()
def traverse(self, engine): assert not self.parent, "Can only call traverse() from the root." s = Session(engine) try: for obj in s.query(self.klass): for child in self.children.values(): child._traverse(obj) finally: s.close()
def persist(self, connection): session = Session(connection) result = session.query(Order).filter(Order.orderid == self.orderid).first() if result: print("The data is already present in the table.") else: session.add(self) session.commit() session.close()
def _fixture(self): User = self.classes.User s = Session() u1, u2, u3, u4 = User(id=1, name='u1'), User(id=2, name='u2'), \ User(id=7, name='u3'), User(id=8, name='u4') s.query(User).filter(User.id.in_([7, 8])).all() s.close() return s, [u1, u2, u3, u4]
def get_or_create_pl(model, item): ''' indexfields should be a dict of the uniqur fields for lookup ''' get_params = lambda model, item: {c.name: item.get(c.name) for c in model.__table__.columns if c.name != 'id'} get_unique = lambda model, item: {c.name: item.get(c.name) for c in model.__table__.columns if c.name != 'id' and getattr(c, 'unique')} get_unique_together = lambda model, item: [{c.name: item.get(c.name) for c in model.__table_args__[0].columns} for targ in model.__table_args__ if isinstance( targ, sqlalchemy.UniqueConstraint)] session = Session(bind=ENGINE) instance = None unique = get_unique(model, item) for k, v in unique.iteritems(): query = session.query(model).filter_by(**{k: v}) instance = query.first() if instance: break if not instance: unique_together = get_unique_together(model, item) for params in unique_together: query = session.query(model).filter_by(**params) instance = query.first() if instance: break created = False kwargs = get_params(model, item) params = dict( (k, v) for k, v in kwargs.iteritems() if not isinstance(v, ClauseElement)) if not instance: #params.update(defaults) instance = model(**params) else: for k, v in params.iteritems(): setattr(instance, k, v) try: session.add(instance) session.commit() created = True except Exception: session.close() raise session.refresh(instance) # Refreshing before session close session.close() return instance
def station(): # Create our session (link) from Python to the DB session = Session(engine) results = session.query(Station.station).all() session.close() return jsonify(results)
def go(): s = Session(testing.db) s.connection() s.close()
def unemploymentData(): start_date = request.args.get('start_date') end_date = request.args.get('end_date') stateparam = request.args.get("state_abbr") session = Session(engine) if not start_date: # query the min of all file_week_ended entries if no date is given in that parameter min_start_date = session.query(func.min(unemployment.file_week_ended)) start_date = min_start_date if not end_date: # query the max of all file_week_ended entries if no date is given in that parameter max_end_date = session.query(func.max(unemployment.file_week_ended)) end_date = max_end_date if not stateparam: results = session.query(unemployment).filter( unemployment.file_week_ended >= start_date).filter( unemployment.file_week_ended <= end_date) if stateparam: print("---------------------------") print("Whats in State:", stateparam) print("Whats it's type:", type(stateparam)) print("---------------------------") stateparam = stateparam.split(',') print("Whats in State after split:", stateparam) print("What type is it now?", type(stateparam)) print("---------------------------") if isinstance(stateparam, list): print("Are you making it to this line?") # this should make an array of states valid and handle the single-state case results = session.query(unemployment).filter( unemployment.file_week_ended >= start_date).filter( unemployment.file_week_ended <= end_date).filter( unemployment.state_abbr.in_(stateparam)).all() session.close() data = [] for result in results: data.append({ "state": result.state, "state_abbr": result.state_abbr, "file_week_ended": result.file_week_ended, "initial_claims": result.initial_claims, "reflecting_week_ended": result.reflecting_week_ended, "continued_claims": result.continued_claims, "covered_employment": result.covered_employment, "insured_unemployment_rate": result.insured_unemployment_rate }) return jsonify(data)
def temp_start_end(start, end): """Fetch the temp data that matches the path variable supplied by the user, or a 404 if not.""" #make sure the date format is correct try: #convert inputs to dates #start date start_date_convert = dt.datetime.strptime(start, '%Y-%m-%d') query_date_start = start_date_convert.date() #end date end_date_convert = dt.datetime.strptime(end, '%Y-%m-%d') query_date_end = end_date_convert.date() #if date format is wrong except ValueError: return jsonify({"error": "Date format incorrect"}), 404 # Create our session (link) from Python to the DB session = Session(engine) #find first date in the database: first_date = session.query(Measurement.date).order_by( Measurement.date).first() # source: https://stackoverflow.com/questions/23324266/converting-string-to-date-object-without-time-info #Create datetime object and then convert to date object first_date_convert = dt.datetime.strptime(first_date[0], '%Y-%m-%d') first_date_convert = first_date_convert.date() #find last date in the database: last_date = session.query(Measurement.date).order_by( Measurement.date.desc()).first() # source: https://stackoverflow.com/questions/23324266/converting-string-to-date-object-without-time-info #Create datetime object and then convert to date object last_date_convert = dt.datetime.strptime(last_date[0], '%Y-%m-%d') last_date_convert = last_date_convert.date() #determine if user date is within database range if (query_date_start < first_date_convert) | (query_date_start > last_date_convert) | ( query_date_end < first_date_convert) | (query_date_end > last_date_convert): #close session session.close() #return error message return jsonify({"error": "Date not found."}), 404 else: #lowest temp recorded for most active station lowest_temp = session.query(func.min(Measurement.tobs)).\ filter(Measurement.date >= query_date_start).\ filter(Measurement.date<=query_date_end).all() #highest temp recorded for most active station highest_temp = session.query(func.max(Measurement.tobs)).\ filter(Measurement.date >= query_date_start).\ filter(Measurement.date<=query_date_end).all() #average temp recorded for most active station average_temp = session.query(func.avg(Measurement.tobs)).\ filter(Measurement.date >= query_date_start).\ filter(Measurement.date<=query_date_end).all() #close session session.close() #add results to a dictionary temp_info_dict = { 'TMax': highest_temp[0][0], 'TMin': lowest_temp[0][0], 'TAvg': round(average_temp[0][0], 2) } #jsonify dictionary result and return return jsonify(temp_info_dict)
#!/usr/bin/python3 """ Script that lists all State objects from a database """ if __name__ == "__main__": import sys from model_state import Base, State from sqlalchemy import create_engine from sqlalchemy.orm import Session engine = create_engine('mysql+mysqldb://{}:{}@localhost/{}'.format (sys.argv[1], sys.argv[2], sys.argv[3]), pool_pre_ping=True) Base.metadata.create_all(engine) s = Session(engine) for state in s.query(State).order_by(State.id).all(): print("{}: {}".format(state.id, state.name)) s.close()
def test_conditional_step(self): """Test a large series of conditionals and assert that results remain correct between all of them within a series of loops. """ User = self.classes.User base_bq = self.bakery(lambda s: s.query(User.id, User.name)) base_bq += lambda q: q.order_by(User.id) for i in range(4): for cond1, cond2, cond3, cond4 in itertools.product( *[(False, True) for j in range(4)]): bq = base_bq._clone() if cond1: bq += lambda q: q.filter(User.name != "jack") if cond2: bq += lambda q: q.join(User.addresses) else: bq += lambda q: q.outerjoin(User.addresses) elif cond3: bq += lambda q: q.filter(User.name.like("%ed%")) else: bq += lambda q: q.filter(User.name == "jack") if cond4: bq += lambda q: q._from_self().with_entities( func.count(User.id)) sess = Session(autocommit=True) result = bq(sess).all() if cond4: if cond1: if cond2: eq_(result, [(4, )]) else: eq_(result, [(5, )]) elif cond3: eq_(result, [(2, )]) else: eq_(result, [(1, )]) else: if cond1: if cond2: eq_( result, [(8, "ed"), (8, "ed"), (8, "ed"), (9, "fred")], ) else: eq_( result, [ (8, "ed"), (8, "ed"), (8, "ed"), (9, "fred"), (10, "chuck"), ], ) elif cond3: eq_(result, [(8, "ed"), (9, "fred")]) else: eq_(result, [(7, "jack")]) sess.close()
def test_subquery_eagerloading(self): User = self.classes.User Address = self.classes.Address Order = self.classes.Order self.bakery = baked.bakery() base_bq = self.bakery(lambda s: s.query(User)) base_bq += lambda q: q.options(subqueryload(User.addresses), subqueryload(User.orders)) base_bq += lambda q: q.order_by(User.id) assert_result = [ User( id=7, addresses=[Address(id=1, email_address="*****@*****.**")], orders=[Order(id=1), Order(id=3), Order(id=5)], ), User( id=8, addresses=[ Address(id=2, email_address="*****@*****.**"), Address(id=3, email_address="*****@*****.**"), Address(id=4, email_address="*****@*****.**"), ], ), User( id=9, addresses=[Address(id=5)], orders=[Order(id=2), Order(id=4)], ), User(id=10, addresses=[]), ] for i in range(4): for cond1, cond2 in itertools.product(*[(False, True) for j in range(2)]): print("HI----") bq = base_bq._clone() sess = Session() if cond1: bq += lambda q: q.filter(User.name == "jack") else: bq += lambda q: q.filter(User.name.like("%ed%")) if cond2: ct = func.count(Address.id).label("count") subq = (sess.query(ct, Address.user_id).group_by( Address.user_id).having(ct > 2).subquery()) bq += lambda q: q.join(subq) if cond2: if cond1: def go(): result = bq(sess).all() eq_([], result) self.assert_sql_count(testing.db, go, 1) else: def go(): result = bq(sess).all() eq_(assert_result[1:2], result) self.assert_sql_count(testing.db, go, 3) else: if cond1: def go(): result = bq(sess).all() eq_(assert_result[0:1], result) self.assert_sql_count(testing.db, go, 3) else: def go(): result = bq(sess).all() eq_(assert_result[1:3], result) self.assert_sql_count(testing.db, go, 3) sess.close()
def request_user_obj(login): engine = create_engine('sqlite:///info_data_base.db', echo=True) session = Session(bind=engine) user = session.query(User).filter_by(login=login).first() session.close() return user
def request_user_avatar(login): engine = create_engine('sqlite:///info_data_base.db', echo=True) session = Session(bind=engine) avatar = session.query(User.avatar).filter(User.login == login).first()[0] session.close() return avatar
def mortalities(state): # Get the measure scores # Create session (link) from Python to the DB session = Session(engine) """Return a list of a dicionary for mortalites by state""" # Query all for mortality query = session.query(Cms.measure_name, func.avg(Cms.score)) # check for the all condition and filter if valid state passed if state != "all": query = query.filter(Cms.state == state) # group by measure name for uniqueness query = query.group_by(Cms.measure_name) # Get all the results results = query.all() session.close() cms_data = [] for measure, score in results: cms_dict = {} cms_dict["measure"] = measure cms_dict["score"] = score cms_data.append(cms_dict) # Get the average race percent # Create session (link) from Python to the DB session = Session(engine) """Return a list of a dicionary for race by state""" # Query all for races query = session.query(func.avg(Cms.percent_white), func.avg(Cms.percent_black)) # check for the all condition and filter if valid state passed if state != "all": query = query.filter(Cms.state == state) # print(query.statement.compile()) # Get all the results results = query.first() session.close() demo_data = [] demo_dict = {} demo_dict["name"] = "White" demo_dict["percent"] = results[0] demo_data.append(demo_dict) demo_dict = {} demo_dict["name"] = "Black" demo_dict["percent"] = results[1] demo_data.append(demo_dict) # Get the average urban / rural percent # Create session (link) from Python to the DB session = Session(engine) """Return a list of a dicionary for population by state""" # Query all for population query = session.query(func.sum(Cms.total_population)) # Grab rural numbers only less than or equal to 50000 query = query.filter(Cms.total_population <= 50000) # check for the all condition and filter if valid state passed if state != "all": query = query.filter(Cms.state == state) # print(query.statement.compile()) # Get all the results results = query.first() session.close() rural_total = 0 if not results[0] is None: rural_total = int(results[0]) # Get the average urban / rural percent # Create session (link) from Python to the DB session = Session(engine) """Return a list of a dicionary for population by state""" # Query all for population query = session.query(func.sum(Cms.total_population)) # Grab urban numbers only greater than 50000 query = query.filter(Cms.total_population > 50000) # check for the all condition and filter if valid state passed if state != "all": query = query.filter(Cms.state == state) # print(query.statement.compile()) # Get all the results results = query.first() session.close() urban_total = 0 if not results[0] is None: urban_total = int(results[0]) total_population = rural_total + urban_total if total_population == 0: total_population = 1 demo_dict = {} demo_dict["name"] = "Rural" demo_dict["percent"] = (rural_total / total_population) * 100 demo_data.append(demo_dict) demo_dict = {} demo_dict["name"] = "Urban" demo_dict["percent"] = (urban_total / total_population) * 100 demo_data.append(demo_dict) return_data = {} return_data["measures"] = cms_data return_data["demo"] = demo_data return jsonify(return_data)