def test_w_mapper_versioning(self): class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" id = Column(Integer, primary_key=True) name = Column(String(50)) SomeClass.__mapper__.version_id_col = SomeClass.__table__.c.version self.create_tables() sess = self.session sc = SomeClass(name="sc1") sess.add(sc) sess.commit() s2 = Session(sess.bind) sc2 = s2.query(SomeClass).first() sc2.name = "sc1modified" sc.name = "sc1modified_again" sess.commit() eq_(sc.version, 2) assert_raises(orm_exc.StaleDataError, s2.flush)
def test_update_from_multitable_same_names(self): Document = self.classes.Document User = self.classes.User s = Session() s.query(Document).\ filter(User.id == Document.user_id).\ filter(User.id == 2).update({ Document.samename: 'd_samename', User.samename: 'u_samename' }, synchronize_session=False) eq_( s.query(User.id, Document.samename, User.samename). filter(User.id == Document.user_id). order_by(User.id).all(), [ (1, None, None), (1, None, None), (2, 'd_samename', 'u_samename'), (2, 'd_samename', 'u_samename'), (3, None, None), (3, None, None), ] )
def test_delete_against_metadata(self): User = self.classes.User users = self.tables.users sess = Session() sess.query(users).delete(synchronize_session=False) eq_(sess.query(User).count(), 0)
def test_one_to_many_on_m2o(self): Node, nodes = self.classes.Node, self.tables.nodes mapper(Node, nodes, properties={ 'children': relationship(Node, backref=sa.orm.backref('parentnode', remote_side=nodes.c.name, passive_updates=False), )}) sess = Session() n1 = Node(name='n1') sess.add(n1) n2 = Node(name='n11', parentnode=n1) n3 = Node(name='n12', parentnode=n1) n4 = Node(name='n13', parentnode=n1) sess.add_all([n2, n3, n4]) sess.commit() n1.name = 'new n1' sess.commit() eq_(['new n1', 'new n1', 'new n1'], [n.parent for n in sess.query(Node).filter( Node.name.in_(['n11', 'n12', 'n13']))])
def test_illegal_operations(self): User = self.classes.User Address = self.classes.Address s = Session() for q, mname in ( (s.query(User).limit(2), r"limit\(\)"), (s.query(User).offset(2), r"offset\(\)"), (s.query(User).limit(2).offset(2), r"limit\(\)"), (s.query(User).order_by(User.id), r"order_by\(\)"), (s.query(User).group_by(User.id), r"group_by\(\)"), (s.query(User).distinct(), r"distinct\(\)"), (s.query(User).join(User.addresses), r"join\(\), outerjoin\(\), select_from\(\), or from_self\(\)"), (s.query(User).outerjoin(User.addresses), r"join\(\), outerjoin\(\), select_from\(\), or from_self\(\)"), (s.query(User).select_from(Address), r"join\(\), outerjoin\(\), select_from\(\), or from_self\(\)"), (s.query(User).from_self(), r"join\(\), outerjoin\(\), select_from\(\), or from_self\(\)"), ): assert_raises_message( exc.InvalidRequestError, r"Can't call Query.update\(\) or Query.delete\(\) when " "%s has been called" % mname, q.update, {'name': 'ed'}) assert_raises_message( exc.InvalidRequestError, r"Can't call Query.update\(\) or Query.delete\(\) when " "%s has been called" % mname, q.delete)
def test_invocation_systemwide_loaders(self): baked.bake_lazy_loaders() try: User, Address = self._o2m_fixture() sess = Session() q = sess.query(User).options(lazyload(User.addresses)) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # invoked is_( el.mock_calls[0][1][1], u1._sa_instance_state ) finally: baked.unbake_lazy_loaders() clear_mappers() User, Address = self._o2m_fixture() sess = Session() q = sess.query(User).options(lazyload(User.addresses)) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # not invoked eq_(el.mock_calls, [])
def test_orm_bundles(n): """Load lightweight "bundle" objects using the ORM.""" sess = Session(engine) bundle = Bundle("customer", Customer.id, Customer.name, Customer.description) for row in sess.query(bundle).yield_per(10000).limit(n): pass
def test_get_pk_w_null(self): """test the re-implementation of logic to do get with IS NULL.""" class AddressUser(object): pass mapper( AddressUser, self.tables.users.outerjoin(self.tables.addresses), properties={ "id": self.tables.users.c.id, "address_id": self.tables.addresses.c.id } ) bq = self.bakery(lambda s: s.query(AddressUser)) sess = Session() def go(): u1 = bq(sess).get((10, None)) eq_(u1.name, 'chuck') self.assert_sql_count(testing.db, go, 1) u1 = sess.query(AddressUser).get((10, None)) # noqa def go(): u2 = bq(sess).get((10, None)) eq_(u2.name, 'chuck') self.assert_sql_count(testing.db, go, 0)
def test_invocation_per_mapper(self): """test that BakedLazyLoader is getting invoked with the "baked_select" lazy setting. """ User, Address = self._o2m_fixture(lazy="baked_select") sess = Session() q = sess.query(User).options(lazyload(User.addresses)) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # not invoked eq_(el.mock_calls, []) sess = Session() q = sess.query(User) with mock.patch.object(BakedLazyLoader, "_emit_lazyload") as el: u1 = q.first() u1.addresses # invoked is_( el.mock_calls[0][1][1], u1._sa_instance_state )
def create_user(dbsession:Session, registry:Registry, email:str=EMAIL, password:str=PASSWORD, admin:bool=False) -> User: """A helper function to create normal and admin users for tests. :param admin: If True run :py:class:`websauna.system.user.usermixin.SiteCreator` login and set the user to admin group. """ user = User(email=email) if password: hasher = registry.getUtility(IPasswordHasher) user.hashed_password = hasher.hash_password(password) user.user_registration_source = "dummy" dbsession.add(user) dbsession.flush() user.username = user.generate_username() user.activated_at = now() assert user.can_login() # First user, make it admin if admin: site_creator = get_site_creator(registry) site_creator.init_empty_site(dbsession, user) return user
def test_any_wpoly(self): ParentThing, DataContainer, Job, SubJob = \ self.classes.ParentThing,\ self.classes.DataContainer,\ self.classes.Job,\ self.classes.SubJob Job_P = with_polymorphic(Job, SubJob, aliased=True) s = Session() q = s.query(Job).join(DataContainer.jobs).\ filter( DataContainer.jobs.of_type(Job_P).\ any(Job_P.id < Job.id) ) self.assert_compile(q, "SELECT job.id AS job_id, job.type AS job_type, " "job.container_id " "AS job_container_id " "FROM data_container " "JOIN job ON data_container.id = job.container_id " "WHERE EXISTS (SELECT 1 " "FROM (SELECT job.id AS job_id, job.type AS job_type, " "job.container_id AS job_container_id, " "subjob.id AS subjob_id, subjob.attr AS subjob_attr " "FROM job LEFT OUTER JOIN subjob ON job.id = subjob.id) AS anon_1 " "WHERE data_container.id = anon_1.job_container_id AND job.id > anon_1.job_id)" )
def test_not_none(self): Graph, Edge, Point = (self.classes.Graph, self.classes.Edge, self.classes.Point) # current contract. the composite is None # when hasn't been populated etc. on a # pending/transient object. e1 = Edge() assert e1.end is None sess = Session() sess.add(e1) # however, once it's persistent, the code as of 0.7.3 # would unconditionally populate it, even though it's # all None. I think this usage contract is inconsistent, # and it would be better that the composite is just # created unconditionally in all cases. # but as we are just trying to fix [ticket:2308] and # [ticket:2309] without changing behavior we maintain # that only "persistent" gets the composite with the # Nones sess.flush() assert e1.end is not None
def test_set_composite_attrs_via_selectable(self): Values, CustomValues, values, Descriptions, descriptions = (self.classes.Values, self.classes.CustomValues, self.tables.values, self.classes.Descriptions, self.tables.descriptions) session = Session() d = Descriptions( custom_descriptions = CustomValues('Color', 'Number'), values =[ Values(custom_values = CustomValues('Red', '5')), Values(custom_values=CustomValues('Blue', '1')) ] ) session.add(d) session.commit() eq_( testing.db.execute(descriptions.select()).fetchall(), [(1, 'Color', 'Number')] ) eq_( testing.db.execute(values.select()).fetchall(), [(1, 1, 'Red', '5'), (2, 1, 'Blue', '1')] )
def test_is_modified_passive_on(self): User, Address = self._default_mapping_fixture() s = Session() u = User(name='fred', addresses=[Address(email_address='foo')]) s.add(u) s.commit() u.id def go(): assert not s.is_modified(u, passive=True) self.assert_sql_count( testing.db, go, 0 ) u.name = 'newname' def go(): assert s.is_modified(u, passive=True) self.assert_sql_count( testing.db, go, 0 )
def test_move_persistent_clean(self): sess, u1 = self._persistent_fixture() sess.close() s2 = Session() s2.add(u1) self._assert_no_cycle(u1) self._assert_not_modified(u1)
def test_hybrid_descriptor_three(self): class Point(object): def __init__(self, x, y): self.x, self.y = x, y @hybrid_property def x_alone(self): return self.x self._fixture(Point) alias = aliased(Point) eq_(str(Point.x_alone), "Point.x") eq_(str(alias.x_alone), "AliasedClass_Point.x") assert Point.x_alone is Point.x eq_(str(alias.x_alone == alias.x), "point_1.x = point_1.x") a2 = aliased(Point) eq_(str(a2.x_alone == alias.x), "point_1.x = point_2.x") sess = Session() self.assert_compile( sess.query(alias).filter(alias.x_alone > Point.x), "SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, " "point_1.y AS point_1_y FROM point AS point_1, point " "WHERE point_1.x > point.x" )
def test_batch_interaction(self): """test batching groups same-structured, primary key present statements together. """ t = self.tables.t class T(fixtures.ComparableEntity): pass mapper(T, t) sess = Session() sess.add_all([ T(data='t1'), T(data='t2'), T(id=3, data='t3'), T(id=4, data='t4'), T(id=5, data='t5'), T(id=6, data=func.lower('t6')), T(id=7, data='t7'), T(id=8, data='t8'), T(id=9, data='t9', def_='def2'), T(id=10, data='t10', def_='def3'), T(id=11, data='t11'), ]) self.assert_sql_execution( testing.db, sess.flush, CompiledSQL( "INSERT INTO t (data) VALUES (:data)", {'data': 't1'} ), CompiledSQL( "INSERT INTO t (data) VALUES (:data)", {'data': 't2'} ), CompiledSQL( "INSERT INTO t (id, data) VALUES (:id, :data)", [{'data': 't3', 'id': 3}, {'data': 't4', 'id': 4}, {'data': 't5', 'id': 5}] ), CompiledSQL( "INSERT INTO t (id, data) VALUES (:id, lower(:lower_1))", {'lower_1': 't6', 'id': 6} ), CompiledSQL( "INSERT INTO t (id, data) VALUES (:id, :data)", [{'data': 't7', 'id': 7}, {'data': 't8', 'id': 8}] ), CompiledSQL( "INSERT INTO t (id, data, def_) VALUES (:id, :data, :def_)", [{'data': 't9', 'id': 9, 'def_':'def2'}, {'data': 't10', 'id': 10, 'def_':'def3'}] ), CompiledSQL( "INSERT INTO t (id, data) VALUES (:id, :data)", {'data': 't11', 'id': 11} ), )
def test_mysql_read(self): User = self.classes.User sess = Session() self.assert_compile(sess.query(User.id).with_for_update(read=True), "SELECT users.id AS users_id FROM users LOCK IN SHARE MODE", dialect="mysql" )
def test_unknown_legacy_lock_mode(self): User = self.classes.User sess = Session() assert_raises_message( exc.ArgumentError, "Unknown with_lockmode argument: 'unknown_mode'", sess.query(User.id).with_lockmode, 'unknown_mode' )
def test_postgres_for_no_key_nowait_update(self): User = self.classes.User sess = Session() self.assert_compile(sess.query(User.id).with_for_update(key_share=True, nowait=True), "SELECT users.id AS users_id FROM users FOR NO KEY UPDATE NOWAIT", dialect="postgresql" )
def test_oracle_update(self): User = self.classes.User sess = Session() self.assert_compile(sess.query(User.id).with_for_update(), "SELECT users.id AS users_id FROM users FOR UPDATE", dialect="oracle" )
def test_postgres_read(self): User = self.classes.User sess = Session() self.assert_compile(sess.query(User.id).with_for_update(read=True), "SELECT users.id AS users_id FROM users FOR SHARE", dialect="postgresql" )
def upgrade(): session = Session(bind=op.get_bind()) for i, item in enumerate(session.query(Salad).order_by(Salad.position)): item.position = i session.commit()
def test_seven(self): Parent, Base1, Base2, Sub1, Sub2, EP1, EP2 = self._classes() s = Session() self.assert_compile( # adding Sub2 to the entities list helps it, # otherwise the joins for Sub2.ep1/ep2 don't have columns # to latch onto. Can't really make it better than this s.query(Parent, Sub2).join(Parent.sub1).\ join(Sub1.sub2).from_self().\ join(Sub2.ep1). join(Sub2.ep2), "SELECT anon_1.parent_id AS anon_1_parent_id, " "anon_1.parent_data AS anon_1_parent_data, " "anon_1.sub2_id AS anon_1_sub2_id, " "anon_1.base2_id AS anon_1_base2_id, " "anon_1.base2_base1_id AS anon_1_base2_base1_id, " "anon_1.base2_data AS anon_1_base2_data, " "anon_1.sub2_subdata AS anon_1_sub2_subdata " "FROM (SELECT parent.id AS parent_id, parent.data AS parent_data, " "sub2.id AS sub2_id, " "base2.id AS base2_id, " "base2.base1_id AS base2_base1_id, " "base2.data AS base2_data, " "sub2.subdata AS sub2_subdata " "FROM parent JOIN (base1 JOIN sub1 ON base1.id = sub1.id) " "ON parent.id = sub1.parent_id JOIN " "(base2 JOIN sub2 ON base2.id = sub2.id) " "ON base1.id = base2.base1_id) AS anon_1 " "JOIN ep1 ON anon_1.base2_id = ep1.base2_id " "JOIN ep2 ON anon_1.base2_id = ep2.base2_id" )
def _two_obj_fixture(self): e1 = Engineer(name='wally') e2 = Engineer(name='dilbert', reports_to=e1) sess = Session() sess.add_all([e1, e2]) sess.commit() return sess
def get(session: orm.Session, discord_id=None, user_id=None, twitch_id=None, create=True): """ :param session: :param discord_id: :param user_id: :param twitch_id: :param create: :rtype: roboto.model.User :return: """ if not any([discord_id, user_id, twitch_id]): return None q = session.query(User) if user_id: col = User.user_id val = user_id elif twitch_id: col = User.twitch_id val = twitch_id.lower() else: col = User.discord_id val = discord_id q = q.filter(col == val).first() if q: return q if create: user = User() setattr(user, col.key, val) session.add(user) log.debug("Creating new user: {}".format(user)) return user return None
def test_create_several_scenario_nodes(fresh_database_config, scenario_manager): # pylint: disable=invalid-name """Test that several scenario nodes can be created by library. 1. Create 2 scenario node with scenario manager. 2. Check type of the returned value. 3. Check that records for scenario node and node state are created. """ specs = [ NewScenarioNodeSpec(name=u'первый'), NewScenarioNodeSpec(name=u'second'), ] nodes = scenario_manager.create_scenario_nodes(specs) assert len(nodes) == 2, "Wrong number of scenario nodes has been returned" assert all(isinstance(node, ScenarioNode) for node in nodes), ( "Wrong type returned by create_scenario_node method" ) engine = create_engine(fresh_database_config.get_connection_string()) session = Session(bind=engine) node_records = session.query(ScenarioNodeRecord).all() assert len(node_records) == 2, "Wrong number of scenario node records" created_data = set() for record in node_records: state = session.query(ScenarioNodeStateRecord).filter_by(node_id=record.node_id).one() created_data.add(state.name) expected_data = set(spec.name for spec in specs) assert created_data == expected_data, "Wrong states have been created in the database"
def test_not_supported_by_dialect_should_just_use_update(self): User = self.classes.User sess = Session() self.assert_compile(sess.query(User.id).with_for_update(read=True), "SELECT users.id AS users_id FROM users FOR UPDATE", dialect=default.DefaultDialect() )
def update_song_id3(engine, song_name, new_name='', new_artist='', new_album=''): session = Session(bind=engine) song = session.query(Song).filter(Song.name == song_name) try: song_path = song.one().path pass except Exception: song_path = './song.mp3' updated = {} if new_name: updated['name'] = new_name if new_artist: updated['artist'] = new_artist if new_album: updated['album'] = new_album song.update(updated) session.commit() original_song = EasyID3(song_path) for key, value in updated.items(): if key == 'name': key = 'title' original_song[key] = value original_song.save()
def test_postgres_update_of_entity(self): User = self.classes.User sess = Session() self.assert_compile(sess.query(User.id).with_for_update(of=User), "SELECT users.id AS users_id FROM users FOR UPDATE OF users", dialect="postgresql" )
def getallactors(db: Session = Depends(get_db)): actors = db.query(models.Actor).all() return actors
def get_person_by_email(db: Session, email: str): return db.query(models.Person).filter(models.Person.email == email).first()
def mark_person_emailed(db: Session, email: str): person = db.query(models.Person).filter(models.Person.email == email).first() person.signup_email_success = True db.commit()
def get_person_by_id(db: Session, person_id: int): return db.query(models.Person).filter(models.Person.id == person_id).first()
def edit_actor(id, request: schemas.Actor, db: Session = Depends(get_db)): actor = db.query(models.Actor).filter(models.Actor.id == id) actor.update(request.dict()) print(request) db.commit() return "updated"
def post_actors(request: schemas.Actor, db: Session = Depends(get_db)): new_actor = models.Actor(actor_name=request.actor_name) db.add(new_actor) db.commit() db.refresh(new_actor) return new_actor
def getactorbyid(id: int, db: Session = Depends(get_db)): actors = db.query(models.Actor).filter(models.Actor.id == id).first() return actors
def weather(start_date): session = Session(engine) results = session.query(Measurement.date, func.min(Measurement.tobs), func.max(Measurement.tobs), func.avg(Measurement.tobs)).\ filter(Measurement.date >= start_date).group_by(Measurement.date).all() session.close() return (jsonify(results))
from sqlalchemy.orm import Session from sqlalchemy import create_engine, func from flask import Flask, jsonify ############################################################## # Database Setup ############################################################## engine = create_engine("sqlite:///Resources/hawaii.sqlite") Base = automap_base() Base.prepare(engine, reflect=True) Measurement = Base.classes.measurement Station = Base.classes.station session = Session(engine) """finding date a year from the last date in the sqlitefile""" end_date = session.query(Measurement.date).order_by( Measurement.date.desc()).first() year_ago = dt.date(2017, 8, 23) - dt.timedelta(days=365) session.close() print(end_date) print(year_ago) ############################################################## # Flask Setup ############################################################## app = Flask(__name__)
def get_nodes_and_make_polygon(drive_time_query_id): print(f'[{datetime.now()}] get_nodes_and_make_polygon(): Setting up sqlalchemy') engine = create_engine(db_url_from_env(), echo=False) session = Session(engine) metadata = MetaData() metadata.reflect(engine, only=['routing_drivetimenode', 'routing_drivetimepolygon', 'bridges_newyorkbridge']) Base = automap_base(metadata=metadata) Base.prepare() # Reflecting on the metadata collects the routing_drivetimenode table, # the routing_drivetimepolygon table, and their related tables # (routing_drivetimequery and ways_vertices_pgr) DriveTimeNode = Base.classes.routing_drivetimenode DriveTimePolygon = Base.classes.routing_drivetimepolygon DriveTimeQuery = Base.classes.routing_drivetimequery WaysVerticesPgr = Base.classes.ways_vertices_pgr NewYorkBridge = Base.classes.bridges_newyorkbridge # Get the routing_drivetimequery object that matches the message and the # associated drivetimenodes drive_time_query = session.query(DriveTimeQuery).get(drive_time_query_id) drive_time_nodes = session.query(DriveTimeNode).filter( DriveTimeNode.routing_drivetimequery == drive_time_query ).all() print(f'[{datetime.now()}] Display name: {drive_time_query.display_name}') # Make a polygon object from the nodes print( f'[{datetime.now()}] get_nodes_and_make_polygon(): ' + f'Processing {len(drive_time_nodes)} nodes' ) points = [loads(str(dtn.the_geom), hex=True) for dtn in drive_time_nodes] polygon = to_polygon(points, alpha=30) # Commit the results to the database new_drive_time_polygon = DriveTimePolygon( the_geom='SRID=4326;'+polygon.buffer(0.005).wkt, drive_time_query_id=drive_time_query_id, created_time=datetime.now(), edited_time=datetime.now(), ) session.add(new_drive_time_polygon) session.flush() session.commit() print(f'[{datetime.now()}] get_nodes_and_make_polygon(): Committed polygon to db') print(f'[{datetime.now()}] Running intersect query on NewYorkBridge objects') bridges = session.query(NewYorkBridge).filter( NewYorkBridge.the_geom.ST_Intersects('SRID=4326;'+polygon.buffer(0.005).wkt) ).all() print(f'[{datetime.now()}] Iterating through {len(bridges)} bridges') for b in bridges: bridge = session.query(NewYorkBridge).filter(NewYorkBridge.id == b.id).first() drive_time_queries = bridge.drive_time_queries bridge.drive_time_queries = list(set(drive_time_queries + [drive_time_query_id])) session.add(bridge) drive_time_query.polygon_pending = False session.add(drive_time_query) print( f'[{datetime.now()}] get_nodes_and_make_polygon(): Set DriveTimeQuery' + f'.polygon_pending to {drive_time_query.polygon_pending}' ) session.flush() session.commit() print(f'[{datetime.now()}] get_nodes_and_make_polygon(): Committed bridges to db') return True
def get_topics(db: Session, skip: int = 0, limit: int = 100): topics = db.query(models.Topics).offset(skip).limit(limit).all() return topics
def tobs(): session = Session(engine) tobs_query = session.query(Measurement.date, Measurement.tobs).filter(Measurement.date >= year_ago).\ order_by(Measurement.date).all() return jsonify(tobs_query)
def setUp(self): self.session = Session(engine) self.Base = declarative_base() versioned_session(self.session)
def get_choices(db: Session, question_id: int): """.""" return db.query(models.Choices).filter( models.Choices.q_id == question_id).all()
from sqlalchemy import create_engine, func from sqlalchemy.ext.automap import automap_base from sqlalchemy.orm import Session from sqlalchemy.pool import SingletonThreadPool from flask import Flask, jsonify import pymysql import datetime as dt import json pymysql.install_as_MySQLdb() engine = create_engine( "sqlite:///Resources/hawaii.sqlite?check_same_thread=False") session = Session(bind=engine) # reflect an existing database into a new model Base = automap_base() # reflect the tables Base.prepare(engine, reflect=True) print(Base.classes.keys()) # Save references to each table Measurement = Base.classes.measurement Station = Base.classes.station # Flask app = Flask(__name__) #* Use FLASK to create your routes.
def get_question(db: Session, quest_id: int): return db.query(models.Questions).filter( models.Questions.id == quest_id).first()
document._element = element def __delete__(self, document): del document._element document._nodes = [] # override Document's "element" attribute with the marshaller. Document.element = ElementTreeMarshal() # PART V - Basic Persistence Example line = "\n--------------------------------------------------------" # save to DB session = Session(e) # get ElementTree documents for file in ("test.xml", "test2.xml", "test3.xml"): filename = os.path.join(os.path.dirname(__file__), file) doc = ElementTree.parse(filename) session.add(Document(file, doc)) print("\nSaving three documents...", line) session.commit() print("Done.") print("\nFull text of document 'text.xml':", line) document = session.query(Document).filter_by(filename="test.xml").first() ElementTree.dump(document.element)
class TestVersioning(TestCase, AssertsCompiledSQL): __dialect__ = "default" def setUp(self): self.session = Session(engine) self.Base = declarative_base() versioned_session(self.session) def tearDown(self): self.session.close() clear_mappers() self.Base.metadata.drop_all(engine) def create_tables(self): self.Base.metadata.create_all(engine) def test_plain(self): class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" id = Column(Integer, primary_key=True) name = Column(String(50)) self.create_tables() sess = self.session sc = SomeClass(name="sc1") sess.add(sc) sess.commit() sc.name = "sc1modified" sess.commit() assert sc.version == 2 SomeClassHistory = SomeClass.__history_mapper__.class_ eq_( sess.query(SomeClassHistory).filter( SomeClassHistory.version == 1).all(), [SomeClassHistory(version=1, name="sc1")], ) sc.name = "sc1modified2" eq_( sess.query(SomeClassHistory).order_by( SomeClassHistory.version).all(), [ SomeClassHistory(version=1, name="sc1"), SomeClassHistory(version=2, name="sc1modified"), ], ) assert sc.version == 3 sess.commit() sc.name = "temp" sc.name = "sc1modified2" sess.commit() eq_( sess.query(SomeClassHistory).order_by( SomeClassHistory.version).all(), [ SomeClassHistory(version=1, name="sc1"), SomeClassHistory(version=2, name="sc1modified"), ], ) sess.delete(sc) sess.commit() eq_( sess.query(SomeClassHistory).order_by( SomeClassHistory.version).all(), [ SomeClassHistory(version=1, name="sc1"), SomeClassHistory(version=2, name="sc1modified"), SomeClassHistory(version=3, name="sc1modified2"), ], ) def test_w_mapper_versioning(self): class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" use_mapper_versioning = True id = Column(Integer, primary_key=True) name = Column(String(50)) self.create_tables() sess = self.session sc = SomeClass(name="sc1") sess.add(sc) sess.commit() s2 = Session(sess.bind) sc2 = s2.query(SomeClass).first() sc2.name = "sc1modified" sc.name = "sc1modified_again" sess.commit() eq_(sc.version, 2) assert_raises(orm_exc.StaleDataError, s2.flush) def test_from_null(self): class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" id = Column(Integer, primary_key=True) name = Column(String(50)) self.create_tables() sess = self.session sc = SomeClass() sess.add(sc) sess.commit() sc.name = "sc1" sess.commit() assert sc.version == 2 def test_insert_null(self): class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" id = Column(Integer, primary_key=True) boole = Column(Boolean, default=False) self.create_tables() sess = self.session sc = SomeClass(boole=True) sess.add(sc) sess.commit() sc.boole = None sess.commit() sc.boole = False sess.commit() SomeClassHistory = SomeClass.__history_mapper__.class_ eq_( sess.query(SomeClassHistory.boole).order_by( SomeClassHistory.id).all(), [(True, ), (None, )], ) eq_(sc.version, 3) def test_deferred(self): """test versioning of unloaded, deferred columns.""" class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" id = Column(Integer, primary_key=True) name = Column(String(50)) data = deferred(Column(String(25))) self.create_tables() sess = self.session sc = SomeClass(name="sc1", data="somedata") sess.add(sc) sess.commit() sess.close() sc = sess.query(SomeClass).first() assert "data" not in sc.__dict__ sc.name = "sc1modified" sess.commit() assert sc.version == 2 SomeClassHistory = SomeClass.__history_mapper__.class_ eq_( sess.query(SomeClassHistory).filter( SomeClassHistory.version == 1).all(), [SomeClassHistory(version=1, name="sc1", data="somedata")], ) def test_joined_inheritance(self): class BaseClass(Versioned, self.Base, ComparableEntity): __tablename__ = "basetable" id = Column(Integer, primary_key=True) name = Column(String(50)) type = Column(String(20)) __mapper_args__ = { "polymorphic_on": type, "polymorphic_identity": "base", } class SubClassSeparatePk(BaseClass): __tablename__ = "subtable1" id = column_property(Column(Integer, primary_key=True), BaseClass.id) base_id = Column(Integer, ForeignKey("basetable.id")) subdata1 = Column(String(50)) __mapper_args__ = {"polymorphic_identity": "sep"} class SubClassSamePk(BaseClass): __tablename__ = "subtable2" id = Column(Integer, ForeignKey("basetable.id"), primary_key=True) subdata2 = Column(String(50)) __mapper_args__ = {"polymorphic_identity": "same"} self.create_tables() sess = self.session sep1 = SubClassSeparatePk(name="sep1", subdata1="sep1subdata") base1 = BaseClass(name="base1") same1 = SubClassSamePk(name="same1", subdata2="same1subdata") sess.add_all([sep1, base1, same1]) sess.commit() base1.name = "base1mod" same1.subdata2 = "same1subdatamod" sep1.name = "sep1mod" sess.commit() BaseClassHistory = BaseClass.__history_mapper__.class_ SubClassSeparatePkHistory = ( SubClassSeparatePk.__history_mapper__.class_) SubClassSamePkHistory = SubClassSamePk.__history_mapper__.class_ eq_( sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(), [ SubClassSeparatePkHistory( id=1, name="sep1", type="sep", version=1), BaseClassHistory(id=2, name="base1", type="base", version=1), SubClassSamePkHistory( id=3, name="same1", type="same", version=1), ], ) same1.subdata2 = "same1subdatamod2" eq_( sess.query(BaseClassHistory).order_by( BaseClassHistory.id, BaseClassHistory.version).all(), [ SubClassSeparatePkHistory( id=1, name="sep1", type="sep", version=1), BaseClassHistory(id=2, name="base1", type="base", version=1), SubClassSamePkHistory( id=3, name="same1", type="same", version=1), SubClassSamePkHistory( id=3, name="same1", type="same", version=2), ], ) base1.name = "base1mod2" eq_( sess.query(BaseClassHistory).order_by( BaseClassHistory.id, BaseClassHistory.version).all(), [ SubClassSeparatePkHistory( id=1, name="sep1", type="sep", version=1), BaseClassHistory(id=2, name="base1", type="base", version=1), BaseClassHistory(id=2, name="base1mod", type="base", version=2), SubClassSamePkHistory( id=3, name="same1", type="same", version=1), SubClassSamePkHistory( id=3, name="same1", type="same", version=2), ], ) def test_joined_inheritance_multilevel(self): class BaseClass(Versioned, self.Base, ComparableEntity): __tablename__ = "basetable" id = Column(Integer, primary_key=True) name = Column(String(50)) type = Column(String(20)) __mapper_args__ = { "polymorphic_on": type, "polymorphic_identity": "base", } class SubClass(BaseClass): __tablename__ = "subtable" id = column_property(Column(Integer, primary_key=True), BaseClass.id) base_id = Column(Integer, ForeignKey("basetable.id")) subdata1 = Column(String(50)) __mapper_args__ = {"polymorphic_identity": "sub"} class SubSubClass(SubClass): __tablename__ = "subsubtable" id = Column(Integer, ForeignKey("subtable.id"), primary_key=True) subdata2 = Column(String(50)) __mapper_args__ = {"polymorphic_identity": "subsub"} self.create_tables() SubSubHistory = SubSubClass.__history_mapper__.class_ sess = self.session q = sess.query(SubSubHistory) self.assert_compile( q, "SELECT " "subsubtable_history.id AS subsubtable_history_id, " "subtable_history.id AS subtable_history_id, " "basetable_history.id AS basetable_history_id, " "subsubtable_history.changed AS subsubtable_history_changed, " "subtable_history.changed AS subtable_history_changed, " "basetable_history.changed AS basetable_history_changed, " "basetable_history.name AS basetable_history_name, " "basetable_history.type AS basetable_history_type, " "subsubtable_history.version AS subsubtable_history_version, " "subtable_history.version AS subtable_history_version, " "basetable_history.version AS basetable_history_version, " "subtable_history.base_id AS subtable_history_base_id, " "subtable_history.subdata1 AS subtable_history_subdata1, " "subsubtable_history.subdata2 AS subsubtable_history_subdata2 " "FROM basetable_history " "JOIN subtable_history " "ON basetable_history.id = subtable_history.base_id " "AND basetable_history.version = subtable_history.version " "JOIN subsubtable_history ON subtable_history.id = " "subsubtable_history.id AND subtable_history.version = " "subsubtable_history.version", ) ssc = SubSubClass(name="ss1", subdata1="sd1", subdata2="sd2") sess.add(ssc) sess.commit() eq_(sess.query(SubSubHistory).all(), []) ssc.subdata1 = "sd11" ssc.subdata2 = "sd22" sess.commit() eq_( sess.query(SubSubHistory).all(), [ SubSubHistory( name="ss1", subdata1="sd1", subdata2="sd2", type="subsub", version=1, ) ], ) eq_( ssc, SubSubClass(name="ss1", subdata1="sd11", subdata2="sd22", version=2), ) def test_joined_inheritance_changed(self): class BaseClass(Versioned, self.Base, ComparableEntity): __tablename__ = "basetable" id = Column(Integer, primary_key=True) name = Column(String(50)) type = Column(String(20)) __mapper_args__ = { "polymorphic_on": type, "polymorphic_identity": "base", } class SubClass(BaseClass): __tablename__ = "subtable" id = Column(Integer, ForeignKey("basetable.id"), primary_key=True) __mapper_args__ = {"polymorphic_identity": "sep"} self.create_tables() BaseClassHistory = BaseClass.__history_mapper__.class_ SubClassHistory = SubClass.__history_mapper__.class_ sess = self.session s1 = SubClass(name="s1") sess.add(s1) sess.commit() s1.name = "s2" sess.commit() actual_changed_base = sess.scalar( select([BaseClass.__history_mapper__.local_table.c.changed])) actual_changed_sub = sess.scalar( select([SubClass.__history_mapper__.local_table.c.changed])) h1 = sess.query(BaseClassHistory).first() eq_(h1.changed, actual_changed_base) eq_(h1.changed, actual_changed_sub) h1 = sess.query(SubClassHistory).first() eq_(h1.changed, actual_changed_base) eq_(h1.changed, actual_changed_sub) def test_single_inheritance(self): class BaseClass(Versioned, self.Base, ComparableEntity): __tablename__ = "basetable" id = Column(Integer, primary_key=True) name = Column(String(50)) type = Column(String(50)) __mapper_args__ = { "polymorphic_on": type, "polymorphic_identity": "base", } class SubClass(BaseClass): subname = Column(String(50), unique=True) __mapper_args__ = {"polymorphic_identity": "sub"} self.create_tables() sess = self.session b1 = BaseClass(name="b1") sc = SubClass(name="s1", subname="sc1") sess.add_all([b1, sc]) sess.commit() b1.name = "b1modified" BaseClassHistory = BaseClass.__history_mapper__.class_ SubClassHistory = SubClass.__history_mapper__.class_ eq_( sess.query(BaseClassHistory).order_by( BaseClassHistory.id, BaseClassHistory.version).all(), [BaseClassHistory(id=1, name="b1", type="base", version=1)], ) sc.name = "s1modified" b1.name = "b1modified2" eq_( sess.query(BaseClassHistory).order_by( BaseClassHistory.id, BaseClassHistory.version).all(), [ BaseClassHistory(id=1, name="b1", type="base", version=1), BaseClassHistory( id=1, name="b1modified", type="base", version=2), SubClassHistory(id=2, name="s1", type="sub", version=1), ], ) # test the unique constraint on the subclass # column sc.name = "modifyagain" sess.flush() def test_unique(self): class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" id = Column(Integer, primary_key=True) name = Column(String(50), unique=True) data = Column(String(50)) self.create_tables() sess = self.session sc = SomeClass(name="sc1", data="sc1") sess.add(sc) sess.commit() sc.data = "sc1modified" sess.commit() assert sc.version == 2 sc.data = "sc1modified2" sess.commit() assert sc.version == 3 def test_relationship(self): class SomeRelated(self.Base, ComparableEntity): __tablename__ = "somerelated" id = Column(Integer, primary_key=True) class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" id = Column(Integer, primary_key=True) name = Column(String(50)) related_id = Column(Integer, ForeignKey("somerelated.id")) related = relationship("SomeRelated", backref="classes") SomeClassHistory = SomeClass.__history_mapper__.class_ self.create_tables() sess = self.session sc = SomeClass(name="sc1") sess.add(sc) sess.commit() assert sc.version == 1 sr1 = SomeRelated() sc.related = sr1 sess.commit() assert sc.version == 2 eq_( sess.query(SomeClassHistory).filter( SomeClassHistory.version == 1).all(), [SomeClassHistory(version=1, name="sc1", related_id=None)], ) sc.related = None eq_( sess.query(SomeClassHistory).order_by( SomeClassHistory.version).all(), [ SomeClassHistory(version=1, name="sc1", related_id=None), SomeClassHistory(version=2, name="sc1", related_id=sr1.id), ], ) assert sc.version == 3 def test_backref_relationship(self): class SomeRelated(self.Base, ComparableEntity): __tablename__ = "somerelated" id = Column(Integer, primary_key=True) name = Column(String(50)) related_id = Column(Integer, ForeignKey("sometable.id")) related = relationship("SomeClass", backref="related") class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" id = Column(Integer, primary_key=True) self.create_tables() sess = self.session sc = SomeClass() sess.add(sc) sess.commit() assert sc.version == 1 sr = SomeRelated(name="sr", related=sc) sess.add(sr) sess.commit() assert sc.version == 1 sr.name = "sr2" sess.commit() assert sc.version == 1 sess.delete(sr) sess.commit() assert sc.version == 1 def test_create_double_flush(self): class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" id = Column(Integer, primary_key=True) name = Column(String(30)) other = Column(String(30)) self.create_tables() sc = SomeClass() self.session.add(sc) self.session.flush() sc.name = "Foo" self.session.flush() assert sc.version == 2 def test_mutate_plain_column(self): class Document(self.Base, Versioned): __tablename__ = "document" id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String, nullable=True) description_ = Column("description", String, nullable=True) self.create_tables() document = Document() self.session.add(document) document.name = "Foo" self.session.commit() document.name = "Bar" self.session.commit() DocumentHistory = Document.__history_mapper__.class_ v2 = self.session.query(Document).one() v1 = self.session.query(DocumentHistory).one() self.assertEqual(v1.id, v2.id) self.assertEqual(v2.name, "Bar") self.assertEqual(v1.name, "Foo") def test_mutate_named_column(self): class Document(self.Base, Versioned): __tablename__ = "document" id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String, nullable=True) description_ = Column("description", String, nullable=True) self.create_tables() document = Document() self.session.add(document) document.description_ = "Foo" self.session.commit() document.description_ = "Bar" self.session.commit() DocumentHistory = Document.__history_mapper__.class_ v2 = self.session.query(Document).one() v1 = self.session.query(DocumentHistory).one() self.assertEqual(v1.id, v2.id) self.assertEqual(v2.description_, "Bar") self.assertEqual(v1.description_, "Foo") def test_unique_identifiers_across_deletes(self): """Ensure unique integer values are used for the primary table. Checks whether the database assigns the same identifier twice within the span of a table. SQLite will do this if sqlite_autoincrement is not set (e.g. SQLite's AUTOINCREMENT flag). """ class SomeClass(Versioned, self.Base, ComparableEntity): __tablename__ = "sometable" id = Column(Integer, primary_key=True) name = Column(String(50)) self.create_tables() sess = self.session sc = SomeClass(name="sc1") sess.add(sc) sess.commit() sess.delete(sc) sess.commit() sc2 = SomeClass(name="sc2") sess.add(sc2) sess.commit() SomeClassHistory = SomeClass.__history_mapper__.class_ # only one entry should exist in the history table; one() # ensures that scdeleted = sess.query(SomeClassHistory).one() # If sc2 has the same id that deleted sc1 had, # it will fail when modified or deleted # because of the violation of the uniqueness of the primary key on # sometable_history ne_(sc2.id, scdeleted.id) # If previous assertion fails, this will also fail: sc2.name = "sc2 modified" sess.commit()
def get_users(db: Session, skip: int = 0, limit: int = 100): return db.query(models.User).offset(skip).limit(limit).all()
# set up engine = create_engine("sqlite:///resources/hawaii.sqlite") # reflect database Base = automap_base() # reflect tables Base.prepare(engine, reflect=True) # Table References Station = Base.classes.station Measurement = Base.classes.measurement # creates session link from python to database session = Session(engine) # set up Flask app = Flask(__name__) # flask Routes @app.route("/") def landing(): return( f"<a href='/api/v1.0/precipitation'>Precipitation</a><br/>" f"<a href='/api/v1.0/stations'>Stations</a><br/>" f"<a href='/api/v1.0/tobs'>Temps from All Stations</a><br/>" f"<a href='/api/v1.0/precipitation'>Precipitation</a><br/>" f"/api/v1.0/-ymd-start_date<br/>" f"api/v1.0/-ymd-start/-ymd-end<br/>")
def _fixture(self): Foo, version_table = self.classes.Foo, self.tables.version_table mapper(Foo, version_table, version_id_col=version_table.c.version_id) s1 = Session() return s1
def get_user_by_email(db: Session, email: str): return db.query(models.User).filter(models.User.email == email).first()
def get_items(db: Session, skip: int = 0, limit: int = 100) -> List[models.Item]: return db.query(models.Item).offset(skip).limit(limit).all()
def test_child_row_switch_two(self): P = self.classes.P Session = sessionmaker() # TODO: not sure this test is # testing exactly what its looking for sess1 = Session() sess1.add(P(id='P1', data='P version 1')) sess1.commit() sess1.close() p1 = sess1.query(P).first() sess2 = Session() p2 = sess2.query(P).first() sess1.delete(p1) sess1.commit() # this can be removed and it still passes sess1.add(P(id='P1', data='P version 2')) sess1.commit() p2.data = 'P overwritten by concurrent tx' if testing.db.dialect.supports_sane_rowcount: assert_raises_message( orm.exc.StaleDataError, r"UPDATE statement on table 'p' expected to update " r"1 row\(s\); 0 were matched.", sess2.commit) else: sess2.commit
def get_tenants(db_session: Session, skip: int = 0, limit: int = 100): return db_session.query(Tenant).offset(skip).limit(limit).all()
def get_user(db: Session, user_id: int) -> models.User: return db.query(models.User).filter(models.User.id == user_id).first()
from flask import Flask, jsonify # ------------------------------------------------------------- # Database Setup engine = create_engine("sqlite:///Resources/hawaii.sqlite") # Reflect an existing database and tables Base = automap_base() Base.prepare(engine, reflect=True) # Save reference to the tables Measurement = Base.classes.measurement Station = Base.classes.station # ------------------------------------------------------ session = Session(engine) # station_max = session.query(Measurement.station, func.count(Measurement.station)).group_by( # Measurement.station).order_by(func.count(Measurement.station).desc()).all() # # create DataFrame using data # df = pd.DataFrame(station_max, columns=['station', 'count']) # max_df = df.loc[df['count'].idxmax()] # # find the last date in the database # last_date = session.query(Measurement.date).order_by( # Measurement.date.desc()).first()
def get_tenant_by_name(db_session: Session, name: str): """Get a tenant by name.""" return db_session.query(Tenant).filter(Tenant.name == name).first()
def import_products(date: datetime.date, products: IO[str], session: Session): for item in json.loads(s=products.read()): # It seems this data source uses 'p' as a unit of price and this must be converted to '£'. session.add( Product(date=date, sku=item['Sku'], price=item['Price'] / 100)) session.commit()
def get_tenant(db_session: Session, id: int): return db_session.query(Tenant).filter(Tenant.id == id).first()