def test_one_to_many_on_o2m(self): Node, nodes = self.classes.Node, self.tables.nodes mapper( Node, nodes, properties={ "children": relationship( Node, backref=sa.orm.backref("parentnode", remote_side=nodes.c.name), passive_updates=False ) }, ) sess = Session() n1 = Node(name="n1") n1.children.append(Node(name="n11")) n1.children.append(Node(name="n12")) n1.children.append(Node(name="n13")) sess.add(n1) sess.commit() n1.name = "new n1" sess.commit() eq_(n1.children[1].parent, "new n1") eq_( ["new n1", "new n1", "new n1"], [n.parent for n in sess.query(Node).filter(Node.name.in_(["n11", "n12", "n13"]))], )
def create_user(dbsession:Session, registry:Registry, email:str=EMAIL, password:str=PASSWORD, admin:bool=False) -> User: """A helper function to create normal and admin users for tests. :param admin: If True run :py:class:`websauna.system.user.usermixin.SiteCreator` login and set the user to admin group. """ user = User(email=email) if password: hasher = registry.getUtility(IPasswordHasher) user.hashed_password = hasher.hash_password(password) user.user_registration_source = "dummy" dbsession.add(user) dbsession.flush() user.username = user.generate_username() user.activated_at = now() assert user.can_login() # First user, make it admin if admin: site_creator = get_site_creator(registry) site_creator.init_empty_site(dbsession, user) return user
def test_not_none(self): Graph, Edge, Point = (self.classes.Graph, self.classes.Edge, self.classes.Point) # current contract. the composite is None # when hasn't been populated etc. on a # pending/transient object. e1 = Edge() assert e1.end is None sess = Session() sess.add(e1) # however, once it's persistent, the code as of 0.7.3 # would unconditionally populate it, even though it's # all None. I think this usage contract is inconsistent, # and it would be better that the composite is just # created unconditionally in all cases. # but as we are just trying to fix [ticket:2308] and # [ticket:2309] without changing behavior we maintain # that only "persistent" gets the composite with the # Nones sess.flush() assert e1.end is not None
def test_before_flush_affects_dirty(self): users, User = self.tables.users, self.classes.User mapper(User, users) def before_flush(session, flush_context, objects): for obj in list(session.identity_map.values()): obj.name += " modified" sess = Session(autoflush=True) event.listen(sess, 'before_flush', before_flush) u = User(name='u1') sess.add(u) sess.flush() eq_(sess.query(User).order_by(User.name).all(), [User(name='u1')] ) sess.add(User(name='u2')) sess.flush() sess.expunge_all() eq_(sess.query(User).order_by(User.name).all(), [ User(name='u1 modified'), User(name='u2') ] )
def test_set_composite_attrs_via_selectable(self): Values, CustomValues, values, Descriptions, descriptions = (self.classes.Values, self.classes.CustomValues, self.tables.values, self.classes.Descriptions, self.tables.descriptions) session = Session() d = Descriptions( custom_descriptions = CustomValues('Color', 'Number'), values =[ Values(custom_values = CustomValues('Red', '5')), Values(custom_values=CustomValues('Blue', '1')) ] ) session.add(d) session.commit() eq_( testing.db.execute(descriptions.select()).fetchall(), [(1, 'Color', 'Number')] ) eq_( testing.db.execute(values.select()).fetchall(), [(1, 1, 'Red', '5'), (2, 1, 'Blue', '1')] )
def test_explicit_expunge_deleted(self): users, User = self.tables.users, self.classes.User mapper(User, users) sess = Session() sess.add(User(name='x')) sess.commit() u1 = sess.query(User).first() sess.delete(u1) sess.flush() assert was_deleted(u1) assert u1 not in sess assert object_session(u1) is sess sess.expunge(u1) assert was_deleted(u1) assert u1 not in sess assert object_session(u1) is None sess.rollback() assert was_deleted(u1) assert u1 not in sess assert object_session(u1) is None
def test_move_persistent_clean(self): sess, u1 = self._persistent_fixture() sess.close() s2 = Session() s2.add(u1) self._assert_no_cycle(u1) self._assert_not_modified(u1)
def test_is_modified_passive_off(self): """as of 0.8 no SQL is emitted for is_modified() regardless of the passive flag""" User, Address = self._default_mapping_fixture() s = Session() u = User(name='fred', addresses=[ Address(email_address='foo')]) s.add(u) s.commit() u.id def go(): assert not s.is_modified(u) self.assert_sql_count( testing.db, go, 0 ) s.expire_all() u.name = 'newname' # can't predict result here # deterministically, depending on if # 'name' or 'addresses' is tested first mod = s.is_modified(u) addresses_loaded = 'addresses' in u.__dict__ assert mod is not addresses_loaded
def test_is_modified_passive_on(self): User, Address = self._default_mapping_fixture() s = Session() u = User(name='fred', addresses=[Address(email_address='foo')]) s.add(u) s.commit() u.id def go(): assert not s.is_modified(u, passive=True) self.assert_sql_count( testing.db, go, 0 ) u.name = 'newname' def go(): assert s.is_modified(u, passive=True) self.assert_sql_count( testing.db, go, 0 )
def create_scheduled_text(text_data): engine = create_engine(settings['ENGINE_STRING']) session = Session(engine) try: st = ScheduledText() validation_code = randint(100000,999999) st.phone_number = text_data["phone_number"] st.route_number = text_data["route_number"] st.stop_id = text_data["stop_id"] st.hour = int(text_data["hour"]) st.minute = int(text_data["minute"]) if text_data["ampm"] == "pm": st.hour += 12 st.max_minutes = text_data["max_minutes"] st.validation_code = validation_code session.add(st) session.commit() except: session.rollback() raise return st
def test_auto_detach_on_gc_session(self): users, User = self.tables.users, self.classes.User mapper(User, users) sess = Session() u1 = User(name='u1') sess.add(u1) sess.commit() # can't add u1 to Session, # already belongs to u2 s2 = Session() assert_raises_message( sa.exc.InvalidRequestError, r".*is already attached to session", s2.add, u1 ) # garbage collect sess del sess gc_collect() # s2 lets it in now despite u1 having # session_key s2.add(u1) assert u1 in s2
def test_cast_type(self): Json = self.classes.Json s = Session(testing.db) j = Json(json={'field': 10}) s.add(j) s.commit() jq = s.query(Json).filter(Json.int_field == 10).one() eq_(j.id, jq.id) jq = s.query(Json).filter(Json.text_field == '10').one() eq_(j.id, jq.id) jq = s.query(Json).filter(Json.json_field.astext == '10').one() eq_(j.id, jq.id) jq = s.query(Json).filter(Json.text_field == 'wrong').first() is_(jq, None) j.json = {'field': True} s.commit() jq = s.query(Json).filter(Json.text_field == 'true').one() eq_(j.id, jq.id)
def test_map_to_select(self): Base, Child = self.classes.Base, self.classes.Child base, child = self.tables.base, self.tables.child base_select = select([base]).alias() mapper( Base, base_select, polymorphic_on=base_select.c.type, polymorphic_identity="base", ) mapper(Child, child, inherits=Base, polymorphic_identity="child") sess = Session() # 2. use an id other than "1" here so can't rely on # the two inserts having the same id c1 = Child(id=12, name="c1") sess.add(c1) sess.commit() sess.close() c1 = sess.query(Child).one() eq_(c1.name, "c1")
def test_persistence_states(self): User = self.classes.User u1 = User(name='ed') insp = inspect(u1) eq_( (insp.transient, insp.pending, insp.persistent, insp.detached), (True, False, False, False) ) s = Session(testing.db) s.add(u1) eq_( (insp.transient, insp.pending, insp.persistent, insp.detached), (False, True, False, False) ) s.flush() eq_( (insp.transient, insp.pending, insp.persistent, insp.detached), (False, False, True, False) ) s.expunge(u1) eq_( (insp.transient, insp.pending, insp.persistent, insp.detached), (False, False, False, True) )
def get(session: orm.Session, discord_id=None, user_id=None, twitch_id=None, create=True): """ :param session: :param discord_id: :param user_id: :param twitch_id: :param create: :rtype: roboto.model.User :return: """ if not any([discord_id, user_id, twitch_id]): return None q = session.query(User) if user_id: col = User.user_id val = user_id elif twitch_id: col = User.twitch_id val = twitch_id.lower() else: col = User.discord_id val = discord_id q = q.filter(col == val).first() if q: return q if create: user = User() setattr(user, col.key, val) session.add(user) log.debug("Creating new user: {}".format(user)) return user return None
def test_one_to_many_on_m2o(self): Node, nodes = self.classes.Node, self.tables.nodes mapper(Node, nodes, properties={ 'children': relationship(Node, backref=sa.orm.backref('parentnode', remote_side=nodes.c.name, passive_updates=False), )}) sess = Session() n1 = Node(name='n1') sess.add(n1) n2 = Node(name='n11', parentnode=n1) n3 = Node(name='n12', parentnode=n1) n4 = Node(name='n13', parentnode=n1) sess.add_all([n2, n3, n4]) sess.commit() n1.name = 'new n1' sess.commit() eq_(['new n1', 'new n1', 'new n1'], [n.parent for n in sess.query(Node).filter( Node.name.in_(['n11', 'n12', 'n13']))])
def test_one_to_many_on_o2m(self): Node, nodes = self.classes.Node, self.tables.nodes mapper(Node, nodes, properties={ 'children': relationship(Node, backref=sa.orm.backref('parentnode', remote_side=nodes.c.name), passive_updates=False )}) sess = Session() n1 = Node(name='n1') n1.children.append(Node(name='n11')) n1.children.append(Node(name='n12')) n1.children.append(Node(name='n13')) sess.add(n1) sess.commit() n1.name = 'new n1' sess.commit() eq_(n1.children[1].parent, 'new n1') eq_(['new n1', 'new n1', 'new n1'], [n.parent for n in sess.query(Node).filter( Node.name.in_(['n11', 'n12', 'n13']))])
def test_scalar(self): users = self.tables.users canary = Mock() class User(fixtures.ComparableEntity): @validates('name') def validate_name(self, key, name): canary(key, name) ne_(name, 'fred') return name + ' modified' mapper(User, users) sess = Session() u1 = User(name='ed') eq_(u1.name, 'ed modified') assert_raises(AssertionError, setattr, u1, "name", "fred") eq_(u1.name, 'ed modified') eq_(canary.mock_calls, [call('name', 'ed'), call('name', 'fred')]) sess.add(u1) sess.commit() eq_( sess.query(User).filter_by(name='ed modified').one(), User(name='ed') )
def test_child_row_switch_two(self): P = self.classes.P Session = sessionmaker() # TODO: not sure this test is # testing exactly what its looking for sess1 = Session() sess1.add(P(id='P1', data='P version 1')) sess1.commit() sess1.close() p1 = sess1.query(P).first() sess2 = Session() p2 = sess2.query(P).first() sess1.delete(p1) sess1.commit() # this can be removed and it still passes sess1.add(P(id='P1', data='P version 2')) sess1.commit() p2.data = 'P overwritten by concurrent tx' if testing.db.dialect.supports_sane_rowcount: assert_raises_message( orm.exc.StaleDataError, r"UPDATE statement on table 'p' expected to update " r"1 row\(s\); 0 were matched.", sess2.commit ) else: sess2.commit
def test_instance_lazy_relation_loaders(self): users, addresses = (self.tables.users, self.tables.addresses) mapper(User, users, properties={ 'addresses': relationship(Address, lazy='noload') }) mapper(Address, addresses) sess = Session() u1 = User(name='ed', addresses=[ Address( email_address='*****@*****.**', ) ]) sess.add(u1) sess.commit() sess.close() u1 = sess.query(User).options( lazyload(User.addresses) ).first() u2 = pickle.loads(pickle.dumps(u1)) sess = Session() sess.add(u2) assert u2.addresses
def test_collection(self): users, addresses, Address = (self.tables.users, self.tables.addresses, self.classes.Address) canary = Mock() class User(fixtures.ComparableEntity): @validates('addresses') def validate_address(self, key, ad): canary(key, ad) assert '@' in ad.email_address return ad mapper(User, users, properties={ 'addresses': relationship(Address)} ) mapper(Address, addresses) sess = Session() u1 = User(name='edward') a0 = Address(email_address='noemail') assert_raises(AssertionError, u1.addresses.append, a0) a1 = Address(id=15, email_address='*****@*****.**') u1.addresses.append(a1) eq_(canary.mock_calls, [call('addresses', a0), call('addresses', a1)]) sess.add(u1) sess.commit() eq_( sess.query(User).filter_by(name='edward').one(), User(name='edward', addresses=[Address(email_address='*****@*****.**')]) )
def test_09_pickle(self): users = self.tables.users mapper(User, users) sess = Session() sess.add(User(id=1, name='ed')) sess.commit() sess.close() inst = User(id=1, name='ed') del inst._sa_instance_state state = sa_state.InstanceState.__new__(sa_state.InstanceState) state_09 = { 'class_': User, 'modified': False, 'committed_state': {}, 'instance': inst, 'callables': {'name': state, 'id': state}, 'key': (User, (1,)), 'expired': True} manager = instrumentation._SerializeManager.__new__( instrumentation._SerializeManager) manager.class_ = User state_09['manager'] = manager state.__setstate__(state_09) eq_(state.expired_attributes, {'name', 'id'}) sess = Session() sess.add(inst) eq_(inst.name, 'ed') # test identity_token expansion eq_(sa.inspect(inst).key, (User, (1, ), None))
def test_continue_flushing_on_commit(self): """test that post-flush actions get flushed also if we're in commit()""" users, User = self.tables.users, self.classes.User mapper(User, users) sess = Session() to_flush = [User(name='ed'), User(name='jack'), User(name='wendy')] @event.listens_for(sess, "after_flush_postexec") def add_another_user(session, ctx): if to_flush: session.add(to_flush.pop(0)) x = [1] @event.listens_for(sess, "after_commit") # noqa def add_another_user(session): x[0] += 1 sess.add(to_flush.pop()) sess.commit() eq_(x, [2]) eq_( sess.scalar(select([func.count(users.c.id)])), 3 )
def test_noload_append(self): # test that a load of User.addresses is not emitted # when flushing an append User, Address = self._user_address_fixture() sess = Session() u1 = User(name="jack", addresses=[Address(email_address="a1")]) sess.add(u1) sess.commit() u1_id = u1.id sess.expire_all() u1.addresses.append(Address(email_address='a2')) self.assert_sql_execution( testing.db, sess.flush, CompiledSQL( "SELECT users.id AS users_id, users.name AS users_name " "FROM users WHERE users.id = :param_1", lambda ctx: [{"param_1": u1_id}]), CompiledSQL( "INSERT INTO addresses (user_id, email_address) " "VALUES (:user_id, :email_address)", lambda ctx: [{'email_address': 'a2', 'user_id': u1_id}] ) )
def test_dirty_state_transferred_deep_nesting(self): User, users = self.classes.User, self.tables.users mapper(User, users) s = Session(testing.db) u1 = User(name='u1') s.add(u1) s.commit() nt1 = s.begin_nested() nt2 = s.begin_nested() u1.name = 'u2' assert attributes.instance_state(u1) not in nt2._dirty assert attributes.instance_state(u1) not in nt1._dirty s.flush() assert attributes.instance_state(u1) in nt2._dirty assert attributes.instance_state(u1) not in nt1._dirty s.commit() assert attributes.instance_state(u1) in nt2._dirty assert attributes.instance_state(u1) in nt1._dirty s.rollback() assert attributes.instance_state(u1).expired eq_(u1.name, 'u1')
def test_09_pickle(self): users = self.tables.users mapper(User, users) sess = Session() sess.add(User(id=1, name="ed")) sess.commit() sess.close() inst = User(id=1, name="ed") del inst._sa_instance_state state = sa_state.InstanceState.__new__(sa_state.InstanceState) state_09 = { "class_": User, "modified": False, "committed_state": {}, "instance": inst, "callables": {"name": state, "id": state}, "key": (User, (1,)), "expired": True, } manager = instrumentation._SerializeManager.__new__( instrumentation._SerializeManager ) manager.class_ = User state_09["manager"] = manager state.__setstate__(state_09) eq_(state.expired_attributes, {"name", "id"}) sess = Session() sess.add(inst) eq_(inst.name, "ed") # test identity_token expansion eq_(sa.inspect(inst).key, (User, (1,), None))
def test_11_pickle(self): users = self.tables.users mapper(User, users) sess = Session() u1 = User(id=1, name="ed") sess.add(u1) sess.commit() sess.close() manager = instrumentation._SerializeManager.__new__( instrumentation._SerializeManager ) manager.class_ = User state_11 = { "class_": User, "modified": False, "committed_state": {}, "instance": u1, "manager": manager, "key": (User, (1,)), "expired_attributes": set(), "expired": True, } state = sa_state.InstanceState.__new__(sa_state.InstanceState) state.__setstate__(state_11) eq_(state.identity_token, None) eq_(state.identity_key, (User, (1,), None))
def test_warning_on_using_inactive_session_rollback_evt(self): users, User = self.tables.users, self.classes.User mapper(User, users) sess = Session() u1 = User(id=1, name='u1') sess.add(u1) sess.commit() u3 = User(name='u3') @event.listens_for(sess, "after_rollback") def evt(s): sess.add(u3) sess.add(User(id=1, name='u2')) def go(): assert_raises( orm_exc.FlushError, sess.flush ) assert_warnings(go, ["Session's state has been changed on a " "non-active transaction - this state " "will be discarded."], ) assert u3 not in sess
def _persist(self, json_str): logger = logging.getLogger(__name__) if json_str is None: return engine = create_engine(DB_CONN) Session = sessionmaker(bind=engine) session = Session() json_obj = json.loads(json_str) # dummy id's if none provided. TODO FIX AFTER PROTOTYPE # getting an element from json object returns a 'list', not a single element acct_id = json_obj['accountId'] if acct_id is None or not self.represents_int(acct_id[0]): logger.debug("PixelEventLogger._persist: account Id provided was invalid. Seeting to default value 0") account_id = 0 else: account_id = acct_id[0] cust_id = json_obj['customerId'] if cust_id is None or not self.represents_int(cust_id[0]): logger.debug("PixelEventLogger._persist: customer Id provided was invalid. Seeting to default value 0") customer_id = 0 else: customer_id = cust_id[0] logger.debug("PixelEventLogger._persist: Saving pixel event..." + json_str) pixevent = PixelEvent(account_id=account_id, customer_id=customer_id, doc=json_obj) session.add(pixevent) session.commit()
def test_11_pickle(self): users = self.tables.users mapper(User, users) sess = Session() u1 = User(id=1, name='ed') sess.add(u1) sess.commit() sess.close() manager = instrumentation._SerializeManager.__new__( instrumentation._SerializeManager) manager.class_ = User state_11 = { 'class_': User, 'modified': False, 'committed_state': {}, 'instance': u1, 'manager': manager, 'key': (User, (1,)), 'expired_attributes': set(), 'expired': True} state = sa_state.InstanceState.__new__(sa_state.InstanceState) state.__setstate__(state_11) eq_(state.identity_token, None) eq_(state.identity_key, (User, (1,), None))
def create_сompetence(db: Session, сompetence: schemas.СompetenceCreate): db_сompetence = models.Competence(**сompetence.dict()) db.add(db_сompetence) db.commit() db.refresh(db_сompetence) return db_сompetence
def create(request: schemas.User, db: Session): user = models.User(name= request.name, email= request.email, password= Hash.bcrypt(request.password)) db.add(user) db.commit() db.refresh(user) return user
def admin_authorize(config, url): """Grant a local user CLI access.""" # expensive imports so only execute if the function is called from joule.services import load_config from joule.models import (Base, master) from sqlalchemy import create_engine from sqlalchemy.orm import Session parser = configparser.ConfigParser() # load the Joule configuration file try: with open(config, 'r') as f: parser.read_file(f, config) config = load_config.run(custom_values=parser) except FileNotFoundError: raise click.ClickException( "Cannot load joule configuration file at [%s]" % config) except PermissionError: raise click.ClickException( "Cannot read joule configuration file at [%s] (run as root)" % config) except errors.ConfigurationError as e: raise click.ClickException("Invalid configuration: %s" % e) # create a connection to the database engine = create_engine(config.database) with engine.connect() as conn: conn.execute('CREATE SCHEMA IF NOT EXISTS data') conn.execute('CREATE SCHEMA IF NOT EXISTS metadata') Base.metadata.create_all(engine) db = Session(bind=engine) if 'SUDO_USER' in os.environ: username = os.environ["SUDO_USER"] else: username = os.environ["LOGNAME"] try: nodes = api.get_nodes() except ValueError as e: raise click.ClickException(str(e)) # check if this name is associated with a master entry my_master = db.query(master.Master). \ filter(master.Master.type == master.Master.TYPE.USER). \ filter(master.Master.name == username).first() if my_master is None: # create a new master entry my_master = master.Master() my_master.key = master.make_key() my_master.type = master.Master.TYPE.USER my_master.name = username db.add(my_master) # if a url is specified use it if url is not None: joule_url = url # if the Joule server is not hosting a TCP server use the # default proxy address elif config.ip_address is None: joule_url = "https://localhost/joule" # otherwise use the the server information in the config file else: if config.security is not None and config.security.cafile != "": addr = config.name elif config.ip_address != "0.0.0.0": addr = config.ip_address else: addr = "127.0.0.1" if config.security is None: scheme = "http" else: scheme = "https" joule_url = "%s://%s:%d" % (scheme, addr, config.port) my_node = api.create_tcp_node(joule_url, my_master.key, config.name) api.save_node(my_node) db.commit() db.close() click.echo("Access to node [%s] granted to user [%s]" % (config.name, username))
def insert_data(cls): A, B, C = cls.classes('A', 'B', 'C') s = Session() s.add(A(bs=[B(cs=[C()]), B(cs=[C()])])) s.commit()
async def send_message( chat_id: int, message: Message, pool: Pool = Depends(get_pool), current_user: UserResponse = Depends(get_current_verified_user), db: Session = Depends(get_db), ): """Send a message to a chat.""" chat = crud.get_chat(db, current_user.user_id, chat_id) if not chat: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"The chat with id {chat_id} does not exist.", ) connector_id = chat.contacts[0].contact.connector_id connector = crud.get_connector(db, current_user.user_id, connector_id) if connector.connector_type == "DEMO": new_message = models.Message( chat_id=chat_id, contact_id=connector.connector_user_id, message=json.dumps(message.message.__dict__), sent_datetime=datetime.now(), ) db.add(new_message) # add is ok here db.commit() else: sent = await pool.get( f"{connector.connector_type}/{connector.connector_id}/{str(uuid.uuid4())}/send_message", { "chat_id": chat.internal_id, "message": message.message.__dict__ }, ) # This should never be null contact = db.query(models.Contact).filter( models.Contact.connector_id == connector_id, models.Contact.is_self).first() new_message = models.Message( message_id=crud.get_message_id(connector.connector_id, sent.get("message_id"), chat_id), internal_id=sent.get("message_id"), chat_id=chat_id, contact_id=contact.contact_id, message=json.dumps(message.message.__dict__), sent_datetime=datetime.now(), ) try: db.merge(new_message) db.commit() except sqlalchemy.exc.IntegrityError: # happens when the service sends the message already back to the server via push api pass return MessageResponse( message_id=new_message.message_id, contact_id=new_message.contact_id, sent_datetime=new_message.sent_datetime, message=message.message.__dict__, )
def add_userinproject(db: Session, userinproject: schemas.UserInProject): db_userinproject = models.UserInProject(**userinproject.dict()) db.add(db_userinproject) db.commit() db.refresh(db_userinproject) return db_userinproject
def create_user(db: Session, user: schemas.UserCreate): db_user = models.User(**user.dict()) db.add(db_user) db.commit() db.refresh(db_user) return db_user
def create_project(db: Session, project: schemas.ProjectCreate): db_project = models.Project(**project.dict()) db.add(db_project) db.commit() db.refresh(db_project) return db_project
print(query.all()) """ +-------------------------------------------------------------------------+ | 3. Array indexes will OFFSET to that index and limit by one +-------------------------------------------------------------------------+ """ result = session.query(User).order_by(User.id)[1] print(result) """ +-------------------------------------------------------------------------+ | 3. Demonstrating relationship user to many addresses +-------------------------------------------------------------------------+ """ jack = User(name="jack", full_name="Jack Bean") session.add(jack) jack.addresses = [ Address(email_address="*****@*****.**", user_id=jack.id), Address(email_address="*****@*****.**", user_id=jack.id), Address(email_address="*****@*****.**", user_id=jack.id), ] print(jack.addresses) assert jack.addresses[0].user == jack session.commit() # addresses also get comitted """ +-------------------------------------------------------------------------+ | 4. Changing a relationship owner +-------------------------------------------------------------------------+ """
def create_vacancy(db: Session, vacancy: schemas.VacancyCreate): db_vacancy = models.Vacancy(**vacancy.dict()) db.add(db_vacancy) db.commit() db.refresh(db_vacancy) return db_vacancy
def after_pay(db: Session, order: Order): order.paid = True db.add(order) db.commit() db.refresh(order)
def create_blog(request: schemas.Blog, db: Session = Depends(get_db)): new_blog = models.Blog(title=request.title, body=request.body, user_id=1) db.add(new_blog) db.commit() db.refresh(new_blog) return new_blog
Base = declarative_base() class Sensors(Base): __tablename__ = 'BME_DATA' id = Column(Integer, primary_key=True) TIME_STAMP = Column(DATETIME) TEMPERATURE = Column(NUMERIC) GAS = Column(NUMERIC) HUMIDITY = Column(NUMERIC) PRESSURE = Column(NUMERIC) ALTITUDE = Column(NUMERIC) Base.metadata.create_all(conn) # Creating dummy data set for test # ---------------------------------- now = datetime.datetime.now() test = Sensors(TIME_STAMP =(now), TEMPERATURE = "75", GAS = '100', HUMIDITY = '50', PRESSURE = '1000', ALTITUDE = '1000') # Use sesion objec to comunicat to db session = Session(bind=engine) # Add test to the current session session.add(test) # Commit test to the database session.commit()
def task_create(db: Session, task: schemas.TaskCreate): db_task = models.Task(name=task.name) db.add(db_task) db.commit() db.refresh(db_task) return db_task
def insert_data(cls, connection): s = Session(connection) jill = cls.classes.Person(id=3, first_name="jill") s.add(jill) s.commit()
def add(self, session: Session): session.add(self) session.commit()
class TestSerialize(unittest.TestCase): @classmethod def setUpClass(cls): cls.engine = create_engine('sqlite:///:memory:', echo=False) def setUp(self): self.session = Session(self.engine) Base.metadata.create_all(self.engine) user_1 = User(name='Bill u1', id=1 , password = '******') self.session.add(user_1) self.session.commit() user_2 = User(name='Alex u2', id=2 , password = '******') self.session.add(user_2) self.session.commit() post_11 = Post( id=11, body='Post 11 body.', archived=True ) post_11.user = user_1 self.session.add(post_11) self.session.commit() comment_11 = Comment( id=11, body='Comment 11 body', user=user_1, post=post_11, rating=1 ) self.session.add(comment_11) self.session.commit() def tearDown(self): Base.metadata.drop_all(self.engine) def test_serialize_single(self): result = self.session.query(User)\ .first()\ .to_dict(exclude= ['password']) expected = { 'id': 1, 'name': 'Bill u1' } self.assertDictEqual(result, expected) def test_serialize_list(self): result = [user.to_dict(exclude = ['password']) for user in self.session.query(User).all()] expected = [ { 'id': 1, 'name': 'Bill u1' }, { 'id': 2, 'name': 'Alex u2' }, ] self.assertListEqual(expected, result) def test_serialize_nested(self): result = self.session.query(Post).first().to_dict(nested=True) expected = { 'id': 11, 'body': 'Post 11 body.', 'archived': True, 'user_id': 1, 'user': { 'id': 1, 'name': 'Bill u1' , 'password' : 'pass1' }, 'comments': [ { 'id': 11, 'body': 'Comment 11 body', 'user_id': 1, 'post_id': 11, 'rating': 1, } ] } self.assertDictEqual(result, expected) def test_serialize_single__with_hybrid(self): result = self.session.query(User)\ .first()\ .to_dict(hybrid_attributes=True , exclude = ['password']) expected = { 'id': 1, 'name': 'Bill u1', 'posts_count': 1 } self.assertDictEqual(result, expected) def test_serialize_nested__with_hybrid(self): result = self.session.query(Post).first().to_dict(nested=True, hybrid_attributes=True) expected = { 'id': 11, 'body': 'Post 11 body.', 'archived': True, 'user_id': 1, 'user': { 'id': 1, 'name': 'Bill u1', 'posts_count': 1 , 'password' : 'pass1' }, 'comments': [ { 'id': 11, 'body': 'Comment 11 body', 'user_id': 1, 'post_id': 11, 'rating': 1, } ] } self.assertDictEqual(result, expected)
def create_sku(sku: schemas.SKURequestBody, db: Session): new_sku = models.SKU(**sku.dict()) db.add(new_sku) db.commit() db.refresh(new_sku) return new_sku
def update_info(db_session: Session, *, farm: Farm, info: FarmInfo): setattr(farm, 'info', info) db_session.add(farm) db_session.commit() db_session.refresh(farm) return farm
def set( cls, key: str, value: Any, task_id: str, dag_id: str, execution_date: Optional[datetime.datetime] = None, session: Session = NEW_SESSION, *, run_id: Optional[str] = None, map_index: int = -1, ) -> None: """:sphinx-autoapi-skip:""" from airflow.models.dagrun import DagRun if not exactly_one(execution_date is not None, run_id is not None): raise ValueError("Exactly one of run_id or execution_date must be passed") if run_id is None: message = "Passing 'execution_date' to 'XCom.set()' is deprecated. Use 'run_id' instead." warnings.warn(message, DeprecationWarning, stacklevel=3) try: dag_run_id, run_id = ( session.query(DagRun.id, DagRun.run_id) .filter(DagRun.dag_id == dag_id, DagRun.execution_date == execution_date) .one() ) except NoResultFound: raise ValueError(f"DAG run not found on DAG {dag_id!r} at {execution_date}") from None elif run_id == IN_MEMORY_RUN_ID: dag_run_id = -1 else: dag_run_id = session.query(DagRun.id).filter_by(dag_id=dag_id, run_id=run_id).scalar() if dag_run_id is None: raise ValueError(f"DAG run not found on DAG {dag_id!r} with ID {run_id!r}") value = cls.serialize_value( value=value, key=key, task_id=task_id, dag_id=dag_id, run_id=run_id, map_index=map_index, ) # Remove duplicate XComs and insert a new one. session.query(cls).filter( cls.key == key, cls.run_id == run_id, cls.task_id == task_id, cls.dag_id == dag_id, ).delete() new = cast(Any, cls)( # Work around Mypy complaining model not defining '__init__'. dag_run_id=dag_run_id, key=key, value=value, run_id=run_id, task_id=task_id, dag_id=dag_id, ) session.add(new) session.flush()
def createUser(request: schemas.Blog, db: Session): new_user = models.User(username=request.username, email=request.email, password=Hash.passwordHash(request.password)) db.add(new_user) db.commit() db.refresh(new_user) return new_user
def create(db: Session, request: schemas.BlogBase): new_blog = models.Blog(title=request.title, body=request.body) db.add(new_blog) db.commit() db.refresh(new_blog) return new_blog
def test_coerce_none(self): sess = Session() f1 = Foo(data=None) sess.add(f1) sess.commit() eq_(f1.data, None)
def add(self, sess: Session, **kwargs) -> Entity: entity = self.entity_cls() sess.add(entity) self.set_attribute(sess, entity, False, **kwargs) return entity
def create_location(db: Session, location: schemas.LocationBase): db_location = models.Location(address=location.address) db.add(db_location) db.commit() db.refresh(db_location) return db_location
async def create_user(db: Session, item: schema.User): db_item = db_model.User(**item.dict()) db.add(db_item) db.commit() db.refresh(db_item) return db_item
class SqlAlchemyUpdateTest(TestCase): def setUp(self): self.engine = sa.create_engine('dracodb://') self.base = declarative_base(bind=self.engine) class Character(self.base): __tablename__ = 'characters' name = sa.Column(sa.String, primary_key=True) age = sa.Column(sa.Integer) obj = sa.Column(Object) ts = sa.Column(sa.DateTime, onupdate=datetime.utcnow) self.character = Character self.session = Session() @patch('crate.client.connection.Cursor', FakeCursor) def test_onupdate_is_triggered(self): char = self.character(name='Arthur') self.session.add(char) self.session.commit() now = datetime.utcnow() fake_cursor.fetchall.return_value = [('Arthur', None)] fake_cursor.description = ( ('characters_name', None, None, None, None, None, None), ('characters_ts', None, None, None, None, None, None), ) char.age = 40 self.session.commit() expected_stmt = ("UPDATE characters SET age = ?, " "ts = ? WHERE characters.name = ?") args, kwargs = fake_cursor.execute.call_args stmt = args[0] args = args[1] self.assertEqual(expected_stmt, stmt) self.assertEqual(40, args[0]) dt = datetime.strptime(args[1], '%Y-%m-%dT%H:%M:%S.%fZ') self.assertTrue(isinstance(dt, datetime)) self.assertTrue(dt > now) self.assertEqual('Arthur', args[2]) @patch('crate.client.connection.Cursor', FakeCursor) def test_bulk_update(self): """ Checks whether bulk updates work correctly on native types and Crate types. """ before_update_time = datetime.utcnow() self.session.query(self.character).update({ # change everyone's name to Julia self.character.name: 'Julia', self.character.obj: { 'favorite_book': 'Romeo & Juliet' } }) self.session.commit() expected_stmt = ("UPDATE characters SET " "name = ?, obj = ?, ts = ?") args, kwargs = fake_cursor.execute.call_args stmt = args[0] args = args[1] self.assertEqual(expected_stmt, stmt) self.assertEqual('Julia', args[0]) self.assertEqual({'favorite_book': 'Romeo & Juliet'}, args[1]) dt = datetime.strptime(args[2], '%Y-%m-%dT%H:%M:%S.%fZ') self.assertTrue(isinstance(dt, datetime)) self.assertTrue(dt > before_update_time)
def create_brewer(db: Session, brewer: schemas.BrewerCreate): db_brewer = models.Brewer(name=brewer.name) db.add(db_brewer) db.commit() db.refresh(db_brewer) return db_brewer
#!/usr/bin/python3 """ task 11 """ from sys import argv from sqlalchemy import create_engine from model_state import State, Base from sqlalchemy.orm import Session if __name__ == "__main__": """ func """ engine = create_engine('mysql+mysqldb://{}:{}@localhost/{}'.format( argv[1], argv[2], argv[3]), pool_pre_ping=True) Base.metadata.create_all(engine) session = Session(engine) new = State(name="Louisiana") session.add(new) session.commit() print("{}".format(new.id)) session.close()
def create_user(db: Session, user: schemas.UserBase): db_user = models.User(phone=user.phone, name=user.name) db.add(db_user) db.commit() db.refresh(db_user) return db_user