def _get_instances(task, config: ConfigHolder, session: sqlalchemy.orm.Session): if config.manual_query: query = None print( "Manual query chosen. Please fill a query. After finishing the query just end ipython.\n\ Query result must be of type Graph or TaskJobs!") embed() assert query is not None, "query must be filled!" session.add( Config(task=task, value=query.statement(), param="statement")) return query.all() if config.task_id is None: query = session.query(Graph) else: query = session.query(TaskJobs).join(Graph).filter( TaskJobs.task_id == config.task_id) if config.min_n is not None: query = query.filter(Graph.vert_amount >= config.min_n) if config.max_n is not None: query = query.filter(Graph.vert_amount <= config.max_n) if config.min_m is not None: query = query.filter(Graph.edge_amount >= config.min_m) if config.max_m is not None: query = query.filter(Graph.edge_amount <= config.max_m) if config.instance_types: query = query.filter(Graph.i_type.in_(config.instance_types)) if config.max_amount is not None: query = query[:config.max_amount] return query[:]
def test_relationship(ssn: sa.orm.Session): """ Test getting historical relationship values through an InstanceHistoryProxy """ # Prepare ssn.add(User(id=1, name='John', age=18)) ssn.add(User(id=2, name='Jack', age=18)) ssn.add(Article(id=1, title='Python', author_id=1)) ssn.commit() # Users john = ssn.query(User).get(1) jack = ssn.query(User).get(2) # Article article: Article = ssn.query(Article).get(1) old_article: Article = InstanceHistoryProxy(article) # noqa assert article.author == john # load it assert old_article.author == john # works # Modify article.author = jack assert old_article.author == john # still works # Flush ssn.flush() assert old_article.author == john # still works
async def feed(limit: Optional[int] = Query(15), page: Optional[int] = Query(1), current_user: User = Depends(get_current_user), db: sa.orm.Session = Depends(get_db)): """ Get user feed :param limit: records on page limit :param page: page number :param current_user: Current authentificated user :param db: Session instance :return: Posts related to tags user subscribed """ posts = db.query(Post)\ .join(PostTag, PostTag.post_id == Post.id)\ .join(UserTag, UserTag.tag_id == PostTag.tag_id)\ .filter( UserTag.user_id == current_user.id ) \ .order_by(sa.desc(PostTag.created_at)) \ .limit(limit)\ .offset(limit * (page - 1)) tags = db.query( PostTag.tag_id, PostTag.post_id, Tag.title )\ .join(Tag, PostTag.tag_id == Tag.id)\ .filter(PostTag.post_id.in_([p.id for p in posts])).all() result = [] for post in posts: result.append({ 'id': post.id, 'title': post.title, 'preview_text': post.preview_text, 'text': post.text, 'cover': post.cover, 'created_at': post.created_at, 'updated_at': post.updated_at, 'author': AuthorModel(first_name=post.author.first_name, last_name=post.author.last_name), 'tags': [{ 'id': t[0], 'title': t[2] } for t in list(filter(lambda x: x.post_id == post.id, tags))] }) return result
def get_by_id( asset_id: int = Path(..., ge=1), session: sa.orm.Session = sess, ): query = session.query(db.models.Asset) asset = query.filter(db.models.Asset.id == asset_id).first() return asset
def get_by_id( fund_id: int = Path(..., ge=1), session: sa.orm.Session = sess, ): query = session.query(db.models.Fund) fund = query.filter(db.models.Fund.id == fund_id).first() return fund
def qy_post_or_page_by_id(session: sa.orm.Session, pk: str, auth=False): qy = session.query(Blog) \ .filter(sa.or_(Blog._id == pk, Blog.alias == pk)) if not auth: qy = qy.filter(Blog.status == Blog.eMeta.status_of_public) return qy.one_or_none()
def get_tweet_by_id(tweet_id: int, session: sqlalchemy.orm.Session): """Loads tweet object from database, given their tweet id :type tweet_id: int :param tweet_id: :type session: sqlalchemy.orm.Session """ return session.query(Tweet).filter(Tweet.tweetID == tweet_id).first()
def process_triggers(session: sqlalchemy.orm.Session, characters: List[Character], area: models.Area): """ Find triggers for an area, print them, and potentially resolve them. """ triggers = session.query(models.Trigger).filter( (~models.Trigger.resolved) & (models.Trigger.area_id == area.id) & (models.Trigger.party_lvl_min <= min(c.level for c in characters)) & (models.Trigger.party_size >= len(characters)) ).all() events = [t.event for t in triggers] for event in events: print('Event: {}'.format(event.name)) print(event.description) while 1: try: resolved_bool = prompt_bool('Resolved ?', True) except MalformedBoolException: continue break if resolved_bool: for t in event.triggers: t.resolved = True # TODO(jalex): Does this commit the triggers attached to the events? session.add_all(events) session.commit()
def get_user_by_id(user_id: int, session: sqlalchemy.orm.Session): """Loads user object from database, given their user id. Will return None if no user exists :type user_id: int :param user_id: :type session: sqlalchemy.orm.Session """ return session.query(User).filter(User.userID == user_id).first()
def _get_pokemon(self, session: sa.orm.Session, pokemon_id): pokemon = session.query(models.Pokemon).get(pokemon_id) if not pokemon: raise PokemonNotInDbError( f"No Pokemon with id={pokemon_id!r} exists in our database.\n" f"Did you remember to import it first using POST /pokemon endpoint?" ) return pokemon
def get_by_name(name: str, session: sa.orm.Session = sess): acc = ( session.query(models.Account) .filter(models.Account.name == name) .first() ) if acc is None: msg = f"Account {name} doesn't exists" raise HTTPException(status_code=404, detail=msg) return acc
def init_characters(session: sqlalchemy.orm.Session) -> CharacterGroups: """ Initialize the party's characters. Returns: Character groups. """ characters = [] while 1: name = input('Character name: ') if not name: break chars = session.query( models.Character).filter(models.Character.name == name).all() if not chars: try: new_char = prompt_bool('New character?', False) except MalformedBoolException: continue if not new_char: continue player_name = input('Player name: ') char = models.Character(name=name, player_name=player_name) else: try: idx = prompt_choices( chars, lambda i, char: ' {} - {} ({})'.format( i, char.name, char.player_name)) except MalformedInputException: continue char = chars[idx] default_lvl = char.level or 1 try: char.level = int( input('Level: [{}] '.format(default_lvl)) or default_lvl) except ValueError: print('Supply integer level.') continue characters.append(char) character_groups = init_locations(session, characters) session.add_all(x for sl in character_groups for x in sl) session.commit() return character_groups
def update_asset( asset_id: int, asset_update: asset_create_class, session: sa.orm.Session = sess, ): asset_db = session.query(db.models.Asset).get(asset_id) for field, value in asset_update.dict().items(): setattr(asset_db, field, value) db.main.try_to_commit(session) session.refresh(asset_db) return asset_db
def get_releases_from_db(db_session: sqlalchemy.orm.Session, breakpoint_version: int) -> typing.List[shipit_api.common.models.Release]: """ SELECT * FROM shipit_api_releases as r WHERE cast(split_part(r.version, '.', 1) as int) > 20; """ Release = shipit_api.common.models.Release query = db_session.query(Release) # Using cast and split_part is postgresql specific query = query.filter(Release.status == "shipped") query = query.filter(sqlalchemy.cast(sqlalchemy.func.split_part(Release.version, ".", 1), sqlalchemy.Integer) >= breakpoint_version) return query.all()
def cancel_transaction( transaction_id: int, session: sa.orm.Session = sess, ): "Creates a transaction that reverse the original" # find the original transaction transaction = session.query(db.models.Transaction).get(transaction_id) if transaction is None: msg = f"Transaction {transaction_id=} not found" raise HTTPException(status_code=404, detail=msg) # cancel it and create reverse entries transaction.cancel = True entries = [] for n in range(len(transaction.entries)): original_entry = transaction.entries[n] original_entry.cancel = True new_entry = EntryCreate( account_id=original_entry.account_id, value=-original_entry.value, asset_id=original_entry.asset_id, quantity=-original_entry.quantity, ) new_entry_db = db.models.Entry( **new_entry.dict(), datetime=transaction.datetime, cancel=True, fund_id=transaction.fund_id, ) entries.append(new_entry_db) # create the reverse transaction canceling = db.models.Transaction( timestamp=dt.datetime.utcnow(), datetime=transaction.datetime, value=transaction.value, description=f"Cancel: {transaction_id}", entries=entries, cancel=True, fund_id=transaction.fund_id, ) # persist and return session.add(canceling) db.main.try_to_commit(session) return canceling
def random_recipe(s: sa.orm.Session, *, n: int = 1) -> Iterable(Recipe): """ Get `n` random recipes. Parameters ---------- s : sqlalchemy.orm.Session database session to bind objects n : int = [default: 1] number of recipes to return """ q = s.query(Recipe)\ .order_by(sa.func.random())\ .limit(n) return iter(q.all())
def test_property(ssn: sa.orm.Session): """ Test getting historical @property values through an InstanceHistoryProxy """ # Prepare ssn.add(User(id=1, name='John', age=18)) ssn.commit() # Load user: User = ssn.query(User).get(1) old_user: User = InstanceHistoryProxy(user) # noqa # @property access assert user.age_in_100_years == 118 assert old_user.age_in_100_years == 118 # Modify user.age = 20 assert old_user.age_in_100_years == 118 # still good
async def send_field_payload(self, session: sqlalchemy.orm.Session, msg: Dict[str, Any]): logger.debug(f"send_field_payload:{msg}") # breakpoint() module = importlib.import_module(msg['package_name']) klass = getattr(module, msg['class_name']) pkname = msg['id_field'] obj = session.query(klass).filter( getattr(klass, pkname) == msg[pkname]).one() extensions = get_model_extensions_for_obj(obj) logger.debug(f"model extension: {extensions}") # fieldname = msg['field_name'] for extension in extensions: if extension.send_payload_fn: await extension.send_payload_fn(obj, self.websocket, session)
def _select_new_area(session: sqlalchemy.orm.Session) -> models.Area: """ Choose an area from user input. """ name = input('What is the name of the area? ') areas = session.query(models.Area).filter(models.Area.name == name) if not areas: print('No areas found by that name. Please try again.') return _select_new_area(session) idx = prompt_choices( areas, lambda i, area: ' {} - {} ({})'.format(i, area.name, area.full_name())) area = areas[idx] return area
def _gen_sql_of_public_post(session: sa.orm.Session, type_=Blog.eMeta.type_of_post, keyword=None, *args, **kwargs): qy = session.query(Blog).order_by(Blog.add_date.desc()) if type_ == Blog.eMeta.type_of_page: qy = qy.filter(Blog.type == Blog.eMeta.type_of_page) else: qy = qy.filter( Blog.type.in_( [Blog.eMeta.type_of_article, Blog.eMeta.type_of_markdown])) if keyword: _kw = "%{}%".format(keyword) qy = qy.filter(sa.or_(Blog.content.like(_kw), Blog.title.like(_kw))) return qy
def delete_fund( fund_id: int = Path(..., ge=1), session: sa.orm.Session = sess, ): """ Deletes a temporary fund Only temporary funds can be deleted, otherwise you will get an 403 error When a fund is deleted, all of its transactions and entries are deleted too """ fund = session.query(db.models.Fund).get(fund_id) if fund is None: msg = f"Fund {fund_id=} not found" raise HTTPException(status_code=404, detail=msg) if not fund.temporary: msg = "Only temporary funds can be deleted" raise HTTPException(status_code=403, detail=msg) session.delete(fund) db.main.try_to_commit(session) return None
def test_columns(ssn: sa.orm.Session): """ Simple test of InstanceHistoryProxy with columns """ # Prepare ssn.add(User(id=1, name='John', age=18)) ssn.commit() # Check initial state user: User = ssn.query(User).get(1) old_user: User = InstanceHistoryProxy(user) # noqa def old_user_is_correct(): assert old_user.id == 1 assert old_user.name == 'John' assert old_user.age == 18 # Modify user.id = 1000 user.name = 'CHANGED' user.age = 1800 old_user_is_correct() # still good # Flush ssn.flush() old_user_is_correct() # still good
def loadall(ssn: sa.orm.Session, Model: type): """ Load all rows from a table, ordered by primary key """ pk_cols = primary_key_columns(Model) return ssn.query(Model).order_by(*pk_cols)
def tweets_with_other_data_generator(session: sqlalchemy.orm.Session): """Returns an iterator which yields tweets that have a non-empty other_data field :type session: sqlalchemy.orm.Session """ for tweet in session.query(Tweet).filter(Tweet.other_data.isnot(None)): yield tweet
async def handle_push(connection: Connection, session: sqlalchemy.orm.Session) -> Optional[int]: msgs_got = 0 version: Optional[Version] = None async for msg in connection.socket: msgs_got += 1 msg_json = json.loads(msg) pushmsg = PushMessage(msg_json) # print(f"pushmsg: {msg}") if not pushmsg.operations: logger.warn("empty operations list in client PushMessage") for op in pushmsg.operations: logger.info(f"operation: {op}") # await connection.socket.send(f"answer is:{msg}") logger.info(f"message key={pushmsg.key}") latest_version_id = core.get_latest_version_id(session=session) logger.info( f"** version on server:{latest_version_id}, version in pushmsg:{pushmsg.latest_version_id}" ) if latest_version_id != pushmsg.latest_version_id: exc = f"version identifier isn't the latest one; " \ f"incoming: {pushmsg.latest_version_id}, on server:{latest_version_id}" if latest_version_id is None: logger.warn(exc) raise PushRejected(exc) if pushmsg.latest_version_id is None: logger.warn(exc) raise PullSuggested(exc) if pushmsg.latest_version_id < latest_version_id: logger.warn(exc) raise PullSuggested(exc) raise PushRejected(exc) if not pushmsg.islegit(session): raise PushRejected("message isn't properly signed") for listener in before_push: listener(session, pushmsg) # I) detect unique constraint conflicts and resolve them if possible unique_conflicts = find_unique_conflicts(pushmsg, session) conflicting_objects = set() for uc in unique_conflicts: obj = uc['object'] conflicting_objects.add(obj) for key, value in zip(uc['columns'], uc['new_values']): setattr(obj, key, value) for obj in conflicting_objects: make_transient(obj) # remove from session for model in set(type(obj) for obj in conflicting_objects): pk_name = get_pk(model) pks = [ getattr(obj, pk_name) for obj in conflicting_objects if type(obj) is model ] session.query(model).filter(getattr(model, pk_name).in_(pks)). \ delete(synchronize_session=False) # remove from the database session.add_all(conflicting_objects) # reinsert session.flush() # II) perform the operations operations = [ o for o in pushmsg.operations if o.tracked_model is not None ] post_operations: List[Tuple[Operation, SQLClass, Optional[SQLClass]]] = [] try: op: Operation for op in operations: (obj, old_obj) = await op.perform_async(pushmsg, session, pushmsg.node_id, connection.socket) if obj is not None: # if the op has been skipped, it wont be appended for post_operation handling post_operations.append((op, obj, old_obj)) resp = dict(type="info", op=dict( row_id=op.row_id, version=op.version, command=op.command, content_type_id=op.content_type_id, )) call_after_tracking_fn(session, op, obj) await connection.socket.send(json.dumps(resp)) except OperationError as e: logger.exception( "Couldn't perform operation in push from node %s.", pushmsg.node_id) raise PushRejected("at least one operation couldn't be performed", *e.args) # III) insert a new version if post_operations: # only if operations have been done -> create the new version version = Version(created=datetime.datetime.now(), node_id=pushmsg.node_id) session.add(version) # IV) insert the operations, discarding the 'order' column accomplished_operations = [ op for (op, obj, old_obj) in post_operations ] for op in sorted(accomplished_operations, key=attr('order')): new_op = Operation() for k in [k for k in properties_dict(op) if k != 'order']: setattr(new_op, k, getattr(op, k)) session.add(new_op) new_op.version = version session.flush() for op, obj, old_obj in post_operations: op.call_after_operation_fn(session, obj, old_obj) # from woodmaster.model.sql.model import WoodPile, Measurement # orphans = session.query(Measurement).filter(Measurement.woodpile_id == None).all() # print(f"orphans:{orphans}") for listener in after_push: listener(session, pushmsg) # return the new version id back to the client logger.info(f"version is: {version}") if version: await connection.socket.send( json.dumps( dict(type="result", new_version_id=version.version_id))) return {'new_version_id': version.version_id} else: await connection.socket.send( json.dumps(dict(type="result", new_version_id=None))) logger.info("sent nothing message") await connection.socket.close() logger.info("push ready")
def get_package_version_link_id_query( session: sqlalchemy.orm.Session, link: Tuple[int, int]) -> sqlalchemy.orm.query.Query: parent_package_id, child_package_id = link return session.query(PackageLink.id).filter_by( parent_package_id=parent_package_id, child_package_id=child_package_id)
def get_transaction(session: sa.orm.Session = sess): query = session.query(db.models.Transaction) transactions = query.all() return transactions
def _asset_is_currency(asset_id: int, session: sa.orm.Session): asset = session.query(db.models.Asset).get(asset_id) if asset is None: return False return asset.type == AssetTypes.currency
def test_does_not_lose_history(ssn: sa.orm.Session): """ Extensive test of InstanceHistoryProxy with query counters and lazy loads """ assert ssn.autoflush == False, 'this test relies on Session.autoflush=False' engine = ssn.get_bind() # Prepare ssn.add(User(id=1, name='John', age=18)) ssn.add(Article(id=1, title='Python', author_id=1)) ssn.commit() # === Test 1: ModelHistoryProxy does not lose history when flushing a session ssn.expunge_all( ) # got to reset; otherwise, the session might reuse loaded objects user = ssn.query(User).get(1) with ExpectedQueryCounter(engine, 0, 'Expected no queries here'): old_user_hist = InstanceHistoryProxy(user) # issues no queries # Modify user.name = 'CHANGED' # History works assert old_user_hist.name == 'John' # Flush ssn.flush() # History is NOT broken! assert old_user_hist.name == 'John' # Change another column after flush; history is still NOT broken! user.age = 1800 assert old_user_hist.age == 18 # correct # Undo ssn.rollback() # === Test 1: ModelHistoryProxy does not lose history when lazyloading a column ssn.expunge_all( ) # got to reset; otherwise, the session might reuse loaded objects user = ssn.query(User).options(load_only('name')).get(1) with ExpectedQueryCounter(engine, 0, 'Expected no queries here'): old_user_hist = InstanceHistoryProxy(user) # issues no queries user.name = 'CHANGED' assert old_user_hist.name == 'John' # Load a column with ExpectedQueryCounter(engine, 1, 'Expected 1 lazyload query'): user.age # get an unloaded column # History is NOT broken! assert old_user_hist.name == 'John' # === Test 2: ModelHistoryProxy does not lose history when lazyloading a one-to-many relationship ssn.expunge_all( ) # got to reset; otherwise, the session might reuse loaded objects user = ssn.query(User).get(1) with ExpectedQueryCounter(engine, 0, 'Expected no queries here'): old_user_hist = InstanceHistoryProxy(user) user.name = 'CHANGED' assert old_user_hist.name == 'John' # History works # Load a relationship with ExpectedQueryCounter(engine, 1, 'Expected 1 lazyload query'): list(user.articles) # History is NOT broken! assert old_user_hist.name == 'John' # === Test 3: ModelHistoryProxy does not lose history when lazyloading a one-to-one relationship ssn.expunge_all( ) # got to reset; otherwise, the session might reuse loaded objects article = ssn.query(Article).get(1) with ExpectedQueryCounter(engine, 0, 'Expected no queries here'): old_article_hist = InstanceHistoryProxy(article) article.title = 'CHANGED' assert old_article_hist.title == 'Python' # works # Load a relationship with ExpectedQueryCounter(engine, 1, 'Expected 1 lazyload query'): article.author # History is NOT broken! assert old_article_hist.title == 'Python' # works
def get_all(session: sa.orm.Session = sess): query = session.query(db.models.Fund) assets = query.all() return assets