def getTest(): reload(sys) sys.setdefaultencoding( "utf-8" ) question=models.WordQuize.query.order_by(func.random()).limit(20) results=[] results2=[] results3=[] #result={'user_id': 2,'s':1} #{'id':+q.id,'question':+q.question,"option:":+q.option} for q in question: result={'id':q.id,'question':q.question,'option':q.option} results.append(result) question = models.ConfuseQuize.query.order_by(func.random()).limit(20) for q in question: result = {'id': q.id, 'question': q.question, 'option': q.option} results3.append(result) question=models.PhraseQuize.query.order_by(func.random()).limit(20) for q in question: result={'id':q.id,'question':q.question,'option':q.option} results2.append(result) json_result = {'result':results,'result2':results2, 'result3':results3}#json.dumps(json_result) print results print results2 print results3 return json.dumps(json_result,ensure_ascii=False)
def get_request(): """ @return: a random request guarantied to be locked and unique """ session = database.get_session() me = Host.by_name(socket.gethostname(), session) if me.region is not None: # noinspection PyComparisonWithNone,PyPep8 query = session.query(HashRequest).join(HashRequest.host).join(Host.region).filter( and_(HashRequest.locked == False, HashRequest.server != None, Region.id == me.region.id)).order_by(func.random()).with_for_update() else: # noinspection PyComparisonWithNone,PyPep8 query = session.query(HashRequest).join(HashRequest.host).filter( and_(HashRequest.locked == False, HashRequest.server != None, HashRequest.host == me)).order_by(func.random()).with_for_update() try: request = query.first() if request is None: return None request.locked = True session.commit() return request except Exception as error: session.rollback() raise error finally: session.close()
def test_generic_random(self): assert func.random().type == sqltypes.NULLTYPE assert isinstance(func.random(type_=Integer).type, Integer) for ret, dialect in [ ('random()', sqlite.dialect()), ('random()', postgresql.dialect()), ('rand()', mysql.dialect()), ('random()', oracle.dialect()) ]: self.assert_compile(func.random(), ret, dialect=dialect)
def random(self): engine = str(db.engine) if 'postgresql' in engine: # Postgres return Session.query(self).order_by(func.random()).first() elif 'mysql' in engine: # MySQL return Session.query(self).order_by(func.rand()).first() elif 'sqlite' in engine: # sqlite return Session.query(self).order_by(func.random()).first()
def index(self, **kw): # items = DBSession.query(Item).all() # my_page = Page(items, page = int(kw.get("page", 1)), url = lambda page:"%s?page=%d" % (self.request.path, page)) # self.render("index.html", my_page = my_page) items = DBSession.query(Item).all()[:9] try: show_case1 = DBSession.query(Item).order_by(func.random()).all()[:4] show_case2 = DBSession.query(Item).order_by(func.random()).all()[:4] except: show_case1 = DBSession.query(Item).order_by(func.rand()).all()[:4] show_case2 = DBSession.query(Item).order_by(func.rand()).all()[:4] self.render("index.html", items = items, show_case1 = show_case1, show_case2 = show_case2)
def random(self, n_instances=1, dialect=None): """Returns random model instances. :param n_instances: the number of instances to return :type n_instances: int :param dialect: the engine dialect (the implementation of random differs between MySQL and SQLite among others). By default will look up on the query for the dialect used. If no random function is available for the chosen dialect, the fallback implementation uses total row count to generate random offsets. :type dialect: str :rtype: model instances """ if dialect is None: dialect = self.session.get_bind().dialect.name if dialect == 'mysql': instances = self.order_by(func.rand()).limit(n_instances).all() elif dialect in ['sqlite', 'postgresql']: instances = self.order_by(func.random()).limit(n_instances).all() else: # fallback implementation count = self.count() instances = [ self.offset(randint(0, count - 1)).first() for _ in range(n_instances) ] if len(instances) == 1: return instances[0] return instances
def coverage(self): image = self.images.filter_by(is_coverage=True) if image.count() == 0: image = self.images.order_by(func.random()).limit(1) image = image.first() return image and {'url': image.url, 'id': image.id} or None
def update_scores(self, model, chunksize=10000): # update node scores waypoint_nodes = ( self.session.query( Node, ST_X(cast(Node.loc, Geometry)), ST_Y(cast(Node.loc, Geometry))) .filter(Node.num_ways != 0) .order_by(func.random())) # random order # process nodes in chunks for memory efficiency. # note: normalization of scores is done per chunk, which should be a # reasonable approximation to global normalization when the chunks are # large since the query specifies random ordering for chunk in _grouper(chunksize, waypoint_nodes): nodes, x, y = zip(*chunk) X = np.vstack((x, y)).T scores = model.score_samples(X) for node, score in zip(nodes, scores): node.score = score # update cumulative scores sq = ( self.session.query( Waypoint.id.label('id'), func.sum(Node.score).over( partition_by=Waypoint.way_id, order_by=Waypoint.idx).label('cscore')) .join(Node) .subquery()) (self.session.query(Waypoint) .filter(Waypoint.id == sq.c.id) .update({Waypoint.cscore: sq.c.cscore}))
def get_random_mapobject_subset(self, mapobject_type_name, n): '''Selects a random subset of mapobjects. Parameters ---------- mapobject_type_name: str name of the selected :class:`MapobjectType <tmlib.models.mapobject.MapobjectType>` n: int number of mapobjects that should be selected at random Returns ------- Tuple[int] IDs of selected mapobject ''' with tm.utils.ExperimentSession(self.experiment_id) as session: mapobject_type = session.query(tm.MapobjectType.id).\ filter_by(name=mapobject_type_name).\ one() mapobjects = session.query(tm.Mapobject.id).\ filter_by(mapobject_type_id=mapobject_type.id).\ order_by(func.random()).\ limit(n).\ all() return [m.id for m in mapobjects]
def find(self): recipe = session.query(Recipe).filter(Recipe.done == False).order_by(func.random()).first() if not recipe: abort(500, message="Every recipe was done. Nice.") recipe.done = True session.commit() return recipe
def meet_character(cls, character): """ Get monster for character. """ return cls.query.filter( cls.level >= character.level - 2, cls.level <= character.level + 2 ).order_by(func.random()).first()
def load_projects(): """Takes 'n' the user specifies and returns n projects from the cluster.""" number = request.form.get("number") print number cluster_id = request.form.get("id") print cluster_id #returns n random projects in chosen cluster cluster_query = modelsession.query(model.Cluster).filter( model.Cluster.cluster_num==cluster_id).order_by( func.random()).limit(number).all() print len(cluster_query) projects = [] #loops through projects in cluster_query to create a list of the project objs. for item in cluster_query: proj_id = item.project_id proj_id = '"'+proj_id+'"' print proj_id query = modelsession.query(model.Project).filter( model.Project.id==proj_id).first() projects.append(query) print projects #creates a json object to pass to javascript json_list = [] for i in range(len(projects)): json_list.append({"id": projects[i].id, "title": projects[i].title, "location": projects[i].school.city + ", " + projects[i].school.state, "grade": projects[i].grade_level, "needs": projects[i].fulfillment_trailer}) results = json.dumps(json_list) return results
def cmd(send, msg, args): """Returns a random line from $nick. Syntax: {command} (--channel <channel>) (nick) """ parser = arguments.ArgParser(args['config']) parser.add_argument('--channel', action=arguments.ChanParser) parser.add_argument('nick', nargs='*') try: cmdargs = parser.parse_args(msg) except arguments.ArgumentException as e: send(str(e)) return quote = args['db'].query(Log.msg, Log.source) nick = ' '.join(cmdargs.nick) if cmdargs.nick else "" if nick: quote = quote.filter(Log.source == nick) else: quote = quote.filter(Log.source != args['botnick']) target = cmdargs.channels[0] if hasattr(cmdargs, 'channels') else args['config']['core']['channel'] quote = quote.filter(or_(Log.type == 'pubmsg', Log.type == 'privmsg', Log.type == 'action'), Log.target == target, func.length(Log.msg) > 5).order_by(func.random()).first() if quote: send("%s -- %s" % quote) elif nick: send("%s isn't very quotable." % nick) else: send("Nobody is very quotable :(")
def get_representative_nodes(): """Get elements with a reasonable distribution of node counts.""" step = 1000 start = 0 stop = 60000 limit = 50 record_names = ['name', 'gridcode', 'node_count'] ranges = [] to_process = [] s = Session() while start < stop: ranges.append([start, start + step]) start += step for ctr, select_range in enumerate(ranges): log.debug(select_range) q = s.query(VectorProcessingUnit.name, Catchment.gridcode, Catchment.node_count).join(Catchment) q = q.filter(Catchment.node_count >= select_range[0]).filter(Catchment.node_count < select_range[1]) q = q.order_by(func.random()).limit(limit) for idx, record in enumerate(q): to_process.append(dict(zip(record_names, record))) s.close() return to_process
def rand(self, sender, *, orig_only=False): ''' A random tweet from someone. Example:: /rand j a random tweet from j :param str sender: single sender :param bool orig_only: original tweets only :return: `Resp` object :rtype: Resp ''' sender = self._to_sender(sender) assert len(sender) == 1, "so many people" sender = sender[0] cond = Tweet.user_id == int(sender) if orig_only: cond = cond & (Tweet.type == 'tweet') one = self.session.query(Tweet).filter(cond)\ .order_by(func.random()).first() if one is None: return Resp(message="nothing indexed") cmd = '/randt' if orig_only else '/rand' keyboard = {"keyboard": [[cmd + ' ' + sender], ['/end']], "selective": True, "resize_keyboard": True} return Resp(message=self.fmt.format_rand(one), keyboard=keyboard, markdown=True)
def birdie_view(request): dbsession = DBSession() userid = authenticated_userid(request) user = dbsession.query(User).filter_by(userid=userid).first() follows = dbsession.query(Follower).filter(Follower.follower == user.id) follows = follows.order_by(Follower.follows.asc()).limit(10) followers = dbsession.query(Follower).filter(Follower.follows == user.id) followers = followers.order_by(Follower.follower.asc()).limit(10) chirpers = [follow.follows for follow in follows] chirpers.append(user.id) chirps = dbsession.query(Chirp).filter(Chirp.author_id.in_(chirpers)) chirps = chirps.order_by(Chirp.timestamp.desc()).limit(30) whomtofollow = dbsession.query(User).order_by(func.random()).limit(10) return { "app_url": request.application_url, "static_url": request.static_url, "userid": userid, "user": user, "elapsed": get_elapsed, "follows": follows, "followers": followers, "user_chirps": False, "whomtofollow": whomtofollow, "chirps": chirps, }
def index(request): project_ids = [ r[0] for r in ( request.db.query(Project.id) .order_by(Project.zscore.desc().nullslast(), func.random()) .limit(5) .all() ) ] release_a = aliased( Release, request.db.query(Release) .distinct(Release.project_id) .filter(Release.project_id.in_(project_ids)) .order_by( Release.project_id, Release.is_prerelease.nullslast(), Release._pypi_ordering.desc(), ) .subquery(), ) trending_projects = ( request.db.query(release_a) .options(joinedload(release_a.project)) .order_by(func.array_idx(project_ids, release_a.project_id)) .all() ) latest_releases = ( request.db.query(Release) .options(joinedload(Release.project)) .order_by(Release.created.desc()) .limit(5) .all() ) counts = dict( request.db.query(RowCount.table_name, RowCount.count) .filter( RowCount.table_name.in_( [ Project.__tablename__, Release.__tablename__, File.__tablename__, User.__tablename__, ] ) ) .all() ) return { "latest_releases": latest_releases, "trending_projects": trending_projects, "num_projects": counts.get(Project.__tablename__, 0), "num_releases": counts.get(Release.__tablename__, 0), "num_files": counts.get(File.__tablename__, 0), "num_users": counts.get(User.__tablename__, 0), }
def get(self): sort = request.args.get('sort') role = request.args.get('role') require = request.args.get('require') limit = request.args.get('limit') if sort == 'random': users = User.query.order_by(func.random()) else: users = User.query.order_by(desc(User.first_name)) if role: roles = role.split(',') users = users.filter(User.role.in_(roles)) if require: # Return only users that have attributes that are lists with at least one element # example, if require = 'host-applications', only return users with at least one host application requires = require.split(',') requires = [getattr(User, lisp_to_snake(require)).any() for require in requires] users = User.query.filter(*requires) if limit: limit = int(limit) users = users[:limit] else: users = users.all() #print(row2dict(users[0])) tmp = [row2dict(user) for user in users] users = {'users': tmp} return users
def get_question_bytag(tag_name,limit): """ :param tag_name: :return: """ print(tag_name) max_questions=20 tag = models.Tag.query.filter_by(tag_name=tag_name).first() if tag: try: limit = int(limit) except Exception as e: pass if isinstance(limit,int): if limit > max_questions or limit <=0: limit=max_questions questions = tag.quest.order_by(func.random()).limit(limit).all() elif limit=='all': questions = tag.quest.all() else: questions = [] if len(questions)==1: r = questions[0].to_dict() else: r = [q.to_dict() for q in questions] else: r = [] return make_response(jsonify({"result":r}), 200)
def cmd(send, msg, args): """Reposts a url. Syntax: {command} """ result = args['db'].query(Urls).order_by(func.random()).first() send("%s" % result.url)
def GET(self): session=database.Session() with AutoClose(session) as ac: allnews=session.query(News).options(defer(News.content),defer(News.image)).order_by(News.id.desc()).limit(6).all() project=session.query(Project).options(defer(Project.content)).order_by(func.random()).first() partners=session.query(Partner).all() tpl=WebSiteBasePage.jinja2_env.get_template('Index.html') return tpl.render(news=allnews,project=project,partners=partners)
def get_quote(cls, id=None): try: if id is None: return meta.session.query(cls).order_by(func.random()).limit(1).one() return meta.session.query(cls).get(id) except: meta.session.rollback() raise
def random_products(s, start=0, finish=20): order_by_list = [func.random(), func.rand(), 'dbms_random.value'] for o in order_by_list : try: return s.query(Product).order_by(o).offset(0).limit(finish) except: pass return []
def getRandomN(n): res = db_session.query(Meizi).order_by(func.random()).limit(n).all() # meizis = {} # i =0 # for item in res: # meizis[i] = res[i].filename # i+=1 return res
def get_adventure_random_list(adventure_list, request_user): # hero name user has subquery_hero_metadata_id = HeroModel.query.\ with_entities(HeroModel.hero_metadata_id).\ filter(HeroModel.user_id == request_user.id).\ filter(HeroModel.visible).\ subquery() hero_names = MetadataHeroModel.query.\ with_entities(MetadataHeroModel.name).\ filter(MetadataHeroModel.id.in_(subquery_hero_metadata_id)).\ all() result = [] # for weighted random choice # 랜덤 뽑기를 위해서 넣을 것 pick = [] for adventure in adventure_list: # 이미 시작한 모험은 결과 리스트에 바로 넣는다 if adventure.is_start: result.append(adventure) continue # 시작되지 않은 모험은 모험 등장 규칙에 맞게 curr_adventure_name = MetadataAdventureModel.query.\ with_entities(MetadataAdventureModel.name).\ filter(MetadataAdventureModel.id == adventure.adventure_metadata_id). \ subquery() target = MetadataAdventureModel.query. \ filter(MetadataAdventureModel.name == curr_adventure_name). \ filter(and_(MetadataAdventureModel.level <= adventure.max_level, MetadataAdventureModel.level >= adventure.max_level-3)).\ filter(or_(MetadataAdventureModel.hero_name_for_appear.in_(hero_names), MetadataAdventureModel.hero_name_for_appear == None)).\ order_by(func.random()).\ first() if target: pick.append(target) adventure.adventure_metadata_id = target.id # Adventure 실행 횟수 리프레쉬(하루가 지났다면) adventure_execution_refresh(adventure_list, request_user) # 이미 스타트한 모험은 결과리스트에 있기 때문에 리스트의 나머지를 채워야 함 remain_adventure_count = request_user.adventure_slot - len(result) # 모험 하루 수행 횟수를 다 만족한 모험은 리스트에 넣지 않는다. while remain_adventure_count != 0 and len(pick) != 0: choice = weighted_random(pick, [adventure.appear_rate for adventure in pick], sum(adventure.appear_rate for adventure in pick)) for adventure in adventure_list: if adventure.adventure_metadata_id == choice.id: if adventure.execution_count < choice.execution_limit: result.append(adventure) remain_adventure_count -= 1 break return result
def get(self): # random for sqlite, rand for mysql state_list = [State.IN_VOTE, State.REQUESTED, State.ACCEPTED, State.DECLINED] bands = Band.query.order_by(func.random()).filter(Band.state.in_(state_list)) return jsonify(bands=[band2json(band) for band in bands], tracks=[track2json(track) for track in Track.query.join(Band).filter( Band.state.in_(state_list))], comments=[comment2json(comment) for comment in Comment.query.join(Band).filter(Band.state.in_(state_list))])
def welcome_page(request): if request.user: return HTTPFound(request.route_url('home')) photos = DBSession.query(Photo).filter_by( is_public=True ).order_by(func.random()).limit(3) return {'photos': photos}
def random(self, for_user: User=None) -> int: query = self._db.query(Request.id).filter(Request.status == "posted") if for_user: query = query.filter(Request.user_id != for_user.id) if for_user.blacklist_filter is not None: query = query.filter(for_user.blacklist_filter) row = query.order_by(func.random()).first() return row[0] if row else None
def get_random_user_agent(): """ Return a random user agent string. Returns the user agent string as JSON of the form:: { "ua": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.117 Safari/537.36", } """ return jsonify(ua=UserAgent.query.order_by(func.random()).first().string)
def get_random_posts(self, count=10): posts = Post.query.order_by( func.random() ).limit(count).all() post_dict_list = self.data_dict_list_generator( posts, POST_DICT_KEY ) return post_dict_list
def index(request): project_names = [ r[0] for r in (request.db.query(Project.name).order_by( Project.zscore.desc().nullslast(), func.random()).limit(5).all()) ] release_a = aliased( Release, request.db.query(Release).distinct(Release.name).filter( Release.name.in_(project_names)).order_by( Release.name, Release.is_prerelease.nullslast(), Release._pypi_ordering.desc()).subquery(), ) trending_projects = (request.db.query(release_a).options( joinedload(release_a.project)).order_by( func.array_idx(project_names, release_a.name)).all()) latest_releases = (request.db.query(Release).options( joinedload(Release.project)).order_by( Release.created.desc()).limit(5).all()) counts = dict( request.db.query(RowCount.table_name, RowCount.count).filter( RowCount.table_name.in_([ Project.__tablename__, Release.__tablename__, File.__tablename__, User.__tablename__, ])).all()) return { "latest_releases": latest_releases, "trending_projects": trending_projects, "num_projects": counts.get(Project.__tablename__, 0), "num_releases": counts.get(Release.__tablename__, 0), "num_files": counts.get(File.__tablename__, 0), "num_users": counts.get(User.__tablename__, 0), }
def books_per_block(): # new arrivals for book in Books.query.order_by(Books.created_on.desc()).order_by( func.random()).limit(10): f_db.get('books').get('New Arrivals')['New Arrivals'].append(book.id) f_db.commit() # Trending for book in Books.query.order_by(Books.pg_views.desc()).order_by( func.random()).limit(10): f_db.get('books').get('Trending')['Trending'].append(book.id) f_db.commit() # Most Downloaded for book in Books.query.order_by(Books.downloads.desc()).order_by( func.random()).limit(10): f_db.get('books').get('Most Downloaded')['Most Downloaded'].append( book.id) f_db.commit() # Most Searched for book in Books.query.order_by(func.random()).limit(10): f_db.get('books').get('Most Searched')['Most Searched'].append(book.id) f_db.commit() # Editor's choice try: for book in Books.query.filter(Books.language == 'Hindi').order_by( func.random()).limit(10): f_db.get('books').get("Hindi Books")["Hindi Books"].append(book.id) f_db.commit() except Exception as e: print(e) # New inspiring try: for book in Books.query.filter( Books.categories[0] == 'Inspiring').order_by( func.random()).limit(10): f_db.get('books').get('New inspiring')['New inspiring'].append( book.id) f_db.commit() except Exception as e: print(e)
async def news(self): if self.news_cache.get("news"): return jsonify(self.news_cache["news"]) recent = (await Mod.query.where( and_(Mod.verified, Mod.status == ModStatus.released) ).order_by(func.random()).limit(10).gino.first()) featured = (await EditorsChoice.load(mod=Mod).where( EditorsChoice.featured).order_by(EditorsChoice.created_at.desc() ).gino.first()) blog = await get_latest_medium_post(self.core.aioh_sess) recent = ({ "type": 0, "title": recent.title, "body": recent.tagline, "url": f"/mods/{recent.id}", "banner": recent.banner, "id": hash(recent.id), } if recent is not None else None) featured = ({ "type": 1, "title": featured.mod.title, "body": featured.editors_notes, "url": featured.article_url, "banner": featured.mod.banner, "id": hash(featured.mod.id), } if featured is not None else None) blog = {"type": 2, **blog} news = [recent, featured, blog] # Featured and recent may be None if there are no EditorsChoices or Mods respectively. self.news_cache["news"] = news = [x for x in news if x is not None] return jsonify(news)
def get_learnings(how_many=12, sort_order="recent", offset=0): ''' Gather and return some recent definitions ''' order_descending = Definition.creation_date.desc() order_random = func.random() order_alphabetical = Definition.term order_function = order_descending prefix_singluar = "I recently learned the definition for" prefix_plural = "I recently learned definitions for" no_definitions_text = "I haven't learned any definitions yet." if sort_order == "random": order_function = order_random elif sort_order == "alpha": order_function = order_alphabetical if sort_order == "random" or sort_order == "alpha" or offset > 0: prefix_singluar = "I know the definition for" prefix_plural = "I know definitions for" # if how_many is 0, ignore offset and return all results if how_many == 0: definitions = db.session.query(Definition).order_by(order_function).all() # if order is random and there is an offset, randomize the results after the query elif sort_order == "random" and offset > 0: definitions = db.session.query(Definition).order_by(order_descending).limit(how_many).offset(offset).all() random.shuffle(definitions) else: definitions = db.session.query(Definition).order_by(order_function).limit(how_many).offset(offset).all() if not definitions: return no_definitions_text, no_definitions_text wording = prefix_plural if len(definitions) > 1 else prefix_singluar plain_text = "{}: {}".format(wording, ', '.join([item.term for item in definitions])) rich_text = "{}: {}".format(wording, ', '.join([make_bold(item.term) for item in definitions])) return plain_text, rich_text
async def psa_json(request): # On GET requests return a random PSA. if request.method == "GET": # Query the database and get a random PSA. result = database.db_session.query(database.PSA).order_by( func.random()).first() # Handle the result if it was found. if result: # Return the JSON. return response.json({ "id": result.id, "content": result.content, "author": result.author, "date": result.create_on.strftime("%Y-%m-%d %H:%M:%S") }) else: return response.json({"message": "PSA entry not found!"}, status=404)
async def rand(self, ctx: commands.Context, *, args: PlaybackArgumentConverter() = _DEFAULT_PLAYBACK_ARGUMENTS): """ Play a random sound. :param args: The volume/speed of playback, in format v[XX%] s[SS%]. e.g. v50 s100 for 50% sound, 100% speed. """ sound = await self.bot.db.fetch_one( select([sound_names.c.sound_id, sound_names.c.name]) .where( and_( sound_names.c.guild_id == ctx.guild.id, ~sound_names.c.is_alias )) .offset( func.floor( func.random() * select([func.count()]) .select_from(sound_names) .where(sound_names.c.guild_id == ctx.guild.id))) .limit(1) ) log.debug(f'Playing random sound {sound[sound_names.c.name]}.') await ctx.invoke(self.play, sound, args=args)
def auto_node_selector(CrawledNode, session, refresh_days): """ Automatically get a node to crawl :param CrawledNode: a DB Object :param session: A session object :param refresh_days: Days ago to select from DB :return: A dict containing (id, port, last_crawled, port_name) """ node_to_crawl_info = None refresh_time = datetime.datetime.utcnow().replace(microsecond=0) - \ datetime.timedelta(days=refresh_days) # Get a node that hasn't been crawled in 2 weeks try: # Get a node port that doesn't need check and is active crawled_nodes = session.query(CrawledNode).filter( CrawledNode.last_crawled < refresh_time). \ filter(CrawledNode.needs_check == false(), CrawledNode.active_port == true()). \ order_by(func.random()).limit(1).one_or_none() if crawled_nodes: node_to_crawl_info = { crawled_nodes.node_id: (crawled_nodes.id, crawled_nodes.port, crawled_nodes.last_crawled, crawled_nodes.port_name) } except NoResultFound: print("Nothing to crawl") exit() if node_to_crawl_info is None: print("Nothing to crawl") exit() return node_to_crawl_info
def play_quiz(): body = request.get_json() previous_questions = body.get('previous_questions', []) quiz_category = body.get('quiz_category', None) try: # 0 means all categories quiz_category_id = quiz_category['id'] if quiz_category_id == 0: question_query = Question.query else: question_query = Question.query\ .filter(Question.category == int(quiz_category_id)) questions = question_query\ .filter(Question.id.notin_(previous_questions))\ .order_by(func.random()) # The game is over if questions.count() == 0: return jsonify({ 'success': True, 'status_code': 200, }) else: question = questions.first().format() previous_questions.append(question) print(question) return jsonify({ 'success': True, 'status_code': 200, 'question': question, }) except Exception as e: print(e) abort(422)
def get_cards(game_id): # return cards for this player player_id = request.cookies.get('player_id') # find cards for this player in this game player = PlayerInGame.query.filter_by(player_id=player_id).first() # if player doesn't have cards get random ones if not player.cards: game = Game.query.filter_by(id=game_id).first() random_cards = Card.query.outerjoin(used_card).outerjoin(Game).filter( Card.type == 0).filter( or_(used_card.c.game_id == None, used_card.c.game_id != game_id)).order_by( func.random()).limit(10).all() for random_card in random_cards: player.cards.append(random_card) game.used_cards.append(random_card) db.session.commit() # convert to JSON and return data = [] for card in player.cards: data.append(card.as_json()) return json.dumps(data)
def render_all(): form = SortForm(request.args, meta={'csrf': False}) if not form.validate() and request.args.to_dict(): return redirect(url_for("render_all")) sort_value = request.args.get("sort") if not sort_value or sort_value == "random": teachers = db.session.query(Teacher).order_by(func.random()).all() return render_template("all.html", form=form, teachers=teachers) elif sort_value == "by_rating": teachers = db.session.query(Teacher).order_by( Teacher.rating.desc()).all() return render_template("all.html", form=form, teachers=teachers) elif sort_value == "expensive_first": teachers = db.session.query(Teacher).order_by( Teacher.price.desc()).all() return render_template("all.html", form=form, teachers=teachers) elif sort_value == "cheap_first": teachers = db.session.query(Teacher).order_by(Teacher.price).all() return render_template("all.html", form=form, teachers=teachers)
def review_entity_get(offset=None): """Jump to the next entity that needs disambiguation.""" qa = session.query(Mapping.left_uid.label('uid'), func.sum(Mapping.score).label('num')) qa = qa.filter(Mapping.project == project.name) qa = qa.filter(Mapping.decided == False) # noqa qa = qa.group_by(Mapping.left_uid) qb = session.query(Mapping.right_uid.label('uid'), func.sum(Mapping.score).label('num')) qb = qb.filter(Mapping.project == project.name) qb = qb.filter(Mapping.decided == False) # noqa qb = qa.group_by(Mapping.right_uid) sq = qa.union(qb).subquery() q = session.query(sq.c.uid, func.sum(sq.c.num)) q = q.join(Entity, Entity.uid == sq.c.uid) q = q.filter(Entity.active == True) # noqa q = q.group_by(sq.c.uid, Entity.tasked) q = q.order_by(Entity.tasked.desc()) q = q.order_by(func.sum(sq.c.num).desc()) q = q.order_by(func.random()) if q.count() == 0: return redirect(url_for('.entities')) q = q.limit(1) return redirect(url_for('.entity', uid=q.scalar()))
def generate_relations(): print('generating some random relationships') # b2b # pick random sample sample = Bulletin.query.count() / 2 bulletins = Bulletin.query.order_by(func.random()).limit(sample) for bulletin in bulletins: # 0 - 3 related entities x = Bulletin.query.order_by(func.random()).limit(random.randint(0, 3)) for b in x: bulletin.relate_bulletin(b) bulletin.comments = 'added relationship' bulletin.create_revision() # b2a # pick random sample sample = Bulletin.query.count() / 2 bulletins = Bulletin.query.order_by(func.random()).limit(sample) for bulletin in bulletins: # 0 - 3 related entities x = Actor.query.order_by(func.random()).limit(random.randint(0, 3)) for a in x: bulletin.relate_actor(a) a.create_revision() bulletin.comments = 'added relationship with actor' bulletin.create_revision() # a2a # pick random sample sample = Actor.query.count() / 2 actors = Actor.query.order_by(func.random()).limit(sample) for actor in actors: # 0 - 3 related entities x = Actor.query.order_by(func.random()).limit(random.randint(0, 3)) for a in x: actor.relate_actor(a) actor.comments = 'added relationship' actor.create_revision()
def random(self, number): return self.order_by(func.random()).limit(number).all()
def get_settings_theme_previews(request: Request) -> dict: """Generate the theme preview page.""" # get the generic/unknown user and a random group to display on the example posts fake_user = request.query(User).filter(User.user_id == -1).one() group = request.query(Group).order_by(func.random()).limit(1).one() fake_link_topic = Topic.create_link_topic(group, fake_user, "Example Link Topic", "https://tildes.net/") fake_text_topic = Topic.create_text_topic(group, fake_user, "Example Text Topic", "No real text") fake_text_topic.content_metadata = { "excerpt": "Lorem ipsum dolor sit amet, consectetur adipiscing elit." } fake_topics = [fake_link_topic, fake_text_topic] # manually add other necessary attributes to the fake topics for fake_topic in fake_topics: fake_topic.topic_id = sys.maxsize fake_topic.tags = ["tag one", "tag two"] fake_topic.num_comments = 123 fake_topic.num_votes = 12 fake_topic.created_time = utc_now() - timedelta(hours=12) # create a fake top-level comment that appears to be written by the user markdown = ( "This is what a regular comment written by yourself would look like.\n\n" "It has **formatting** and a [link](https://tildes.net).") fake_top_comment = Comment(fake_link_topic, request.user, markdown) fake_top_comment.comment_id = sys.maxsize fake_top_comment.created_time = utc_now() - timedelta(hours=12, minutes=30) child_comments_markdown = [ ("This reply has received an Exemplary label. It also has a blockquote:\n\n" "> Hello World!"), ("This is a reply written by the topic's OP with a code block in it:\n\n" "```js\n" "function foo() {\n" " ['1', '2', '3'].map(parseInt);\n" "}\n" "```"), ("This reply is new and has the *Mark New Comments* stripe on its left " "(even if you don't have that feature enabled)."), ] fake_comments = [fake_top_comment] # vary the ID and created_time on each fake comment so CommentTree works properly current_comment_id = fake_top_comment.comment_id current_created_time = fake_top_comment.created_time for markdown in child_comments_markdown: current_comment_id -= 1 current_created_time += timedelta(minutes=5) fake_comment = Comment(fake_link_topic, fake_user, markdown, parent_comment=fake_top_comment) fake_comment.comment_id = current_comment_id fake_comment.created_time = current_created_time fake_comment.parent_comment_id = fake_top_comment.comment_id fake_comments.append(fake_comment) # add other necessary attributes to all of the fake comments for fake_comment in fake_comments: fake_comment.num_votes = 0 fake_tree = CommentTree(fake_comments, CommentTreeSortOption.NEWEST, request.user) # add a fake Exemplary label to the first child comment fake_comments[1].labels = [ CommentLabel(fake_comments[1], fake_user, CommentLabelOption.EXEMPLARY, 1.0) ] # the comment to mark as new is the last one, so set a visit time just before it fake_last_visit_time = fake_comments[-1].created_time - timedelta( minutes=1) return { "theme_options": THEME_OPTIONS, "fake_topics": fake_topics, "fake_comment_tree": fake_tree, "last_visit": fake_last_visit_time, }
def next_question(questions=[], reponses=None): # Vérification de l'état du test: si la cible est atteinte, on arrête if len(questions) >= 50: #print("Condition de longueur atteinte") return None # Sélection aléatoire if random() < 0.6: print("Aléatoire") return db.session.query(Phrase.id_phrase).join(StatsPhrases).filter(StatsPhrases.nb_reponses < 20).filter(Phrase.statut == "En évaluation").order_by(func.random()).first() # Sélection via mirtCAT else: print("Mirtcat") return mirtcat_next_item(generer_objet_mirtcat(mirt_obj_from_db()), questions_repondues=questions, reponses=reponses)
def get_random_problem(self, low=1, high=10): q = session.query(Problem_DB)\ .filter(Problem_DB.points.between(low, high))\ .order_by(func.random()).limit(1) if q.count(): return q.first()
def test_get_quiz_question_with_category(self): res = self.client().post('/quizzes', json={'previous_questions': [1,2,3], 'quiz_category': {"id":1}}) data = json.loads(res.data) question = Question.query.filter(~Question.id.in_([1,2,3])).filter(Question.category == 1).order_by(func.random()).first() self.assertEqual(res.status_code, 200) self.assertEqual(data['success'], True) self.assertNotIn(question.id, [1,2,3]) self.assertEqual(question.category, 1)
def random_plant(): """Mix up plants and randomly return the first one.""" plant = Plant.query.order_by(func.random()).first() return plant
def test_match(sphinx_connections): MockSphinxModel, session, sphinx_engine = sphinx_connections base_query = session.query(MockSphinxModel.id) # One Match query = session.query(MockSphinxModel.id) query = query.filter(MockSphinxModel.name.match("adriel")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name adriel)')" query = session.query(MockSphinxModel.id) query = query.filter(func.match(MockSphinxModel.name, "adriel")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name adriel)')" # Escape quote query = session.query(MockSphinxModel.id) query = query.filter(func.match(MockSphinxModel.name, "adri'el")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name adri\\'el)')" # Escape at symbol query = session.query(MockSphinxModel.id) query = query.filter(func.match(MockSphinxModel.name, "@username")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name \\\\@username)')" # Escape multiple at symbols query = session.query(MockSphinxModel.id) query = query.filter(func.match(MockSphinxModel.name, "user @user @name")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name user \\\\@user \\\\@name)')" # Escape brackets query = session.query(MockSphinxModel.id) query = query.filter(func.match(MockSphinxModel.name, "user )))(")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name user \\\\)\\\\)\\\\)\\\\()')" # Function match all query = session.query(MockSphinxModel.id) query = query.filter(func.match("adriel")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('adriel')" # Function match all with quote query = session.query(MockSphinxModel.id) query = query.filter(func.match("adri'el")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('adri\\'el')" # Function match all with unicode query = session.query(MockSphinxModel.id) query = query.filter(func.match(u"miljøet")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == u"SELECT id \nFROM mock_table \nWHERE MATCH('miljøet')" # Function match specific query = session.query(MockSphinxModel.id) query = query.filter(func.match("@name adriel")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('@name adriel')" # Function match specific with quote query = session.query(MockSphinxModel.id) query = query.filter(func.match("@name adri'el")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('@name adri\\'el')" # Function match specific with unicode query = session.query(MockSphinxModel.id) query = query.filter(func.match(u"@name miljøet")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == u"SELECT id \nFROM mock_table \nWHERE MATCH('@name miljøet')" # Matching single columns query = session.query(MockSphinxModel.id) query = query.filter(MockSphinxModel.name.match("adriel"), MockSphinxModel.country.match("US")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name adriel) (@country US)')" # Matching single columns with quotes query = session.query(MockSphinxModel.id) query = query.filter(MockSphinxModel.name.match("adri'el"), MockSphinxModel.country.match("US")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name adri\\'el) (@country US)')" # Matching single columns with at symbol query = session.query(MockSphinxModel.id) query = query.filter(MockSphinxModel.name.match("@username"), MockSphinxModel.country.match("US")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name \\\\@username) (@country US)')" # Matching single columns with multiple at symbols query = session.query(MockSphinxModel.id) query = query.filter(MockSphinxModel.name.match("user @user @name"), MockSphinxModel.country.match("US")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name user \\\\@user \\\\@name) (@country US)')" # Matching single columns with brackets query = session.query(MockSphinxModel.id) query = query.filter(MockSphinxModel.name.match("user )))("), MockSphinxModel.country.match("US")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name user \\\\)\\\\)\\\\)\\\\() (@country US)')" # Matching through functions query = session.query(MockSphinxModel.id) query = query.filter(func.match(MockSphinxModel.name, "adriel"), func.match(MockSphinxModel.country, "US")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name adriel) (@country US)')" # Matching with not_ base_expression = not_(MockSphinxModel.country) for expression in (base_expression.match("US"), func.match(base_expression, "US")): query = base_query.filter(expression) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@!country US)')" # Matching multiple columns with or_ base_expression = or_(MockSphinxModel.name, MockSphinxModel.country) for expression in (base_expression.match("US"), func.match(base_expression, "US")): query = base_query.filter(expression) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@(name,country) US)')" # Matching multiple columns with or_ and not_ through functions base_expression = not_(or_(MockSphinxModel.name, MockSphinxModel.country)) for expression in (base_expression.match("US"), func.match(base_expression, "US")): query = base_query.filter(expression) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@!(name,country) US)')" # Mixing and Matching query = session.query(MockSphinxModel.id) query = query.filter(func.match(MockSphinxModel.name, "adriel"), MockSphinxModel.country.match("US")) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name adriel) (@country US)')" # Match with normal filter query = session.query(MockSphinxModel.id) query = query.filter(func.match(MockSphinxModel.name, "adriel"), MockSphinxModel.country.match("US"), MockSphinxModel.id == 1) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE MATCH('(@name adriel) (@country US)') AND id = %s" # Match with normal filter with unicode query = session.query(MockSphinxModel.id) query = query.filter(func.match(MockSphinxModel.name, u"miljøet"), MockSphinxModel.country.match("US"), MockSphinxModel.id == 1) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == u"SELECT id \nFROM mock_table \nWHERE MATCH('(@name miljøet) (@country US)') AND id = %s" query = session.query(MockSphinxModel.id) query = query.filter(func.random(MockSphinxModel.name)) sql_text = query.statement.compile(sphinx_engine).string assert sql_text == "SELECT id \nFROM mock_table \nWHERE random(name)"
def get_ids_by_home_flag_random(status = 0, num = 100): return db_session.query(SeedIds).filter(SeedIds.home_crawled == status).order_by(func.random()).limit(num).all()
def get_one_proxy(): s = sess_maker() res = s.query(Proxy).filter(Proxy.status == STATUS_OK).order_by(func.random()).limit(1).one() s.close() return res
def get_random(cls): return db.session.query(cls).order_by(func.random()).first()
def order_by_random(self, table: Base): return self.db.session.query(table).order_by(func.random()).first()
def get_random_data(cls, limit=10): return cls.query.order_by(func.random()).limit(limit).all()
def get_random_peers(limit=10): return db.query(Peer).order_by(func.random()).limit(limit)
def get_random_unlabelled_sample(self) -> Sample: return db.query(Sample) \ .filter(Sample.dataset == self.dataset, ~Sample.associations.any()) \ .order_by(db_functions.random()) \ .first()
def __init__(self, verbose=True): init_db() self.verbose = verbose self.meme_repo = db.session.query(RedditMeme.media).order_by( func.random()).all()
def detail(id): """ The individual detail page for gyms that show all of the information we have about a gym item. """ gym = db.session.query(Gyms).get(id) if gym is None: abort(404) image = db.session.query(Images).get(gym.pic_id).pic gym.name = capwords(gym.name) img = unbinary(str(base64.b64encode(image))) # Search for the nearest stores. stores = db.session.query(Stores).all() lat2 = at_get('lat') lng2 = at_get('lng') lat = lat2(gym) lng = lng2(gym) store_list = nsmallest(4, stores, lambda x: real_dist(lat, lng, lat2(x), lng2(x))) images = [] for store in store_list: s_image = db.session.query(Images).get(store.pic_id).pic images.append(unbinary(str(base64.b64encode(s_image)))) # add some workouts workouts = db.session.query(Workouts).filter(or_(Workouts.category == "conditioning exercise")).order_by(func.random()).limit(4).all() return render_template('gyms/gymsdetail.html', gym=gym, pic=img, stores=store_list, workouts=workouts, images=images, key=app.config['EMBED_API'])
def load(session, user): """Approve pairs load handler.""" parser = reqparse.RequestParser() parser.add_argument('vids', None, type=str) parser.add_argument('apsid', None, type=int) args = parser.parse_args() if args.vids is not None: args.vids = [int(vid) for vid in args.vids.split('-')] if len(args.vids) != 2: return 'vids must be two elements', 400 if args.vids is None and args.apsid is None: # pylint: disable-msg=E1101,E1103 vehicle = session.query(database.Vehicle) \ .join(database.Revision) \ .outerjoin(database.ApprovePairToVehicleAssociation) \ .outerjoin(database.ApprovePairSession) \ .group_by(database.Vehicle.id) \ .having(func.bool_and(or_( database.ApprovePairSession.uid != user.id, database.ApprovePairSession.uid == None ))) \ .filter(database.Revision.final == True) \ .filter(database.Vehicle.cropped != None) \ .filter(database.Vehicle.partner_id != None) \ .having(func.count(database.ApprovePairSession.id) < 1) \ .order_by(desc(func.count(database.ApprovePairSession.id))) \ .order_by(func.random()) \ .first() # pylint: enable-msg=E1101,E1103 if vehicle: image1 = os.path.join(APP.config['HOST'], vehicle.cropped) image2 = os.path.join(APP.config['HOST'], vehicle.partner.cropped) labels = { 'vehicle1': { 'vid': vehicle.id, 'image': image1, }, 'vehicle2': { 'vid': vehicle.partner_id, 'image': image2, } } else: labels = None elif args.vids is not None and args.apsid is None: vehicle = session.query(database.Vehicle) \ .filter_by(id=args.vids[0]) \ .one() if vehicle.partner_id != args.vids[1]: return 'not a valid vid pair', 400 image1 = os.path.join(APP.config['HOST'], vehicle.cropped) image2 = os.path.join(APP.config['HOST'], vehicle.partner.cropped) labels = { 'vehicle1': { 'vid': vehicle.id, 'image': image1, }, 'vehicle2': { 'vid': vehicle.partner_id, 'image': image2, } } elif args.apsid is not None: approve_pair_session = session.query(database.ApprovePairSession) \ .filter_by(id=args.apsid) \ .one() image1 = os.path.join(APP.config['HOST'], approve_pair_session.vehicles[0].cropped) image2 = os.path.join(APP.config['HOST'], approve_pair_session.vehicles[1].cropped) labels = { 'vehicle1': { 'vid': approve_pair_session.vehicles[0].id, 'image': image1, }, 'vehicle2': { 'vid': approve_pair_session.vehicles[1].id, 'image': image2, }, 'answer': approve_pair_session.answer } if labels is None: return jsonify(status='empty') else: return jsonify(status='ok', data=labels)
def test(): form = AnswerForm() if 'practice-topics' in session: print('Topics Chosen:', session['practice-topics']) questionArray = [] questionIdArray = [] time = 0 #total time of questions in session for num in session['practice-topics']: q = Question.query.filter_by(topic_no=num).order_by( func.random()).first() print("Question for topic ", num) print(q) questionArray.append(q) questionIdArray.append(q.id) time += q.time #formArray is an array of forms (one form per question) formArray = [] for i in range(len(questionArray)): form = AnswerForm() formArray.append(form) length = len(questionArray) session['practice-length'] = length session.pop('practice-topics') timeLeft = time print("GOING TO ANSWER") print("questionIdArray") print(questionIdArray) return render_template('practice-test.html', formArray=formArray, questionArray=questionArray, questionIdArray=questionIdArray, length=length, timeLeft=time) else: if request.method == "POST": global practiceSessionChoices global practiceSessionQuestionIdList length = session['practice-length'] session.pop('practice-length') print("=================POST========================") print("practiceSessionChoices:", practiceSessionChoices) print(practiceSessionQuestionIdList) for i in range(length): #Data is 1-based index of choice data = request.form.get(str(i)) print("practice-lengthData:", data) if data == None: #no answer pass else: a = Answer() a.question_id = practiceSessionQuestionIdList[i] a.choice_id = practiceSessionChoices[i][int(data) - 1] db.session.add(a) print(a) db.session.commit() session['practice-session-done'] = True return redirect(url_for('practice.result')) return redirect(url_for('practice.select'))