def construct(self, engine): h = self.rtree.data idx = sa.Index(self.data.name + '_iname_idx', sa.func.upper(self.c.name)) with engine.begin() as conn: conn.execute(DropIndexIfExists(idx)) self.truncate(conn) max_depth = conn.scalar(sa.select([saf.max(h.c.depth)])) subtab = sa.select([h.c.child, saf.max(h.c.depth).label("lvl")])\ .group_by(h.c.child).alias() # Process relations by hierarchy, starting with the highest depth. # This guarantees that the geometry of member relations is already # available for processing the relation geometry. if max_depth is not None: for level in range(max_depth, 1, -1): subset = self.rels.data.select()\ .where(subtab.c.lvl == level)\ .where(self.rels.c.id == subtab.c.child) self.insert_objects(engine, subset) # Lastly, process all routes that are nobody's child. subset = self.rels.data.select()\ .where(self.rels.c.id.notin_( sa.select([h.c.child], distinct=True).as_scalar())) self.insert_objects(engine, subset) with engine.begin() as conn: idx.create(conn)
def _insert_objects(self, conn, subsel=None): h = self.rtree.data max_depth = conn.scalar(sa.select([saf.max(h.c.depth)])) subtab = sa.select([h.c.child, saf.max(h.c.depth).label("lvl")])\ .group_by(h.c.child).alias() # Process relations by hierarchy, starting with the highest depth. # This guarantees that the geometry of member relations is already # available for processing the relation geometry. if max_depth is not None: for level in range(max_depth, 1, -1): subset = self.rels.data.select()\ .where(subtab.c.lvl == level)\ .where(self.rels.c.id == subtab.c.child) if subsel is not None: subset = subset.where(subsel) self.insert_objects(conn, subset) # Lastly, process all routes that are nobody's child. subset = self.rels.data.select()\ .where(self.rels.c.id.notin_( sa.select([h.c.child], distinct=True).as_scalar())) if subsel is not None: subset = subset.where(subsel) self.insert_objects(conn, subset)
def get_grade_info(self, all_snapshots): grade = all_snapshots.c.grade graduation_date = all_snapshots.c.graduation_date school_year = all_snapshots.c.school_year status = all_snapshots.c.status student_lookup = all_snapshots.c.student_lookup withdraw_reason = all_snapshots.c.withdraw_reason end_grade = sql.case([(db_func.max(grade) > 12, 12)], else_=db_func.max(grade)) start_grade = sql.case([(db_func.min(grade) > 12, 12)], else_=db_func.min(grade)) return \ sql.select([ student_lookup, (end_grade - start_grade).label('num_grades'), (db_func.max(school_year) - db_func.min(school_year)).label('num_hs_years'), start_grade.label('start_grade'), end_grade.label('end_grade'), db_func.min(school_year).label('start_year'), db_func.max(school_year).label('end_year'), db_func.min(graduation_date).label('graduation_date'), db_func.array_agg(sql.distinct(status)).label('statuses'), db_func.array_agg(sql.distinct(withdraw_reason)).label('withdraw_reasons'), db_func.array_agg(sql.distinct(sql.func.substr(withdraw_reason, 1, 7))).label('withdraw_reasons_short'), ]).\ where( grade >= 9 ).\ group_by( student_lookup )
def handle_save(self): data, errors = self.extractData() journal_note = data.pop('note') now = datetime.now() if errors: self.flash(_(u'Es ist ein Fehler aufgetreten!')) return FAILURE number = data.get('number', 0) if number: session = get_session('ukhvoucher') try: from sqlalchemy.sql.functions import max oid = int(session.query(max(Voucher.oid)).one()[0]) + 1 except: oid = 100000 from ukhvoucher.models import Generation import json p = int(session.query(max(Generation.oid)).one()[0] or 0) + 1 generation = Generation( oid=p, date=now.strftime('%Y-%m-%d'), type=data['kategorie'], data=json.dumps('Manuelle Erzeugung'), user=self.request.principal.id, uoid=oid ) for idx in range(number): voucher = Voucher( creation_date=datetime.now().strftime('%Y-%m-%d'), status=CREATED, cat = data['kategorie'], user_id=self.context.oid, generation_id=p, oid=oid) oid += 1 session.add(voucher) session.add(generation) # journalize entry = JournalEntry( date=datetime.now().strftime('%Y-%m-%d'), userid=self.request.principal.id, action=u"Berechtigungsscheine manuell erstellt", #action=u"Add:%s" % self.context.model.__label__, oid=str(self.context.oid), note=journal_note) session.add(entry) # redirect self.flash(_(u"%s Berechtigungsscheine erstellt" % number)) self.redirect(self.application_url()) return SUCCESS else: self.flash(_(u"The demand must be for at least 1 voucher.")) self.redirect(self.url(self.context)) return FAILURE
def impacted_hls(self, *args): """ Renvoie une requête portant sur les services de haut niveau impactés. @param args: Liste d'éléments à récupérer dans la requête. @type args: Une C{DeclarativeBase} ou une liste de C{Column}s. @return: Une C{Query} portant sur les éléments demandés. @rtype: C{sqlalchemy.orm.query.Query} """ from vigilo.models.tables import HighLevelService, \ ImpactedHLS, ImpactedPath if not args: args = [HighLevelService] imp_hls1 = aliased(ImpactedHLS) imp_hls2 = aliased(ImpactedHLS) subquery = DBSession.query( functions.max(imp_hls1.distance).label('distance'), imp_hls1.idpath ).join( (ImpactedPath, ImpactedPath.idpath == imp_hls1.idpath) ).filter(ImpactedPath.idsupitem == self.idsupitem ).group_by(imp_hls1.idpath).subquery() services_query = DBSession.query(*args).distinct( ).join( (imp_hls2, HighLevelService.idservice == imp_hls2.idhls), (subquery, subquery.c.idpath == imp_hls2.idpath), ).filter(imp_hls2.distance == subquery.c.distance) return services_query
def clean_stale_tasks(): from maproulette.models import db, Task, Action from sqlalchemy.sql.functions import max from datetime import datetime, timedelta import pytz current_time = datetime.now(pytz.utc) stale_threshold = current_time - timedelta(hours=1) counter = 0 for task in ( db.session.query(Task) .filter(Task.currentaction.in_(["assigned", "editing"])) .join(Task.actions) .group_by(Task.id) .having(max(Action.timestamp) < stale_threshold) .all() ): task.append_action(Action("available")) db.session.add(task) print "setting task %s to available" % (task.identifier) counter += 1 db.session.commit() print "done. %i tasks made available" % counter
def powa_getstatdata_db(): base_query = powa_base_statdata_db() diffs = get_diffs_forstatdata() return (select([column("dbid")] + diffs) .select_from(base_query) .group_by(column("dbid")) .having(max(column("calls")) - min(column("calls")) > 0))
def _build_rating_query(session): passed_answers_query = session.query(Answer.player_id, count('*').label('points'), sum_(Answer.tries).label('tries'), max(Answer.answer_time).label('last_answer_time')).filter(Answer.passed == True) \ .group_by(Answer.player_id).subquery() hint_count_query = session.query(Hint.player_id, count('*').label('hint_count')).group_by( Hint.player_id).subquery() position_field = dense_rank().over(order_by=[ passed_answers_query.c.points.desc().nullslast(), passed_answers_query.c.tries.nullslast(), hint_count_query.c.hint_count.nullsfirst(), passed_answers_query.c.last_answer_time.nullslast() ]).label('position') return position_field, session.query(position_field, Player, passed_answers_query.c.points, passed_answers_query.c.tries, hint_count_query.c.hint_count, passed_answers_query.c.last_answer_time) \ .select_from(Player) \ .outerjoin(passed_answers_query, Player.player_id == passed_answers_query.c.player_id) \ .outerjoin(hint_count_query, Player.player_id == hint_count_query.c.player_id).order_by(position_field)
def testing_function_4(query_module): models_module = sqlalchemy_models query = query_module.get_query(max(models_module.Author.id)) rows = query.all() result = map(extract_row, rows) return str(result)
def dao_get_last_date_template_was_used(template_id, service_id): last_date_from_notifications = db.session.query( functions.max(Notification.created_at)).filter( Notification.service_id == service_id, Notification.template_id == template_id, Notification.key_type != KEY_TYPE_TEST).scalar() if last_date_from_notifications: return last_date_from_notifications last_date = db.session.query(functions.max( FactNotificationStatus.bst_date)).filter( FactNotificationStatus.template_id == template_id, FactNotificationStatus.key_type != KEY_TYPE_TEST).scalar() return last_date
def _nextOrdering(self, blogId): ''' Provides the next ordering. ''' max = self.session().query(fn.max(BlogPostMapped.Order)).filter(BlogPostMapped.Blog == blogId).scalar() if max: return max + 1 return 1
def query_for_bulletin_date(engine): metadata = sqlalchemy.MetaData(engine) with engine.connect() as connection: table = sqlalchemy.Table('bitemporal', metadata, autoload=True) query = select([max(table.c.bulletin_date)]) result = connection.execute(query) return result.fetchone()[0]
def index(): s = SESSION() """ select user, max(date), count(user) from game order by date group by user""" qs = s.query(Game.user, max(Game.date).label('recent'), count(Game.user).label('wins')).filter(Game.date > datetime.now().replace(day=1)).group_by(Game.user).all() print(qs) return render_template('leaderboard.html', users=qs, game_url=url_for('.new_game'))
def execute(self, message, user, params): planet = Planet.load(*params.group(1, 3, 5)) if planet is None: message.reply("No planet with coords %s:%s:%s found" % params.group(1, 3, 5)) return Q = session.query(Scan.scantype, max(Scan.tick), count()) Q = Q.filter(Scan.planet == planet) Q = Q.group_by(Scan.scantype) result = Q.all() if len(result) < 1: message.reply("No scans available on %s:%s:%s" % ( planet.x, planet.y, planet.z, )) return prev = [] for type, latest, number in result: prev.append("(%d %s, latest pt%s)" % ( number, type, latest, )) reply = "scans for %s:%s:%s - " % (planet.x, planet.y, planet.z) + ", ".join(prev) message.reply(reply)
def scrape_lessons(resume=True, start_id=0): if resume: last_id = db.session.query(func.max(Lesson.student_id)).all()[0][0] if last_id: start_id = last_id for student in Student.query.filter(Student.id >= start_id).all(): print u"{} {} {} {}".format(student.id, student.first_name, student.last_name, student._class) for day in range(5): day_width = width / 5 day_x_left = day * (day_width) subdivide_y = 20 x_values = (day_x_left + int(day_width / 3.2), day_x_left + int(day_width / 2), day_x_left + int(day_width / 1.3)) print "Day {}".format(day) for x in x_values: for i in range(subdivide_y): y = i * (height / subdivide_y) + 23 click_data = click_basedata.format( school_id=school_id, x=x, y=y, p_id=student.schedule_id) lesson = click(click_data) if lesson: save_lesson(day, student, lesson.text)
def new_text_search_index(self): try: return self.db_session.query( sql_func.max(self.ObservationFact.text_search_index).label( 'max')).one().max + 1 except TypeError: return 1
def _nextCId(self): ''' Provides the next change Id. ''' max = self.session().query(fn.max(BlogPostMapped.CId)).scalar() if max: return max + 1 return 1
def find_score(filename, id, userid): """ the import here is done inside because else a cyclic import situation arrises but as this method runs inside another process so the time consumed doesnt matter :param filename: :param id: :return: """ from manage import app with app.app_context(): with open(filename) as file: length = len(file.read()) question = Question.query.filter(Question.id == id).first() maxS = question.max_score print(length, maxS) score = ((maxS - length) / maxS) * 100 if score < 1: score = 1 submission = Submission(user_id=userid, question_id=id, \ result=True, result_score=score, result_message="Solved") db.session.add(submission) db.session.commit() db.create_all() all_submissions = db.session.query(functions.max(Submission.result_score)).filter(Submission.user_id==userid).group_by( Submission.question_id).all() user = User.query.filter(User.id == userid).first() user.total_score = sum((x[0] for x in all_submissions)) db.session.commit() print("done")
def fetch_tickers(self, time, *, include_expired=False): session = self.__session() try: latest = session.query( Ticker.tk_site, Ticker.tk_code, functions.max(Ticker.tk_time).label('tk_time')).filter( Ticker.tk_time <= time, or_( and_(Ticker.tk_ask.isnot(None), Ticker.tk_ask != self._ZERO), and_(Ticker.tk_bid.isnot(None), Ticker.tk_bid != self._ZERO), and_(Ticker.tk_ltp.isnot(None), Ticker.tk_ltp != self._ZERO), )).group_by( Ticker.tk_site, Ticker.tk_code, ).subquery() inst = aliased(Evaluation, name='ev_inst') fund = aliased(Evaluation, name='ev_fund') results = session.query(Ticker, Product, inst, fund).join( latest, and_( Ticker.tk_site == latest.c.tk_site, Ticker.tk_code == latest.c.tk_code, Ticker.tk_time == latest.c.tk_time, )).join( Product, and_( Product.pr_site == Ticker.tk_site, Product.pr_code == Ticker.tk_code, or_( Product.pr_expr.is_(None), Product.pr_expr >= time, include_expired, ), )).outerjoin( inst, and_( inst.ev_site == Product.pr_site, inst.ev_unit == Product.pr_inst, )).outerjoin( fund, and_( fund.ev_site == Product.pr_site, fund.ev_unit == Product.pr_fund, )).all() finally: session.close() dto = namedtuple('TickerDto', ('ticker', 'product', 'inst', 'fund')) return [dto(*r) for r in results]
def find_score(filename, id, userid): """ the import here is done inside because else a cyclic import situation arrises but as this method runs inside another process so the time consumed doesnt matter :param filename: :param id: :return: """ from manage import app with app.app_context(): with open(filename) as file: length = len(file.read()) question = Question.query.filter(Question.id == id).first() maxS = question.max_score print(length, maxS) score = ((maxS - length) / maxS) * 100 if score < 1: score = 1 submission = Submission(user_id=userid, question_id=id, \ result=True, result_score=score, result_message="Solved") db.session.add(submission) db.session.commit() db.create_all() all_submissions = db.session.query( functions.max(Submission.result_score)).filter( Submission.user_id == userid).group_by( Submission.question_id).all() user = User.query.filter(User.id == userid).first() user.total_score = sum((x[0] for x in all_submissions)) db.session.commit() print("done")
def caprate(self, attacker=None): maxcap = PA.getfloat("roids", "maxcap") mincap = PA.getfloat("roids", "mincap") if not attacker or not self.value: return maxcap modifier = (float(self.value) / float(attacker.value)) ** 0.5 return max(mincap, min(maxcap * modifier, maxcap))
def get_list(cls, **kw): # SELECT client.clientid, job_bytes, max_job FROM client # LEFT JOIN (SELECT job.clientid, SUM(job.jobbytes) AS job_bytes FROM job # GROUP BY job.clientid) AS vsota ON vsota.clientid = client.clientid # LEFT JOIN (SELECT job.clientid, MAX(job.schedtime) AS max_job FROM job # GROUP BY job.clientid) AS last_job ON last_job.clientid = client.clientid; sum_stmt = Job.query\ .with_entities(Job.clientid, func.sum(Job.jobbytes).label('job_sumvolbytes'))\ .group_by(Job.clientid)\ .subquery('stmt_sub') last_stmt = Job.query\ .with_entities(Job.clientid, func.max(Job.starttime).label('job_maxschedtime')).filter(Job.jobstatus == 'T')\ .group_by(Job.clientid)\ .subquery('stmt_max') objects = cls.query.with_entities(Client, 'job_sumvolbytes', 'job_maxschedtime', func.count(Job.jobid).label('num_jobs'))\ .outerjoin(Job, Client.clientid == Job.clientid)\ .outerjoin(sum_stmt, sum_stmt.c.clientid == Client.clientid)\ .outerjoin(last_stmt, last_stmt.c.clientid == Client.clientid)\ .group_by(cls, 'job_sumvolbytes', 'job_maxschedtime')\ .all() # ugly hack since sqlite returns strings for job_maxschedtime # TODO: report upstream to sqlalchemy if DBSession.bind.dialect.name == 'sqlite': def convert_datetime(l): if l.job_maxschedtime: l.job_maxschedtime = datetime.datetime.strptime(l.job_maxschedtime, '%Y-%m-%d %H:%M:%S') return l objects = map(convert_datetime, objects) return objects
def get_top_notebooks(start, end): """ 获取最近的笔记本(按回复时间排序,从start 到 end) :param start: :param end: :return: """ session = Session() try: # Cross Join content_query = session.query(NotebookContent.nid, f.max(NotebookContent.time).label("t"), Notebook).\ group_by(NotebookContent.nid).\ order_by(sql.desc("t")).\ filter(Notebook.nid == NotebookContent.nid, Notebook.mode == 2).\ offset(start).limit(end - start).all() notebooks_json = [{ "id": [nid, notebook.rid], "name": notebook.name, "desc": notebook.desc, "last_reply": str(last_reply) } for nid, last_reply, notebook in content_query] return dbmsg(data=notebooks_json) except: traceback.print_exc() finally: session.close()
def query_ordered(cls): # order by most recent last_seen OR key transaction newest_date = functions.max( functions.coalesce(User.last_seen, 0), functions.coalesce(KeyTransaction.start, 0) ) query = Key.query.outerjoin(Key.holder).outerjoin(Key.current_transaction) return query.order_by(db.desc(newest_date))
def expiry_date_expression(self) -> Function: return coalesce( max(self.table.c.sat_load_dt).over( partition_by=self.columns_in_table(self.table, self.parent.key_columns), order_by=self.table.c.sat_load_dt, rows=(1, 1)), literal_column("CAST('9999-12-31 00:00' AS DATE)"))
def index(): if current_user.is_authenticated(): all_questions = Question.query.all() all_submissions = db.session.query(Submission.question_id,functions.max(Submission.result_score),Submission.result_message,Submission.result).filter(Submission.user_id==current_user.id).group_by( Submission.question_id).all() return render_template("index.html", all_quest=all_questions,allsubmission =all_submissions) flash("you need to login to see the questions", category="warning") return redirect(url_for("auth.login"))
def lessonCompletedRead(): sq_attempts = \ db.session.query( Rs_student_course_enrol.course_index.label('course_index'), QuizAttempt.student_id.label('student_id'), QuizAttempt.quiz_id.label('quiz_id'), max(QuizAttempt.score).label('score') )\ .select_from(Rs_student_course_enrol)\ .outerjoin(QuizAttempt, QuizAttempt.student_id == Rs_student_course_enrol.student_id)\ .group_by( Rs_student_course_enrol.course_index, QuizAttempt.student_id, QuizAttempt.quiz_id )\ .subquery() query_results = \ db.session.query( Topic.id.label('topic_id'), Topic.name.label('topic_name'), Lesson.id.label('lesson_id'), Lesson.name.label('lesson_name'), Quiz.id.label('quiz_id'), Quiz.name.label('quiz_name'), sq_attempts.c.course_index.label('course_index'), sq_attempts.c.student_id.label('student_id'), sq_attempts.c.score.label('score'), count(Question.id).label('count_questions') )\ .select_from(Topic)\ .outerjoin(Lesson)\ .outerjoin(Rs_lesson_quiz_contain)\ .outerjoin(Quiz)\ .outerjoin(sq_attempts, Quiz.id == sq_attempts.c.quiz_id)\ .outerjoin(Rs_quiz_question_contain)\ .outerjoin(Question)\ .group_by( Topic.id, Topic.name, Lesson.id, Lesson.name, Quiz.id, Quiz.name, sq_attempts.c.course_index, sq_attempts.c.student_id, sq_attempts.c.score )\ .order_by( asc(Topic.id), asc(Lesson.id), asc(Quiz.id), asc(sq_attempts.c.course_index), asc(sq_attempts.c.student_id) ) return query_results.all()
def get_stale_assigned_tasks(): """returns all assigned tasks that are stale""" # select t.id from tasks t, actions a where # a.task_id = t.id and t.currentaction = 'assigned' # group by t.id having now() - max(a.timestamp) < interval '1 day'; return db.session.query(Task).filter_by( currentaction='assigned').join(Task.actions).group_by( Task.id).having(max(Action.timestamp) > stale_threshold).all()
def getInvoiceId(): from ukhvoucher.models import Invoice from sqlalchemy.sql.functions import max session = get_session('ukhvoucher') try: oid = int(session.query(max(Invoice.oid)).one()[0]) + 1 except: oid = 100000 return unicode(oid)
def _setup_next_sequence(cls, *args, **kwargs): """Compute the next available PK, based on the 'pk' database field.""" session = cls.FACTORY_SESSION model = cls.FACTORY_FOR pk = getattr(model, model.__mapper__.primary_key[0].name) max_pk = session.query(max(pk)).one()[0] if isinstance(max_pk, int): return max_pk + 1 if max_pk else 1 else: return 1
def fetch_transactions(self, start_time, end_time): session = self.__session() try: transactions = session.query( Transaction.tx_site, Transaction.tx_code, functions.count(Transaction.tx_time).label('tx_size'), functions.min(Transaction.tx_time).label('tx_time_min'), functions.max(Transaction.tx_time).label('tx_time_max'), functions.sum(Transaction.tx_inst).label('tx_net_inst'), functions.sum(Transaction.tx_fund).label('tx_net_fund'), functions.sum(func.abs( Transaction.tx_inst)).label('tx_grs_inst'), functions.sum(func.abs( Transaction.tx_fund)).label('tx_grs_fund'), ).filter(Transaction.tx_time >= start_time, Transaction.tx_time < end_time).group_by( Transaction.tx_site, Transaction.tx_code, ).subquery() inst = aliased(Evaluation, name='ev_inst') fund = aliased(Evaluation, name='ev_fund') results = session.query(transactions, Product, inst, fund).join( Product, and_( Product.pr_site == transactions.c.tx_site, Product.pr_code == transactions.c.tx_code, )).outerjoin( inst, and_( inst.ev_site == Product.pr_site, inst.ev_unit == Product.pr_inst, )).outerjoin( fund, and_( fund.ev_site == Product.pr_site, fund.ev_unit == Product.pr_fund, )).all() finally: session.close() dto = namedtuple( 'TransactionDto', ('tx_site', 'tx_code', 'tx_size', 'tx_time_min', 'tx_time_max', 'tx_net_inst', 'tx_net_fund', 'tx_grs_inst', 'tx_grs_fund', 'product', 'ev_inst', 'ev_fund')) return [dto(*r) for r in results]
def on_get(self, req, resp): dates = (self.db_session.query( functions.min(ActivityLog.local_time), functions.max(ActivityLog.local_time), ).all()) resp.body = WorkdayPublicSchema().dumps({ 'min_date': dates[0][0], 'max_date': dates[0][1], })
def _(context: DefaultExecutionContext): result = context.root_connection.execute( # type: ignore sa.select([ sql_func.coalesce(sql_func.max(class_.serial), 0) + 1 ]).where(class_.client_id == context.get_current_parameters()[ "client_id"] # type: ignore )) try: return result.fetchone()[0] finally: result.close()
def stats(self): """ select host, count(pk), min(created), max(created) from responses group by host; """ q = select([ responses.c.host.label('host'), functions.count(responses.c.pk).label('amount'), functions.min(responses.c.created), functions.max(responses.c.created), ]).group_by('host').order_by(desc('amount')) return self.db.execute(q).fetchall()
def insert(form, amount): now = datetime.datetime.now() principal = form.request.principal session = get_session('ukhvoucher') kat = form._iface.getName() cat_vouchers = principal.getVouchers(cat=kat) if len(cat_vouchers) > 0: form.flash(u'Die Berechtigungsscheine wurde für diese Kategorie bereits erzeugt.', type="info") url = form.application_url() return SuccessMarker('Success', True, url=url) try: oid = int(session.query(max(Voucher.oid)).one()[0]) + 1 except: oid = 100000 try: p = int(session.query(max(Generation.oid)).one()[0]) + 1 except: p=1 generation = Generation( oid=p, date=now.strftime('%Y-%m-%d'), type=form._iface.getName(), data=json.dumps(data), user=principal.id, uoid=oid ) for i in range(amount): oid += 1 voucher = Voucher( oid = oid, creation_date = now.strftime('%Y-%m-%d'), status = CREATED, cat = form._iface.getName(), user_id = principal.oid, generation_id = p, ) session.add(voucher) session.add(generation)
def powa_getwaitdata_db(): base_query = powa_base_waitdata_db() return (select([ column("dbid"), column("event_type"), column("event"), diff("count") ]) .select_from(base_query) .group_by(column("dbid"), column("event_type"), column("event")) .having(max(column("count")) - min(column("count")) > 0))
def process_elos(self, session, game_type_cd=None): if game_type_cd is None: game_type_cd = self.game_type_cd # we do not have the actual duration of the game, so use the # maximum alivetime of the players instead duration = 0 for d in session.query(sfunc.max(PlayerGameStat.alivetime)).\ filter(PlayerGameStat.game_id==self.game_id).\ one(): duration = d.seconds scores = {} alivetimes = {} for (p,s,a) in session.query(PlayerGameStat.player_id, PlayerGameStat.score, PlayerGameStat.alivetime).\ filter(PlayerGameStat.game_id==self.game_id).\ filter(PlayerGameStat.alivetime > timedelta(seconds=0)).\ filter(PlayerGameStat.player_id > 2).\ all(): # scores are per second scores[p] = s/float(a.seconds) alivetimes[p] = a.seconds player_ids = scores.keys() elos = {} for e in session.query(PlayerElo).\ filter(PlayerElo.player_id.in_(player_ids)).\ filter(PlayerElo.game_type_cd==game_type_cd).all(): elos[e.player_id] = e # ensure that all player_ids have an elo record for pid in player_ids: if pid not in elos.keys(): elos[pid] = PlayerElo(pid, game_type_cd) for pid in player_ids: elos[pid].k = KREDUCTION.eval(elos[pid].games, alivetimes[pid], duration) if elos[pid].k == 0: del(elos[pid]) del(scores[pid]) del(alivetimes[pid]) elos = self.update_elos(elos, scores, ELOPARMS) # add the elos to the session for committing for e in elos: session.add(elos[e]) if game_type_cd == 'duel': self.process_elos(session, "dm")
def fetch_positions(self, time): session = self.__session() try: latest = session.query( Position.ps_site, Position.ps_code, functions.max(Position.ps_time).label('ps_time')).filter( Position.ps_time <= time).group_by( Position.ps_site, Position.ps_code, ).subquery() inst = aliased(Evaluation, name='ev_inst') fund = aliased(Evaluation, name='ev_fund') results = session.query(Position, Product, inst, fund).join( latest, and_( Position.ps_site == latest.c.ps_site, Position.ps_code == latest.c.ps_code, Position.ps_time == latest.c.ps_time, )).join( Product, and_( Product.pr_site == Position.ps_site, Product.pr_code == Position.ps_code, or_( Product.pr_expr.is_(None), Product.pr_expr >= time, ), )).outerjoin( inst, and_( inst.ev_site == Product.pr_site, inst.ev_unit == Product.pr_inst, )).outerjoin( fund, and_( fund.ev_site == Product.pr_site, fund.ev_unit == Product.pr_fund, )).all() finally: session.close() dto = namedtuple('PositionDto', ('position', 'product', 'inst', 'fund')) return [dto(*r) for r in results]
def __init__(self): """Find the date range and instantiate the data dictionary.""" connect_to_db(app) self.min_year = db.session.query( cast(func.min(extract('year', MedicalCall.received_dttm)), Integer) ).scalar() self.max_year = db.session.query( cast(func.max(extract('year', MedicalCall.received_dttm)), Integer) ).scalar() self.data = {}
def index(): if current_user.is_authenticated(): all_questions = Question.query.all() all_submissions = db.session.query( Submission.question_id, functions.max(Submission.result_score), Submission.result_message, Submission.result).filter( Submission.user_id == current_user.id).group_by( Submission.question_id).all() return render_template("index.html", all_quest=all_questions, allsubmission=all_submissions) flash("you need to login to see the questions", category="warning") return redirect(url_for("auth.login"))
def update(self, engine): firstid = self.segment_table.first_new_id with engine.begin() as conn: # delete any objects that might have been deleted # Note: a relation also might get deleted from this table # because it lost its relevant tags. conn.execute(self.data.delete().where( self.id_column.in_(self.src.select_modify_delete()))) # Collect all changed relations in a temporary table sel = select([sqlf.func.unnest(self.segment_table.data.c.rels).label("id")], distinct=True)\ .where(self.segment_table.data.c.id >= firstid) if self.hierarchy_table is not None: sel = select([self.hierarchy_table.data.c.parent], distinct=True)\ .where(self.hierarchy_table.data.c.child.in_( sel.union(self.src.select_add_modify()))).alias() hmax = self.hierarchy_table.data.alias() crosstab = select([hmax.c.child, sqlf.max(hmax.c.depth).label("lvl")])\ .group_by(hmax.c.child).alias() sel = select([sel.c.parent.label("id"), crosstab.c.lvl])\ .where(sel.c.parent == crosstab.c.child) conn.execute( 'DROP TABLE IF EXISTS __tmp_osgende_routes_updaterels') conn.execute( CreateTableAs('__tmp_osgende_routes_updaterels', sel, temporary=False)) tmp_rels = Table('__tmp_osgende_routes_updaterels', MetaData(), autoload_with=conn) conn.execute(self.data.delete()\ .where(self.id_column.in_(select([tmp_rels.c.id])))) # reinsert those that are not deleted if self.hierarchy_table is None: inssel = self.src.select_all( self.src.data.c.id.in_(tmp_rels.select())) self.insert_objects(engine, inssel) else: for level in range(6, 0, -1): where = self.src.data.c.id.in_( select([tmp_rels.c.id]).where(tmp_rels.c.lvl == level)) self.insert_objects(engine, self.src.select_all(where)) # drop the temporary table tmp_rels.drop(engine)
def progressRead(student_id): sq_attempts = \ db.session.query( QuizAttempt.student_id, QuizAttempt.quiz_id, max(QuizAttempt.score).label('score') )\ .filter_by(student_id=student_id)\ .group_by( QuizAttempt.student_id, QuizAttempt.quiz_id )\ .subquery() query_results = \ db.session.query( Topic.id.label('topic_id'), Topic.name.label('topic_name'), Lesson.id.label('lesson_id'), Lesson.name.label('lesson_name'), Quiz.id.label('quiz_id'), Quiz.name.label('quiz_name'), sq_attempts.c.student_id.label('sq_attempts_student_id'), coalesce(sq_attempts.c.score, 0).label('sq_attempts_score'), count(Question.id).label('count_questions') )\ .select_from(Topic)\ .outerjoin(Lesson)\ .outerjoin(Rs_lesson_quiz_contain)\ .outerjoin(Quiz)\ .outerjoin(sq_attempts, Quiz.id == sq_attempts.c.quiz_id)\ .outerjoin(Rs_quiz_question_contain)\ .outerjoin(Question)\ .group_by( Topic.id, Topic.name, Lesson.id, Lesson.name, Quiz.id, Quiz.name, sq_attempts.c.student_id, sq_attempts.c.score )\ .order_by( asc(Topic.id), asc(Lesson.id), asc(Quiz.id) ) return query_results.all()
def update(self, engine): firstid = self.segment_table.first_new_id with engine.begin() as conn: # delete any objects that might have been deleted # Note: a relation also might get deleted from this table # because it lost its relevant tags. conn.execute(self.data.delete().where(self.id_column.in_ (self.src.select_modify_delete()))) # Collect all changed relations in a temporary table sel = select([sqlf.func.unnest(self.segment_table.data.c.rels).label("id")], distinct=True)\ .where(self.segment_table.data.c.id >= firstid) if self.hierarchy_table is not None: sel = select([self.hierarchy_table.data.c.parent], distinct=True)\ .where(self.hierarchy_table.data.c.child.in_( sel.union(self.src.select_add_modify()))).alias() hmax = self.hierarchy_table.data.alias() crosstab = select([hmax.c.child, sqlf.max(hmax.c.depth).label("lvl")])\ .group_by(hmax.c.child).alias() sel = select([sel.c.parent.label("id"), crosstab.c.lvl])\ .where(sel.c.parent == crosstab.c.child) conn.execute('DROP TABLE IF EXISTS __tmp_osgende_routes_updaterels') conn.execute(CreateTableAs('__tmp_osgende_routes_updaterels', sel, temporary=False)) tmp_rels = Table('__tmp_osgende_routes_updaterels', MetaData(), autoload_with=conn) conn.execute(self.data.delete()\ .where(self.id_column.in_(select([tmp_rels.c.id])))) # reinsert those that are not deleted w = self.segment_table.osmtables.way.data self._stm_ways = select([w.c.nodes]).where(w.c.id == bindparam('id'))\ .compile(engine) if self.hierarchy_table is None: inssel = self.src.select_all(self.src.data.c.id.in_(tmp_rels.select())) self.insert_objects(engine, inssel) else: for level in range(6, 0, -1): where = self.src.data.c.id.in_(select([tmp_rels.c.id]) .where(tmp_rels.c.lvl == level)) self.insert_objects(engine, self.src.select_all(where)) # drop the temporary table tmp_rels.drop(engine)
def get_end_year(self, high_school_gpa): student_lookup = high_school_gpa.c.student_lookup school_year = high_school_gpa.c.school_year num_classes = high_school_gpa.c.num_classes return \ sql.select([ student_lookup, db_func.max(school_year).label('end_year'), db_func.sum(num_classes).label('total_classes'), ]).\ group_by( student_lookup )
def add(self, name, password, _crypt_strength=None): if self.name_taken(name): raise ValueError('Name already exists') if _crypt_strength is None: salt = bcrypt.gensalt() else: salt = bcrypt.gensalt(_crypt_strength) db = self.backend._db max_id = (db.query(functions.max(self.item_table.id)).one()[0] or 0) user = self.item_table( id=max_id + 1, name=name, normalized_name=make_identifier(name), password=bcrypt.hashpw(password, salt), joined_at=datetime.utcnow(), ) db.add(user) db.flush() return self.item_class(self.backend, user)
def construct(self, engine): """ Fill the table in order of hierarchy in which they appear in the hierarchy table, if applicable. This means that we can rely on all subrelations already being computed. """ self.truncate(engine) w = self.segment_table.osmtables.way.data self._stm_ways = select([w.c.nodes]).where(w.c.id == bindparam('id'))\ .compile(engine) if self.hierarchy_table is None: self.insert_objects(engine, self.src.select_all(self.subset)) else: h = self.hierarchy_table.data subtab = select([h.c.child, sqlf.max(h.c.depth).label("lvl")])\ .group_by(h.c.child).alias() for level in range(6, 0, -1): subset = self.src.data.select().where(subtab.c.lvl == level).where(self.src.data.c.id == subtab.c.child) self.insert_objects(engine, subset)
def get(self): """ List of all projects """ opts = PROJECTS_OPTS_PARSER.parse_args() filters = PROJECT_FILTERS_PARSER.parse_args() filters = clean_attrs(filters) query = Project.query if not current_user.is_authenticated(): query = query.filter_by(public=True) if opts['order'] == 'recent': query = ( query. join(Project.jobs, isouter=True). group_by(Project). order_by(sql_func.max(Job.create_ts).desc().nullslast()) ) if filters: query = query.filter(*[ getattr(Project, field) == value for field, value in filters.items() ]) marshaler = dict(items=ALL_LIST_ROOT_FIELDS['items']) values = dict(items=query.all()) args = PROJECT_LIST_PARSER.parse_args() if args['meta']: marshaler['meta'] = ALL_LIST_ROOT_FIELDS['meta'] values['meta'] = {'total': query.count()} values['meta'].update(Project.get_status_summary(filters)) if args['latest_job']: marshaler['items'] = ITEMS_MARSHALER_LATEST_JOB return marshal(values, marshaler)
def execute(self, message, user, params): planet = Planet.load(*params.group(1,3,5)) if planet is None: message.reply("No planet with coords %s:%s:%s found" % params.group(1,3,5)) return Q = session.query(Scan.scantype, max(Scan.tick), count()) Q = Q.filter(Scan.planet == planet) Q = Q.group_by(Scan.scantype) result = Q.all() if len(result) < 1: message.reply("No scans available on %s:%s:%s" % (planet.x,planet.y,planet.z,)) return prev=[] for type, latest, number in result: prev.append("(%d %s, latest pt%s)" % (number,type,latest,)) reply="scans for %s:%s:%s - " % (planet.x,planet.y,planet.z) + ", ".join(prev) message.reply(reply)
def diff(var): return (max(column(var)) - min(column(var))).label(var)
def __init__(self, read_blocktmp_files=True): super(IOLoop, self).__init__() self.sock = None self.out_queue = Queue.Queue() self.waiting_for = {} self.stored = {} self.max_height = multiprocessing.Value(ctypes.c_ulong, 0) self.db_session = db.Session() self.db_write_loop = DBWriteLoop(self) max_height = self.db_session.query(sql_functions.max(db.Block.depth)).scalar() if max_height is not None: self.max_height.value = max_height self.known_blocks = set( block.block_hash for block in self.db_session.query(db.Block.block_hash).all() ) self._prev_block_hashes = set() if read_blocktmp_files: for blktmpfilename in glob.glob('blocktmp/*.rawblk'): self.db_write_loop.queue_block(blktmpfilename) log.info('Reading blockfile %s', blktmpfilename) try: with open(blktmpfilename, 'rb') as blktmpfile: data = blktmpfile.read() except IOError: log.exception('IOError reading blockfile %s', blktmpfilename) continue (msg, _) = protocol.Message.parse(data) assert not _, _ self.known_blocks.add(msg.block_hash) self._prev_block_hashes.add(msg.prev_block_hash) self.num_blocks = multiprocessing.Value(ctypes.c_ulonglong, len(self.known_blocks)) log.info('Block database starting with %r blocks', self.num_blocks.value) self.process_queue = Queue.Queue() self.process_thread = None self.write_thread = None self.read_thread = None self.shutdown_event = multiprocessing.Event() self._internal_shutdown_event = threading.Event() self.message_timeout = MESSAGE_TIMEOUT self.ping_timing = SECONDS_BETWEEN_PINGS self.last_ping = None self.last_pong = None self.last_message = None self.remote_addr = ('10.0.42.253', 8333) self.remote_addr = ('127.0.0.1', 8333) local_addr = [ addrs for i, addrs in ((i, [addr for addr in addrs[netifaces.AF_INET] if 'peer' not in addr]) for i, addrs in ((i, netifaces.ifaddresses(i)) for i in netifaces.interfaces()) if netifaces.AF_INET in addrs) if addrs ][0][0] self.local_addr = local_addr['addr'] self.local_port = 8334
def _compute_first(self, conn): cur_id = conn.scalar(select([sqlf.max(self.data.c.id)])) if cur_id is None: self.first_new_id = 0 else: self.first_new_id = cur_id + 1
def obtener_dodcod(sesion): dodcod = sesion.query(max(Dod.dodcod)).scalar() if (dodcod is None): dodcod = 0 return dodcod
def run(settings, options): ''' run data ingestion process for a maximum of the last 6 months of data. Param: datetime object or None. If None, ingestion runs on latest available file in cache_dir. If present, ingestion runs on file with the provided YYYY-MM-DD in its filename. ''' cache_dir = settings['cache.dir'] + "/gcp" changed = [] if 'nocacheupdate' not in options: changed = update_file_cache(settings) if 'rundate' in options: # GCP puts data for a given date inside of files labeled for # $date, # $date - $1-day and $date + $1-day. # So, we scan all three files for relevant data needing to be reset. rundate = datetime.strptime(options['rundate'], '%Y-%m-%d') filename = date_to_filename(rundate) runbefore = rundate + relativedelta(days=-1) filebefore = date_to_filename(runbefore) runafter = rundate + relativedelta(days=1) fileafter = date_to_filename(runafter) LOG.info("Deleting records with start-date: %s", options['rundate']) # delete any existing records and re-ingest DBSession.query(GcpLineItem ).filter(GcpLineItem.start_time == options['rundate'] ).delete() insert_data(filebefore, cache_dir, rundate=rundate) insert_data(filename, cache_dir, rundate=rundate) insert_data(fileafter, cache_dir, rundate=rundate) else: # check last insert date, then do import here. last_insert, = DBSession.query(functions.max(GcpLineItem.end_time )).one() if not last_insert: # only import the last 6 months of data, maximum. last_insert = datetime.today() - relativedelta(months=7) LOG.debug("Last insert: %s", last_insert) for filename in os.listdir(cache_dir): if filename == 'etags.json': continue file_date = filename_to_date(filename) if not file_date: LOG.warn("Skipping %s", filename) continue if file_date > last_insert: insert_data(filename, cache_dir) # don't insert the same data twice. if filename in changed: changed.pop(changed.index(filename)) for filename in changed: fndate = filename_to_date(filename) next_day = datetime.today() + relativedelta(days=1) # clear out partial data, then re-insert DBSession.query(GcpLineItem ).filter(GcpLineItem.start_time.between(fndate, next_day), GcpLineItem.end_time.between(fndate, next_day) ).delete(synchronize_session='fetch') insert_data(filename, cache_dir)
def hosttree(self, parent_id=None, onlytype="", offset=0, noCache=None): """ Affiche un étage de l'arbre de sélection des hôtes et groupes d'hôtes. @param parent_id: identifiant du groupe d'hôte parent @type parent_id: C{int} or None """ # Si l'identifiant du groupe parent n'est pas # spécifié, on retourne la liste des groupes racines, # fournie par la méthode get_root_hosts_groups. if parent_id is None: return self.get_root_host_groups() # TODO: Utiliser un schéma de validation parent_id = int(parent_id) offset = int(offset) # On vérifie si le groupe parent fait partie des # groupes auxquel l'utilisateur a accès, et on # retourne une liste vide dans le cas contraire is_manager = config.is_manager.is_met(request.environ) if not is_manager: direct_access = False user = get_current_user() # On calcule la distance de ce groupe par rapport aux groupes # sur lesquels l'utilisateur a explicitement les permissions. # # La distance est définie ainsi : # 0 : l'utilisateur a des droits explicites sur ce groupe. # > 0 : l'utilisateur a accès implicitement au groupe. # < 0 : l'utilisateur n'a pas d'accès (il peut juste parcourir # ce groupe) # # Il faut 2 étapes pour trouver la distance. La 1ère essaye # de trouver une distance >= 0, la 2ème une distance <= 0. # Distance positive. distance = DBSession.query( functions.max(GroupHierarchy.hops) ).join( (Group, Group.idgroup == GroupHierarchy.idparent), (DataPermission, DataPermission.idgroup == Group.idgroup), (UserGroup, UserGroup.idgroup == DataPermission.idusergroup), (USER_GROUP_TABLE, USER_GROUP_TABLE.c.idgroup == \ UserGroup.idgroup), ).filter(USER_GROUP_TABLE.c.username == user.user_name ).filter(Group.grouptype == u'supitemgroup' ).filter(GroupHierarchy.idchild == parent_id ).scalar() if distance is None: # Distance négative. distance = DBSession.query( functions.max(GroupHierarchy.hops) ).join( (Group, Group.idgroup == GroupHierarchy.idchild), (DataPermission, DataPermission.idgroup == Group.idgroup), (UserGroup, UserGroup.idgroup == DataPermission.idusergroup), (USER_GROUP_TABLE, USER_GROUP_TABLE.c.idgroup == \ UserGroup.idgroup), ).filter(USER_GROUP_TABLE.c.username == user.user_name ).filter(Group.grouptype == u'supitemgroup' ).filter(GroupHierarchy.idparent == parent_id ).scalar() if distance is not None: distance = -distance if distance is None: # Pas d'accès à ce groupe. return dict(groups = [], items = []) direct_access = distance >= 0 limit = int(config.get("max_menu_entries", 20)) result = {"groups": [], "items": []} if not onlytype or onlytype == "group": # On récupère la liste des groupes dont # l'identifiant du parent est passé en paramètre gh1 = aliased(GroupHierarchy, name='gh1') gh2 = aliased(GroupHierarchy, name='gh2') db_groups = DBSession.query( SupItemGroup ).options(lazyload('_path_obj') ).distinct( ).join( (gh1, gh1.idchild == SupItemGroup.idgroup), ).filter(gh1.hops == 1 ).filter(gh1.idparent == parent_id ).order_by(SupItemGroup.name.asc()) if not is_manager and not direct_access: # On ne doit afficher que les fils du groupe <parent_id> # tels que l'utilisateur a accès explicitement à l'un # des fils de l'un de ces groupes. db_groups = db_groups.join( (gh2, gh2.idparent == gh1.idchild), (DataPermission, DataPermission.idgroup == gh2.idchild), (UserGroup, UserGroup.idgroup == DataPermission.idusergroup), (USER_GROUP_TABLE, USER_GROUP_TABLE.c.idgroup == UserGroup.idgroup), ).filter(USER_GROUP_TABLE.c.username == user.user_name) num_children_left = db_groups.count() - offset if offset: result["continued_from"] = offset result["continued_type"] = "group" all_groups = db_groups.limit(limit).offset(offset).all() for group in all_groups: result["groups"].append({ 'id' : group.idgroup, 'name' : group.name, 'type' : "group", }) if num_children_left > limit: result["groups"].append({ 'name': _("Next %(limit)s") % {"limit": limit}, 'offset': offset + limit, 'parent_id': parent_id, 'type': 'continued', 'for_type': 'group', }) # On récupère la liste des hôtes appartenant au # groupe dont l'identifiant est passé en paramètre if ((not onlytype or onlytype == "item") and (is_manager or direct_access)): db_hosts = DBSession.query( Host.idhost, Host.name, ).join( (SUPITEM_GROUP_TABLE, SUPITEM_GROUP_TABLE.c.idsupitem == Host.idhost ), ).filter(SUPITEM_GROUP_TABLE.c.idgroup == parent_id ).order_by(Host.name.asc()) num_children_left = db_hosts.count() - offset if offset: result["continued_from"] = offset result["continued_type"] = "item" all_hosts = db_hosts.limit(limit).offset(offset).all() for host in all_hosts: result["items"].append({ 'id' : host.idhost, 'name' : host.name, 'type' : "item", }) if num_children_left > limit: result["items"].append({ 'name': _("Next %(limit)s") % {"limit": limit}, 'offset': offset + limit, 'parent_id': parent_id, 'type': 'continued', 'for_type': 'item', }) return result
def latest_order_form(cls, user): form_id = DBSession.query(max(cls.id)).filter(cls.issuedBy == user).all() return DBSession.query(cls).get(form_id[0]) if form_id else None
def get_max_version(cls, pkg_code): version=DBSession.query(max(cls.version)) \ .filter(cls.packaging_code==pkg_code) \ .all() return version[0][0] if version else 1