def mainpage(page=1): try: page_number = int(page) except ValueError: page_number = 1 posts = Post.query \ .order_by(Post.pub_date.desc()) \ .offset((page_number - 1) * POSTS_PER_PAGE)\ .limit(POSTS_PER_PAGE) pagination_query = Post.query.paginate(page_number, POSTS_PER_PAGE) """ tags = Tag.query \ .order_by(Tag.name) \ .limit(10) """ tags_query = db.session.query(Tag.name, func.count(Tag.id).label('tagged_count')) \ .join(Tag.posts) \ .group_by(Tag.name) \ .order_by('tagged_count desc') month_query = db.session.query(Post.pub_date, func.count().label('posts_count')) \ .group_by(func.strftime('%Y', Post.pub_date), func.strftime('%m', Post.pub_date)) \ .order_by(Post.pub_date.desc()) \ .limit(12) return render_template('main_page.html', posts=posts, tags=tags_query, months=month_query, pagination=pagination_query)
def list_value_grouped_by_year_month(self, group): return Expense.query \ .join(ExpenseItem) \ .filter(Expense.group_id == group.id) \ .group_by(func.strftime("%Y-%m", Expense.date_created)) \ .with_entities(func.strftime("%Y-%m", Expense.date_created).label("date"), func.sum(ExpenseItem.value).label("value")) \ .all()
def get_drinks_on_date(self, date): '''Gets all the drinks drunk on that date.''' q = self.session.query(Log).filter(Log.log_type == Log.TYPES['DRINK']) q = q.filter(func.strftime('%Y', Log.date) == "%04d"%date.year) q = q.filter(func.strftime('%m', Log.date) == "%02d"%date.month) q = q.filter(func.strftime('%d', Log.date) == "%02d"%date.day) q = q.order_by(Log.date.desc()) drinks = q.all() return drinks
def get_sum(session, start, end=None): #print ("ID:", threading.get_ident()) if end is None: end = datetime.datetime.now() item = session.query( func.sum(FoodNutrition.kcal).label("kcal"), func.sum(FoodNutrition.protein).label("protein"), func.sum(FoodNutrition.water).label("water")) \ .join(Item) \ .filter(Item.time.between(start, end)) \ .filter(Item.calc_nutrition != None) \ .group_by(func.strftime("%Y-%m-%d", Item.time)) \ .one() #print (items) print("{} - {}".format(start, end)) ln = LocalNutrition(kcal=item.kcal, protein=item.protein, water=item.water) print("UNTIL NOW:", ln) mins = end.minute #print ("{0}:{1} {1}/60 = {2}".format(end.hour, end.minute, end.minute/60)) hour = end.minute / 60 + end.hour diff = display_part(hour, ln) #print (display_part(hour)) print("DIFF:", diff) rets = [] if diff.water <= -250: rets.append((diff, 0, "PIJ VODO", end)) if diff.kcal <= -300: txt = "JEJ V NASLEDNJE POL URE" if diff.protein <= -5: txt = "JEJ BELJAKOVINE V NASLEDNJE POL URE" rets.append((diff, 1, txt, end)) return rets
def api_sectors(): sector_totals = db.session.query( func.sum(models.ActivityFinances.transaction_value).label( "total_disbursement"), models.CodelistCode.code, models.CodelistCode.name, func.strftime( '%Y', func.date(models.ActivityFinances.transaction_date, 'start of month', '-6 month')).label("fiscal_year")).join( models.Activity, models.ActivityFinancesCodelistCode, models.CodelistCode).filter( models.ActivityFinances.transaction_type == u"D", models.ActivityFinancesCodelistCode.codelist_id == u"mtef-sector").group_by( models.CodelistCode.name, models.CodelistCode.code, "fiscal_year").all() return jsonify(sectors=list( map( lambda s: { "name": s.name, "value": round(s.total_disbursement, 2), "code": s.code, "fy": s.fiscal_year }, sector_totals)))
def last_day(self): utc = timezone("UTC") eastern = timezone('US/Eastern') hours = 30 now = datetime.utcnow() query = db.session.query( func.round(func.avg(Reading.temperature)), func.round(func.avg(Reading.humidity)), Reading.timestamp )\ .filter_by(channel=self.channel)\ .filter(Reading.timestamp > func.datetime('now', '-30 hours'))\ .group_by(func.strftime('%d %H', Reading.timestamp))\ .order_by(Reading.timestamp.desc())\ .limit(hours) readings = query.all() times = ["" for i in range(hours)] est = utc.localize(datetime.utcnow()).astimezone(eastern) for i in range(0, hours): times[i] = "\"" + ( est - timedelta(hours=i)).strftime("%I%p").lstrip('0') + "\"" times.reverse() temps = [0 for i in range(hours)] hums = [0 for i in range(hours)] for r in readings: idx = hours - self._hours(now - r[2]) - 1 temps[idx] = r[0] / 10 hums[idx] = r[1] self.last_day_data = { "temperatures": temps, "humidities": hums, "timestamps": times } return self.last_day_data
def dashboard(cls, author=None): """ return dictionary data for dashboard in administrator panel :param author: scc/cxw :return: dict """ dash = {} total_published_article = db.session.query(func.count(Article.id)).\ filter(Article.author == author, Article.status == "PUBLISHED").one() total_unpublished_article = db.session.query(func.count(Article.id)).\ filter(Article.author == author, Article.status != "PUBLISHED").one() total_words = db.session.query(func.sum(func.length(Article.content))).\ filter(Article.author == author).one() total_unpublished_words = db.session.query(func.sum(func.length(Article.content))).\ filter(Article.author == author, Article.status != "PUBLISHED").one() last_article_date = db.session.query(Article.create_date).\ filter(Article.author == author, Article.status == "PUBLISHED").\ order_by(desc(Article.create_date)).first() df1 = db.session.query(func.strftime("%Y/%m", Article.create_date), func.count(Article.id), func.sum(func.length(Article.content))).\ filter(Article.author == author, Article.status == "PUBLISHED").\ group_by(func.strftime("%Y/%m", Article.create_date)).all() df2 = db.session.query(Article.tags).filter(Article.author == author, Article.status == "PUBLISHED").all() df2_list = list(itertools.chain(*df2)) df2_list = [x.split(",") for x in df2_list if x is not None] df2_list = list(itertools.chain(*df2_list)) unique_tag = list(set(df2_list)) tag_counts = [df2_list.count(x) for x in unique_tag] dash.update({ "total_published_article": total_published_article[0], "total_unpublished_article": total_unpublished_article[0], "total_words": total_words[0], "total_unpublished_words": total_unpublished_words[0], "last_article": last_article_date[0].strftime("%Y-%m-%d"), "df_1_month": map(lambda x: x[0], df1), "df_1_counts": map(lambda x: x[1], df1), "df_1_words": map(lambda x: x[2], df1), "df_2_tags": unique_tag, "df_2_tag_counts": tag_counts }) return dash
class Stat(dbs.Model): """ This model is the main Stat model. For non time consuming development reasons, only One-to-One relations are set. The created time is set server-side. """ __tablename__ = 'stat' id = dbs.Column(dbs.Integer, primary_key=True) shortcodeId = dbs.Column(dbs.Integer, dbs.ForeignKey('shortcode.id')) shortcode = dbs.relationship('Shortcode', back_populates='stats', foreign_keys=[shortcodeId]) created = dbs.Column(dbs.DateTime(timezone=True), server_default=func.strftime('%Y-%m-%d %H:%M:%f', 'now')) redirect = dbs.relationship('Redirect', back_populates='stat', uselist=False) @classmethod def get_stats(cls, shortcode): """ This method retrieves the stats for the provided shortcode. :param shortcode: The provided shortcode. :type shortcode: str :return: The fetched stats for the provided shortcode. :rtype: dict .. note:: As the Redirect child for a stat is created in a non-greedy way, the logic handling for a not existing Redirect is handled in this method. :raises: ShortcodeNotFound: When the provided shortcode does not exist. """ in_use = Shortcode.check_in_use(shortcode=shortcode) if in_use is False: raise ShortcodeNotFound _stat = cls.query.filter( cls.shortcode.has(shortcode=shortcode)).first() if _stat.redirect is None: last_redirect = None redirect_count = 0 else: last_redirect = _stat.redirect.lastRedirect.isoformat() redirect_count = _stat.redirect.redirectCount return { cls.created.name: _stat.created.isoformat(), Redirect.lastRedirect.name: last_redirect, Redirect.redirectCount.name: redirect_count }
def athlets_by_birthdate(sess, birthdate): """ Return all athlets as Athlet class close by 'birthdate' """ athlet_birthdate = \ sess.query(Athlete.birthdate)\ .order_by( func.abs( func.strftime('%s', Athlete.birthdate) - func.strftime('%s', birthdate) ) )\ .limit(1).first() if athlet_birthdate: return sess.query(Athlete)\ .filter( Athlete.birthdate.in_(athlet_birthdate) )\ .all() else: return None
def visit_range(self, node, parents, context): def get_val(val): if val == "*": return None return val def get_dt_val(val): if val is None: return None if val.upper() == "TODAY": return datetime.datetime.now().date() elif val.upper() == "NOW": return datetime.datetime.now() if len(val) <= 3: if val[0] == "T": return int(val[1:]) else: return int(val) else: return date_parse(val, yearfirst=True) column = context["column"] column_type = context["column"].type.python_type low_t = None high_t = None if column_type == datetime.datetime: low_t = get_dt_val(get_val(node.low.value)) high_t = get_dt_val(get_val(node.high.value)) #Hour only part if type(low_t) is int or type(high_t) is int: column = cast(func.strftime("%H", context["column"]), Integer) #print ("COLUMN: DATETIME", node) elif column_type == int or column_type == float: low_t = get_val(node.low.value) high_t = get_val(node.high.value) #print ("COLUMN: INT", node) else: raise Exception( "Range can only be used with Dates/integer/float " + "columns not {}".format(column_type)) if low_t is None and high_t is None: raise Exception("Low and High can't be both Wildcards (*)!") elif low_t is None: if node.include_high: return column <= high_t else: return column < high_t elif high_t is None: if node.include_low: return column >= low_t else: return column > low_t return column.between(low_t, high_t)
def by_hour(request, guild_id): session = Session() query = session.query(Messages) \ .with_entities(func.strftime('%H', Messages.hour, 'unixepoch').label('agg_hour'), func.sum(Messages.count).label('count')) \ .group_by('agg_hour').order_by('agg_hour') query = _filter(query, int(guild_id), request.query) data = [] for row in query: data.append({'hour': row.agg_hour, 'count': row.count}) return Response.json(data)
def login_history(self, user_name: str = None) -> list: """ Метод возвращающий историю входов. :param user_name: Имя клиента :return: """ query = self.session.query( User.name, func.strftime('%Y-%m-%d %H:%M', History.time), History.ip).join(User) if user_name: query = query.filter(User.name == user_name) return [value for value in query.all()]
def graph_pw(): """ Get stats for graph pw and power """ qry = db.session.query( func.sum(StatsRigs.pw_gpu).label('total_pw'), StatsRigs.created_date) qry = qry.group_by( func.strftime("%Y-%m-%d %H-%m-%s", StatsRigs.created_date)) items = [] for res in qry.all(): items.append({ 'date': str(res.created_date.replace(microsecond=0)), 'total_pw': str(res.total_pw), }) return items
def last_day(self): query = db.session.query( func.round(func.avg(Reading.temperature)), func.round(func.avg(Reading.humidity)), Reading.timestamp )\ .filter_by(channel=self.channel)\ .group_by(func.strftime('%H', Reading.timestamp))\ .order_by(Reading.timestamp.desc())\ .limit(30) return [{ "temperature": r[0], "humidity": r[1], "timestamp": r[2] } for r in query.all()]
def last_day(self): query = db.session.query( func.round(func.avg(Reading.temperature)), func.round(func.avg(Reading.humidity)), Reading.timestamp )\ .filter_by(channel=self.channel)\ .group_by(func.strftime('%H', Reading.timestamp))\ .order_by(Reading.timestamp.desc())\ .limit(30) return [ { "temperature": r[0], "humidity": r[1], "timestamp": r[2] } for r in query.all() ]
def get_stop_time_matrix(): weekhour = func.strftime('%w', trips.c.tapahtumapaiva) * 24 + func.cast(func.substr(trips.c.tuloaika_time,1,2), sa.Integer) cols = [trips.c.tulopysakki, weekhour, func.sum(trips.c.pysakkiaika).label('stop_sum')] conds = and_(*conditions) groupcols = [trips.c.tulopysakki, weekhour] ts = run(cols, conds, groupcols, n_limit=None) # Write to a csv file stops = list(set([val[0] for val in ts])) stop_map = {v: idx for (idx, v) in enumerate(stops)} mat = np.zeros((168, len(stops))) for (stop, wh, val) in ts: mat[wh, stop_map[stop]] = val with open('../site/stop_time_matrix.csv', 'w') as f: f.write(",".join(map(str, stops)) + '\n') for i in range(mat.shape[0]): f.write(",".join(map(str, mat[i,:])) + '\n')
def graph_rig(id_rig): """ Get stats for graph pw and power """ qry = db.session.query( func.sum(StatsRigs.pw_gpu).label('total_pw'), func.sum(StatsRigs.hash_gpu).label('total_hash'), func.sum(StatsRigs.fan_gpu).label('total_fan'), func.sum(StatsRigs.temp_gpu).label('total_temp'), StatsRigs.created_date) qry = qry.group_by( func.strftime("%Y-%m-%d %H-%m-%s", StatsRigs.created_date)) items = [] for res in qry.filter_by(id_rig=id_rig): items.append({ 'total_pw': str(res.total_pw), 'total_hash': str(res.total_hash), #'total_fan': str(res.total_fan), #'total_temp': str(res.total_temp), 'date': str(res.created_date.replace(microsecond=0)), }) return items
def precipitation(): return jsonify( dict( session.query(Measurement.date, Measurement.prcp).filter( func.strftime("%Y", Measurement.date) == '2017').all()))
def api_sectors_C_D(): query = db.session.query( func.sum( models.ActivityFinances.transaction_value).label("total_value"), models.ActivityFinances.transaction_type, models.CodelistCode.code, models.CodelistCode.name, models.Activity.domestic_external, func.strftime( '%Y', func.date( models.ActivityFinances.transaction_date, 'start of month', '-6 month')).label("fiscal_year")).join( models.Activity, models.ActivityFinancesCodelistCode, models.CodelistCode).filter( models.CodelistCode.codelist_code == u"mtef-sector", models.CodelistCode.name != u"").group_by( models.CodelistCode.name, models.CodelistCode.code, models.ActivityFinances.transaction_type, models.Activity.domestic_external, "fiscal_year") query = qactivity.filter_activities_for_permissions(query) sector_totals = query.all() fy_query = db.session.query( func.sum(models.ActivityForwardSpend.value).label("total_value"), sa.sql.expression.literal("FS").label("transaction_type"), models.CodelistCode.code, models.CodelistCode.name, models.Activity.domestic_external, func.strftime( '%Y', func.date( models.ActivityForwardSpend.period_start_date, 'start of month', '-6 month')).label("fiscal_year")).join( models.Activity, models.ActivityCodelistCode, models.CodelistCode).filter( models.CodelistCode.codelist_code == u"mtef-sector", models.CodelistCode.name != u"").group_by( models.CodelistCode.name, models.CodelistCode.code, "transaction_type", models.Activity.domestic_external, "fiscal_year") fy_query = qactivity.filter_activities_for_permissions(fy_query) fy_sector_totals = fy_query.all() def append_path(root, paths): if paths: sector = root.setdefault( "{}_{}_{}".format(paths.domestic_external, paths.fiscal_year, paths.name), { 'Commitments': 0.0, 'Disbursements': 0.0, 'Disbursement Projection': 0.0 }) sector[{ "C": "Commitments", "D": "Disbursements", "FS": "Disbursement Projection" }[paths.transaction_type]] = paths.total_value sector["name"] = paths.name sector["code"] = paths.code sector["domestic_external"] = paths.domestic_external sector["fy"] = paths.fiscal_year root = {} for s in sector_totals: append_path(root, s) for s in fy_sector_totals: append_path(root, s) return jsonify(sectors=root.values())
def dialect_format_date(field, format_string): if db.session.bind.dialect.name == 'sqlite': return func.strftime(format_string, field) elif db.session.bind.dialect.name == 'mysql': return func.date_format(field, format_string)
class Redirect(dbs.Model): """ This model is the main Redirect model. For non time consuming development reasons, only One-to-One relations are set. The lastRedirect time is set server-side. """ __tablename__ = 'redirect' id = dbs.Column(dbs.Integer, primary_key=True) statId = dbs.Column(dbs.Integer, dbs.ForeignKey('stat.id')) stat = dbs.relationship('Stat', back_populates='redirect', foreign_keys=[statId]) lastRedirect = dbs.Column(dbs.DateTime(timezone=True), server_default=func.now(), onupdate=func.strftime('%Y-%m-%d %H:%M:%f', 'now')) redirectCount = dbs.Column(dbs.Integer) @classmethod def check_in_use(cls, shortcode): """ This method checks if a redirect record for the provided shortcode is already in use, by querying the database. :param shortcode: The provided shortcode. :type shortcode: str :return: The in-use status. :rtype: bool :raises: ShortcodeNotFound: When the provided shortcode does not exist. """ _stat = Stat.query.filter( Stat.shortcode.has(shortcode=shortcode)).first() if _stat is None: raise ShortcodeNotFound else: _redirect = _stat.redirect if _redirect is None: return False else: return True @classmethod def redirect(cls, shortcode): """ This method handles the redirect by incrementing the redirectCount for the Redirect record and returning the attached FQDN domain for the provided shortcode. :param shortcode: The provided shortcode. :type shortcode: str :return: The related FQDN domain. :rtype: str """ in_use = cls.check_in_use(shortcode=shortcode) if in_use is True: _redirect = Stat.query.filter( Stat.shortcode.has(shortcode=shortcode)).first().redirect _redirect.redirectCount += 1 else: _redirect = cls(stat=Stat.query.filter( Stat.shortcode.has(shortcode=shortcode)).first(), redirectCount=1) dbs.session.add(_redirect) dbs.session.commit() return _redirect.stat.shortcode.url.url
def tobs(): query_tobs = session.query(Measurement.tobs).filter( func.strftime("%Y", Measurement.date) == '2017').all() return jsonify(query_tobs)