def get_query(self, date_from, date_to): from rockpack.mainsite.services.video.models import Video dates = readonly_session.query( func.date(Video.date_added).distinct().label('date_added') ).filter( Video.date_added.between(date_from, date_to) ).subquery() counts = readonly_session.query( func.date(Video.date_added).label('date_added'), func.count().label('count') ).filter( Video.link_title.isnot(None) ).group_by( func.date(Video.date_added) ).subquery() query = readonly_session.query( dates.c.date_added, func.sum(counts.c.count) ).filter( counts.c.date_added < dates.c.date_added ).group_by( dates.c.date_added ) return query
def count_last_week_data(brand_arr): """统计上周七天内的数据 [0, 0, 0, 0, 0, 0, 0] 周一-周日:0-6 过期日期:优惠券创建时间+发放时间+1天 1.首先获取当天日期 2.然后获取上周一日期 3.for循环得出上周7天日期 """ import datetime expire_data = [0, 0, 0, 0, 0, 0, 0] get_ticket_data = [0, 0, 0, 0, 0, 0, 0] callback_data = [0, 0, 0, 0, 0, 0, 0] today = datetime.date.today() last_monday = today - timedelta(days=today.weekday() + 7) for i in range(0, 6): index_day = last_monday + timedelta(days=i) expire_data[i] += int(GetTicketRecord.query.filter( GetTicketRecord.get_expire_date == index_day + timedelta(days=1)).count()) get_ticket_data[i] += int( GetTicketRecord.query.filter(func.date(GetTicketRecord.create_at) == index_day).filter( GetTicketRecord.discount.has(Discount.brand_id.in_(brand_arr))).count()) callback_data[i] += int(GetTicketRecord.query.filter(func.date(GetTicketRecord.create_at) == index_day, GetTicketRecord.status == 'usedit').filter( GetTicketRecord.discount.has(Discount.brand_id.in_(brand_arr))).count()) return expire_data, get_ticket_data, callback_data
def filter_date(self, query, appstruct): """ filter the query and restrict it to the given year """ year = appstruct.get('year') date_range_start = appstruct.get('date_range_start') date_range_end = appstruct.get('date_range_end') if date_range_start not in (None, colander.null): query = query.filter( func.date(Activity.datetime) >= date_range_start ) if date_range_end not in (None, colander.null): query = query.filter( func.date(Activity.datetime) <= date_range_end ) if ( year not in (None, colander.null, -1) and date_range_start in (None, colander.null) and date_range_end in (None, colander.null) ): query = query.filter( func.extract('YEAR', Activity.datetime) == year ) return query
def do_job(self): """ SELECT *, coalesce(end_time,now)-start_time FROM task t, subject s WHERE t.subject_id=s.id AND start_time > GetDate() - @days ORDER BY start_time :return: """ session = DBSession() now = datetime.now() time_worked = (func.julianday(func.coalesce(Task.end_time,now)) - func.julianday(Task.start_time)) * 86400 query = session.query(Task.start_time, func.coalesce(Task.end_time, now), time_worked, Subject.title, Task.title) \ .filter(Subject.id==Task.subject_id) \ .filter(func.date(Task.start_time) > func.date('now', '-%s day' % self.args.days)) \ .order_by(Task.start_time) print '\n' table = PrettyTable(['Start', 'End', 'Time', 'Subject', 'Title']) table.align["Title"] = "l" total_time = 0 day_total = 0 last_date = None for row in query: if last_date == None: last_date = row[0].date() if row[0].date() != last_date: table.add_row([ '', '', timedelta(seconds=round(day_total)), '', '' ]) last_date = row[0].date() day_total = 0 day_total += row[2] total_time += row[2] table.add_row([ row[0], row[1], timedelta(seconds=round(row[2])), row[3], row[4], ]) if day_total > 0: table.add_row([ '', '', timedelta(seconds=round(day_total)), '', '' ]) print table print 'Total Work time: %s' % timedelta(seconds=total_time) print
def complete(): form = MeaningForm() if not form.validate_on_submit(): return render_template('survey.html', form=form) # check if session exists today = strftime("%Y-%m-%d") sesh = Sessions.query.filter( func.date(Sessions.created)==func.date(today) ).order_by(desc(Sessions.created)).first() # create a new session if one does not exist # for today if sesh is None: sesh = Sessions() db_session.add(sesh) db_session.commit() submission = Submissions(session_id=sesh.id, ip=request.remote_addr) db_session.add(submission) db_session.commit() for field in form: if field.name == "csrf_token": continue response = Responses( submission_id=submission.id, phrase=field.label.text, value=field.data ) db_session.add(response) db_session.commit() return render_template('survey.html', success=True)
def admin_weekly_checkins(date, grade=None): week = week_magic(date) if grade is None: checkins = Checkin.query.filter(and_(func.date(Checkin.checkin_timestamp)>=week[0], func.date(Checkin.checkin_timestamp)<=week[4])).order_by('Checkin.checkin_timestamp') else: checkins = Checkin.query.filter(and_(Checkin.user.has(grade=grade), func.date(Checkin.checkin_timestamp)>=week[0], func.date(Checkin.checkin_timestamp)<=week[4])).order_by('Checkin.checkin_timestamp') return checkins
def get_summary(value_class): q = db_session.query( func.date(SR_Values.datetime).label("date") , func.sum(SR_Values.value).label("daily_value") ).filter(SR_Classes.id == SR_Values.value_class_id ).filter(SR_Classes.accum_flag == true() ).filter(SR_Classes.value_class == value_class ).filter(SR_Values.datetime > datetime.datetime(datetime.datetime.now().year, 1, 1) ).group_by(SR_Classes.value_class, func.month(SR_Values.datetime) ).order_by(SR_Classes.value_class, func.date(SR_Values.datetime)) print q rows = [{ "name": x.date , "value": x.daily_value } for x in q.all()] q = db_session.query( func.date(SR_Values.datetime).label("date") , func.avg(SR_Values.value).label("daily_value") ).filter(SR_Classes.id == SR_Values.value_class_id ).filter(SR_Classes.accum_flag == false() ).filter(SR_Classes.value_class == value_class ).filter(SR_Values.datetime > datetime.datetime(datetime.datetime.now().year, 1, 1) ).group_by(SR_Classes.value_class, func.month(SR_Values.datetime) ).order_by(SR_Classes.value_class, func.date(SR_Values.datetime)) rows.extend([{ "name": x.date , "value": x.daily_value } for x in q.all()]) print rows return rows
def getDiaryEntries(_from, _to): session = pos.database.session() query = session.query(DiaryEntry) if _to is None: query = query.filter(func.date(DiaryEntry.date) == func.date(_from)) else: query = query.filter(func.date(DiaryEntry.date) >= func.date(_from) & func.date(DiaryEntry.date) <= func.date(_to)) query = query.order_by(DiaryEntry.date.asc()) return query.all()
def getTickets(u, _from, _to, show): session = pos.database.session() query = session.query(Ticket).filter((Ticket.user == u) & Ticket.payment_method.in_(show)) if _to is None: query = query.filter(func.date(Ticket.date_close) == func.date(_from)) else: query = query.filter(func.date(Ticket.date_close) >= func.date(_from) & func.date(Ticket.date_close) <= func.date(_to)) query = query.order_by(Ticket.date_close.asc(), Ticket.date_open.asc(), Ticket.date_paid.desc()) return query.all()
def get_total_usage(): entries = DBSession.query( func.count('*'), func.date(DropLog.time) ).filter( DropLog.username != 'openhouse' ).group_by( func.date(DropLog.time) ).all() return process_usage(entries)
def getTickets(_from, _to=None): session = pos.database.session() query = session.query(Ticket) if _to is None: query = query.filter(func.date(Ticket.date_close) == func.date(_from)) else: query = query.filter( func.date(Ticket.date_close) >= func.date(_from) & func.date(Ticket.date_close) <= func.date(_to) ) query = query.order_by(Ticket.date_close.asc(), Ticket.date_open.asc(), Ticket.date_paid.desc()) return query.all()
def dau(): session = get_session() active_users = [] active_users.append(['Day', 'Active Users']) for value in session.query( func.date(Activity.moment), func.count(Activity.user_id.distinct())).\ group_by(func.date(Activity.moment)).all(): active_users.append(list(value)) return json.dumps(active_users)
def calendar(year=SimpleDate.getNumYearToday(), month=SimpleDate.getNumMonthToday()): # TODO: refactor duplicated code!!! days = range(1, SimpleDate.getNumDaysInMonth(month)) categories = SpendingCategory.query.order_by(SpendingCategory.order.asc()).all() sums = {} descriptions = {} totals = {} total = 0.0 for cat in categories: totals[cat.id] = 0.0 for day in days: sums[day] = {} descriptions[day] = "" for cat in categories: objs = db.session.query(Spending)\ .filter(func.date(Spending.date) >= datetime.date(year,month,1))\ .filter(func.date(Spending.date) <= datetime.date(year,month,SimpleDate.getNumDaysInMonth(month)))\ .filter(Spending.category==cat)\ .filter( func.date(Spending.date) == datetime.date(year,month,day)).all() sums[day][cat.id] = 0.0 for obj in objs: totals[cat.id] += obj.amount total += obj.amount sums[day][cat.id] += obj.amount if(obj.name <> ""): descriptions[day] += "("+obj.name+")" form = SpendingForm() return render_template('calendar.html', form=form, categories=categories, prefix = url_for('calendar'), month = month, year = year, today_day = SimpleDate.getNumDayToday(), today_month = SimpleDate.getNumMonthToday(), today_year = SimpleDate.getNumYearToday(), days=days, sums=sums, totals=totals, total=total, desc=descriptions)
def client_sale_report(dbapi, start, end): sales_by_date = list(dbapi.db_session.query( NSale.seller_ruc, func.date(NSale.timestamp), func.sum(NSale.pretax_amount_usd), func.sum(NSale.tax_usd), func.count(NSale.uid)).filter( NSale.timestamp >= start, NSale.timestamp <= end, NSale.status == Status.NEW, NSale.payment_format != 'None').group_by( func.date(NSale.timestamp), NSale.seller_ruc)) for ruc, date, sale, tax, count in sales_by_date: yield SaleReportByDate( timestamp=date, ruc=ruc, sale_pretax_usd=sale, tax_usd=tax, count=count )
def notebook(context, request): tasks = Task.query().filter_by(notebook=context).order_by('name').all() urgent_tasks = [t for t in tasks if t.emergency >= Task.EMERGENCY_THRESHOLD] urgent_tasks.sort(key=attrgetter('emergency'), reverse=True) time_spend_today = (DBSession.query(func.sum(Execution.length)) .join('task').filter(Task.notebook==context) .filter(or_(Execution.executor==request.user, Execution.executor==None)) .filter(func.date(Execution.time) == datetime.date.today()) .scalar()) or 0 # TODO this could be configurable time_left = 15 - time_spend_today suggested_something = False for t in urgent_tasks: if time_left <= 0: break t.suggested = True time_left -= t.mean_execution suggested_something = True last_executions = (Execution.query() .join('task').filter(Task.notebook==context) .order_by(Execution.time.desc()).limit(5).all()) return { 'notebook': context, 'tasks': tasks, 'urgent_tasks': urgent_tasks, 'urgent_tasks_time': int(sum(t.mean_execution for t in urgent_tasks)), 'last_executions': last_executions, 'suggested_something': suggested_something, }
def run(self): q = self.session.query(EveType.name,func.sum(Drop.dropped).label('dropped'),\ func.sum(Drop.destroyed).label('destroyed'),func.date(KillMail.timestamp).label('date')).\ join(Drop,Drop.evetype_id == EveType.id).\ join(KillMail, KillMail.id == Drop.killmail_id).\ join(SolarSystem, SolarSystem.id == KillMail.solarsystem_id).\ join(Region, Region.id == SolarSystem.region_id).\ join(EveGroup,EveGroup.id == EveType.group_id).\ join(EveCategory,EveCategory.id == EveGroup.category_id) if(self.region_ids): q = q.filter(Region.id.in_(self.region_ids)) if(self.type_ids): q = q.filter(EveType.id.in_(self.type_ids)) if(self.category_ids): q = q.filter(EveCategory.id.in_(self.category_ids)) if (self.after): q = q.filter(KillMail.timestamp >= self.after) if (self.before): q = q.filter(KillMail.timestamp <= self.before) q = q.group_by(EveType.id,desc('date')) q = q.order_by(EveType.id,desc('date')) self.query = q self.columns = q.column_descriptions self.results = q.all()
def index(self): from gviz_data_table import Table from rockpack.mainsite.services.user.models import User, UserActivity, UserAccountEvent if request.args.get('activity') == 'activity': activity_model, activity_date = UserActivity, UserActivity.date_actioned else: activity_model, activity_date = UserAccountEvent, UserAccountEvent.event_date try: interval_count = int(request.args['interval_count']) except Exception: interval_count = 10 interval = request.args.get('interval') if interval not in ('week', 'month'): interval = 'week' cohort = func.date_part(interval, User.date_joined) cohort_label = func.min(func.date(User.date_joined)) active_interval = (func.date_part(interval, activity_date) - cohort).label('active_interval') q = readonly_session.query(User).filter( User.date_joined > LAUNCHDATE, User.refresh_token != '') if request.args.get('gender') in ('m', 'f'): q = q.filter(User.gender == request.args['gender']) if request.args.get('locale') in app.config['ENABLED_LOCALES']: q = q.filter(User.locale == request.args['locale']) if request.args.get('age') in ('13-18', '18-25', '25-35', '35-45', '45-55'): age1, age2 = map(int, request.args['age'].split('-')) q = q.filter(between( func.age(User.date_of_birth), text("interval '%d years'" % age1), text("interval '%d years'" % age2) )) active_users = dict( ((c, int(w)), u) for c, w, u in q.join( activity_model, (activity_model.user == User.id) & (activity_date >= User.date_joined) ).group_by(cohort, active_interval).values( cohort, active_interval, func.count(func.distinct(activity_model.user)) ) ) table = Table( [dict(id='cohort', type=date)] + [dict(id='%s%d' % (interval, i), type=str) for i in range(interval_count)] ) totals = q.group_by(cohort).order_by(cohort) for c, l, t in totals.values(cohort, cohort_label, func.count()): data = [] for i in range(interval_count): a = active_users.get((c, i), '') data.append(a and '%d%% (%d)' % (ceil(a * 100.0 / t), a)) table.append([l] + data) return self.render('admin/retention_stats.html', data=table.encode())
def index_old(self): from gviz_data_table import Table from rockpack.mainsite.services.user.models import User, UserActivity user_count = readonly_session.query(func.count(User.id)).\ filter(User.refresh_token != '').scalar() header = ('user count', 'max lifetime', 'avg lifetime', 'stddev lifetime', 'max active days', 'avg active days', 'stddev active days') lifetime = func.date_part('days', func.max(UserActivity.date_actioned) - func.min(UserActivity.date_actioned)).label('lifetime') active_days = func.count(func.distinct(func.date( UserActivity.date_actioned))).label('active_days') activity = readonly_session.query(UserActivity.user, lifetime, active_days).\ group_by(UserActivity.user) ctx = {} for key, having_expr in ('all', None), ('1day', lifetime > 1), ('7day', lifetime > 7): data = activity.having(having_expr).from_self( func.count('*'), func.max(lifetime), func.avg(lifetime), func.stddev_samp(lifetime), func.max(active_days), func.avg(active_days), func.stddev_samp(active_days) ).one() table = Table([ dict(id='metric', type=str), dict(id='value', type=float), dict(id='%', type=str), ]) pdata = ('%d%%' % (data[0] * 100 / user_count),) + ('',) * 6 table.extend(zip(*(header, map(float, data), pdata))) ctx['ret_%s_data' % key] = table.encode() return self.render('admin/retention_stats_old.html', **ctx)
def api_outlimit_stations(request, propid, startdate, enddate, limit): session = Session() #response = HttpResponse(mimetype='text/csv') #response['Content-Disposition'] = 'attachment; filename="obs_' + stid + '_' + propid + '_' + date + '.csv"' startdate = datetime(int(startdate.split('-')[0]), int(startdate.split('-')[1]), int(startdate.split('-')[2])) enddate = datetime(int(enddate.split('-')[0]), int(enddate.split('-')[1]), int(enddate.split('-')[2])) #max = aliased(func.max(Observation.value)) #currentdate = aliased(func.date(Observation.date)) observations = session.query(func.max(Observation.value), Station.municipality, Station.lat, Station.lng, func.date(Observation.date)).\ join(Station).join(Property).\ filter(Observation.date.between(startdate, enddate), Property.name == propid).\ group_by(func.date(Observation.date), Station.code).having(func.max(Observation.value) >= float(limit)).all() #Observation.value >= limit #writer = csv.writer(response) #for obs in observations: # writer.writerow([obs.station.lat, obs.station.lng, obs.date, obs.value]) resp = [] for obs in observations: o = {} o['lat'] = obs[2] o['lng'] = obs[3] o['date'] = obs[4].isoformat() o['municipality'] = obs[1] o['value'] = obs[0] resp.append(o) session.close() #return response return resp
def _get_host_service_list_daily_average_from_hours(self, start, end): return self.__mysql_session.query(HostService, func.avg(HostService.avg_Uptime).label('dailyUptime')) \ .group_by(HostService.entityId) \ .group_by(func.date(HostService.timestampId)) \ .filter(HostService.aggregationType == 'h') \ .filter(HostService.timestampId >= start, HostService.timestampId <= end) \ .all()
def session_data(session_date=None): if not session_date: session_date = datetime.today().date().replace(year=2017, month=5, day=1) # Start application session_factory = sessionmaker() # Set up our sqlite connection db = create_engine('sqlite:///:memory:', echo=False) Base.metadata.bind = db Base.metadata.create_all() # This creates the table information. Needs to happen before session inst # create a configured "Session" class # session = scoped_session(Session(bind=db)) session = session_factory(bind=db) # add records from data_src to the local db for call_id, call_data_dict in test(query_date=session_date.strftime('%Y-%m-%d')).items(): # Get data from PG connection # call_data = SlaStorage( # id=call_id, # start=call_data_dict['Start Time'], # end=call_data_dict['End Time'], # data=call_data_dict # ) call_data = SlaStorage( id=call_id, start=call_data_dict.pop('Start Time'), end=call_data_dict.pop('End Time'), unique_id1=call_data_dict.pop('Unique Id1'), unique_id2=call_data_dict.pop('Unique Id2'), data=call_data_dict ) session.add(call_data) session.commit() # from json import dumps # from automated_sla_tool.test.flexible_storage import MyEncoder # for row in session.query(SlaStorage).filter(func.date(session_date)).all(): # print(row.id) # print(dumps(row.data['Event Summary'], cls=MyEncoder, indent=4)) # print( # row.id, # row.data['Talking Duration'], # (row.end - row.start) - row.data['Talking Duration'], # Wait = Call Duration - Talk Duration - Hold # row.data['Hold Events'] # Hold = 'Hold', 'Transfer Hold', 'Park' # ) # for row in session.query(SlaStorage).filter(func.date(session_date)).all(): # if row.data["Call Group"] == '7521': # print(row.id) # print(dumps(row.data, cls=MyEncoder, indent=4)) # # for row in session.query(SlaStorage).filter(func.date(session_date)).all(): # yield row return session.query(SlaStorage).filter(func.date(session_date)).all()
def insert_box_score(self, box_score, session): team_object = session.query(Team).filter(Team.abbreviation == box_score.team).one() opponent = session.query(Team).filter(Team.abbreviation == box_score.opponent).one() try: player_object = session.query(Player).filter(and_(Player.first_name == box_score.first_name, Player.last_name == box_score.last_name, Player.team == team_object.id)).one() game = session.query(Game).filter(or_(func.date(Game.start_time) == box_score.date, func.date(Game.start_time) == box_score.date + timedelta(days=1))).filter(and_(or_(Game.home_team == team_object.id, Game.away_team == team_object.id),or_(Game.home_team == opponent.id, Game.away_team == opponent.id))).one() get_or_create( session, BoxScore, player=player_object.id, game=game.id, seconds_played=box_score.seconds_played, made_field_goals=box_score.field_goals, attempted_field_goals=box_score.field_goal_attempts, made_three_point_field_goals=box_score.three_point_field_goals, attempted_three_point_field_goals=box_score.three_point_field_goal_attempts, made_free_throws=box_score.free_throws, attempted_free_throws=box_score.free_throw_attempts, offensive_rebounds=box_score.offensive_rebounds, defensive_rebounds=box_score.defensive_rebounds, total_rebounds=box_score.total_rebounds, assists=box_score.assists, steals=box_score.steals, blocks=box_score.blocks, turnovers=box_score.turnovers, fouls_committed=box_score.personal_fouls, points=box_score.points, draftkings_points=float(box_score.points) + float(box_score.three_point_field_goals) * 0.5 + float(box_score.total_rebounds) * 1.25 + float(box_score.assists) * 1.5 + float(box_score.steals) * 2 + float(box_score.blocks) * 2 - float(box_score.turnovers) * 0.5 ) except (NoResultFound, MultipleResultsFound, IntegrityError): print box_score.date, box_score.team, box_score.opponent session.rollback()
def get_list_of_tours_by_date(start_date_): """ Return a list of tours whose start date is equal to start_date_. :param start_date_: start date of query :return: tuple list of name, and id """ return database.session.query(Tour).filter( func.date(Tour.start_datetime) == start_date_).all()
def _get_per_region_host_service_list(self, start, end, service_type, aggregation_type): return self.__mysql_session.query(HostService) \ .group_by(HostService.entityId) \ .group_by(func.date(HostService.timestampId))\ .filter(HostService.serviceType == service_type) \ .filter(HostService.aggregationType == aggregation_type) \ .filter(HostService.timestampId >= start, HostService.timestampId <= end) \ .all()
def profile(username): user = User.query.filter_by(username=username).first() if user is None: flash('User {} not found.'.format(username)) return redirect(url_for('index')) else: links = Link.query.join(User, (User.id == Link.user_id)).\ filter(User.username == user.username).\ order_by(Link.timestamp.desc()) daysAgo = [] for x in xrange(8): daysAgo.append( datetime.date(datetime.utcnow() - timedelta(days=x))) # A list of the user's short URLs and long URLs listOfLinksQuery = Link.query.\ join(User, (User.id == Link.user_id)).\ filter(User.username == user.username).\ order_by(Link.timestamp.desc()) listOfKeysShortURL = [c.shorturl for c in listOfLinksQuery] listOfLongURL = [c.longurl for c in listOfLinksQuery] totalClicksPerLink = [] for i in xrange(0, len(listOfKeysShortURL)): totalClicksPerLink.append( int(Click.query.filter( Click.shorturl == listOfKeysShortURL[i]).count())) # A list of total clicks for each short URL # Broken down by each day of the week, starting with the most recent weeklyCounts = [[] for x in xrange(len(listOfKeysShortURL))] for key, value in enumerate(listOfKeysShortURL): for j in xrange(8): weeklyCounts[key].append( int(Click.query. filter(Click.shorturl == value). filter(func.date(Click.timestamp) == daysAgo[j]). count())) listOfFullShortURL = [str(request.url_root + 's/' + x) for x in listOfKeysShortURL] listOfTimestamps = [datetime.date(link.timestamp) for link in links] masterList = zip(listOfLongURL, listOfFullShortURL, listOfKeysShortURL, totalClicksPerLink, weeklyCounts, listOfTimestamps) return render_template("user.html", title='Home', user=g.user, links=masterList)
def logs(): today_dt = date.today() allStaff = Staff.query.all() allDtr = DailyTimeRecord.query.filter( func.date(DailyTimeRecord.time_in_am) == today_dt).all() return render_template('logs.html', user=current_user, allDtr=allDtr, allStaff=allStaff)
def not_yet_updated(codes, class_): """指定类在codes中今天尚未更新的代码""" assert class_ in (ShortName, SpecialTreatment) today = pd.Timestamp('today').date() with session_scope() as sess: query = sess.query(class_.code).filter( func.date(class_.last_updated) == today).distinct() updated_codes = set([x[0] for x in query.all()]) return set(codes).difference(updated_codes)
def mark_date_unavailable(date, reason="No reason specified."): """ Marks a given date unavailable for reservation for a given reason. """ assert isinstance(reason, str) assert isinstance(date, datetime.date) only_date = date.date() if isinstance(date, datetime.datetime) else date existing_unavailable_date = ( DB.session.query(models.UnavailableDate) .filter(func.date(models.UnavailableDate.date) == func.date(only_date)) .first() ) if existing_unavailable_date: existing_unavailable_date.reason = reason else: new_unavailable_date = models.UnavailableDate(only_date, reason) DB.session.add(new_unavailable_date) DB.session.commit()
def filter_date(self, query, appstruct): date = appstruct.get('date') if date is not None: query = query.filter( models.Workshop.timeslots.any( func.date(models.Timeslot.start_time) == date ) ) return query
def fetch_todays_total_message_count(service_id): result = db.session.query( func.count(Notification.id).label('count') ).filter( Notification.service_id == service_id, Notification.key_type != KEY_TYPE_TEST, func.date(Notification.created_at) == datetime.utcnow().date() ).first() return 0 if result is None else result.count
def format_venues(venue): # try to find shows from that venue shows = db.session.query(Show).filter(Show.venue_id == venue.id,func.date(Show.start_time)>=datetime.today()) upcoming_shows = [show for show in shows] return { "id": venue.id, "name": venue.name, "num_upcoming_shows": len(upcoming_shows) }
def ticket_sales_end_mail(): current_time = datetime.datetime.now() current_day = datetime.date.today() last_day = current_day - datetime.timedelta(days=1) next_day = current_day - datetime.timedelta(days=-1) next_week = current_day - datetime.timedelta(days=-7) events_with_expired_tickets = (Event.query.filter_by( state='published', deleted_at=None).filter( Event.ends_at > current_time, Event.tickets.any( and_( Ticket.deleted_at == None, func.date(Ticket.sales_ends_at) == last_day, )), ).all()) events_whose_ticket_expiring_tomorrow = (Event.query.filter_by( state='published', deleted_at=None).filter( Event.ends_at > current_time, Event.tickets.any( and_( Ticket.deleted_at == None, func.date(Ticket.sales_ends_at) == next_day, )), ).all()) events_whose_ticket_expiring_next_week = (Event.query.filter_by( state='published', deleted_at=None).filter( Event.ends_at > current_time, Event.tickets.any( and_( Ticket.deleted_at == None, func.date(Ticket.sales_ends_at) == next_week, )), ).all()) for event in events_with_expired_tickets: emails = get_emails_for_sales_end_email(event) send_email_ticket_sales_end(event, emails) for event in events_whose_ticket_expiring_tomorrow: emails = get_emails_for_sales_end_email(event) send_email_ticket_sales_end_tomorrow(event, emails) for event in events_whose_ticket_expiring_next_week: emails = get_emails_for_sales_end_email(event) send_email_ticket_sales_end_next_week(event, emails)
def get_item_usage_over_time(id_, session, override=False): result = ItemStatsWrapper.get_redis_result_if_exists( "items_get_item_usage_over_time_", id_) if not override and result is not None: return result category_map = { 1: Loadout.car, 2: Loadout.wheels, 3: Loadout.boost, 4: Loadout.topper, 5: Loadout.antenna, 6: Loadout.skin, 9: Loadout.trail, 10: Loadout.goal_explosion, 11: Loadout.banner, 12: Loadout.engine_audio } category = RLGarageAPI().get_item(id_)['category'] loadout_item = category_map[category] date = func.date(Game.match_date) inner = session.query(date.label('date'), Loadout.player, loadout_item) \ .join(Game, Loadout.game == Game.hash) \ .distinct(Loadout.player, date) \ .filter(loadout_item == id_) \ .filter(Loadout.player != Game.primary_player) \ .filter(date > datetime.date(2019, 9, 13)) \ .filter(date < datetime.date.fromtimestamp(datetime.datetime.utcnow().timestamp())) \ .group_by(date, Loadout.player, loadout_item) \ .subquery() inner2 = session.query(date.label('date'), Loadout.player) \ .join(Game, Loadout.game == Game.hash) \ .filter(Loadout.player != Game.primary_player) \ .filter(date > datetime.date(2019, 9, 13)) \ .filter(date < datetime.date.fromtimestamp(datetime.datetime.utcnow().timestamp())) \ .distinct(Loadout.player, date) \ .group_by(date, Loadout.player) \ .subquery() stats = session.query(inner.c.date, func.count(inner.c.player).label('count')) \ .group_by(inner.c.date).subquery() stats2 = session.query(inner2.c.date, func.count(inner2.c.player).label('count')) \ .group_by(inner2.c.date).subquery() final = session.query(stats.c.date, stats.c.count, stats2.c.count) \ .join(stats2, stats.c.date == stats2.c.date).order_by(stats.c.date) data = { 'data': [{ 'date': r[0], 'count': r[1], 'total': r[2] } for r in final.all()] } ItemStatsWrapper.set_redis_result_if_exists( "items_get_item_usage_over_time_", id_, data) return data
def results(date): sesh_name = None responses = [] # find data based on total, # specified date in GET, or today's records sesh_date = strftime("%Y-%m-%d") if date == "overall": responses = Responses.query.all() sesh_date = "Overall" else: sesh = Sessions.query.\ filter(func.date(Sessions.created)==func.date(sesh_date)).\ order_by(desc(Sessions.created)).first() if sesh is not None: sesh_name = sesh.name subs = Submissions.query.\ filter(Submissions.session==sesh) responses = Responses.query.\ filter(Responses.submission_id.in_([sub.id for sub in subs])) # add up totals by phrase and store them # in an array of dictionaries agg_phrases = [] for res in responses: found = False for phrase_item in agg_phrases: if res.phrase == phrase_item['phrase']: found = True phrase_item['values'].append(res.value) break if not found: agg_phrases.append({ "phrase": res.phrase, "values": [res.value] }) # get min, max, and average of all values within a phrase for phrase_item in agg_phrases: phrase_item['min'] = min(phrase_item['values']) phrase_item['max'] = max(phrase_item['values']) phrase_item['avg'] = format(sum(phrase_item['values']) / len(phrase_item['values']), '.2f') phrase_item['count'] = len(phrase_item['values']) return render_template('results.html', phrases=agg_phrases, date=sesh_date, sesh_name=sesh_name)
def action_notify_count(self, action='vote'): from ..message.models import Notification action_notify_count = db.session.query(func.count(Notification.id)).\ filter(Notification.unread == True, Notification.receive_id == self.id, Notification.action == action).\ group_by(func.date(Notification.date_created), Notification.target, Notification.target_type).count() return action_notify_count
def max_devices_by_date(cls, organization_id, since, until, data_collectors): sum_query = cls.group_by_hour(organization_id, since, until, data_collectors).subquery() max_query = sum_query.select().with_only_columns([ func.date(sum_query.c.hour).label('date'), func.max(sum_query.c.devices_count).label('max_devices') ]) query = max_query.group_by(max_query.c.date).order_by(max_query.c.date) return db.session.execute(query).fetchall()
def my_expenses(): expense_search = current_user.my_expenses() form = ListExpenseForm() if form.validate_on_submit(): date_from = form.date_from.data date_to = form.date_to.data expense_search = expense_search.filter( func.date(Expense.date) >= date_from, func.date(Expense.date) <= date_to) page = request.args.get('page', 1, type=int) pagination = expense_search.paginate(page, current_app.config['TASKS_PER_PAGE'], False) expenses = pagination.items return render_template('expenses/my_expenses.html', title='My Expenses', expenses=expenses, pagination=pagination, form=form)
def index(): page = request.args.get('page', 1, type=int) orders = ProductOrder.query.filter( ProductOrder.sales_by == current_user.id).order_by( ProductOrder.id.desc()).paginate(page=page, per_page=20) # today = date.today() today = datetime.now(JST).strftime('%Y-%m-%d') sum_qty_today = db.session.query(func.sum(ProductOrder.qty)).filter(ProductOrder.sales_by ==current_user.id)\ .filter(func.date(ProductOrder.date) == today).filter(ProductOrder.item != 901).scalar() sum_amount_today = db.session.query(func.sum((ProductOrder.price * ProductOrder.qty)))\ .filter(ProductOrder.sales_by == current_user.id).filter(func.date(ProductOrder.date) == today).scalar() return render_template('index.html', orders=orders, today=today, sum_qty_today=sum_qty_today, sum_amount_today=sum_amount_today)
def my_tasks(): task_search = current_user.my_tasks() form = ListTaskForm() if form.validate_on_submit(): date_from = form.date_from.data date_to = form.date_to.data task_search = task_search.filter( func.date(Task.date) >= date_from, func.date(Task.date) <= date_to) page = request.args.get('page', 1, type=int) pagination = task_search.paginate(page, current_app.config['TASKS_PER_PAGE'], False) tasks = pagination.items return render_template('tasks/my_tasks.html', title='My Tasks', tasks=tasks, pagination=pagination, form=form)
def setPrescriptionStatus(idPrescription): data = request.get_json() user = User.find(get_jwt_identity()) dbSession.setSchema(user.schema) os.environ['TZ'] = 'America/Sao_Paulo' p = Prescription.query.get(idPrescription) if (p is None): return { 'status': 'error', 'message': 'Prescrição Inexistente!' }, status.HTTP_400_BAD_REQUEST if 'status' in data.keys(): p.status = data.get('status', None) p.update = datetime.today() if p.agg: db.session.query(Prescription)\ .filter(Prescription.admissionNumber == p.admissionNumber)\ .filter(Prescription.status != p.status)\ .filter(Prescription.idSegment == p.idSegment)\ .filter(Prescription.concilia == None)\ .filter(between(func.date(p.date), func.date(Prescription.date), func.date(Prescription.expire)))\ .update({ 'status': p.status, 'update': datetime.today(), 'user': user.id }, synchronize_session='fetch') else: Prescription.checkPrescriptions(p.admissionNumber, p.date, p.idSegment, user.id) if 'notes' in data.keys(): p.notes = data.get('notes', None) p.notes_at = datetime.today() if 'concilia' in data.keys(): concilia = data.get('concilia', 's') p.concilia = str(concilia)[:1] p.user = user.id return tryCommit(db, str(idPrescription), user.permission())
def activity(): today = str(datetime.date.today()) date_from = request.args.get("date_from", today) date_to = request.args.get("date_to", today) if not is_valid_date(date_from): date_from = today if not is_valid_date(date_to): date_to = today activity = db.session.query(label("date", func.date(Like.created_at)), label('likes', func.sum(cast(Like.liked, sqlalchemy.Integer)))).filter(func.date(Like.created_at) >= date_from).filter(func.date(Like.created_at) <= date_to).group_by(func.date(Like.created_at)).all() return {"activity": activity}
def get_model_api_keys(service_id, id=None): if id: return ApiKey.query.filter_by(id=id, service_id=service_id, expiry_date=None).one() seven_days_ago = datetime.utcnow() - timedelta(days=7) return ApiKey.query.filter( or_(ApiKey.expiry_date == None, func.date(ApiKey.expiry_date) > seven_days_ago), # noqa ApiKey.service_id == service_id).all()
def step_info_with_output(session, step_name, project_name=None, sample_name=None, time_from=None, time_to=None, container_name=None, artifact_udfs=None): """ Get a join of output artifact UDFs, for all step of the provided name - filterable to a project, sample name, container name or date range. """ q = session.query(t.Process.luid, t.Process.daterun, t.ProcessIOTracker.inputartifactid, t.Sample.name, t.Project.name, t.Artifact.luid)\ .join(t.Process.type)\ .join(t.Process.processiotrackers) \ .join(t.ProcessIOTracker.artifact) \ .join(t.Artifact.samples)\ .join(t.Sample.project)\ .filter(t.ProcessType.displayname == step_name)\ if artifact_udfs: output_artifact = aliased(t.Artifact) q = q.join(t.ProcessIOTracker.output)\ .join(output_artifact, t.OutputMapping.outputartifactid == output_artifact.artifactid) \ .join(output_artifact.udfs) \ .filter(t.ArtifactUdfView.udfname.in_(artifact_udfs)) \ .filter(t.ArtifactUdfView.udfvalue != None) q = q.add_columns(t.ArtifactUdfView.udfname, t.ArtifactUdfView.udfvalue) if container_name: q = q.filter(t.Container.name == container_name) if time_from: q = q.filter(t.Process.daterun > func.date(time_from)) if time_to: q = q.filter(t.Process.daterun < func.date(time_to)) q = add_filters(q, project_name=project_name, sample_name=sample_name) return q.all()
def get_list(self, args): """ 'type': 'object', 'properties': { 'page': {'type': 'string'}, 'per_page': {'type': 'string'}, 'sort': {'type': 'string'}, 'filter': {'type': 'string'}, 'status': {'type': 'string'}, 'publisher_name': {'type': 'string'}, 'offer_alias': {'type': 'string'}, 'from_date': {'type': 'string'}, 'to_date': {'type': 'string'}, } """ page = helper.get_page_from_args(args) per_page = helper.get_size_from_args(args) limit = (page - 1) * per_page base_query = m.Conversion.query.filter(m.Conversion.active) if 'publisher_name' in args: publisher_name = args.get('publisher_name') base_query = base_query.filter( m.Conversion.publisher_name == publisher_name) if 'status' in args: status = args.get('status') base_query = base_query.filter(m.Conversion.status == status) if 'offer_alias' in args: alias = args.get('offer_alias') offer = m.Offer.query.filter(m.Offer.alias == alias).first() base_query = base_query.filter(m.Conversion.offer_id == offer.id) if 'from_date' in args and 'to_date' in args: from sqlalchemy import and_, func from_date = helper.datetime_from_timestamp( int(args.get('from_date'))).strftime('%Y-%m-%d') to_date = helper.datetime_from_timestamp(int( args.get('to_date'))).strftime('%Y-%m-%d') base_query = base_query.filter( and_( func.date(m.Conversion.created_at) <= to_date, func.date(m.Conversion.created_at) >= from_date)) count = 1 conversions = base_query.limit(per_page).offset(limit).all() return conversions, count, page, per_page
def tobs(): today = date.today() results = session.query(Measurement.tobs)\ .filter(func.date(Measurement.date) >= date(today.year-2, today.month, today.day))\ .group_by(Measurement.date).all() result_list = list(results) return jsonify(result_list)
def get_stats_7days_history(self): ''' get statics of proxy history at latest 7 days ''' before_dt = (datetime.datetime.now() + datetime.timedelta(days=-7)).strftime('%Y-%m-%d') ret = self.session.query(func.strftime("%Y-%m-%d %H:00",ProxyHistory.updatetime),ProxyHistory.score, func.count()) \ .filter(func.date(ProxyHistory.updatetime) > before_dt) \ .group_by(func.strftime("%Y-%m-%d %H:00",ProxyHistory.updatetime),ProxyHistory.score) \ .all() return ret
def get(self): self.set_header("Content-Type", "application/json") self.set_header('Access-Control-Allow-Origin', '*') ret = {} user_id = self.get_argument('user_id', '') mongo_logger = get_mongo_logger() now = dt.datetime.now() try: session = Session() results = session.query(UserCoupon) \ .filter(UserCoupon.user_id == user_id) \ .filter(func.date(UserCoupon.expire_date) >= now.date()) \ .filter(UserCoupon.used == 0) \ .order_by(UserCoupon.expire_date) \ .all() print "request user coupon sql query" user_coupons = [] for result in results: user_coupon = {} user_coupon['id'] = result.id user_coupon['issue_date'] = dt.datetime.strftime( result.issue_date, '%Y.%m.%d') user_coupon['expire_date'] = dt.datetime.strftime( result.expire_date, '%Y.%m.%d') user_coupon['discount_price'] = result.discount_price user_coupon['description'] = result.description user_coupon['title'] = result.title user_coupons.append(user_coupon) ret['response'] = user_coupons self.set_status(Response.RESULT_OK) mongo_logger.debug('request user coupon', extra={'user_id': user_id}) except Exception, e: session.rollback() add_err_message_to_response(ret, err_dict['err_mysql']) self.set_status(Response.RESULT_SERVERERROR) print_err_detail(e) mongo_logger.error('failed to request user coupon', extra={ 'user_id': user_id, 'err': str(e) })
def get(self, job_id): print('JOB ID = {}'.format(job_id)) today = datetime.datetime.today() yesterday = today - datetime.timedelta(1) last_week = today - datetime.timedelta(7) chart_data = [] query = db.session.query(func.count().label('count'), model.JobRun.failure_type).group_by( model.JobRun.failure_type) f_query = query.filter( func.date(model.JobRun.date) == func.date(today), model.JobRun.job_id == job_id).all() data = [] for row in f_query: data.append({'name': row.failure_type, 'value': row.count}) chart_data.append({'status_type': 'today', 'data': data}) f_query = query.filter( func.date(model.JobRun.date) == func.date(yesterday), model.JobRun.job_id == job_id).all() data = [] for row in f_query: data.append({'name': row.failure_type, 'value': row.count}) chart_data.append({'status_type': 'yesterday', 'data': data}) data = [] f_query = query.filter( func.date(model.JobRun.date) <= func.date(today), func.date(model.JobRun.date) >= func.date(last_week), model.JobRun.job_id == job_id ).all() for row in f_query: data.append({'name': row.failure_type, 'value': row.count}) chart_data.append({'status_type': 'week', 'data': data}) data = [] f_query = query.filter(model.JobRun.job_id == job_id).all() for row in f_query: data.append({'name': row.failure_type, 'value': row.count}) chart_data.append({'status_type': 'overall', 'data': data}) return chart_data
def get_object_list(cls, db_session, page_num=1, filter_=None): object_query = (db_session.query( cls.model.id, cls.model.email, cls.model.order_no, cls.model.status, cls.model.size, cls.model.quantity, cls.model.total, cls.model.remote_ip, cls.model.created_time).filter( cls.model.status != OrderStatusEnum.deleted).order_by( cls.model.id.desc())) if filter_ is not None: if filter_['start_date']: object_query = object_query.filter( func.date(cls.model.created_time) >= filter_['start_date']) if filter_['end_date']: object_query = object_query.filter( func.date(cls.model.created_time) <= filter_['end_date']) if filter_['status']: object_query = object_query.filter(cls.model.status == getattr( OrderStatusEnum, filter_['status'])) object_list = paginate(object_query, page_num, Config.NUM_PER_PAGE) return object_list
def get_precomputed_shanghai_holidays(): """自开市以来至今,除周六周日外的假期""" db_dir_name = 'szsh' sess = get_session(db_dir_name) res = sess.query(func.date(TradingCalendar.日期)).filter( TradingCalendar.交易日 == 0, ).all() sess.close() return [ x[0] for x in res if pd.Timestamp(x[0]).day_name() not in ('Saturday', 'Sunday') ]
def get_on_this_day_events(db: Session = Depends(get_db)) -> Dict[str, Any]: try: data = (db.query(WikipediaEvents).filter( func.date(WikipediaEvents.date_inserted) == date.today()).one()) except NoResultFound: data = insert_on_this_day_data(db) except (SQLAlchemyError, AttributeError) as e: logger.error(f'on this day failed with error: {e}') data = {'events': [], 'wikipedia': 'https://en.wikipedia.org/'} return data
def date(self, tz=None): tz = get_timezone(tz) if str(db.engine.url).startswith('sqlite'): minutes_offset = round( tz.utcoffset(datetime.datetime.now()).total_seconds() / 60) return func.date(self.expr, f'{minutes_offset} minutes') else: tzname = tz.tzname(datetime.datetime.now()) datetime_at_tz = func.timezone(tzname, self.expr) return func.DATE(datetime_at_tz)
def get_list_of_tours_by_place_and_date(place, date): """ Return a list of tours whose place is equal to place and start date is equal to start_date_. :param place: place of tour :param date: start date of query :return: tuple list of name, and id """ return database.session.query(Tour).filter( func.lower(Tour.place) == func.lower(place), func.date(Tour.start_datetime) == date).all()
def wpu(): session = get_session() weekly_active_users = defaultdict(lambda: defaultdict(int)) for value in session.query(func.date(Impression.moment), Splash.user_id, func.count(Splash.splash_id)).\ join(Splash, Impression.splash_id == Splash.splash_id).\ group_by(func.date(Impression.moment), Splash.user_id).all(): day = datetime.strptime(value[0], '%Y-%m-%d').isocalendar() week_id = str(day[0]) + '_' + str(day[1]) user_id = str(value[1]) daily_impressions = int(value[2]) weekly_active_users[week_id][user_id] += daily_impressions weeks = sorted(weekly_active_users, key=lambda key: weekly_active_users[key]) results = [] for week in weeks: results.append(nlargest(10, weekly_active_users[week].iteritems(), itemgetter(1))) return zip(weeks, results)
def index(): narr_erc_all = narr_erc.query.limit(5).all( ) # This returns a list of objects. Pass that list of objects to your template using jinga. narr_erc_lat = narr_erc.query.filter_by(lat='39.2549').first() narr_erc_date = narr_erc.query.filter( func.date(narr_erc.date) <= '1979-01-02').all() return render_template( 'add_user_fwp.html', narr_erc_all=narr_erc_all, narr_erc_lat=narr_erc_lat, narr_erc_date=narr_erc_date) # myUser is passed to template
def create_report(self, year, months, stations, magnitudes): # Comprobación de los valores: # El año es obgligatorio # Si no se especifican meses, se consideran todos # Si no se especifican estaciones, se consideran todas # Si no se especifican magnitudes, se consideran todas if not year: raise Exception('Year must be specified') data = db.session.query( Measurement.datetime, Measurement.town_id, Measurement.magnitude_id, Measurement.station_id, Measurement.data, Holiday.name, Holiday.scope).filter( db.extract('year', Measurement.datetime) == year).join( Holiday, func.date(Holiday.date) == func.date(Measurement.datetime), isouter=True).order_by(Measurement.datetime, Measurement.town_id) if len(months) > 0: data = data.filter( db.extract('month', Measurement.datetime).in_(months)) if len(stations) > 0: data = data.filter(Measurement.station_id.in_(stations)) if len(magnitudes) > 0: data = data.filter(Measurement.magnitude_id.in_(magnitudes)) all_measurements = data.all() result, keys = self.__get_hourly_report_data(all_measurements) memory_stream = io.StringIO() dict_writer = csv.DictWriter(memory_stream, keys) dict_writer.writeheader() dict_writer.writerows(measurement_list) encoded = base64.b64encode(memory_stream.getvalue().encode('utf-8')) return ReportDto(encoded)
def group_frequency_measurements(): """Groups all frequency measurements by day and calculates minimum, maximum and average.""" now = datetime.datetime.now() today = datetime.datetime(now.year, now.month, now.day) groups = ( FrequencyMeasurement.query.filter(FrequencyMeasurement.timestamp < today) .group_by(func.date(FrequencyMeasurement.timestamp), FrequencyMeasurement.user_id) .all() ) for group in groups: measurements_query_data = ( db.session.query( func.min(FrequencyMeasurement.rate).label("min"), func.max(FrequencyMeasurement.rate).label("max"), func.avg(FrequencyMeasurement.rate).label("avg"), ) .filter( func.date(FrequencyMeasurement.timestamp) == group.timestamp.date(), FrequencyMeasurement.user_id == group.user_id, ) .first() ) measurement = FrequencyMeasurementDaily() measurement.date = group.timestamp.date() measurement.user_id = group.user_id measurement.rate_min = measurements_query_data[0] measurement.rate_max = measurements_query_data[1] measurement.rate_avg = measurements_query_data[2] db.session.add(measurement) FrequencyMeasurement.query.filter( func.date(FrequencyMeasurement.timestamp) == group.timestamp.date(), FrequencyMeasurement.user_id == group.user_id, ).delete(synchronize_session="fetch") print(measurement) db.session.commit()
def precipitation(): """Return the JSON representation of your dictionary using date as the key and prcp as the value.""" # Query all precipitation for the year session = Session(engine) latest_date = session.query(Measurement.date).order_by(Measurement.date.desc()).first() year_ago = dt.date(2017, 8, 23) - dt.timedelta(days=365) sel = [Measurement.date, func.sum(Measurement.prcp)] results = session.query(*sel).filter(and_(func.date(Measurement.date) >= year_ago),\ func.date(Measurement.date) <= 'latest_date').group_by(Measurement.date).order_by(Measurement.date).all() # Create a dictionary from the row data and append to a precipitation records for the dates in analysis all_precipitation = [] for date, prcp in results: precipitation_dict = {} precipitation_dict["date"] = date precipitation_dict["precipitation"] = prcp all_precipitation.append(precipitation_dict) return jsonify(all_precipitation)
def get_daily_attacks(self): results = session.query(func.count(Connections.id).label('counts'), Connections.created_at) \ .group_by(func.date(Connections.created_at)) \ .all() labels = [] data = [] for count, date in results: if count > 1: labels.append(str(date.date())) data.append(count) self.chartjs_results("Title", labels, data)