def get_users_with_expirations( aggregation_date: datetime.date = datetime.utcnow().date(), expiration_lookahead: int = 30 ) -> List[str]: predplatne_mysql_mappings = get_sqlalchemy_tables_w_session( 'MYSQL_CRM_CONNECTION_STRING', 'MYSQL_CRM_DB', ['payments', 'subscriptions'] ) mysql_predplatne_session = predplatne_mysql_mappings['session'] payments = predplatne_mysql_mappings['payments'] subscriptions = predplatne_mysql_mappings['subscriptions'] relevant_users = mysql_predplatne_session.query( subscriptions.c['user_id'] ).join( payments, payments.c['subscription_id'] == subscriptions.c['id'] ).filter( and_( payments.c['status'] == 'paid', func.datediff(subscriptions.c['end_time'], aggregation_date) <= expiration_lookahead, func.datediff(subscriptions.c['end_time'], aggregation_date) > 0, ) ).group_by( subscriptions.c['user_id'] ) relevant_users = relevant_users.all() relevant_users = [user_id[0] for user_id in relevant_users] mysql_predplatne_session.close() return relevant_users
def get_rx_count_within_cycle_between_dates_in_store( cls, start, end, store_id, cycle): from application.models.reception import Reception from application.models.customer import Customer query = db.session.query(func.count(distinct(Reception.id))).outerjoin( Customer, Customer.id == Reception.customer_id).outerjoin( Order, Order.customer_id == Customer.id).filter( and_( db.func.date(Order.created_on) >= start, db.func.date(Order.created_on) <= end, Order.store_id == store_id, Order.status != 'cancelled', Reception.status != 'cancelled', Reception.rx_type != 'other')) if len(cycle) == 1: query = query.filter( func.datediff(Order.created_on, Customer.created_on) >= cycle[0]) elif len(cycle) > 1: query = query.filter( and_( func.datediff(Order.created_on, Customer.created_on) >= cycle[0], func.datediff(Order.created_on, Customer.created_on) < cycle[1])) return query.scalar()
def render_user(user_id): user = db.session.query(User).get_or_404(user_id) if not user.is_boss: non_executed_tasks = user.non_executed_tasks if 'mysql' in current_app.config['SQLALCHEMY_DATABASE_URI']: avg_execute_time = db.session.query(func.round(func.avg( func.datediff(Task.completed_on, Task.created_on)), 2)) \ .filter(Task.executor_id == user_id) \ .scalar() ratio = db.session.query(func.round(func.avg( (func.datediff(Task.deadline, Task.created_on) - func.datediff(Request.executed_on, Task.created_on)) / func.datediff(Task.deadline, Task.created_on)), 2)) \ .join(Request, Task.requests) \ .filter(Request.denied_on.is_(None), Task.executor_id == user_id) \ .scalar() else: query = text( 'SELECT ROUND(AVG(CAST((JULIANDAY(t.completed_on) - JULIANDAY(t.created_on)) AS Integer)), 2) ' 'FROM tasks t WHERE t.executor_id = :user_id') avg_execute_time = db.engine.execute(query, { 'user_id': user_id }).scalar() ratio_query = ( 'SELECT ROUND(AVG((CAST(JULIANDAY(t.deadline) - JULIANDAY(t.created_on) AS REAL) - ' 'CAST(JULIANDAY(r.executed_on) - JULIANDAY(t.created_on) AS REAL)) / ' 'CAST(JULIANDAY(t.deadline)- JULIANDAY(t.created_on) AS REAL)), 2) ' 'FROM tasks t ' 'JOIN requests r ON r.task_id = t.id ' 'WHERE r.denied_on IS NULL AND t.executor_id = :user_id') ratio = db.engine.execute(ratio_query, { 'user_id': user_id }).scalar() try: user_contribution = round( len(user.executionship) / db.session.query(func.count( Task.id)).filter(~Task.completed_on.is_(None)).scalar() * 100, 2) except ZeroDivisionError: user_contribution = None return render_template('user.html', title="Личный кабинет", user=user, avg_execute_time=avg_execute_time, non_executed_tasks=non_executed_tasks[0:5], user_contribution=user_contribution, ratio=ratio) else: all_tasks = db.session.query(func.count( Task.id)).filter(Task.status != 'выполнена').scalar() authorship_tasks = user.authorship closership_tasks = user.closership tasks_on_consider = user.get_tasks_on_consider() return render_template('user.html', title="Личный кабинет", user=user, authorship_tasks=authorship_tasks, closership_tasks=closership_tasks, top_urgent_tasks=tasks_on_consider[0:5], all_tasks=all_tasks, count_tasks_on_consider=len(tasks_on_consider))
def get_appt_count_within_cycle_between_dates_in_store( cls, start, end, store_id, cycle): from application.models.appointment import Appointment from application.models.customer import Customer query = db.session.query(func.count( distinct(Appointment.id))).outerjoin( Customer, Customer.id == Appointment.customer_id).outerjoin( Order, Order.customer_id == Customer.id).filter( and_( db.func.date(Order.created_on) >= start, db.func.date(Order.created_on) <= end, Order.store_id == store_id, Order.status != 'cancelled', or_(Appointment.type == 'instore', Appointment.type == 'followup'))) if len(cycle) == 1: query = query.filter( func.datediff(Order.created_on, Customer.created_on) >= cycle[0]) elif len(cycle) > 1: query = query.filter( and_( func.datediff(Order.created_on, Customer.created_on) >= cycle[0], func.datediff(Order.created_on, Customer.created_on) < cycle[1])) return query.scalar()
def reports(self): c.title = "Sales Dashboard" c.items = c.user.programs if len(c.items) > 0: nullToken = c.nullToken """Each row is (program_id, date, numPurchases, numAffiliatePurchases, affiliateTotal, purchaseTotal """ #JOIN was unnecessary #c.orders = Session_.query(Order.product_id, func.datediff(Order.date, c.lowerDate), func.count('*').label('total'), func.sum(Order.isReturned, type_=Integer).label('numReturns'), func.count(Order.affiliate_user_id).label('numAffiliatePurchases')).join((Product, and_(Order.seller_user_id==c.user.id, Order.product_id==Product.id))).filter(Order.date >= c.lowerDate).filter(Order.date < c.upperDate).group_by(Order.product_id).group_by(Order.date).all() c.orders = Session_.query(Order.program_id, func.datediff(Order.date, c.lowerDate), func.count('*').label('numPurchases'), func.count(Order.affiliate_user_id).label('numAffiliatePurchases'), func.sum(case([(Order.affiliate_user_id != None, Order.amount)], else_=0)).label('affiliateTotal'), func.sum(Order.amount).label('purchaseTotal'), ).filter(Order.merchant_user_id==c.user.id).filter(Order.date >= c.lowerDate).filter(Order.date <= c.upperDate).group_by(Order.program_id).group_by(Order.date).all() #c.days = Session_.query(func.datediff(Transaction.date, c.lowerDate), func.sum(Transaction.amount)).filter(Transaction.user_id==c.user.id).filter(Transaction.date >= c.lowerDate).filter(Transaction.date < c.upperDate).group_by(Transaction.date).all() """SINGLE PRODUCT FOR OWNER""" #c.orders = Session_.query(Order.product_id, func.datediff(Order.date, c.lowerDate), func.count('*').label('total'), func.sum(Order.isReturned, type_=Integer).label('numReturns'), func.count(Order.affiliate_user_id).label('numAffiliatePurchases')).filter(Order.product_id==c.product.id).filter(Order.date >= c.lowerDate).filter(Order.date < c.upperDate).group_by(Order.product_id).group_by(Order.date).all() #c.impressions = Session_.query(Impression.product_id, func.datediff(Impression.date, c.lowerDate), func.count('*').label('total'), func.sum(case([(Impression.affiliate_ts != nullToken, 1)],else_=0)), func.sum(case([(Impression.order_ts != nullToken, 1)],else_=0)), func.sum(case([(and_(Impression.affiliate_ts != nullToken, Impression.order_ts != nullToken), 1)],else_=0)).label('buyConversions'), func.sum(case([(and_(Impression.purchase_ts != nullToken, Impression.order_ts != nullToken), 1)],else_=0)).label('purchaseConversions')).filter(Impression.product_id==c.product.id).filter(Impression.date >= c.lowerDate).filter(Impression.date < c.upperDate).group_by(Impression.product_id).group_by(Impression.date).all() """Each row is (product_id, date, affiliateViews, conversionTime)""" c.impressions = Session_.query(Impression.program_id, func.datediff(Impression.date, c.lowerDate), func.sum(case([(Impression.affiliate_ts != nullToken, 1)],else_=0)), func.avg(case([(and_(Impression.purchase_ts != nullToken, Impression.affiliate_ts != nullToken), func.time_to_sec(func.timediff(Impression.purchase_ts,Impression.affiliate_ts)))],else_=0), ).label('purchaseConversions')).join((Program, and_(c.user.id==Program.merchant_user_id, Impression.program_id==Program.id))).filter(Impression.date >= c.lowerDate).filter(Impression.date <= c.upperDate).group_by(Impression.program_id).group_by(Impression.date).all() logging.info(c.impressions) self.__Temp(c) return render('/account/accountSummary.mak')
def _perform_sql(self): filter = {} filter["last_check"] = "now()" query = self.session.query(self.tables.monitor_obs)\ .add_column((func.datediff(self.tables.monitor_obs.last_check, func.now())).label("test"))\ .filter(self.tables.monitor_obs.locked == False)\ .filter(func.datediff(self.tables.monitor_obs.last_check, func.now()) <= self.tables.monitor_obs.frequency) \ .order_by('last_check') logger.debug(query)
def received_to_delivered(self, year): """Calculate averages from received to delivered.""" query = (self.session.query( models.Application.category.label("category"), func.month(models.Sample.received_at).label("month_no"), (func.avg( func.datediff(models.Sample.delivered_at, models.Sample.received_at)).label("average")), ).join( models.Sample.customer, models.Sample.application_version, models.ApplicationVersion.application, ).filter( models.Customer.priority == "diagnostic", models.Sample.received_at > self.get_from_date(year), models.Sample.received_at < self.get_until_date(year), models.Sample.delivered_at is not None, ).group_by( models.Application.category, func.month(models.Sample.received_at), )) for category, results in groupby(query, key=lambda result: result.category): averages = { MONTHS[result.month_no]: float(result.average) if result.average else None for result in results } yield { "name": category, "results": { month: averages.get(month) or None for month in MONTHS.values() }, }
def get_dag_duration_info() -> List[DagDurationInfo]: '''get duration of currently running DagRuns :return dag_info ''' driver = Session.bind.driver # pylint: disable=no-member durations = { 'pysqlite': func.julianday(func.current_timestamp() - func.julianday(DagRun.start_date)) * 86400.0, 'mysqldb': func.timestampdiff(text('second'), DagRun.start_date, func.now()), 'pyodbc': func.sum(func.datediff(text('second'), DagRun.start_date, func.now())), 'default': func.now() - DagRun.start_date } duration = durations.get(driver, durations['default']) sql_res = Session.query( # pylint: disable=no-member DagRun.dag_id, func.max(duration).label('duration')).group_by( DagRun.dag_id).filter(DagRun.state == State.RUNNING).all() res = [] for i in sql_res: if driver == 'mysqldb' or driver == 'pysqlite': dag_duration = i.duration else: dag_duration = i.duration.seconds res.append(DagDurationInfo(dag_id=i.dag_id, duration=dag_duration)) return res
def delete_inactive_link(): with app.app_context(): delete_query = Link.__table__.delete()\ .where(Link.hash_lifetime == func.datediff(func.now(), Link.generated_day)) db.session.execute(delete_query) db.session.commit() print('scheduler works')
def calc_duration_changes(max): """Calculate duration of changes (time from created to updated). This will print sumary stats about the duration of the changes in the review system, and will show some of them. Parameters ---------- max: int Max number of changes to show. """ res = session.query( label ("number", DB.Change.number), label ("start", DB.Change.created), label ("finish", DB.Change.updated), ) \ .filter (DB.Change.created < DB.Change.updated) \ .order_by (desc (func.datediff(DB.Change.updated, DB.Change.created))) cases = res.limit(max).all() for case in cases: print str(case.number) + ": " + str(case.start) + \ " (start), " + str(case.finish) + " (finish) Duration: " + \ str(case.finish - case.start)
def received_to_prepped(self): """Calculate averages to prepp samples.""" query = (self.session.query( models.Application.category.label('category'), func.month(models.Sample.received_at).label('month_no'), (func.avg( func.datediff(models.Sample.prepared_at, models.Sample.received_at)).label('average')), ).join( models.Sample.customer, models.Sample.application_version, models.ApplicationVersion.application, ).filter( models.Customer.priority == 'diagnostic', models.Sample.received_at > dt.datetime(2016, 12, 31)).group_by( func.month(models.Sample.received_at))) for category, results in groupby(query, key=lambda result: result.category): averages = { MONTHS[result.month_no]: float(result.average) if result.average else None for result in results } yield { 'category': category, 'results': [{ 'month': month, 'average': averages.get(month) or None, } for month in MONTHS.values()] }
def check_newer_dates(max): """Check that dates related to a change are newer than creation date. This will print sumary stats about dates that are not correct, and will show at most max cases. Parameters ---------- max: int Max number of cases to show among those violating the check. """ res = session.query( label ("number", DB.Change.number), label ("created", DB.Change.created), label ("updated", DB.Change.updated) ) \ .filter (DB.Change.created > DB.Change.updated) \ .order_by (desc (func.datediff(DB.Change.created, DB.Change.updated))) cases = res.limit(max).all() for case in cases: print str(case.number) + ": " + str(case.created) + \ " (created), " + str(case.updated) + " (updated) Mismatch: " + \ str(case.created - case.updated) + ")" print "Total number of mismatchs: " + str(res.count())
def get_users_with_expirations( aggregation_date: datetime.date = datetime.utcnow().date() ) -> pd.DataFrame: predplatne_mysql_mappings = get_sqlalchemy_tables_w_session( 'MYSQL_CRM_CONNECTION_STRING', 'MYSQL_CRM_DB', ['payments', 'subscriptions']) mysql_predplatne_session = predplatne_mysql_mappings['session'] payments = predplatne_mysql_mappings['payments'] subscriptions = predplatne_mysql_mappings['subscriptions'] relevant_subscriptions = mysql_predplatne_session.query( subscriptions.c['user_id'], subscriptions.c['end_time'], subscriptions.c['start_time'], subscriptions.c['user_id']).join( payments, payments.c['subscription_id'] == subscriptions.c['id']).filter( and_( payments.c['status'].in_( ['paid', 'prepaid', 'family', 'upgrade']), func.datediff(subscriptions.c['end_time'], aggregation_date) <= EVENT_LOOKAHEAD, func.datediff(subscriptions.c['end_time'], aggregation_date) > 0, )).subquery('relevant_users') # We will be filtering out users that renewed before relevant_users = mysql_predplatne_session.query( relevant_subscriptions.c['user_id'], func.max(relevant_subscriptions.c['end_time']).cast(DATE).label( 'outcome_date')).outerjoin( subscriptions, and_( subscriptions.c['user_id'] == relevant_subscriptions.c['user_id'], subscriptions.c['start_time'] > relevant_subscriptions.c['start_time'], subscriptions.c['start_time'] <= relevant_subscriptions.c['end_time'], subscriptions.c['start_time'] <= aggregation_date)).filter( subscriptions.c['user_id'] == None).group_by( relevant_subscriptions.c['user_id']) relevant_users = pd.read_sql(relevant_users.statement, relevant_users.session.bind) mysql_predplatne_session.close() return relevant_users
def get_payment_history_features(end_time: datetime): predplatne_mysql_mappings = get_sqlalchemy_tables_w_session( 'MYSQL_CONNECTION_STRING', 'predplatne', ['payments', 'subscriptions'] ) mysql_predplatne_session = predplatne_mysql_mappings['session'] payments = predplatne_mysql_mappings['payments'] subscriptions = predplatne_mysql_mappings['subscriptions'] clv = mysql_predplatne_session.query( func.sum(payments.c['amount']).label('clv'), payments.c['user_id'] ).filter( and_( payments.c['created_at'] <= end_time, payments.c['status'] == 'paid' ) ).group_by( payments.c['user_id'] ).subquery() days_since_last_subscription = mysql_predplatne_session.query( func.datediff(end_time, func.max(subscriptions.c['end_time'])).label('days_since_last_subscription'), func.max(subscriptions.c['end_time']).label('last_subscription_end'), subscriptions.c['user_id'] ).filter( subscriptions.c['end_time'] <= end_time ).group_by( subscriptions.c['user_id'] ).subquery() user_payment_history_query = mysql_predplatne_session.query( clv.c['clv'], clv.c['user_id'], days_since_last_subscription.c['days_since_last_subscription'], days_since_last_subscription.c['last_subscription_end'] ).outerjoin( days_since_last_subscription, clv.c['user_id'] == days_since_last_subscription.c['user_id'] ) user_payment_history = pd.read_sql( user_payment_history_query.statement, user_payment_history_query.session.bind ) user_payment_history['clv'] = user_payment_history['clv'].astype(float) user_payment_history['days_since_last_subscription'] = user_payment_history[ 'days_since_last_subscription' ].astype(float) mysql_predplatne_session.close() return user_payment_history
def list(page_nr=1, search=None): if not ModuleAPI.can_read('vacancy'): return abort(403) # Order the vacancies in such a way that vacancies that are new # or almost expired, end up on top. order = func.abs( (100 * (func.datediff(Vacancy.start_date, func.current_date()) / func.datediff(Vacancy.start_date, Vacancy.end_date))) - 50) if search is not None: vacancies = Vacancy.query.join(Company). \ filter(or_(Vacancy.title.like('%' + search + '%'), Company.name.like('%' + search + '%'), Vacancy.workload.like('%' + search + '%'), Vacancy.contract_of_service.like('%' + search + '%'))) \ .order_by(order.desc()) if not ModuleAPI.can_write('vacancy'): vacancies = vacancies.filter( and_(Vacancy.start_date < datetime.utcnow(), Vacancy.end_date > datetime.utcnow())) vacancies = vacancies.paginate(page_nr, 15, False) return render_template('vacancy/list.htm', vacancies=vacancies, search=search, path=FILE_FOLDER, title="Vacatures") if ModuleAPI.can_write('vacancy'): vacancies = Vacancy.query.join(Company).order_by(order.desc()) else: vacancies = Vacancy.query.order_by(order.desc()) \ .filter(and_(Vacancy.start_date < datetime.utcnow(), Vacancy.end_date > datetime.utcnow())) vacancies = vacancies.paginate(page_nr, 15, False) return render_template('vacancy/list.htm', vacancies=vacancies, search="", path=FILE_FOLDER, title="Vacatures")
def datos_familias(): from datetime import date today = date.today() total_familias = db.session.query(Familia).count() total_adultos = db.session.query(Miembro)\ .filter(func.datediff(today, Miembro.fecha_nac) > 25)\ .count() total_jovenes = db.session.query(Miembro)\ .filter(and_(func.datediff(today, Miembro.fecha_nac) <= 25, func.datediff(today, Miembro.fecha_nac) >= 13))\ .count() total_ninios = db.session.query(Miembro)\ .filter(func.datediff(today, Miembro.fecha_nac) < 13)\ .count() return [total_familias, total_adultos, total_jovenes, total_ninios]
def count_all_within_cycle_between_dates_in_store(cls, start, end, store_id, cycle): from application.models.customer import Customer query = db.session.query(func.count(cls.id)).outerjoin( Customer, Customer.id == cls.customer_id).filter( and_( db.func.date(cls.created_on) >= start, db.func.date(cls.created_on) <= end, cls.store_id == store_id, cls.status != 'cancelled', cls.history_order == 0)) if len(cycle) == 1: query = query.filter( func.datediff(cls.created_on, Customer.created_on) >= cycle[0]) elif len(cycle) > 1: query = query.filter( and_( func.datediff(cls.created_on, Customer.created_on) >= cycle[0], func.datediff(cls.created_on, Customer.created_on) < cycle[1])) return query.scalar()
def affiliate(self): c.title = "Affiliate Dashboard" c.items = c.user.itemsAffiliate if len(c.items) > 0: nullToken = c.nullToken """Each row is (product_id, date, total, numReturns, numAffiliatePurchases """ #JOIN was unnecessary #c.orders = Session_.query(Order.product_id, func.datediff(Order.date, c.lowerDate), func.count('*').label('total'), func.sum(Order.isReturned, type_=Integer).label('numReturns'), func.count(Order.affiliate_user_id).label('numAffiliatePurchases')).join((Product, and_(Order.affiliate_user_id==c.user.id, Order.product_id==Product.id))).filter(Order.date >= c.lowerDate).filter(Order.date < c.upperDate).group_by(Order.product_id).group_by(Order.date).all() c.orders = Session_.query(Order.product_id, func.datediff(Order.date, c.lowerDate), func.count('*').label('total'), func.sum(Order.isReturned, type_=Integer).label('numReturns'), func.count(Order.affiliate_user_id).label('numAffiliatePurchases')).filter(Order.affiliate_user_id==c.user.id).filter(Order.date >= c.lowerDate).filter(Order.date < c.upperDate).group_by(Order.product_id).group_by(Order.date).all() """SINGLE PRODUCT FOR AFFILIATE""" #c.orders = Session_.query(Order.product_id, func.datediff(Order.date, c.lowerDate), func.count('*').label('total'), func.sum(Order.isReturned, type_=Integer).label('numReturns'), func.count(Order.affiliate_user_id).label('numAffiliatePurchases')).filter(Order.affiliate_user_id==c.user.id).filter(Order.product_id==c.product.id).filter(Order.date >= c.lowerDate).filter(Order.date < c.upperDate).group_by(Order.product_id).group_by(Order.date).all() #c.impressions = Session_.query(Impression.product_id, func.datediff(Impression.date, c.lowerDate), func.count('*').label('total'), func.sum(case([(Impression.affiliate_ts != nullToken, 1)],else_=0)), func.sum(case([(Impression.order_ts != nullToken, 1)],else_=0)), func.sum(case([(and_(Impression.affiliate_ts != nullToken, Impression.order_ts != nullToken), 1)],else_=0)).label('buyConversions'), func.sum(case([(and_(Impression.purchase_ts != nullToken, Impression.order_ts != nullToken), 1)],else_=0)).label('purchaseConversions')).filter(c.user.id==Impression.affiliate_user_id).filter(Impression.product_id==c.product.id).filter(Impression.date >= c.lowerDate).filter(Impression.date < c.upperDate).group_by(Impression.product_id).group_by(Impression.date).all() """Each row is (product_id, date, total, affiliateViews, orderViews, buyConversions, purchaseConversions)""" c.impressions = Session_.query(Impression.product_id, func.datediff(Impression.date, c.lowerDate), func.count('*').label('total'), func.sum(case([(Impression.affiliate_ts != nullToken, 1)],else_=0)), func.sum(case([(Impression.order_ts != nullToken, 1)],else_=0)), func.sum(case([(and_(Impression.affiliate_ts != nullToken, Impression.order_ts != nullToken), 1)],else_=0)).label('buyConversions'), func.sum(case([(and_(Impression.purchase_ts != nullToken, Impression.order_ts != nullToken), 1)],else_=0)).label('purchaseConversions')).filter(c.user.id==Impression.affiliate_user_id).filter(Impression.date >= c.lowerDate).filter(Impression.date <= c.upperDate).group_by(Impression.product_id).group_by(Impression.date).all() logging.info(c.impressions) self.__Temp(c, True) return render('/account/accountSummary.mak')
def check_upload (diff): """Check upload time of first revision with created time for change. For each change, the upload time of the first revision (patchset) is matched against the created time for the change. Those changes with more than diff mins. of difference are shown. Parameters ---------- diff: int Minutes of difference considered. """ revs = session.query(label ("daterev", func.min(DB.Revision.date)), label ("change_id", DB.Revision.change_id), label ("number", DB.Change.number)) \ .filter (DB.Revision.change_id == DB.Change.uid) \ .group_by("change_id") \ .subquery() res = session.query( label ("number", revs.c.number), label ("created", DB.Change.created), label ("daterev", revs.c.daterev) ) \ .filter(and_( func.abs(func.timediff( DB.Change.created, revs.c.daterev) > timedelta (minutes = diff)), DB.Change.uid == revs.c.change_id)) \ .order_by (func.datediff(DB.Change.created, revs.c.daterev), func.timediff(DB.Change.created, revs.c.daterev)) messages = res.all() for message in messages: print "Change " + str(message.number) + ": " + \ str(message.created - message.daterev) + \ " -- " + str(message.created) + " (created), " + \ str(message.daterev) + " (first revision)" print "Total changes with discrepancy: " + str (len(messages))
def get_dag_duration_info(): '''get duration of currently running DagRuns :return dag_info ''' driver = Session.bind.driver # pylint: disable=no-member ctime = func.current_timestamp() sdate = func.julianday(DagRun.start_date) durations = { 'pysqlite': func.julianday(ctime - sdate) * 86400.0, 'mysqldb': func.timestampdiff(text('second'), sdate, func.now()), 'pyodbc': func.sum(func.datediff(text('second'), sdate, func.now())), 'default': func.now() - DagRun.start_date } duration = durations.get(driver, durations['default']) return Session.query( DagRun.dag_id, func.max(duration).label('duration')).group_by( DagRun.dag_id).filter(DagRun.state == State.RUNNING).all()
def monthly_account(): ''' prints out how much each sailor should pay depending - (1) how many days a sailor is borrowing a boat - (2) boat rent fee - (3) sum of (1)*(2) --- Equivalent to <Select s.sname, s.sid, result.payment from sailors s, (select r.sid, sum b.rent*datediff(r.return_d,r.start_day from boats b, reserves r where b.bid = r.bid group by r.sid)> ''' # session = Session() print('-----Monthly Payment-----') payment1 = session.query( Reserves.sid.label('sid'), func.sum(Boats.rent * func.datediff( Reserves.return_d, Reserves.start_d)).label('payment')).filter( Boats.bid == Reserves.bid).group_by(Reserves.sid).subquery() payment = session.query( Sailors.sname, Sailors.sid, payment1.c.payment).filter(Sailors.sid == payment1.c.sid) for i in payment: print('Sailor "{}" (#{}) owes ${} dollars this month'.format( i.sname, i.sid, i.payment)) return payment
def get_out_of_stack(): return session.query(TTrade).filter(and_(func.datediff(func.now(), TTrade.sell_time) >= DAYS, TTrade.status == ONLINE)).all()
def get_rentals_sum_by_vehicle_id(id): # rentals summary rentals_summary = db.session.query(Rentals.id, Rentals.vehicle_id, (Rentals.odometer_end - Rentals.odometer_start).label('distance'), Rentals.rental_type, func.IF(Rentals.rental_type == "D", (func.datediff(Rentals.date_end, Rentals.date_start)+1)*100, (Rentals.odometer_end - Rentals.odometer_start)).label('rental_cost')).filter(Rentals.vehicle_id == id).all() rentals_count = 0 rentals_distance = 0 rentals_cost = 0 for rental in rentals_summary: rentals_count += 1 rentals_distance += rental.distance rentals_cost += rental.rental_cost #print(rentals_count) #print(rentals_distance) #print(rentals_cost) rentals_summary = {"total_rentals": rentals_count, "total_distance": rentals_distance, "total_cost": rentals_cost} return (rentals_summary)
print(f"fetched: {len(results)}") for instance in results: print(instance) # fetch the last 10 day EOD prices for State Bank of India results = (RoutingSession.session.query(EquitiesIndiaBse.EodTimeSeries).filter( EquitiesIndiaBse.EodTimeSeries.CODE == code).order_by( EquitiesIndiaBse.EodTimeSeries.TIME_STAMP.desc()).limit(10).all()) print("last 10 day EOD prices:") for instance in results: print(instance) # fetch the last 24 quarter EPS for State Bank of India results = (RoutingSession.session.query( EquitiesIndiaBse.CorporateResults).filter( and_( EquitiesIndiaBse.CorporateResults.CODE == code, EquitiesIndiaBse.CorporateResults.KEY.ilike('%diluted%'), EquitiesIndiaBse.CorporateResults.KEY.ilike('%after%'), func.datediff( text('day'), EquitiesIndiaBse.CorporateResults.PERIOD_BEGIN, EquitiesIndiaBse.CorporateResults.PERIOD_END) < 100)).order_by( EquitiesIndiaBse.CorporateResults.PERIOD_END.desc()).limit( 24).all()) print("the last 24 quarter EPS: ") for instance in results: print(instance)
def get_global_context(start_time, end_time): beam_mysql_mappings = get_sqlalchemy_tables_w_session( 'MYSQL_BEAM_CONNECTION_STRING', 'MYSQL_BEAM_DB', ['article_pageviews'] ) mysql_beam_session = beam_mysql_mappings['session'] article_pageviews = beam_mysql_mappings['article_pageviews'] predplatne_mysql_mappings = get_sqlalchemy_tables_w_session( 'MYSQL_CRM_CONNECTION_STRING', 'MYSQL_CRM_DB', ['payments'] ) mysql_predplatne_session = predplatne_mysql_mappings['session'] payments = predplatne_mysql_mappings['payments'] # We create two subqueries using the same data to merge twice in order to get rolling sum in mysql def get_payments_filtered(): payments_filtered = mysql_predplatne_session.query( payments.c['created_at'].cast(DATE).label('date'), func.count(payments.c['id']).label('payment_count'), func.sum(payments.c['amount']).label('sum_paid') ).filter( payments.c['created_at'] >= start_time, payments.c['created_at'] <= end_time, payments.c['status'] == 'paid' ).group_by( 'date' ).subquery() return payments_filtered def get_article_pageviews_filtered(): article_pageviews_filtered = mysql_beam_session.query( article_pageviews.c['time_from'].cast(DATE).label('date'), func.sum(article_pageviews.c['sum']).label('article_pageviews'), ).filter( article_pageviews.c['time_from'] >= start_time, article_pageviews.c['time_from'] <= end_time ).group_by( 'date' ).subquery() return article_pageviews_filtered payments_filtered_1 = get_payments_filtered() payments_filtered_2 = get_payments_filtered() payments_context = mysql_predplatne_session.query( payments_filtered_1.c['date'].label('date'), func.sum(payments_filtered_2.c['payment_count']).label('payment_count'), func.sum(payments_filtered_2.c['sum_paid']).label('sum_paid') ).join( payments_filtered_2, func.datediff(payments_filtered_1.c['date'], payments_filtered_2.c['date']).between(0, 7) ).group_by( payments_filtered_1.c['date'] ).order_by( payments_filtered_1.c['date'] ).subquery() article_pageviews_filtered_1 = get_article_pageviews_filtered() article_pageviews_filtered_2 = get_article_pageviews_filtered() article_pageviews_context = mysql_beam_session.query( article_pageviews_filtered_1.c['date'].label('date'), func.sum(article_pageviews_filtered_2.c['article_pageviews']).label('article_pageviews_count'), ).join( article_pageviews_filtered_2, func.datediff(article_pageviews_filtered_1.c['date'], article_pageviews_filtered_2.c['date']).between(0, 7) ).group_by( article_pageviews_filtered_1.c['date'] ).order_by( article_pageviews_filtered_1.c['date'] ).subquery() context_query = mysql_predplatne_session.query( payments_context.c['date'], payments_context.c['payment_count'], payments_context.c['sum_paid'], article_pageviews_context.c['article_pageviews_count'] ).join( article_pageviews_context, article_pageviews_context.c['date'] == payments_context.c['date'] ) context = pd.read_sql( context_query.statement, context_query.session.bind ) mysql_predplatne_session.close() mysql_beam_session.close() return context
absent_date=datetime.date(2014, 1, 3), severity_points=2), Absenteeism(emp_id=2, absent_date=datetime.date(2014, 1, 4), severity_points=3) ]) # SELECTで出す型 abs2 = aliased(Absenteeism) sum_scalar = func.sum( case([(abs2.emp_id == None, Absenteeism.severity_points)], else_=0)) q = Query([Absenteeism.emp_id, sum_scalar])\ .outerjoin(abs2, and_(Absenteeism.emp_id == abs2.emp_id, Absenteeism.absent_date - abs2.absent_date == 1))\ .group_by(Absenteeism.emp_id)\ .having(sum_scalar >= 40) print(q) # UPDATEで効率よくいく型 # UPDATE中にJOINが使え無いようなのでできない・・ for absentee in session.query(Absenteeism).join( abs2, and_(Absenteeism.emp_id == abs2.emp_id, func.datediff(Absenteeism.absent_date, abs2.absent_date) == 1)).all(): absentee.severity_points = 0 q = Query(Absenteeism).join(Calendar, Absenteeism.absent_date == Calendar.cal_date)\ .group_by(Absenteeism.emp_id).having(func.sum(Absenteeism.severity_points) >= 40) print(q)
session.add_all([ Absenteeism(emp_id=1, absent_date=datetime.date(2014, 1, 1), severity_points=4), Absenteeism(emp_id=1, absent_date=datetime.date(2014, 1, 2), severity_points=2), Absenteeism(emp_id=1, absent_date=datetime.date(2014, 1, 3), severity_points=3), Absenteeism(emp_id=2, absent_date=datetime.date(2014, 1, 1), severity_points=1), Absenteeism(emp_id=2, absent_date=datetime.date(2014, 1, 3), severity_points=2), Absenteeism(emp_id=2, absent_date=datetime.date(2014, 1, 4), severity_points=3) ]) # SELECTで出す型 abs2 = aliased(Absenteeism) sum_scalar = func.sum(case([(abs2.emp_id == None, Absenteeism.severity_points)], else_=0)) q = Query([Absenteeism.emp_id, sum_scalar])\ .outerjoin(abs2, and_(Absenteeism.emp_id == abs2.emp_id, Absenteeism.absent_date - abs2.absent_date == 1))\ .group_by(Absenteeism.emp_id)\ .having(sum_scalar >= 40) print(q) # UPDATEで効率よくいく型 # UPDATE中にJOINが使え無いようなのでできない・・ for absentee in session.query(Absenteeism).join(abs2, and_( Absenteeism.emp_id == abs2.emp_id, func.datediff(Absenteeism.absent_date, abs2.absent_date) == 1)).all(): absentee.severity_points = 0 q = Query(Absenteeism).join(Calendar, Absenteeism.absent_date == Calendar.cal_date)\ .group_by(Absenteeism.emp_id).having(func.sum(Absenteeism.severity_points) >= 40) print(q)
def age(cls): return func.datediff(text('year'), utcnow(), cls.birthday)
def get_rentals_by_vehicle_id(id): # list of rentals rentals = db.session.query(Rentals.id, Rentals.date_start, Rentals.odometer_start, Rentals.odometer_end, (Rentals.odometer_end - Rentals.odometer_start).label('distance'), Rentals.date_end, Rentals.rental_type, func.IF(Rentals.rental_type == "D", (func.datediff(Rentals.date_end, Rentals.date_start)+1)*100, (Rentals.odometer_end - Rentals.odometer_start)).label('rental_cost')).filter(Rentals.vehicle_id == id).order_by(desc(Rentals.date_start)).all() #print(rentals) rentals_list = rentals_schema_more.jsonify(rentals) return (rentals_list)