def get_archive_list(self, **kwargs): self.start_dt = kwargs.get('start_dt') self.end_dt = kwargs.get('end_dt') self.flt_client_id = safe_int(kwargs.get('client_id')) self.flt_external_id = kwargs.get('external_id') self.flt_not_in_archive = safe_bool(kwargs.get('not_in_archive')) self.flt_closed_only = True self.set_base_query() self._join_latest_location() self._join_location_org_structure() self._join_event_archive() self._join_quality_control() self.query = self.query.with_entities( self.BaseEvent, func.IF(self.MovingAction.id.isnot(None), self.LocationOSfromMoving.shortName, self.LocationOSfromReceived.shortName).label('os_name'), self.EventArchive.id.label('archive_id'), self.EventArchive.archive.label('archive'), self.QualityControlAction).order_by( func.IF(self.MovingAction.id.isnot(None), self.MovingAction_begDate, self.ReceivedAction_begDate).desc()).options( contains_eager(self.BaseEvent.client), ) return self.get_paginated(kwargs)
def expiration(cls): # MySQL specific code. will need to check compatibility on # moving to different db return func.IF( cls.__table__.c.answered, func.TIMESTAMPADD(text('SECOND'), File.__table__.c.interval, cls.__table__.c.due), cls.__table__.c.due)
def best(cls): n = cls.upvotes + cls.downvotes z = 1.281551565545 p = cls.upvotes * 1.0 / n left = p + z * z / (2 * n) right = z * func.sqrt(p * (1 - p) / n + z * z / (4 * n * n)) under = 1 + z * z / n return func.IF(n == 0, 0, 100 * (left - right) / under)
def _filter_by_location_os(self): if self.flt_org_struct_id is not None: self._join_location_org_structure() self.query = self.query.filter( func.IF( self.MovingAction.id.isnot(None), self.LocationOSfromMoving.id == self.flt_org_struct_id, self.LocationOSfromReceived.id == self.flt_org_struct_id))
def _filter_by_status(self): if self.hosp_status == HospStateStatus.transferred[0]: # Переведенные - Action.endDate между beg_date и end_date, а поле "Переведен в отделение" # не пустое, отделение пребывания равно текущему отделению пользователя. self._join_movings_transfered_through() self.query = self.query.filter( self.q_movings_transfered_through.c.event_id.isnot(None)) else: self.query = self.query.filter( or_(self.MovingAction.id.isnot(None), self.ReceivedAction.id.isnot(None)), ) self._filter_by_location_os() # Текущие - Action.endDate для движения пусто или больше end_date, # отделение пребывания равно текущему отделению пользователя. Плюс отображаем пациентов, # у которых есть поступление в это отделение, но их еще не разместили на койке if self.hosp_status == HospStateStatus.current[0]: self.query = self.query.filter( func.IF( self.MovingAction.id.isnot(None), or_(self.MovingAction_endDate.is_(None), self.MovingAction_endDate >= self.end_dt), or_(self.ReceivedAction_endDate.is_(None), self.ReceivedAction_endDate >= self.end_dt))) # Поступившие - Action.begDate для движения (или поступления) более или равно beg_date, # а endDate любая, отделение пребывания равно текущему отделению пользователя. elif self.hosp_status == HospStateStatus.received[0]: self.query = self.query.filter( func.IF( self.MovingAction.id.isnot(None), and_(self.MovingAction_begDate >= self.start_dt, self.MovingAction_begDate < self.end_dt), and_(self.ReceivedAction_begDate >= self.start_dt, self.ReceivedAction_begDate < self.end_dt), )) # Выписанные - Action.endDate между beg_date и end_date, а поле "Переведен в отделение" # пусто. Также должен присутствовать "Выписной эпикриз". elif self.hosp_status == HospStateStatus.leaved[0]: self._join_leaved() self._join_moving_os_transfer() self.query = self.query.filter( self.LeavedAction.id.isnot(None), self.MovingOrgStructTransfer.id.is_(None), and_(self.MovingAction_endDate >= self.start_dt, self.MovingAction_endDate < self.end_dt))
def lifetime(): player_summary = db.session.query(Player.idplayer, Player.FirstName, Player.Surname, Player.Active, func.count(MatchUp.idmatchup).label('MatchesPlayed'), func.sum(MatchUp.MyPlayerActual).label('ActRacksWon'), func.sum(MatchUp.OpponentActual).label('ActRacksLost'), (func.sum(MatchUp.MyPlayerActual) / (func.sum(MatchUp.OpponentActual) + func.sum(MatchUp.MyPlayerActual))).label('RackPct'), (func.sum(func.IF(MatchUp.WinLose == 'W', 1, 0)) / func.count(MatchUp.WinLose)).label('WinPct')). \ join(MatchUp, MatchUp.Player_ID == Player.idplayer).group_by(Player.idplayer).all() return render_template('lifetime.html', summary=player_summary)
def magic_time_completed(cls): """ SQL Expression which lets us calculate and use USD prices in SQL. Must have Order joined in queries where it is used. Ex: db.query(func.sum(Trade.price_in_usd)).join(Order).scalar() This gives decimal results, since we don't have our Money objects in SQL """ return func.IF(cls.time_completed, cls.time_completed, cls.time_created)
def get_latest_hosps(self, start_dt, end_dt, history, **kwargs): self.start_dt = start_dt self.end_dt = end_dt self.history = history self.hosp_status = safe_int(kwargs.get('hosp_status')) self.flt_org_struct_id = safe_int(kwargs.get('org_struct_id')) self.flt_client_id = safe_int(kwargs.get('client_id')) self.flt_exec_person_id = safe_int(kwargs.get('exec_person_id')) self.flt_external_id = kwargs.get('external_id') self.set_base_query() self._filter_by_latest_location() self._filter_by_status() self._join_location_org_structure() self._join_hosp_bed() self.query = self.query.with_entities( self.BaseEvent, self.MovingAction, self.ReceivedAction, func.IF(self.MovingAction.id.isnot(None), self.LocationOSfromMoving.name, self.LocationOSfromReceived.name).label('os_name'), self.MovingOSHB.name.label('hosp_bed_name')) self.query = self.query.order_by( func.IF(self.MovingAction.id.isnot(None), self.MovingAction_begDate, self.ReceivedAction_begDate).desc()) self.query = self.query.options( contains_eager(self.BaseEvent.client), lazyload('*'), joinedload(self.BaseEvent.execPerson), # load only attrs an action, that will be used later Load(self.MovingAction ).load_only('id', 'begDate', 'endDate', 'status', 'event_id', 'person_id', 'createPerson_id').joinedload( 'actionType').load_only('class_'), Load(self.MovingAction).contains_eager('event'), Load(self.ReceivedAction).load_only( 'id', 'begDate', 'endDate', 'status', 'event_id', 'person_id', 'createPerson_id').joinedload('actionType').load_only( 'class_'), Load(self.ReceivedAction).contains_eager('event')) return self.get_paginated(kwargs)
def report_open_close_passbook_month(passbook_type, from_month, to_month): from_month = from_month + ' 00:00:00' to_month = to_month + ' 00:00:00' result = db.session.query( TransactionSlip.transaction_date.cast(Date).label('transaction_date'), func.COUNT(func.IF(TransactionSlip.transaction_type == TransactionType.OPEN_PASSBOOK, 1, None)) .label('passbook_open'), func.COUNT(func.IF(and_(TransactionSlip.transaction_type == TransactionType.WITHDRAW, Passbook.balance_amount == 0), 1, None)) .label('passbook_close'), ) \ .join(Passbook, Passbook.id == TransactionSlip.passbook_id) \ .join(PassbookTypes, PassbookTypes.id == Passbook.passbook_type_id) \ .filter(PassbookTypes.id == passbook_type) \ .filter(TransactionSlip.transaction_date.between(from_month, to_month)) \ .group_by(TransactionSlip.transaction_date.cast(Date)) \ .all() return result
def query_builder(dimension, limit = None): reference_date = dtt.date.today() last_week = reference_date - dtt.timedelta(7) last_month = dtt.date(reference_date.year, reference_date.month - 1, reference_date.day) reference_date_id = reference_date.strftime("%Y%m%d") last_week_id = last_week.strftime("%Y%m%d") last_month_id = last_month.strftime("%Y%m%d") return select([ dimension.c.name, func.sum( func.IF( between(fact_processed_ads.c.date_id, last_week_id, reference_date_id), fact_processed_ads.c.loaded_ads, 0)), func.sum( func.IF( between(fact_processed_ads.c.date_id, last_month_id, reference_date_id), fact_processed_ads.c.loaded_ads, 0)) ], group_by = dimension.c.name).\ select_from(fact_processed_ads.join(dimension)).\ limit(limit)
def _filter_by_latest_location(self): self._join_latest_location() if not self.history: self.query = self.query.filter( func.IF( self.MovingAction.id.isnot(None), # движение попадает во временной интервал and_( self.MovingAction_begDate < self.end_dt, or_(self.MovingAction_endDate.is_(None), self.start_dt <= self.MovingAction_endDate)), # поступление попадает во временной интервал and_( self.ReceivedAction_begDate < self.end_dt, or_(self.ReceivedAction_endDate.is_(None), self.start_dt <= self.ReceivedAction_endDate))))
def dmdri(): form = LoginForm(request.form, csrf_enabled=False) day_id = 1 try: page = int(request.args.get('page', 1)) except ValueError: page = 1 per_page = 5 offset = (page - 1) * per_page participating_count = db.session.query(Attending.activity_id, func.count(Attending.user_id).label('pcount'))\ .group_by(Attending.activity_id)\ .subquery('participating_count') attending_query = db.session.query(Attending.activity_id, func.IF(Attending.user_id == get_user_id(), True, False).label('is_attending'))\ .group_by(Attending.activity_id)\ .subquery('attending_check') activities = db.session.query(Activity, attending_query)\ .select_from(Activity)\ .outerjoin(attending_query, Activity.id == attending_query.c.activity_id)\ .filter(Activity.day_id == day_id)\ .limit(per_page)\ .offset(offset) #.group_by(participating_count.desc())\ all_activities = Activity.query.filter(Activity.day_id == day_id).all() attending = any(user.id == get_user_id() for activity in all_activities for user in activity.participants) pagination = Pagination(page=page, total=len(all_activities), per_page=per_page, search=False, css_framework='bootstrap3', display_msg='viser aktivitet <b>{start}</b> til <b>{end}</b> av <b>{total}</b> aktiviteter') return render_template("public/dmdri.html", activities=activities, attending=attending, per_page=per_page, pagination=pagination, day_id=day_id, time_now=datetime.now(), form=form)
def aktivitetsdagen(): form = LoginForm(request.form, csrf_enabled=False) day_id = 2 attending_query = db.session.query(Attending.activity_id, func.IF(Attending.user_id == get_user_id(), True, False).label('is_attending'))\ .group_by(Attending.activity_id)\ .subquery('attending_check') activities = db.session.query(Activity, attending_query)\ .select_from(Activity)\ .outerjoin(attending_query, Activity.id == attending_query.c.activity_id)\ .filter(Activity.day_id == day_id) attending = any(user.id == get_user_id() for activity in activities for user in activity.Activity.participants) return render_template("public/festningslekene.html", activities=activities, attending=attending, day_id=day_id, time_now=datetime.now(), form=form)
def history(player_id=None): if current_user.Player_ID is None and player_id is None: return redirect(url_for('main.index')) if player_id: p = player_id else: p = current_user.Player_ID player_detail = db.session.query(Player.Surname, MatchUp.OpponentRank, func.count(MatchUp.idmatchup).label('MatchesPlayed'), func.sum(MatchUp.MyPlayerActual).label('ActRacksWon'), func.sum(MatchUp.OpponentActual).label('ActRacksLost'), (func.sum(MatchUp.MyPlayerActual) / (func.sum(MatchUp.OpponentActual) + func.sum(MatchUp.MyPlayerActual))).label('RackPct'), (func.sum(func.IF(MatchUp.WinLose == 'W', 1, 0)) / func.count(MatchUp.WinLose)).label('WinPct')). \ join(MatchUp, MatchUp.Player_ID == Player.idplayer).group_by(MatchUp.OpponentRank). \ filter_by(Player_ID=p).order_by(MatchUp.OpponentRank).all() player_graph = db.session.query(Season.SeasonName, Season.SeasonStart, Season.idseason, func.count(MatchUp.idmatchup).label('MatchesPlayed'), func.sum(func.IF(MatchUp.WinLose == 'W', 1, 0)).label('Wins'), func.sum(func.IF(MatchUp.WinLose == 'L', 1, 0)).label('Loses'), (func.sum(func.IF(MatchUp.WinLose == 'W', 1, 0)) / func.count(MatchUp.WinLose)).label('WinPct'), Player.idplayer). \ join(Match, Season.idseason == Match.Season_ID). \ join(MatchUp, Match.idmatch == MatchUp.Match_ID). \ join(Player, MatchUp.Player_ID == Player.idplayer). \ filter_by(idplayer=p). \ group_by(Player.idplayer, Season.SeasonName, Season.SeasonStart, Season.idseason). \ order_by(Season.SeasonStart).all() # Configure horizontal bar chart of player history against the wire chart = pygal.HorizontalStackedBar(show_legend=False, tooltip_fancy_mode=False, print_values=True, width=800, height=300) labels = [] wins = [] loses = [] # Create series lists for the chart.add command # PYGAL_URL is localhost:5000 for testing, main URL for AWS for season in player_graph: labels.append(season.SeasonName) wins.append({ 'value': int(season.Wins), 'color': 'MediumSeaGreen', 'xlink': { 'href': '{}{}'.format( current_app.config['PYGAL_URL'], url_for('main.drilldown', player_id=season.idplayer, season_id=season.idseason, win_loss='W')), 'target': '_parent' }, 'label': "Pct {:.0%}".format(season.WinPct) }) loses.append({ 'value': int(season.Loses), 'color': 'Orange', 'xlink': { 'href': '{}{}'.format( current_app.config['PYGAL_URL'], url_for('main.drilldown', player_id=season.idplayer, season_id=season.idseason, win_loss='L')), 'target': '_parent' }, 'label': "Pct {:.0%}".format(1 - season.WinPct) }) # Build the graph itself chart.x_labels = labels chart.add("Wins", wins) chart.add("Loses", loses) chart = chart.render_data_uri() player_summary = db.session.query(Player.Surname, MatchUp.MyPlayerRank, func.count(MatchUp.idmatchup).label('MatchesPlayed'), func.sum(MatchUp.MyPlayerActual).label('ActRacksWon'), func.sum(MatchUp.OpponentActual).label('ActRacksLost'), (func.sum(MatchUp.MyPlayerActual) / (func.sum(MatchUp.OpponentActual) + func.sum(MatchUp.MyPlayerActual))).label('RackPct'), (func.sum(func.IF(MatchUp.WinLose == 'W', 1, 0)) / func.count(MatchUp.WinLose)).label('WinPct')). \ join(MatchUp, MatchUp.Player_ID == Player.idplayer).group_by(MatchUp.MyPlayerRank).filter_by(Player_ID=p).all() player = Player.query.filter_by(idplayer=p).first() player_history = db.session.query(Match, MatchUp). \ join(MatchUp, Match.idmatch == MatchUp.Match_ID). \ filter(MatchUp.Player_ID == p). \ order_by(Match.MatchDate).all() return render_template('history.html', history=player_history, player=player, details=player_detail, summary=player_summary, chart=chart)
def get_rentals_by_vehicle_id(id): # list of rentals rentals = db.session.query(Rentals.id, Rentals.date_start, Rentals.odometer_start, Rentals.odometer_end, (Rentals.odometer_end - Rentals.odometer_start).label('distance'), Rentals.date_end, Rentals.rental_type, func.IF(Rentals.rental_type == "D", (func.datediff(Rentals.date_end, Rentals.date_start)+1)*100, (Rentals.odometer_end - Rentals.odometer_start)).label('rental_cost')).filter(Rentals.vehicle_id == id).order_by(desc(Rentals.date_start)).all() #print(rentals) rentals_list = rentals_schema_more.jsonify(rentals) return (rentals_list)
def create(tallySheetId): tallySheet, tallySheetVersion = TallySheet.create_latest_version( tallySheetId=tallySheetId, tallySheetCode=TallySheetCodeEnum.PRE_34_PD) polling_division_id = tallySheet.submission.areaId query = db.session.query( Election.Model.electionId, Area.Model.areaId, ElectionCandidate.Model.candidateId, ElectionCandidate.Model.qualifiedForPreferences, func.sum(TallySheetVersionRow_PRE_30_PD.Model.count).label( "firstPreferenceCount"), func.sum( func.IF( and_(TallySheetVersionRow_PRE_34_preference.Model. preferenceNumber == 2), TallySheetVersionRow_PRE_34_preference.Model.preferenceCount, None)).label("secondPreferenceCount"), func.sum( func.IF( and_(TallySheetVersionRow_PRE_34_preference.Model. preferenceNumber == 3), TallySheetVersionRow_PRE_34_preference.Model.preferenceCount, None)).label("thirdPreferenceCount"), ).join( Submission.Model, Submission.Model.areaId == Area.Model.areaId).join( Election.Model, Election.Model.electionId == Area.Model.electionId).join( ElectionCandidate.Model, or_( ElectionCandidate.Model.electionId == Election.Model.electionId, ElectionCandidate.Model.electionId == Election.Model.parentElectionId)).join( TallySheet.Model, and_( TallySheet.Model.tallySheetId == Submission.Model.submissionId, TallySheet.Model.tallySheetCode.in_([ TallySheetCodeEnum.PRE_30_PD, TallySheetCodeEnum.PRE_34_I_RO ])) ).join( TallySheetVersionRow_PRE_30_PD.Model, and_( TallySheetVersionRow_PRE_30_PD.Model. tallySheetVersionId == Submission.Model. lockedVersionId, TallySheetVersionRow_PRE_30_PD.Model.candidateId == ElectionCandidate.Model.candidateId), isouter=True ).join( TallySheetVersionRow_PRE_34_preference.Model, and_( TallySheetVersionRow_PRE_34_preference.Model. tallySheetVersionId == Submission.Model. lockedVersionId, TallySheetVersionRow_PRE_34_preference.Model. candidateId == ElectionCandidate.Model.candidateId), isouter=True ).filter( Area .Model. areaId == polling_division_id, # ElectionCandidate.Model.qualifiedForPreferences == True ).group_by(ElectionCandidate.Model.candidateId, Submission.Model.areaId).order_by( ElectionCandidate.Model.candidateId, Submission.Model.areaId).all() is_complete = True for row in query: print("###### ABCD #### ", [ row.candidateId, row.qualifiedForPreferences, row.firstPreferenceCount, row.secondPreferenceCount, row.thirdPreferenceCount ]) if (row.candidateId and row.firstPreferenceCount) is not None: tallySheetVersion.add_row(electionId=row.electionId, candidateId=row.candidateId, preferenceNumber=1, preferenceCount=row.firstPreferenceCount, areaId=polling_division_id) if row.qualifiedForPreferences is True: if (row.secondPreferenceCount and row.thirdPreferenceCount) is not None: tallySheetVersion.add_row( electionId=row.electionId, candidateId=row.candidateId, preferenceNumber=2, preferenceCount=row.secondPreferenceCount, areaId=polling_division_id) tallySheetVersion.add_row( electionId=row.electionId, candidateId=row.candidateId, preferenceNumber=3, preferenceCount=row.thirdPreferenceCount, areaId=polling_division_id) else: is_complete = False else: is_complete = False if is_complete: tallySheetVersion.set_complete() db.session.commit() return TallySheetVersionSchema().dump(tallySheetVersion).data
def update_station_fee_statistics(session, statistics_session, specific_date=None, station_id=None, scope=0): # 先拿到所有的日期对象 if station_id: if scope == 0: fee_dates = session.query(models.Fee.date) \ .filter(models.Fee.station_id == station_id).order_by(models.Fee.date.desc()) voucher_dates = session.query(func.DATE(models.FirmSettlementVoucher.create_time)) \ .filter(models.FirmSettlementVoucher.station_id == station_id) \ .order_by(models.FirmSettlementVoucher.create_time.desc()) dates = fee_dates.union(voucher_dates).distinct().all() else: fee_dates = session.query(models.Fee.date) \ .filter(models.Fee.station_id == station_id) voucher_dates = session.query(func.DATE(models.FirmSettlementVoucher.create_time)) \ .filter(models.FirmSettlementVoucher.station_id == station_id) dates = fee_dates.union(voucher_dates).distinct().all() else: if scope == 0: fee_dates = session.query(models.Fee.date).order_by( models.Fee.date.desc()) voucher_dates = session.query(func.DATE(models.FirmSettlementVoucher.create_time)) \ .order_by(models.FirmSettlementVoucher.create_time.desc()) dates = fee_dates.union(voucher_dates).distinct().all() else: fee_dates = session.query(models.Fee.date) voucher_dates = session.query( func.DATE(models.FirmSettlementVoucher.create_time)) dates = fee_dates.union(voucher_dates).distinct().all() dates = [date[0] for date in dates] fee_statistics = statistics_session.query(models_statistics.StatisticsStationFee) \ .filter_by(statistics_type=0) if station_id: fee_statistics = fee_statistics.filter_by(station_id=station_id) if specific_date: fee_statistics = fee_statistics.filter_by( statistics_date=specific_date) fee_statistics_list = fee_statistics.all() # 先拿到当前已有的全部统计表 fee_sta_dict = { "{}:{}".format(s.statistics_date, s.station_id): s for s in fee_statistics_list } # 对费用统计表,需要取到三个值,分别是采购费用,运杂费,日常杂费 # 对financial里实时更新的方法进行修改放到公共方法里 fee_sums = session.query( models.Fee.date, func.sum(func.IF(models.Fee.type == 1, models.Fee.money, 0)).label("delivery"), func.sum(func.IF(models.Fee.type == 2, models.Fee.money, 0)).label("routine"), models.Fee.station_id) if station_id: fee_sums = fee_sums.filter(models.Fee.station_id == station_id) if specific_date: fee_sums = fee_sums.filter(models.Fee.date == specific_date) fee_sums = fee_sums.group_by(models.Fee.date).all() vouchers = session.query(models.FirmSettlementVoucher, models.AllocationOrder, models.PurchaseOrderGoods) \ .join(models.AllocationOrder, models.AllocationOrder.id == models.FirmSettlementVoucher.allocation_order_id) \ .join(models.PurchaseOrderGoods, models.PurchaseOrderGoods.id == models.AllocationOrder.purchase_order_goods_id) if station_id: vouchers = vouchers.filter( models.FirmSettlementVoucher.station_id == station_id) if specific_date: vouchers = vouchers.filter( func.DATE(models.FirmSettlementVoucher.create_time) == specific_date) vouchers = vouchers.all() # 计算结算件数 allocation_order_ids = {order.id for _, order, _ in vouchers} allocated_amount_sums = session.query(models.AllocationOrderGoods.order_id, func.sum(models.AllocationOrderGoods.allocated_amount)) \ .filter(models.AllocationOrderGoods.order_id.in_(allocation_order_ids)) \ .group_by(models.AllocationOrderGoods.order_id) \ .all() allocated_amount_sum_dict = { data[0]: data[1] for data in allocated_amount_sums } def scoped_date(date_param): if scope == 0: str_result = TimeFunc.time_to_str(date_param, "date") elif scope == 1: str_result = TimeFunc.time_to_str(date_param, "year") else: str_result = TimeFunc.time_to_str(date_param, "year_only") return str_result # 待结算单总金额 voucher_sum_dict = defaultdict(int) voucher_station_dict = {} for voucher, allocation, purchase_goods in vouchers: amount = allocated_amount_sum_dict.get(allocation.id, 0) # 采购价,按件数比例计算,实配件数/采购件数*小计 total_money = check_float(amount / purchase_goods.actual_amount * purchase_goods.subtotal) date = scoped_date(voucher.create_time) voucher_sum_dict[date] += total_money voucher_station_dict[date] = voucher.station_id # 按指定范围求和 fee_summary_dict = {} for date, delivery, routine, station_id in fee_sums: date = scoped_date(date) if date not in fee_summary_dict: fee_summary_dict[date] = { "delivery_sum": 0, "routine_sum": 0, "station_id": station_id } fee_summary_dict[date]["delivery_sum"] += delivery or 0 fee_summary_dict[date]["routine_sum"] += routine or 0 result_dates = {scoped_date(date) for date in dates} for date in result_dates: voucher_sum = voucher_sum_dict.get(date, 0) fee_summary = fee_summary_dict.get(date) # 包含当日有采购和未采购但有其他费用的情况 station_id = voucher_station_dict.get( date) or fee_summary["station_id"] delivery_sum = check_float( fee_summary["delivery_sum"]) if fee_summary else 0 routine_sum = check_float( fee_summary["routine_sum"]) if fee_summary else 0 key = "{}:{}".format(date, station_id) statistics = fee_sta_dict.get(key) if not statistics: new_fee_statistic = models_statistics.StatisticsStationFee( statistics_date=date, statistics_type=0, station_id=station_id, voucher_sum=voucher_sum, delivery_sum=delivery_sum, routine_sum=routine_sum) statistics_session.add(new_fee_statistic) statistics_session.flush() fee_sta_dict[key] = new_fee_statistic else: statistics.routine_sum = routine_sum statistics.delivery_sum = delivery_sum statistics.voucher_sum = voucher_sum_dict.get(date, 0) statistics_session.commit()
def create(tallySheetId): tallySheet, tallySheetVersion = TallySheet.create_latest_version( tallySheetId=tallySheetId, tallySheetCode=TallySheetCodeEnum.PRE_34_AI) electoral_districts = db.session.query( AreaMap.Model.electoralDistrictId).filter( AreaMap.Model.countryId == tallySheet.submission.areaId).group_by( AreaMap.Model.electoralDistrictId).all() electoral_district_ids = [] for electoral_district in electoral_districts: electoral_district_ids.append(electoral_district.electoralDistrictId) query = db.session.query( Area.Model.areaId, ElectionCandidate.Model.candidateId, ElectionCandidate.Model.qualifiedForPreferences, Submission.Model.electionId, func.sum( func.IF( and_(TallySheetVersionRow_PRE_34_preference.Model. preferenceNumber == 1), TallySheetVersionRow_PRE_34_preference.Model.preferenceCount, None)).label("firstPreferenceCount"), func.sum( func.IF( and_(TallySheetVersionRow_PRE_34_preference.Model. preferenceNumber == 2), TallySheetVersionRow_PRE_34_preference.Model.preferenceCount, None)).label("secondPreferenceCount"), func.sum( func.IF( and_(TallySheetVersionRow_PRE_34_preference.Model. preferenceNumber == 3), TallySheetVersionRow_PRE_34_preference.Model.preferenceCount, None)).label("thirdPreferenceCount"), ).join(Submission.Model, Submission.Model.areaId == Area.Model.areaId).join( Election.Model, Election.Model.electionId == Area.Model.electionId).join( ElectionCandidate.Model, or_( ElectionCandidate.Model.electionId == Election.Model.electionId, ElectionCandidate.Model.electionId == Election.Model.parentElectionId) ).join( TallySheet.Model, and_( TallySheet.Model.tallySheetId == Submission.Model.submissionId, TallySheet.Model.tallySheetCode == TallySheetCodeEnum.PRE_34_ED) ).join( TallySheetVersionRow_PRE_34_preference.Model, and_( TallySheetVersionRow_PRE_34_preference.Model. tallySheetVersionId == Submission.Model.lockedVersionId, TallySheetVersionRow_PRE_34_preference.Model.candidateId == ElectionCandidate.Model.candidateId), isouter=True).filter( Area.Model.areaId.in_(electoral_district_ids)).group_by( ElectionCandidate.Model.candidateId, Area.Model.areaId).order_by( ElectionCandidate.Model.candidateId, Area.Model.areaId).all() is_complete = True for row in query: if (row.candidateId and row.firstPreferenceCount) is not None: tallySheetVersion.add_row(electionId=row.electionId, candidateId=row.candidateId, preferenceNumber=1, preferenceCount=row.firstPreferenceCount, areaId=row.areaId) if row.qualifiedForPreferences is True: if (row.secondPreferenceCount and row.thirdPreferenceCount) is not None: tallySheetVersion.add_row( electionId=row.electionId, candidateId=row.candidateId, preferenceNumber=2, preferenceCount=row.secondPreferenceCount, areaId=row.areaId) tallySheetVersion.add_row( electionId=row.electionId, candidateId=row.candidateId, preferenceNumber=3, preferenceCount=row.thirdPreferenceCount, areaId=row.areaId) else: is_complete = False else: is_complete = False if is_complete: tallySheetVersion.set_complete() db.session.commit() return TallySheetVersionSchema().dump(tallySheetVersion).data
def __call__(self, user_ids, session): """ Parameters: user_ids : list of mediawiki user ids to find edit for session : sqlalchemy session open on a mediawiki database Returns: dictionary from user ids to a dictionary of the form: { 'threshold': 1 for True, 0 for False, 'time_to_threshold': number in hours or None, 'censored': 1 for True, 0 for False } """ threshold_hours = int(self.threshold_hours.data) threshold_secs = threshold_hours * 3600 number_of_edits = int(self.number_of_edits.data) Revision2 = aliased(Revision, name='r2') ordered_revisions = session \ .query( Revision.rev_user, Revision.rev_timestamp, label('number', func.count()), ) \ .join(MediawikiUser) \ .join(Page) \ .join( Revision2, and_( Revision.rev_user == Revision2.rev_user, Revision.rev_timestamp >= Revision2.rev_timestamp ) ) \ .group_by(Revision.rev_user) \ .group_by(Revision.rev_timestamp) \ .filter(Page.page_namespace.in_(self.namespaces.data)) \ .filter( func.unix_timestamp(Revision.rev_timestamp) - func.unix_timestamp(MediawikiUser.user_registration) <= threshold_secs ) o_r = self.filter(ordered_revisions, user_ids).subquery() metric = session.query( MediawikiUser.user_id, label( Threshold.id, func.IF(o_r.c.rev_timestamp != None, 1, 0) ), label( Threshold.time_to_threshold_id, func.IF( o_r.c.rev_timestamp != None, (func.unix_timestamp(o_r.c.rev_timestamp) - func.unix_timestamp(MediawikiUser.user_registration)) / 3600, None ) ), label(CENSORED, func.IF( o_r.c.rev_timestamp != None, 0, func.IF( func.unix_timestamp(MediawikiUser.user_registration) + threshold_secs > func.unix_timestamp(func.now()), 1, 0 ) )) ) \ .outerjoin( o_r, and_( MediawikiUser.user_id == o_r.c.rev_user, o_r.c.number == number_of_edits)) metric = self.filter(metric, user_ids, MediawikiUser.user_id) return { u.user_id: { Threshold.id: u.threshold, Threshold.time_to_threshold_id: u.time_to_threshold, CENSORED: u.censored, } for u in metric.all() }
def online_count(cls): return func.sum(func.IF(Contract.online, 1, 0))
IF ( ISNULL(s1.salary), 0, s1.salary ) ) AS difference FROM salaries s LEFT JOIN salaries s1 ON s.emp_no = s1.emp_no AND YEAR (s1.from_date) = YEAR (s.from_date) - 1 WHERE s.emp_no = 10001 """ sql_data = [(d.emp_no, d.from_date, d.to_date, d.salary, d.last_salary, d.difference) for d in session.execute(sql)] '''使用 sqlalchemy 方式进行查询''' s1 = aliased(Salary) s2 = aliased(Salary) alchemy_data = session.query(s1.emp_no, s1.from_date, s1.to_date, s1.salary, func.IF(func.isnull(s2.salary), 0, s2.salary).label("last_salary"), (s1.salary - (func.IF(func.isnull(s2.salary), 0, s2.salary))).label("difference")).\ outerjoin(s2, and_(s2.emp_no==s1.emp_no, func.year(s2.from_date)==func.year(s1.from_date)-1)).\ filter(s1.emp_no==10001).all() '''比较两个结果,应该是True''' for d in zip(sql_data, alchemy_data): print(d) print('第四例结果是:{}'.format(operator.eq(sql_data, alchemy_data))) '''-------------------------------------------------------------------------------------------------''' session.commit() session.close()
def get_rentals_sum_by_vehicle_id(id): # rentals summary rentals_summary = db.session.query(Rentals.id, Rentals.vehicle_id, (Rentals.odometer_end - Rentals.odometer_start).label('distance'), Rentals.rental_type, func.IF(Rentals.rental_type == "D", (func.datediff(Rentals.date_end, Rentals.date_start)+1)*100, (Rentals.odometer_end - Rentals.odometer_start)).label('rental_cost')).filter(Rentals.vehicle_id == id).all() rentals_count = 0 rentals_distance = 0 rentals_cost = 0 for rental in rentals_summary: rentals_count += 1 rentals_distance += rental.distance rentals_cost += rental.rental_cost #print(rentals_count) #print(rentals_distance) #print(rentals_cost) rentals_summary = {"total_rentals": rentals_count, "total_distance": rentals_distance, "total_cost": rentals_cost} return (rentals_summary)
def main(inicio,fin,ci): """ En el archivo bandejas.cfg se guardan los parametros de configuracion para la base de datos. El procedimiento toma el archivo de desde el mismo directorio donde se encuentra. En el archivo config.cfg se guardan parametros de configuracion. \nEjecucion El procedimiento se ejecuta de la siguiente forma: (ejemplo)\n $python bj.py --inicio='2018-05-01' --fin='2018-05-14' - Novedades en el periodo [inicio, fin) , incluyendo inicio y no incluye la fecha de fin.Las novedades se refiere a las altas de designacion, ceses de designacion, anulaciones y pasajes a suplencias - Para todas las personas (PerId) que tuvieron novedades en el periodo indicado, se toman los datos de toda la historia de altas, ceses, con tope el 01/03 del año correspondiente a la fecha de inicio que es pasada como parametro. """ with open('bandejas.cfg', 'r') as ymlfile: cdb = yaml.load(ymlfile) with open('config.cfg', 'r') as ymlfile: cfg = yaml.load(ymlfile) engine = create_engine('mysql+pymysql://'+cdb['personal']['usr']+':'+cdb['personal']['password']+'@'+cdb['personal']['host']+'/'+cdb['personal']['bd']) engine_bandeja_in = create_engine('mysql+pymysql://'+cdb['bandeja_in']['usr']+':'+cdb['bandeja_in']['password']+'@'+cdb['bandeja_in']['host']+'/'+cdb['bandeja_in']['bd']) engine_bandeja_out = create_engine('mysql+pymysql://'+cdb['bandeja_out']['usr']+':'+cdb['bandeja_out']['password']+'@'+cdb['bandeja_out']['host']+'/'+cdb['bandeja_out']['bd']) puestos_funcion = cfg['puestos_funcion'] # los puestos considerados docencia directa parametros = {} parametros['p1d']=dt.date(int(inicio.split('-')[0]),int(inicio.split('-')[1]),int(inicio.split('-')[2])) parametros['p2d']=dt.date(int(fin.split('-')[0]),int(fin.split('-')[1]),int(fin.split('-')[2])) # no voy a dejar pasar designaciones que inicien a partir de este tope (el mes siguiente al dado como fin) parametros['tope']=dt.date(int(fin.split('-')[0])+(1 if (fin.split('-')[1]=='12') else 0),1 if (fin.split('-')[1]=='12') else int(fin.split('-')[1])+1,1) # las causales de suplencia que interesan suplcausales = cfg['suplcausales'] parametros['inicioLectivo'] = dt.datetime(int(inicio.split('-')[0])-(1 if inicio.split('-')[1]<'03' else 0), 03, 01) #cargo metadatos del modelo Personal metadata = sa.MetaData() relaciones_laborales = sa.Table('RELACIONES_LABORALES',metadata,autoload=True, autoload_with=engine) anulaciones = sa.Table('ANULACIONES', metadata,autoload=True,autoload_with=engine) funciones_relacion_laboral = sa.Table('FUNCIONES_RELACION_LABORAL', metadata, autoload=True, autoload_with=engine) funciones_asignadas = sa.Table('FUNCIONES_ASIGNADAS', metadata, autoload=True, autoload_with=engine) sillas = sa.Table('SILLAS', metadata, autoload=True, autoload_with=engine) cargas_horarias = sa.Table('CARGAS_HORARIAS', metadata, autoload=True, autoload_with=engine) silla_grupo_materia = sa.Table('SILLAGRUPOMATERIA', metadata, autoload=True, autoload_with=engine) puestos = sa.Table('PUESTOS', metadata, autoload=True, autoload_with=engine) denominaciones_cargo= sa.Table('DENOMINACIONES_CARGOS', metadata, autoload=True, autoload_with=engine) suplencias = sa.Table('SUPLENCIAS', metadata, autoload=True, autoload_with=engine) funciones_agrup_lin = sa.Table('FUNCION_AGRUP_LIN', metadata, autoload=True, autoload_with=engine) # cargo metadatos de Personas personas = sa.Table('PERSONAS', metadata, schema="Personas", autoload=True, autoload_with=engine) personas_documentos = sa.Table('PERSONASDOCUMENTOS', metadata, schema="Personas", autoload=True, autoload_with=engine) # cargo los datos de materias de estudiantil asignaturas_materias = sa.Table('ASIGNATURAS_MATERIAS', metadata, schema="Estudiantil", autoload=True, autoload_with=engine) # cargo las materias de estudiantil query_asignaturas_materias = sa.select([asignaturas_materias]) df_asignaturas_materias = pd.read_sql_query(query_asignaturas_materias, engine, params=parametros) # cargo los datos de la base de siap para las dependencias tabla_institucional = sa.Table('tabla_institucional',metadata, autoload=True, autoload_with=engine_bandeja_in) query_tabla_institucional = sa.select([tabla_institucional.c.DEP_AS400.label('dependid'),tabla_institucional.c.DEP_DBC.label('dependidSiap')]).select_from(tabla_institucional); df_tabla_institucional = pd.read_sql_query(query_tabla_institucional, engine_bandeja_in, params=parametros) # cargo las funciones para identificar las horas de apoyo o POB , POP, talleristas , codigo 68 query_funciones_cargo = sa.select([funciones_agrup_lin]) df_funciones_cargo = pd.read_sql_query(query_funciones_cargo, engine,params=parametros) df_funciones_hap = df_funciones_cargo.loc[df_funciones_cargo.Funcion_Agrup_Cab_Id==1, 'FuncionId'] df_funciones_POB = df_funciones_cargo.loc[df_funciones_cargo.Funcion_Agrup_Cab_Id==8, 'FuncionId'] df_funciones_POP = df_funciones_cargo.loc[df_funciones_cargo.Funcion_Agrup_Cab_Id==7, 'FuncionId'] df_funciones_68 = df_funciones_cargo.loc[df_funciones_cargo.Funcion_Agrup_Cab_Id==5, 'FuncionId'] df_funciones_talleristas = df_funciones_cargo.loc[df_funciones_cargo.Funcion_Agrup_Cab_Id==9, 'FuncionId'] df_coordinadores_especiales = df_funciones_cargo.loc[df_funciones_cargo.Funcion_Agrup_Cab_Id==10,'FuncionId'] # novedades query_novedades = sa. \ select([relaciones_laborales.c.PersonalPerId, relaciones_laborales.c.RelLabId]). \ select_from(relaciones_laborales.join(puestos)). \ where( \ (puestos.c.PuestoFuncionId.in_(puestos_funcion)) & \ # RL designada (relaciones_laborales.c.PersonalPerId <> None) & \ ( \ # se inicia en el período de la bandeja ( \ (relaciones_laborales.c.RelLabFchIniActividades >= sa.bindparam('p1d')) & \ (relaciones_laborales.c.RelLabFchIniActividades < sa.bindparam('p2d')) \ ) | \ # o termina en el período de la bandeja ( \ (relaciones_laborales.c.RelLabCeseFchReal >= sa.bindparam('p1d')) & \ (relaciones_laborales.c.RelLabCeseFchReal < sa.bindparam('p2d')) \ ) | \ # o cambiaron el alta con retraso ( \ (relaciones_laborales.c.RelLabDesignFchAlta >= sa.bindparam('p1d')) & \ (relaciones_laborales.c.RelLabDesignFchAlta < sa.bindparam('p2d')) \ ) | \ # o cambiaron el cese con retraso ( \ (relaciones_laborales.c.RelLabCeseFchAlta >= sa.bindparam('p1d')) & \ (relaciones_laborales.c.RelLabCeseFchAlta < sa.bindparam('p2d')) ) \ ) \ ) df_novedades = pd.read_sql_query(query_novedades, engine, params=parametros) # cargo las anulaciones del periodo query_anulaciones_periodo = sa. \ select([relaciones_laborales.c.PersonalPerId,relaciones_laborales.c.RelLabId, anulaciones.c.AnulacionFchAlta]). \ select_from(anulaciones.join(relaciones_laborales, cast(anulaciones.c.AnulacionValorPkTabla,Integer)==relaciones_laborales.c.RelLabId).join(puestos)). \ where( \ (anulaciones.c.AnulacionFchAlta >= sa.bindparam('p1d')) & \ (anulaciones.c.AnulacionFchAlta < sa.bindparam('p2d')) & \ (anulaciones.c.AnulacionTipoNombre=='DESIGNACION') & \ (puestos.c.PuestoFuncionId.in_(puestos_funcion)) \ ) df_anulaciones_periodo = pd.read_sql(query_anulaciones_periodo,engine,params=parametros) rlt = aliased(relaciones_laborales) # RL de los titulares rls = aliased(relaciones_laborales) # RL de los suplentes # perids que tuvieron novedades o tienen eventos en el período de la bandeja (o el que vino de parámetro) if ci!=None: # si me pasaron una ci como parametro me interesan solo las novedades de esa ci query_perid = sa.select([personas_documentos.c.PerId]).select_from(personas_documentos).where((personas_documentos.c.PaisCod=='UY')&(personas_documentos.c.DocCod=='CI')&(personas_documentos.c.PerDocId==ci)) set_perids_novedades = pd.read_sql_query(query_perid, engine, params=parametros)['PerId'].unique().tolist() else: # cargo las suplencias del período query_suplencias = sa. \ select([rlt.c.PersonalPerId,suplencias.c.RelLabId,func.GREATEST(cast(suplencias.c.SuplFchAlta,Date),rlt.c.RelLabFchIniActividades).label('SuplFchAlta'),suplencias.c.SuplCausId,rlt.c.RelLabFchIniActividades,rlt.c.RelLabCeseFchReal,rls.c.RelLabAnulada.label('RelLabAnuladaS'),rls.c.RelLabFchIniActividades.label('RelLabFchIniActividadesS'),rls.c.RelLabCeseFchReal.label('RelLabCeseFchRealS')]). \ select_from(rlt.join(puestos).join(suplencias, suplencias.c.RelLabId==rlt.c.RelLabId).join(rls, rls.c.RelLabId==suplencias.c.SuplRelLabId)). \ where((puestos.c.PuestoFuncionId.in_(puestos_funcion)) & \ (suplencias.c.SuplCausId.in_(suplcausales)) & \ (rlt.c.RelLabAnulada==0) & \ ((rlt.c.RelLabFchIniActividades < rlt.c.RelLabCeseFchReal) | (rlt.c.RelLabCeseFchReal==None)) & \ # la rls podría estar anulada y en ese caso se marca la novedad en RelLabCeseFchAlta ( \ # inicio de la suplencia está en el período de la bandeja: ((func.GREATEST(cast(suplencias.c.SuplFchAlta,Date),rlt.c.RelLabFchIniActividades) < sa.bindparam('p2d')) & \ (func.GREATEST(cast(suplencias.c.SuplFchAlta,Date),rlt.c.RelLabFchIniActividades) >= sa.bindparam('p1d')) \ ) | \ # o el inicio de la suplencia fue modificado en el período de la bandeja: ((cast(suplencias.c.Suplencias_FchUltAct,Date) < sa.bindparam('p2d')) & \ (cast(suplencias.c.Suplencias_FchUltAct,Date) >= sa.bindparam('p1d')) \ ) | \ # o el fin de la suplencia está en el período de la bandeja: (((rls.c.RelLabCeseFchReal < sa.bindparam('p2d')) | (rls.c.RelLabCeseFchReal==None)) & \ (rls.c.RelLabCeseFchReal >= sa.bindparam('p1d')) \ ) | \ # o el fin de la suplencia fue modificado o anulado en el período de la bandeja: ((rls.c.RelLabCeseFchAlta < sa.bindparam('p2d')) & \ (rls.c.RelLabCeseFchAlta >= sa.bindparam('p1d')) \ ) \ ) \ ) df_suplencias = pd.read_sql_query(query_suplencias, engine, params=parametros) set_perids_novedades = df_novedades['PersonalPerId'].append(df_anulaciones_periodo['PersonalPerId']).append(df_suplencias['PersonalPerId']).unique().tolist() if len(set_perids_novedades) == 0: #si no tengo cédulas para procesar return ## Tomo la historia de los perid con novedades # join historia básica j3 = rlt.join(puestos).join(funciones_relacion_laboral).join(funciones_asignadas).join(sillas).join(silla_grupo_materia, sillas.c.SillaId==silla_grupo_materia.c.SillaId, isouter=True).join(asignaturas_materias, sillas.c.MateriaId==asignaturas_materias.c.MateriaId, isouter=True) # join suplencias jsupl = suplencias.join(rls, ((rls.c.RelLabId==suplencias.c.SuplRelLabId) & (rls.c.RelLabAnulada==0) & (suplencias.c.SuplCausId.in_(suplcausales)))) # clone de join suplencias para encontrar la siguiente supl_siguiente = aliased(suplencias) # suplencia consecutiva a la actual rls_siguiente = aliased(relaciones_laborales) jsupl_siguiente = supl_siguiente.join(rls_siguiente, ((rls_siguiente.c.RelLabId==supl_siguiente.c.SuplRelLabId) & (rls_siguiente.c.RelLabAnulada==0) & (supl_siguiente.c.SuplCausId.in_(suplcausales)))) # clone de join suplencias para asegurar que no hay una intermedia entre la actual y la siguiente supl_intermedia = aliased(suplencias) # suplencia consecutiva a la actual rls_intermedia = aliased(relaciones_laborales) jsupl_intermedia = supl_intermedia.join(rls_intermedia, ((rls_intermedia.c.RelLabId==supl_intermedia.c.SuplRelLabId) & (rls_intermedia.c.RelLabAnulada==0) & (supl_intermedia.c.SuplCausId.in_(suplcausales)))) # historia básica de los perids con novedades, no incluye RL bajadas a suplencia query_historia_rl = sa. \ select([rlt.c.PersonalPerId, puestos.c.PuestoFuncionId,rlt.c.RelLabId,rlt.c.RelLabDesignCaracter,rlt.c.RelLabCicloPago,rlt.c.RelLabFchIniActividades, rlt.c.RelLabCeseFchReal, rlt.c.CauBajCod,silla_grupo_materia.c.GrupoMateriaId,sillas.c.TurnoId, sillas.c.SillaDependId,funciones_relacion_laboral.c.FuncRelLabCantHrs,sillas.c.FuncionId,rlt.c.RelLabAnulada,puestos.c.PuestoAsignId,asignaturas_materias.c.AsignId]). \ select_from( \ j3. \ join(jsupl, ((rlt.c.RelLabId==suplencias.c.RelLabId)), isouter=True) \ ). \ where((rlt.c.RelLabFchIniActividades >= sa.bindparam('inicioLectivo')) & \ (rlt.c.PersonalPerId.in_(set_perids_novedades)) & \ (puestos.c.PuestoFuncionId.in_(puestos_funcion)) & \ (suplencias.c.RelLabId==None) \ ) df_historia_rl = pd.read_sql_query(query_historia_rl, engine, params=parametros) df_historia_rl.loc[:,'Origen']=['df_historia_rl'] # Cambio el número de asignatura de las Coordinaciones df_historia_rl.loc[df_historia_rl['AsignId']==90,['AsignId','RelLabDesignCaracter']] = [75,'I'] # Cambio el número de asignatura de AAM df_historia_rl.loc[df_historia_rl['AsignId']==98,'AsignId'] = 77 # SUPLENCIAS # Para cada bajada a suplencia implica (recorriéndolas en orden de fecha) hay que: # (1) agregar un registro desde el fin de la suplencia hasta el final original (luego el paso 2 le puede cambiar el cese) # (2) cesar la RL vigente en la fecha de inicio de la suplencia # (3) si el causal de bajada corresponde, hay que crear un registro (alta) para el período de suplencia paga # (1) altas inyectadas en la bandeja para el período posterior a cada licencia query_alta_luego_de_suplencia = sa. \ select([rlt.c.PersonalPerId,puestos.c.PuestoFuncionId,rlt.c.RelLabId,rlt.c.RelLabDesignCaracter,rlt.c.RelLabCicloPago,func.GREATEST(rlt.c.RelLabFchIniActividades,func.ADDDATE(rls.c.RelLabCeseFchReal,1)).label('RelLabFchIniActividades'),func.IF(supl_siguiente.c.SuplId==None,rlt.c.RelLabCeseFchReal,cast(supl_siguiente.c.SuplFchAlta,Date)).label('RelLabCeseFchReal'),func.IF(supl_siguiente.c.SuplId==None,rlt.c.CauBajCod,'50').label('CauBajCod'),silla_grupo_materia.c.GrupoMateriaId,sillas.c.TurnoId,sillas.c.SillaDependId,funciones_relacion_laboral.c.FuncRelLabCantHrs,sillas.c.FuncionId,rlt.c.RelLabAnulada,puestos.c.PuestoAsignId,asignaturas_materias.c.AsignId]). \ select_from( \ jsupl. \ join(j3, ((rlt.c.RelLabId==suplencias.c.RelLabId) & (rlt.c.RelLabAnulada==0))). \ join(jsupl_siguiente, ((supl_siguiente.c.RelLabId==rlt.c.RelLabId) & (supl_siguiente.c.SuplId<>suplencias.c.SuplId) & (supl_siguiente.c.SuplFchAlta>=suplencias.c.SuplFchAlta)), \ isouter=True). \ join(jsupl_intermedia, \ ((supl_intermedia.c.RelLabId==rlt.c.RelLabId) & (supl_intermedia.c.SuplId<>suplencias.c.SuplId) & (supl_intermedia.c.SuplFchAlta>=suplencias.c.SuplFchAlta) & (supl_intermedia.c.SuplId<>supl_siguiente.c.SuplId) & (supl_intermedia.c.SuplFchAlta<=supl_siguiente.c.SuplFchAlta)), \ isouter=True) \ ). \ where( \ (rlt.c.RelLabFchIniActividades >= sa.bindparam('inicioLectivo')) & \ (rlt.c.PersonalPerId.in_(set_perids_novedades)) & \ (puestos.c.PuestoFuncionId.in_(puestos_funcion)) & \ (rls.c.RelLabCeseFchReal<>None) & \ (supl_intermedia.c.SuplId==None) & \ ((supl_siguiente.c.SuplId==None) | ((rls.c.RelLabCeseFchReal<>None) & (cast(supl_siguiente.c.SuplFchAlta,Date) > rls.c.RelLabCeseFchReal))) & \ (func.ADDDATE(rls.c.RelLabCeseFchReal,1) < func.IF(supl_siguiente.c.SuplId==None,rlt.c.RelLabCeseFchReal,cast(supl_siguiente.c.SuplFchAlta,Date))) \ ) df_alta_luego_de_suplencia = pd.read_sql_query(query_alta_luego_de_suplencia, engine, params=parametros) df_alta_luego_de_suplencia.loc[:,'Origen']=['df_alta_luego_de_suplencia'] # (2) alta inyectada para el período antes de la primer licencia query_primera_suplencia = sa. \ select([rlt.c.PersonalPerId,puestos.c.PuestoFuncionId,rlt.c.RelLabId,rlt.c.RelLabDesignCaracter,rlt.c.RelLabCicloPago,rlt.c.RelLabFchIniActividades,cast(suplencias.c.SuplFchAlta,Date).label('RelLabCeseFchReal'),literal_column('50').label('CauBajCod'),silla_grupo_materia.c.GrupoMateriaId,sillas.c.TurnoId,sillas.c.SillaDependId,funciones_relacion_laboral.c.FuncRelLabCantHrs,sillas.c.FuncionId,rlt.c.RelLabAnulada,puestos.c.PuestoAsignId, asignaturas_materias.c.AsignId]). \ select_from( jsupl. \ join(j3, ((rlt.c.RelLabId==suplencias.c.RelLabId) & (rlt.c.RelLabAnulada==0))). \ join(jsupl_intermedia, \ ((supl_intermedia.c.RelLabId==rlt.c.RelLabId) & (supl_intermedia.c.SuplId<>suplencias.c.SuplId) & (supl_intermedia.c.SuplFchAlta<=suplencias.c.SuplFchAlta)), isouter=True) \ ). \ where( \ (rlt.c.RelLabFchIniActividades >= sa.bindparam('inicioLectivo')) & \ (rlt.c.PersonalPerId.in_(set_perids_novedades)) & \ (puestos.c.PuestoFuncionId.in_(puestos_funcion)) & \ (supl_intermedia.c.SuplId==None) & \ (rlt.c.RelLabFchIniActividades < cast(suplencias.c.SuplFchAlta,Date)) \ ) df_primera_suplencia = pd.read_sql_query(query_primera_suplencia, engine, params=parametros) df_primera_suplencia.loc[:,'Origen']=['df_primera_suplencia'] # (3) altas inyectadas en la bandeja para el período de licencia si es Junta Médica o Pase en Comisión query_alta_suplencia_paga = sa. \ select([rlt.c.PersonalPerId,puestos.c.PuestoFuncionId,rlt.c.RelLabId,rlt.c.RelLabDesignCaracter,rlt.c.RelLabCicloPago,func.GREATEST(rlt.c.RelLabFchIniActividades,func.ADDDATE(cast(suplencias.c.SuplFchAlta,Date),1)).label('RelLabFchIniActividades'),func.IFNULL(rls.c.RelLabCeseFchReal,rlt.c.RelLabFchFinPrevista).label('RelLabCeseFchReal'),literal_column('50').label('CauBajCod'),silla_grupo_materia.c.GrupoMateriaId,sillas.c.TurnoId,sillas.c.SillaDependId,funciones_relacion_laboral.c.FuncRelLabCantHrs,sillas.c.FuncionId,rlt.c.RelLabAnulada,puestos.c.PuestoAsignId,asignaturas_materias.c.AsignId,suplencias.c.SuplCausId]). \ select_from( jsupl. join(j3, ((rlt.c.RelLabId==suplencias.c.RelLabId) & (rlt.c.RelLabAnulada==0))) \ ). \ where( \ (rlt.c.RelLabFchIniActividades >= sa.bindparam('inicioLectivo')) & \ (rlt.c.PersonalPerId.in_(set_perids_novedades)) & \ (puestos.c.PuestoFuncionId.in_(puestos_funcion)) & \ (suplencias.c.SuplCausId.in_([16, 17, 162])) & \ (func.GREATEST(rlt.c.RelLabFchIniActividades,func.ADDDATE(cast(suplencias.c.SuplFchAlta,Date),1)) <= func.IFNULL(rls.c.RelLabCeseFchReal,rlt.c.RelLabFchFinPrevista)) \ ) df_alta_suplencia_paga = pd.read_sql_query(query_alta_suplencia_paga, engine, params=parametros) df_alta_suplencia_paga.loc[:,'Origen']=['df_alta_suplencia_paga'] # Las Juntas Médicas van con asignatura 162: df_alta_suplencia_paga.loc[df_alta_suplencia_paga['SuplCausId']==162,['AsignId','CauBajCod']] = [162, 66] # Los pases en comisión DENTRO ANEP van con dependencia 8902 df_alta_suplencia_paga.loc[df_alta_suplencia_paga['SuplCausId']==16,['SillaDependId','CauBajCod']] = [8902, 66] # Los pases en comisión FUERA SECUN van con dependencia 8901 df_alta_suplencia_paga.loc[df_alta_suplencia_paga['SuplCausId']==17,['SillaDependId','CauBajCod']] = [8901, 66] del df_alta_suplencia_paga['SuplCausId'] df_historia_completa = pd.concat([df_historia_rl,df_primera_suplencia,df_alta_luego_de_suplencia,df_alta_suplencia_paga],axis=0) df_historia_completa = df_historia_completa.rename(columns = {'RelLabFchIniActividades':'falta','RelLabCeseFchReal':'fcese','SillaDependId':'dependid'}) df_historia_completa = df_historia_completa.reset_index(drop=True) df_historia_completa.merge(df_anulaciones_periodo, on='RelLabId', how='left') df_anulaciones_a_eliminar = df_anulaciones_periodo[df_anulaciones_periodo['RelLabId'].isin(df_novedades['RelLabId'])] # Elimino los anulaciones de la historia df_historia_completa = df_historia_completa[df_historia_completa['RelLabId'].isin(df_anulaciones_a_eliminar['RelLabId'])==False] # obtengo los datos de las personas query_personas = sa.select([personas.c.PerId.label('PersonalPerId'),personas_documentos.c.PerDocId]).select_from(personas.join(personas_documentos)).where((personas_documentos.c.PaisCod=='UY')&(personas_documentos.c.DocCod=='CI')&(personas.c.PerId.in_(set_perids_novedades))) df_personas = pd.read_sql_query(query_personas, engine, params=parametros) df_historia_completa = df_historia_completa.merge(df_personas, on='PersonalPerId', how='left') # agrego asignatura 151 a todos los que no la tienen df_historia_completa.loc[((df_historia_completa['AsignId'].isnull()) & (df_historia_completa['PuestoAsignId'].notnull())),'AsignId']=df_historia_completa['PuestoAsignId'] df_historia_completa.loc[(df_historia_completa['AsignId'].isnull()),'AsignId']=cfg['asignid_otros'] df_historia_completa = df_historia_completa.loc[:,['PerDocId','dependid','AsignId','RelLabCicloPago','RelLabDesignCaracter','FuncRelLabCantHrs','falta','fcese','CauBajCod','GrupoMateriaId','FuncionId','RelLabAnulada','PersonalPerId','RelLabId']] # atributos hardcoded df_historia_completa['PerDocTpo']='DO' df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_funciones_hap.tolist()),'RelLabDesignCaracter']=cfg['caracter_horas_apoyo'] df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_funciones_hap.tolist()),'AsignId']=cfg['asignid_horas_apoyo'] df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_funciones_POB.tolist()),'RelLabDesignCaracter']=cfg['caracter_pob'] df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_funciones_POB.tolist()),'AsignId']=cfg['asignid_pob'] df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_funciones_POP.tolist()),'RelLabDesignCaracter']=cfg['caracter_pop'] df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_funciones_POP.tolist()),'AsignId']=cfg['asignid_pop'] df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_funciones_68.tolist()),'RelLabDesignCaracter']=cfg['caracter_68'] df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_funciones_68.tolist()),'AsignId']=cfg['asignid_68'] df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_funciones_talleristas.tolist()),'RelLabDesignCaracter']=cfg['caracter_talleristas'] df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_funciones_talleristas.tolist()),'AsignId']=cfg['asignid_talleristas'] df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_coordinadores_especiales.tolist()),'RelLabDesignCaracter']=cfg['caracter_especiales'] df_historia_completa.loc[df_historia_completa['FuncionId'].isin(df_coordinadores_especiales.tolist()),'AsignId']=cfg['asignid_especiales'] df_historia_completa.loc[(df_historia_completa['AsignId']==75) & (df_historia_completa['fcese'].notnull()),'CauBajCod']=cfg['causal_coordinacion'] df_historia_completa.loc[(df_historia_completa['RelLabAnulada']==1),'CauBajCod']=cfg['causal_anulacion'] df_historia_completa['PerDocPaisCod']='UY' df_historia_completa['HorClaCurTpo']='' df_historia_completa['HorClaCur']='' df_historia_completa['HorClaArea']='' df_historia_completa['HorClaAnio']=0 df_historia_completa['HorClaHorTope']=0 df_historia_completa['HorClaObs']='' df_historia_completa['HorClaNumInt']=0 df_historia_completa['HorClaParPreCod']=0 df_historia_completa['HorClaCompPor']=0 df_historia_completa['HorClaCompPor']=0 df_historia_completa['HorClaLote']=0 df_historia_completa['HorClaAudUsu']=0 df_historia_completa['HorClaMod']=0 df_historia_completa['HorClaEmpCod']=1 df_historia_completa['HorClaCarNum']=0 df_historia_completa['DesFchCarga']= date.today() df_historia_completa['Resultado']='PE' df_historia_completa['Mensaje']='' df_historia_completa['HorClaFchLib']=df_historia_completa['fcese'] df_historia_completa.loc[(df_historia_completa['CauBajCod'].isnull()),'CauBajCod']=0 del df_historia_completa['FuncionId'] del df_historia_completa['PersonalPerId'] #Transformacion de la dependencia a Siap df_historia_completa=df_historia_completa.merge(df_tabla_institucional) del df_historia_completa['dependid'] #borro la dependencia ya que voy a usar la dependidSiap # filtro los que tienen fcese < falta df_historia_completa = df_historia_completa.loc[(df_historia_completa['fcese']>=df_historia_completa['falta'])| (df_historia_completa['fcese'].isnull())] # filtro los que tienen falta >= tope df_historia_completa = df_historia_completa.loc[df_historia_completa['falta']<parametros['tope']] # filtro los que tienen cero horas df_historia_completa = df_historia_completa.loc[df_historia_completa['FuncRelLabCantHrs']>0] if ci!=None: # si me pasaron una ci como parametro filtro la historia solo para esa ci. df_historia_completa = df_historia_completa.loc[df_historia_completa['PerDocId']==ci] # Le pongo los nombres de los campos que corresponden a la tabla ihorasclase de siap df_historia_completa = df_historia_completa.rename(columns = {'PerDocId':'PerDocNum','RelLabDesignCaracter':'HorClaCar','RelLabCicloPago':'HorClaCic','falta':'HorClaFchPos','fcese':'HorClaFchCese','CauBajCod':'HorClaCauBajCod','GrupoMateriaId':'HorClaGrupo','dependidSiap':'HorClaInsCod','FuncRelLabCantHrs':'HorClaHor','AsignId':'HorClaAsiCod','RelLabAnulada':'HorClaBajLog'}) df_historia_completa.to_sql(name='ihorasclase', con=engine_bandeja_out, if_exists= 'append', index=False)
def online_size(cls): return func.sum(func.IF(Contract.online, File.__table__.c.size, 0))
def create(tallySheetId): tallySheet, tallySheetVersion = TallySheet.create_latest_version( tallySheetId=tallySheetId, tallySheetCode=TallySheetCodeEnum.PRE_34_PD) query = db.session.query( Election.Model.electionId, Area.Model.areaId, ElectionCandidate.Model.candidateId, func.sum( func.IF(TallySheetVersionRow_PRE_30_PD.Model.count == None, 0, TallySheetVersionRow_PRE_30_PD.Model.count)).label( "firstPreferenceCount"), func.sum( func.IF( and_(TallySheetVersionRow_PRE_34_preference.Model. preferenceNumber == 2), TallySheetVersionRow_PRE_34_preference.Model.preferenceCount, 0)).label("secondPreferenceCount"), func.sum( func.IF( and_(TallySheetVersionRow_PRE_34_preference.Model. preferenceNumber == 3), TallySheetVersionRow_PRE_34_preference.Model.preferenceCount, 0)).label("thirdPreferenceCount"), ).join( Submission.Model, Submission.Model.areaId == Area.Model.areaId).join( Election.Model, Election.Model.electionId == Area.Model.electionId).join( ElectionCandidate.Model, or_( ElectionCandidate.Model.electionId == Election.Model.electionId, ElectionCandidate.Model.electionId == Election.Model.parentElectionId)).join( TallySheet.Model, and_( TallySheet.Model.tallySheetId == Submission.Model.submissionId, TallySheet.Model.tallySheetCode.in_([ TallySheetCodeEnum.PRE_30_PD, TallySheetCodeEnum.PRE_34_I_RO ])) ).join( TallySheetVersionRow_PRE_30_PD.Model, and_( TallySheetVersionRow_PRE_30_PD.Model. tallySheetVersionId == Submission.Model. lockedVersionId, TallySheetVersionRow_PRE_30_PD.Model.candidateId == ElectionCandidate.Model.candidateId), isouter=True).join( TallySheetVersionRow_PRE_34_preference.Model, and_( TallySheetVersionRow_PRE_34_preference.Model. tallySheetVersionId == Submission.Model.lockedVersionId, TallySheetVersionRow_PRE_34_preference.Model. candidateId == ElectionCandidate.Model.candidateId), isouter=True).filter( Area.Model.areaId == tallySheet.submission.areaId, ElectionCandidate.Model.qualifiedForPreferences == True).group_by( ElectionCandidate.Model.candidateId, Submission.Model.areaId).order_by( ElectionCandidate.Model.candidateId, Submission.Model.areaId).all() is_complete = True # TODO:Change other reports to validate like this for row in query: if row.candidateId is not None and row.firstPreferenceCount is not None and row.secondPreferenceCount is not None and row.thirdPreferenceCount is not None: tallySheetVersion.add_row(electionId=row.electionId, candidateId=row.candidateId, preferenceNumber=1, preferenceCount=row.firstPreferenceCount) tallySheetVersion.add_row( electionId=row.electionId, candidateId=row.candidateId, preferenceNumber=2, preferenceCount=row.secondPreferenceCount) tallySheetVersion.add_row(electionId=row.electionId, candidateId=row.candidateId, preferenceNumber=3, preferenceCount=row.thirdPreferenceCount) else: is_complete = False if is_complete: tallySheetVersion.set_complete() db.session.commit() return TallySheetVersionSchema().dump(tallySheetVersion).data
def _get_count(text, if_clause): """helper to create query below""" return label(text, func.SUM(func.IF(if_clause, 1, 0)))
def get_circuit_all_other(self, house_id): """ Get and store all other unmonitored circuits total and by interval from database. """ self.base_query = db_session.\ query(label('actual', func.sum(EnergyHourly.used)/1000 - func.sum(func.IF(EnergyHourly.water_heater != None, EnergyHourly.water_heater/1000, 0)) - func.sum(func.IF(EnergyHourly.ashp != None, EnergyHourly.ashp/1000, 0)) - func.sum(func.IF(EnergyHourly.water_pump != None, EnergyHourly.water_pump/1000, 0)) - func.sum(func.IF(EnergyHourly.dryer != None, EnergyHourly.dryer/1000, 0)) - func.sum(func.IF(EnergyHourly.washer != None, EnergyHourly.washer/1000, 0)) - func.sum(func.IF(EnergyHourly.dishwasher != None, EnergyHourly.dishwasher/1000, 0)) - func.sum(func.IF(EnergyHourly.stove != None, EnergyHourly.stove/1000, 0)) - func.sum(func.IF(EnergyHourly.refrigerator != None, EnergyHourly.refrigerator/1000, 0)) - func.sum(func.IF(EnergyHourly.living_room != None, EnergyHourly.living_room/1000, 0)) - func.sum(func.IF(EnergyHourly.aux_heat_bedrooms != None, EnergyHourly.aux_heat_bedrooms/1000, 0)) - func.sum(func.IF(EnergyHourly.aux_heat_living != None, EnergyHourly.aux_heat_living/1000, 0)) - func.sum(func.IF(EnergyHourly.study != None, EnergyHourly.study/1000, 0)) - func.sum(func.IF(EnergyHourly.barn != None, EnergyHourly.barn/1000, 0)) - func.sum(func.IF(EnergyHourly.basement_west != None, EnergyHourly.basement_west/1000, 0)) - func.sum(func.IF(EnergyHourly.basement_east != None, EnergyHourly.basement_east/1000, 0)) - func.sum(func.IF(EnergyHourly.ventilation != None, EnergyHourly.ventilation/1000, 0)) - func.sum(func.IF(EnergyHourly.ventilation_preheat != None, EnergyHourly.ventilation_preheat/1000, 0)) - func.sum(func.IF(EnergyHourly.kitchen_recept_rt != None, \ EnergyHourly.kitchen_recept_rt/1000, 0)))).\ filter(EnergyHourly.house_id == house_id).\ filter(or_(EnergyHourly.device_id == 5, EnergyHourly.device_id == 10)) self.base_query = self.base_query.\ add_columns(label('date', func.min(EnergyHourly.date))) self.filter_query_by_date_range(EnergyHourly) totals = self.base_query.one() self.json_totals = {'actual': str(totals.actual)} items = self.group_query_by_interval(EnergyHourly) self.json_items = [] for item in items: data = {'date': self.format_date(item.date), 'actual': str(item.actual)} self.json_items.append(data) self.json_circuit = {'circuit_id': 'all_other', 'name': self.get_circuit_info('all_other')['name'], 'description': self.get_circuit_info('all_other')['description']}
def fraction(cls): return func.IF( func.ABS(cls.total_time) > 0, func.cast(cls.online_time, Float) / func.cast(cls.total_time, Float), 0)
def sqlalchemy_num_or_zero(column): return func.IF(column == None, 0, column)
def online(self): return func.IF(func.sum(Contract.online) > 0, true(), false())