def compra_mensual(): tbl_compra = Compra.mapper.mapped_table tbl_artcompra = ArticuloCompra.mapper.mapped_table tbl_articulo = Articulo.mapper.mapped_table tbl_marca = Marca.mapper.mapped_table # stmt = select([func.concat(func.year(tbl_compra.c.fecha), "-", format(func.month(tbl_compra.c.fecha), "02")).label("periodo"), stmt = select( [ func.concat( func.year(tbl_compra.c.fecha), "-", func.month(tbl_compra.c.fecha)).label("periodo"), func.concat(tbl_articulo.c.descripcion, " ", tbl_marca.c.denominacion, " ", tbl_articulo.c.cantidad, " ", tbl_articulo.c.unidad_medida).label("articulo"), func.sum(tbl_artcompra.c.cantidad).label("cantidad"), ], from_obj=tbl_compra.join(tbl_artcompra).join(tbl_articulo).join( tbl_marca), group_by=[ func.year(tbl_compra.c.fecha), func.month(tbl_compra.c.fecha), tbl_artcompra.c.articulo_id ], ) return stmt.alias("compra_mensual")
def get_billing_data_per_resource_per_project(year, project_id, resource, output_type): if output_type == 'day': billing_data = db_session.query(func.unix_timestamp(Usage.usage_date), func.sum(Usage.cost), Usage.usage_value, Usage.measurement_unit). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id == project_id, Usage.resource_type == resource). \ group_by(func.unix_timestamp(Usage.usage_date)) elif output_type == 'week': billing_data = db_session.query(Usage.project_id, func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id == project_id, Usage.resource_type == resource). \ group_by(func.month(Usage.usage_date)) else: billing_data = db_session.query(func.extract(output_type, Usage.usage_date), func.sum(Usage.cost)). \ filter(func.extract('year', Usage.usage_date) == year, Usage.project_id == project_id, Usage.resource_type == resource). \ group_by(func.month(Usage.usage_date)) return billing_data
def user_active_stats(time_based='hour'): """ 用户激活统计 :return: """ # 按小时统计 if time_based == 'hour': start_time, end_time = get_current_day_time_ends() hours = get_hours(False) hours_zerofill = get_hours() result = dict(zip(hours, [0] * len(hours))) rows = db.session \ .query(func.hour(User.create_time).label('hour'), func.count(User.id)) \ .filter(User.create_time >= time_local_to_utc(start_time), User.create_time <= time_local_to_utc(end_time), User.status_active == STATUS_ACTIVE_OK) \ .group_by('hour') \ .limit(len(hours)) \ .all() result.update(dict(rows)) return [(hours_zerofill[i], result[hour]) for i, hour in enumerate(hours)] # 按日期统计 if time_based == 'date': start_time, end_time = get_current_month_time_ends() today = datetime.today() days = get_days(year=today.year, month=today.month, zerofill=False) days_zerofill = get_days(year=today.year, month=today.month) result = dict(zip(days, [0] * len(days))) rows = db.session \ .query(func.day(User.create_time).label('date'), func.count(User.id)) \ .filter(User.create_time >= time_local_to_utc(start_time), User.create_time <= time_local_to_utc(end_time), User.status_active == STATUS_ACTIVE_OK) \ .group_by('date') \ .limit(len(days)) \ .all() result.update(dict(rows)) return [(days_zerofill[i], result[day]) for i, day in enumerate(days)] # 按月份统计 if time_based == 'month': start_time, end_time = get_current_year_time_ends() months = get_months(False) months_zerofill = get_months() result = dict(zip(months, [0] * len(months))) rows = db.session \ .query(func.month(User.create_time).label('month'), func.count(User.id)) \ .filter(User.create_time >= time_local_to_utc(start_time), User.create_time <= time_local_to_utc(end_time), User.status_active == STATUS_ACTIVE_OK) \ .group_by('month') \ .limit(len(months)) \ .all() result.update(dict(rows)) return [(months_zerofill[i], result[month]) for i, month in enumerate(months)]
def dqchecks_exce_oracle_excel(): if request.method == "POST": start_date = request.form["start_date"] end_date = request.form["end_date"] period_select = request.form["period"] if period_select == "day": period = manifest_oracle_monitoring.file_date elif period_select == "month": period = func.month(manifest_oracle_monitoring.file_date) elif period_select == "year": period = func.year(manifest_oracle_monitoring.file_date) cdr_types = [ "com", "vou", "cm", "adj", "first", "mon", "data", "voice", "sms", "clr" ] dates = db.session.query(period).filter( and_(manifest_oracle_monitoring.file_date >= start_date, manifest_oracle_monitoring.file_date <= end_date)).group_by(period).all() lookup = db.session.query( period, manifest_oracle_monitoring.cdr_type, func.sum(manifest_oracle_monitoring.ocs_manifest), func.sum(manifest_oracle_monitoring.t1_oracle), func.sum(manifest_oracle_monitoring.variance)).filter( and_(manifest_oracle_monitoring.file_date >= start_date, manifest_oracle_monitoring.file_date <= end_date)).group_by( period, manifest_oracle_monitoring.cdr_type).all() len_date = len(dates) dates = [d[0] for d in dates] cdr_dict = {} for l in lookup: if l.cdr_type not in cdr_dict.keys(): cdr_dict[l.cdr_type] = { "manifest": init_list(len_date), "t1": init_list(len_date), "variance": init_list(len_date) } insert_cdr(cdr_dict[l.cdr_type], dates.index(l[0]), l[2], l[3], l[4]) else: insert_cdr(cdr_dict[l.cdr_type], dates.index(l[0]), l[2], l[3], l[4])
def index(self): lst = [] SumDoanhSoTheoNgay = db.session.query( func.sum(Bill.total_price).label('sum'), Bill.order_time).filter( func.month(Bill.order_time) == datetime.datetime.today().month).group_by( func.date(Bill.order_time)).all() # #co check status # SumDoanhSoTheoNgay = db.session.query(func.sum(Bill.total_price).label('sum'), Bill.order_time).filter( # Bill.status == 4).filter(func.month(Bill.order_time) == datetime.datetime.today().month).group_by( # func.date(Bill.order_time)).all() for i in SumDoanhSoTheoNgay: lst.append([i[1].day, int(i[0])]) lst.sort(key=myFunc) return self.render('admin/analytics.html', lst=lst)
def update_forecasts(self, acc): session = object_session(acc) start_date = session.query(func.min(Transaction.rdate)).filter( Transaction.id_account == acc.id).one()[0] if start_date is None: return # Start from a full month. if start_date.day > 4: start_date = next_month(start_date) start_date = max(start_date, (datetime.date.today() - timedelta(days=6 * 31)).replace(day=1)) records = acc.transactions.filter( Transaction.rdate >= start_date).order_by(Transaction.rdate).all() if len(records) == 0: return if not session.domain in self.main_cats: self.build_categories_cache(session) # Calculate clusters clusterer = TransactionClusterer(records, self.main_cats[session.domain]) clusterer.find_clusters(acc, datetime.date.today()) # do not consider unplanned transactions for c in acc.clusters.filter(TransactionsCluster.next_date != None): #self.logger.debug('%%%% update_projection iteration on cluster %s' % c.wording.encode('utf-8', 'replace')) cluster_records = c.transactions ids = [tr.id for tr in cluster_records] cluster = clusterer.find_cluster(ids) if cluster is not None: #self.logger.debug('cluster found') if cluster.add_records( cluster_records ) >= 1 and cluster.next_date <= datetime.date.today(): # if the cluster was not full, we should retry to find any missing transaction cluster.find_missing_transaction(acc, datetime.date.today()) cluster.refresh() elif c.enabled: #self.logger.debug('cluster not found') cluster = clusterer.add_old_cluster(c, cluster_records, acc, datetime.date.today()) if cluster is None: c.enabled = False else: clusterer.clusters.remove(cluster) if c.enabled: c.mean_amount = cluster.mean_amount c.median_increment = cluster.median_increment c.next_date = cluster.next_date c.wording = cluster.wording c.id_category = cluster.category_id for tr in cluster.records: if tr.id_cluster is None: tr.id_cluster = c.id for cluster in clusterer.clusters: c = TransactionsCluster(id_account=acc.id, mean_amount=cluster.mean_amount, median_increment=cluster.median_increment, next_date=cluster.next_date, wording=cluster.wording, id_category=cluster.category_id) session.add(c) session.flush() for record in cluster.records: record.id_cluster = c.id # Check if there is enough data to calculate prediction. count = session.query(func.count('*').label('nb')).filter( Transaction.id_account == acc.id).group_by( func.year(Transaction.rdate), func.month(Transaction.rdate)) avg = session.query(func.avg(count.subquery().columns.nb)).scalar() if avg is None or avg < 10: for d in xrange(31): session.merge(Prediction(id_account=acc.id, day=d)) else: # Calculate prediction prediction = PredictionCalculator() prediction.add_transactions( records, ignore_after=datetime.date.today().replace(day=1)) prediction.compute_averages() for d in xrange(31): mean_amount, std_amount = prediction.get_prediction_still_to_be_spent( d + 1) session.merge( Prediction(id_account=acc.id, day=d, mean_amount=mean_amount, std_amount=std_amount))
def sales_orders_order_stats(time_based='hour'): """ 报价成交统计 :return: """ condition = [SalesOrder.status_order == STATUS_ORDER_OK] # 按小时统计 if time_based == 'hour': start_time, end_time = get_current_day_time_ends() hours = get_hours(False) hours_zerofill = get_hours() result = dict(zip(hours, [0] * len(hours))) condition.extend( [ SalesOrder.create_time >= time_local_to_utc(start_time), SalesOrder.create_time <= time_local_to_utc(end_time) ] ) rows = db_bearing.session \ .query(func.hour(SalesOrder.create_time).label('hour'), func.count(SalesOrder.id)) \ .filter(*condition) \ .group_by('hour') \ .limit(len(hours)) \ .all() result.update(dict(rows)) return [(hours_zerofill[i], result[hour]) for i, hour in enumerate(hours)] # 按日期统计 if time_based == 'date': start_time, end_time = get_current_month_time_ends() today = datetime.today() days = get_days(year=today.year, month=today.month, zerofill=False) days_zerofill = get_days(year=today.year, month=today.month) result = dict(zip(days, [0] * len(days))) condition.extend( [ SalesOrder.create_time >= time_local_to_utc(start_time), SalesOrder.create_time <= time_local_to_utc(end_time) ] ) rows = db_bearing.session \ .query(func.day(SalesOrder.create_time).label('date'), func.count(SalesOrder.id)) \ .filter(*condition) \ .group_by('date') \ .limit(len(days)) \ .all() result.update(dict(rows)) return [(days_zerofill[i], result[day]) for i, day in enumerate(days)] # 按月份统计 if time_based == 'month': start_time, end_time = get_current_year_time_ends() months = get_months(False) months_zerofill = get_months() result = dict(zip(months, [0] * len(months))) condition.extend( [ SalesOrder.create_time >= time_local_to_utc(start_time), SalesOrder.create_time <= time_local_to_utc(end_time) ] ) rows = db_bearing.session \ .query(func.month(SalesOrder.create_time).label('month'), func.count(SalesOrder.id)) \ .filter(*condition) \ .group_by('month') \ .limit(len(months)) \ .all() result.update(dict(rows)) return [(months_zerofill[i], result[month]) for i, month in enumerate(months)]
def supplier_end_user_stats(time_based='hour'): """ 终端客户统计 :return: """ condition = [Supplier.company_type == TYPE_COMPANY_FINAL_USER] # 按小时统计 if time_based == 'hour': start_time, end_time = get_current_day_time_ends() hours = get_hours(False) hours_zerofill = get_hours() result = dict(zip(hours, [0] * len(hours))) condition.extend( [ Supplier.create_time >= time_local_to_utc(start_time), Supplier.create_time <= time_local_to_utc(end_time) ] ) rows = db.session \ .query(func.hour(Supplier.create_time).label('hour'), func.count(Supplier.id)) \ .filter(*condition) \ .group_by('hour') \ .limit(len(hours)) \ .all() result.update(dict(rows)) return [(hours_zerofill[i], result[hour]) for i, hour in enumerate(hours)] # 按日期统计 if time_based == 'date': start_time, end_time = get_current_month_time_ends() today = datetime.today() days = get_days(year=today.year, month=today.month, zerofill=False) days_zerofill = get_days(year=today.year, month=today.month) result = dict(zip(days, [0] * len(days))) condition.extend( [ Supplier.create_time >= time_local_to_utc(start_time), Supplier.create_time <= time_local_to_utc(end_time) ] ) rows = db.session \ .query(func.day(Supplier.create_time).label('date'), func.count(Supplier.id)) \ .filter(*condition) \ .group_by('date') \ .limit(len(days)) \ .all() result.update(dict(rows)) return [(days_zerofill[i], result[day]) for i, day in enumerate(days)] # 按月份统计 if time_based == 'month': start_time, end_time = get_current_year_time_ends() months = get_months(False) months_zerofill = get_months() result = dict(zip(months, [0] * len(months))) condition.extend( [ Supplier.create_time >= time_local_to_utc(start_time), Supplier.create_time <= time_local_to_utc(end_time) ] ) rows = db.session \ .query(func.month(Supplier.create_time).label('month'), func.count(Supplier.id)) \ .filter(*condition) \ .group_by('month') \ .limit(len(months)) \ .all() result.update(dict(rows)) return [(months_zerofill[i], result[month]) for i, month in enumerate(months)]
def dashboard(): #next_page = request.args.get('next') ''' Agenda.query.with_entities( func.sum(Agenda.valor_servico).label('valor') ).filter(Agenda.confirmado==1).join(Servico.agenda) ''' #saldo das contas sql = text('''SELECT DESCRICAO, SUM(VALOR) AS TOTAL FROM ( SELECT CONT.DESCRICAO, SUM(LANC.VALOR_REALIZADO) AS VALOR FROM LANCAMENTOS AS LANC LEFT JOIN CONTAS AS CONT ON LANC.CONTAS_ID = CONT.ID WHERE TIPO_MOVIMENTACAO = 'C' GROUP BY CONT.DESCRICAO UNION SELECT CONT.DESCRICAO, SUM(LANC.VALOR_REALIZADO)*-1 AS VALOR FROM LANCAMENTOS AS LANC LEFT JOIN CONTAS AS CONT ON LANC.CONTAS_ID = CONT.ID WHERE TIPO_MOVIMENTACAO = 'D' GROUP BY CONT.DESCRICAO ) AS A GROUP BY DESCRICAO ''') result = db.engine.execute(sql) saldo_contas = [row for row in result] # movimentações realizadas realizado_debito = Lancamentos.query.with_entities( Lancamentos.descricao, Lancamentos.data_efetivacao, func.sum(Lancamentos.valor_realizado).label('total_realizado'), func.month(Lancamentos.data_efetivacao).label('mes')).filter( Lancamentos.tipo_movimentacao == 'D', Lancamentos.data_efetivacao != None).group_by( Lancamentos.descricao, Lancamentos.data_efetivacao).all() realizado_credito = Lancamentos.query.with_entities( Lancamentos.descricao, Lancamentos.data_efetivacao, func.sum(Lancamentos.valor_realizado).label('total_realizado'), func.month(Lancamentos.data_efetivacao).label('mes')).filter( Lancamentos.tipo_movimentacao == 'C', Lancamentos.data_efetivacao != None).group_by( Lancamentos.descricao, Lancamentos.data_efetivacao).all() ###################################### # movimentações previstas previsto_debito = Lancamentos.query.with_entities( Lancamentos.descricao, Lancamentos.data_prevista, func.sum(Lancamentos.valor_previsto).label('total_previsto'), func.month(Lancamentos.data_prevista).label('mes')).filter( Lancamentos.tipo_movimentacao == 'D', Lancamentos.data_efetivacao == None).group_by( Lancamentos.descricao, Lancamentos.data_prevista).all() previsto_credito = Lancamentos.query.with_entities( Lancamentos.descricao, Lancamentos.data_prevista, func.sum(Lancamentos.valor_previsto).label('total_previsto'), func.month(Lancamentos.data_prevista).label('mes')).filter( Lancamentos.tipo_movimentacao == 'C', Lancamentos.data_efetivacao == None).group_by( Lancamentos.descricao, Lancamentos.data_prevista).all() ###################################### ''' previsto_total = Lancamentos.query.with_entities( Lancamentos.descricao, Lancamentos.data_prevista, func.sum(Lancamentos.valor_previsto).label('total_previsto') ).group_by(Lancamentos.descricao, Lancamentos.data_prevista).all() ''' return render_template('fluxocaixa/dashboard.html', titulo='Fluxo caixa', previsto_debito=previsto_debito, previsto_credito=previsto_credito, realizado_debito=realizado_debito, realizado_credito=realizado_credito, saldo_contas=saldo_contas)
def dqchecks_overview_oracle_js(): if request.method == "POST": start_date = request.form["start_date"] end_date = request.form["end_date"] period_select = request.form["period"] elif request.method == "GET": date_today = date.today() start_date = date_today - relativedelta(months=4) end_date = date_today period_select = "day" if period_select == "day": period = manifest_oracle_monitoring.file_date elif period_select == "month": period = func.month(manifest_oracle_monitoring.file_date) elif period_select == "year": period = func.year(manifest_oracle_monitoring.file_date) dates = db.session.query(period).filter( and_(manifest_oracle_monitoring.file_date >= start_date, manifest_oracle_monitoring.file_date <= end_date)).group_by(period).all() variances = db.session.query( period, manifest_oracle_monitoring.cdr_type, func.sum(manifest_oracle_monitoring.variance)).filter( and_(manifest_oracle_monitoring.file_date >= start_date, manifest_oracle_monitoring.file_date <= end_date)).group_by( period, manifest_oracle_monitoring.cdr_type).all() len_date = len(dates) date_list = init_list(len_date) variance_com = init_list(len_date) variance_vou = init_list(len_date) variance_first = init_list(len_date) variance_mon = init_list(len_date) variance_cm = init_list(len_date) variance_adj = init_list(len_date) variance_data = init_list(len_date) variance_voice = init_list(len_date) variance_sms = init_list(len_date) variance_clr = init_list(len_date) for i, d in enumerate(dates): date_list[i] = format_date(d[0], period_select) for v in variances: if v.cdr_type == "com" and v[0] == d[0]: variance_com[i] = str(v[2]) elif v.cdr_type == "vou" and v[0] == d[0]: variance_vou[i] = str(v[2]) elif v.cdr_type == "first" and v[0] == d[0]: variance_first[i] = str(v[2]) elif v.cdr_type == "mon" and v[0] == d[0]: variance_mon[i] = str(v[2]) elif v.cdr_type == "cm" and v[0] == d[0]: variance_cm[i] = str(v[2]) elif v.cdr_type == "adj" and v[0] == d[0]: variance_adj[i] = str(v[2]) elif v.cdr_type == "data" and v[0] == d[0]: variance_data[i] = str(v[2]) elif v.cdr_type == "voice" and v[0] == d[0]: variance_voice[i] = str(v[2]) elif v.cdr_type == "sms" and v[0] == d[0]: variance_sms[i] = str(v[2]) elif v.cdr_type == "clr" and v[0] == d[0]: variance_clr[i] = str(v[2]) result_set = { "date_list": date_list, "variance_com": variance_com, "variance_vou": variance_vou, "variance_first": variance_first, "variance_mon": variance_mon, "variance_cm": variance_cm, "variance_adj": variance_adj, "variance_data": variance_data, "variance_voice": variance_voice, "variance_sms": variance_sms, "variance_clr": variance_clr, } return jsonify(result_set)
def dqchecks_hive_excel(): if request.method == "POST": start_date = request.form["start_date"] end_date = request.form["end_date"] period_select = request.form["period"] if period_select == "day": period = manifest_hive_monitoring.file_date elif period_select == "month": period = func.month(manifest_hive_monitoring.file_date) elif period_select == "year": period = func.year(manifest_hive_monitoring.file_date) cdr_types = [ "com", "vou", "cm", "adj", "first", "mon", "data", "voice", "sms", "clr" ] dates = db.session.query(period).filter( and_(manifest_hive_monitoring.file_date >= start_date, manifest_hive_monitoring.file_date <= end_date)).group_by(period).all() lookup = db.session.query( period, manifest_hive_monitoring.cdr_type, func.sum(manifest_hive_monitoring.ocs_manifest), func.sum(manifest_hive_monitoring.t1_hive), func.sum(manifest_hive_monitoring.variance)).filter( and_(manifest_hive_monitoring.file_date >= start_date, manifest_hive_monitoring.file_date <= end_date)).group_by( period, manifest_hive_monitoring.cdr_type).all() len_date = len(dates) dates = [d[0] for d in dates] cdr_dict = {} for l in lookup: if l.cdr_type not in cdr_dict.keys(): cdr_dict[l.cdr_type] = { "manifest": init_list(len_date), "t1": init_list(len_date), "variance": init_list(len_date) } insert_cdr(cdr_dict[l.cdr_type], dates.index(l[0]), l[2], l[3], l[4]) else: insert_cdr(cdr_dict[l.cdr_type], dates.index(l[0]), l[2], l[3], l[4]) #output in bytes output = io.BytesIO() #create WorkBook object workbook = Workbook() workbook_name = "Manifest Hive Bashing {}".format( datetime.now().strftime("%Y-%m-%d %H-%M-%S")) #add a sheet ws = workbook.create_sheet('Manifest vs T1 Bashing Validation', 0) greenFill = PatternFill(start_color='AEEA00', end_color='AEEA00', fill_type='solid') x_pos = 1 y_pos = 1 temp_y = 1 x_lim = 19 row = 0 for c in cdr_dict.keys(): ### CREATE THE HEADER FOR THE CDR # Merge for Date Column ws.cell(row=y_pos, column=x_pos, value="Date").alignment = Alignment(horizontal='center') # ws.cell(row=y_pos, column=x_pos, value="Date").fill = greenFill ws.merge_cells(start_row=y_pos, start_column=x_pos, end_row=y_pos + 1, end_column=x_pos) # Merge for CDR Row ws.cell(row=y_pos, column=x_pos + 1, value=c).alignment = Alignment(horizontal='center') ws.merge_cells(start_row=y_pos, start_column=x_pos + 1, end_row=y_pos, end_column=x_pos + 3) ws.cell(row=y_pos + 1, column=x_pos + 1, value="MANIFEST").alignment = Alignment(horizontal='center') ws.cell(row=y_pos + 1, column=x_pos + 2, value="T1").alignment = Alignment(horizontal='center') ws.cell(row=y_pos + 1, column=x_pos + 3, value="VARIANCE").alignment = Alignment(horizontal='center') for i, d in enumerate(dates): ws.cell(row=y_pos + 2, column=x_pos, value=d.strftime("%m/%d/%y")) ws.cell(row=y_pos + 2, column=x_pos + 1, value=cdr_dict[c]["manifest"][i]) ws.cell(row=y_pos + 2, column=x_pos + 2, value=cdr_dict[c]["t1"][i]) ws.cell(row=y_pos + 2, column=x_pos + 3, value=cdr_dict[c]["variance"][i]) y_pos += 1 ### SET X AND Y POSITIONS if x_pos + 3 < 19: y_pos = temp_y x_pos += 5 else: row += 1 y_pos = temp_y + (len(dates) + 3) temp_y = y_pos x_pos = 1 workbook.save(output) output.seek(0) pprint.pprint(cdr_dict) filename = workbook_name return Response( output, mimetype= "application/openxmlformats-officedocument.spreadsheetml.sheet", headers={ "Content-Disposition": "attachment;filename={}.xlsx".format(filename) })
def payroll_report(): """ display payroll report from all database records """ # get first two weeks of month 1st -> 15th biweekly_first = db.session.query(Worklog.employee_id, func.year(Worklog.date)+'/'+func.month(Worklog.date)+'/1 - '+\ func.year(Worklog.date)+'/'+func.month(Worklog.date)+'/15', func.sum(Worklog.hours * Payscale.hourly_rate))\ .filter(extract('day', Worklog.date) < 16 )\ .join(Payscale, Worklog.job_group==Payscale.job_group)\ .order_by(Worklog.employee_id, Worklog.date)\ .group_by(Worklog.employee_id, func.year(Worklog.date), func.month(Worklog.date)) # get second two weeks of month, 16th -> 'monthend' (correct day# inserted later) biweekly_second = db.session.query(Worklog.employee_id, func.year(Worklog.date)+'/'+func.month(Worklog.date)+'/16 - '+\ func.year(Worklog.date)+'/'+func.month(Worklog.date)+'/monthend', func.sum(Worklog.hours * Payscale.hourly_rate))\ .filter(extract('day', Worklog.date) > 15 )\ .join(Payscale, Worklog.job_group==Payscale.job_group)\ .order_by(Worklog.employee_id, Worklog.date)\ .group_by(Worklog.employee_id, func.year(Worklog.date), func.month(Worklog.date)) # combine them and for second two weeks put in the correct monthend records_all = [] # first half of month is already correct (1st -> 15th) for r in biweekly_first: records_all.append(r) # total days for each month (leap year will be considered later) days_in_month = { '1': '31', '2': '28', '3': '31', '4': '30', '5': '31', '6': '30', '7': '31', '8': '31', '9': '30', '10': '31', '11': '30', '12': '31' } # second half of month needs a little more to handle 'monthend' regex = re.compile('^(\d+)\/.*\/(\d+)\/monthend$') for r in biweekly_second: pay_period = r[1] year_month = regex.findall(pay_period) if len(year_month) > 0: [(year, month)] = year_month num_days = days_in_month[month] if month == '2' and isleap(int(year)): num_days = '29' # create record with correct monthend day# r = (r[0], r[1].replace('monthend', num_days), r[2]) records_all.append(r) return render_template('report.html', n_rows=len(records_all), report_data=records_all)