def get_success_rate(): response = {} try: ret_list = db.session.query(Testresult.batch_number, func.count(Testresult.id), func.count(func.if_(Testresult.test_result == 'pass', True, None)), func.count(func.if_(Testresult.test_result == 'fail', True, None)), func.count(func.if_(Testresult.test_result == 'pass', True, None)) / func.count( Testresult.id)).group_by( Testresult.batch_number).order_by(db.desc(Testresult.batch_number)).limit(15).all() print(ret_list) info_list = [] for ret in ret_list: info = { 'time':time.strftime("%H:%M:%S", time.localtime(int(ret[0][0:10]))), 'total':ret[1], 'pass':ret[2], 'fail':ret[3], 'rate':float(str(Decimal(ret[4]).quantize(Decimal('0.00')))) } info_list.append(info) # time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(ret[0]))), response['testresult'] = info_list response['count'] = len(info_list) response['msg'] = "Get case result success!" response['code'] = 1 except Exception as e: response['msg'] = str(e) response['code'] = 0 return json.dumps(response)
def test_on_duplicate_key_update_preserve_order(self): foos = self.tables.foos with testing.db.connect() as conn: conn.execute( insert( foos, [ dict(id=1, bar="b", baz="bz"), dict(id=2, bar="b", baz="bz2"), ], )) stmt = insert(foos) update_condition = foos.c.updated_once == False # The following statements show importance of the columns update # ordering as old values being referenced in UPDATE clause are # getting replaced one by one from left to right with their new # values. stmt1 = stmt.on_duplicate_key_update([ ( "bar", func.if_( update_condition, func.values(foos.c.bar), foos.c.bar, ), ), ( "updated_once", func.if_(update_condition, True, foos.c.updated_once), ), ]) stmt2 = stmt.on_duplicate_key_update([ ( "updated_once", func.if_(update_condition, True, foos.c.updated_once), ), ( "bar", func.if_( update_condition, func.values(foos.c.bar), foos.c.bar, ), ), ]) # First statement should succeed updating column bar conn.execute(stmt1, dict(id=1, bar="ab")) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, "ab", "bz", True)], ) # Second statement will do noop update of column bar conn.execute(stmt2, dict(id=2, bar="ab")) eq_( conn.execute(foos.select().where(foos.c.id == 2)).fetchall(), [(2, "b", "bz2", True)], )
def _order_by(self, query, joins, sort_joins, sort_field, sort_desc): query, joins = super(AffiliationView, self)._order_by(query, joins, sort_joins, sort_field, sort_desc) if sort_field.name == 'code': # sort by the code field, which has entries like: # 1 # 1.1 # 1.2 # 1.10 # 1.11 # 10.1 # # these are hard to sort, because they don't sort correctly # numerically or lexicographically. Instead, we treat them # as parts of dotted-quad IP addresses and use mysql's inet_aton # to sort them. sort_field = func.inet_aton( func.if_(func.instr(sort_field, '.') > 0, func.concat(sort_field, '.0.0'), # eg. 10.2 func.concat(sort_field, '.0.0.0'))) # eg. 10 if sort_desc: sort_field = desc(sort_field) query = query.order_by(None).order_by(sort_field) return query, joins
def _order_by(self, query, joins, sort_joins, sort_field, sort_desc): query, joins = super(AffiliationView, self)._order_by(query, joins, sort_joins, sort_field, sort_desc) if sort_field.name == 'code': # sort by the code field, which has entries like: # 1 # 1.1 # 1.2 # 1.10 # 1.11 # 10.1 # # these are hard to sort, because they don't sort correctly # numerically or lexicographically. Instead, we treat them # as parts of dotted-quad IP addresses and use mysql's inet_aton # to sort them. sort_field = func.inet_aton( func.if_( func.instr(sort_field, '.') > 0, func.concat(sort_field, '.0.0'), # eg. 10.2 func.concat(sort_field, '.0.0.0'))) # eg. 10 if sort_desc: sort_field = desc(sort_field) query = query.order_by(None).order_by(sort_field) return query, joins
def get(self): """ 获取投资组合汇总信息:全部组合,我的组合,关注组合 数量及对应 status """ user_id = session.get('user_id') count_all, count_my = db.session.query( func.count().label('all'), func.sum(func.if_(PortfolioInfo.create_user_id == user_id, 1, 0))).filter(PortfolioInfo.is_del == 0).first() favorite = db.session.query(func.count()).filter( FavoritePortfolio.user_id == user_id).scalar() ret_data = { 'data': [{ "name": "全部", "status": "all", "count": try_2_float(count_all) }, { "name": "我的", "status": "my", "count": try_2_float(count_my) }, { "name": "关注组合", "status": "favorite", "count": try_2_float(favorite) }] } return ret_data
def test_if(self): expression = func.if_( literal(1) > literal(2), text('a'), text('b'), ) self.assertEqual(self.compile(expression, literal_binds=True), 'if(1 > 2, a, b)')
def test_on_duplicate_key_update_preserve_order(self): foos = self.tables.foos with testing.db.connect() as conn: conn.execute( insert(foos, [ dict(id=1, bar='b', baz='bz'), dict(id=2, bar='b', baz='bz2') ])) stmt = insert(foos) update_condition = (foos.c.updated_once == False) # The following statements show importance of the columns update ordering # as old values being referenced in UPDATE clause are getting replaced one # by one from left to right with their new values. stmt1 = stmt.on_duplicate_key_update([ ('bar', func.if_(update_condition, func.values(foos.c.bar), foos.c.bar)), ('updated_once', func.if_(update_condition, True, foos.c.updated_once)), ]) stmt2 = stmt.on_duplicate_key_update([ ('updated_once', func.if_(update_condition, True, foos.c.updated_once)), ('bar', func.if_(update_condition, func.values(foos.c.bar), foos.c.bar)), ]) # First statement should succeed updating column bar conn.execute(stmt1, dict(id=1, bar='ab')) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, 'ab', 'bz', True)], ) # Second statement will do noop update of column bar conn.execute(stmt2, dict(id=2, bar='ab')) eq_( conn.execute(foos.select().where(foos.c.id == 2)).fetchall(), [(2, 'b', 'bz2', True)])
def test_underscores(self): self.assert_compile(func.if_(), "if()")
def get_subscription_data(max_time: datetime, min_time: datetime = None) -> pd.DataFrame: predplatne_mysql_mappings = get_sqlalchemy_tables_w_session( 'MYSQL_CRM_CONNECTION_STRING', 'MYSQL_CRM_DB', [ 'payments', 'subscriptions', 'subscription_types', 'subscription_upgrades' ]) mysql_predplatne_session = predplatne_mysql_mappings['session'] payments = predplatne_mysql_mappings['payments'] subscriptions = predplatne_mysql_mappings['subscriptions'] subscription_types = predplatne_mysql_mappings['subscription_types'] subscription_upgrades = predplatne_mysql_mappings['subscription_upgrades'] subscription_id_access_level = mysql_predplatne_session.query( subscription_types.c['id'].label("subscription_type_id"), subscription_types.c['name'].label("name"), subscription_types.c['code'].label("code"), subscription_types.c['length'].label("length"), subscription_types.c['price'].label("price"), subscription_types.c['web'].label("is_web"), subscription_types.c['mobile'].label("is_mobile"), subscription_types.c['club'].label("is_club"), subscription_types.c['print'].label("is_print"), subscription_types.c['print_friday'].label("is_print_friday")).filter( subscription_types.c['no_subscription'] != True).subquery() payments_grouped_filtered = mysql_predplatne_session.query( payments.c['subscription_id'].label("subscription_id"), func.count(payments.c['id']).label("payment_count"), func.sum(payments.c['amount']).label("amount"), func.sum(payments.c['additional_amount']).label("additional_amount"), func.if_(func.sum(payments.c['recurrent_charge']) > 0, True, False).label("is_recurrent_charge"), func.group_concat( payments.c['status']).label("payment_status")).filter( and_( payments.c['subscription_id'].isnot(None), *get_table_created_at_filters( payments, max_time, min_time))).group_by( payments.c['subscription_id']).subquery() subscriptions_upgraded_subs = mysql_predplatne_session.query( subscription_upgrades.c['base_subscription_id'].label( "base_subscription_id"), func.if_( func.count(subscription_upgrades.c['id']) > 0, True, False).label("is_upgraded")).group_by( subscription_upgrades.c['base_subscription_id']).filter( *get_table_created_at_filters( subscription_upgrades, max_time, min_time)).subquery() subscriptions_upgrades = mysql_predplatne_session.query( subscription_upgrades.c['base_subscription_id'].label( "base_subscription_id"), subscription_upgrades.c['upgraded_subscription_id'].label( "upgraded_subscription_id"), subscription_upgrades.c['type'].label("upgrade_type"), func.if_( func.count(subscription_upgrades.c['id']) > 0, True, False).label("is_upgrade")).filter( and_(*get_table_created_at_filters( subscription_upgrades, max_time, min_time))).group_by( subscription_upgrades.c['base_subscription_id'], subscription_upgrades.c['upgraded_subscription_id'], subscription_upgrades.c['type']).subquery() subscriptions_data = mysql_predplatne_session.query( subscriptions.c['created_at'].label('created_at'), subscriptions.c['user_id'].label("user_id"), subscriptions.c['id'].label("subscription_id"), subscriptions.c['start_time'].label("start_time"), subscriptions.c['end_time'].label("end_time"), subscriptions.c['length'].label("length"), subscriptions.c['is_recurrent'].label("is_recurrent"), subscriptions.c['is_paid'].label("is_paid"), subscriptions.c['type'].label("subscription_type"), subscriptions.c['subscription_type_id'].label("subscription_type_id"), payments_grouped_filtered.c['payment_count'].label("payment_count"), payments_grouped_filtered.c['amount'].label("amount"), payments_grouped_filtered.c['additional_amount'].label( "additional_amount"), payments_grouped_filtered.c['is_recurrent_charge'].label( "is_recurrent_charge"), payments_grouped_filtered.c['payment_status'].label("payment_status"), subscription_id_access_level.c['name'].label("sub_type_name"), subscription_id_access_level.c['code'].label("sub_type_code"), subscription_id_access_level.c['length'].label("sub_type_length"), subscription_id_access_level.c['price'].label("sub_type_price"), subscription_id_access_level.c['is_web'].label("sub_web_access"), subscription_id_access_level.c['is_mobile'].label( "sub_standard_access"), subscription_id_access_level.c['is_club'].label("sub_club_access"), subscription_id_access_level.c['is_print'].label("sub_print_access"), subscription_id_access_level.c['is_print_friday'].label( "sub_print_friday_access"), subscriptions_upgraded_subs.c['is_upgraded'].label("is_upgraded"), subscriptions_upgrades.c['base_subscription_id'].label( "base_subscription_id"), subscriptions_upgrades.c['upgrade_type'].label("upgrade_type"), subscriptions_upgrades.c['is_upgrade'].label("is_upgrade"), ).join( payments_grouped_filtered, subscriptions.c['id'] == payments_grouped_filtered.c['subscription_id'], isouter=True).join( subscription_id_access_level, subscriptions.c['subscription_type_id'] == subscription_id_access_level.c['subscription_type_id'], isouter=True).join( subscriptions_upgraded_subs, subscriptions.c['id'] == subscriptions_upgraded_subs.c['base_subscription_id'], isouter=True).join( subscriptions_upgrades, subscriptions.c['id'] == subscriptions_upgrades.c['upgraded_subscription_id'], isouter=True).filter( and_( *get_table_created_at_filters( subscriptions, max_time, min_time), payments_grouped_filtered.c['payment_status'].in_([ 'paid', 'prepaid', 'family', 'upgrade' ]))).order_by(subscriptions.c['user_id'], subscriptions.c['start_time']) subscriptions_data_merged = pd.read_sql(subscriptions_data.statement, subscriptions_data.session.bind) subscriptions_data_merged = subscriptions_data_merged.loc[ subscriptions_data_merged["start_time"] != subscriptions_data_merged["end_time"]].copy() subscriptions_data_merged = subscriptions_data_merged.loc[ ~subscriptions_data_merged["payment_status"].str. contains("form|refund", na=False)].copy() subscriptions_data_merged = subscriptions_data_merged.loc[ subscriptions_data_merged["start_time"] >= datetime( 2015, 1, 1, 0, 0, 0)].copy() column_fillna = { 'payment_count': 0, 'amount': 0.0, 'additional_amount': 0.0, 'is_recurrent_charge': False, 'payment_status': 'no_payment', 'is_upgraded': False, 'base_subscription_id': subscriptions_data_merged['subscription_id'], 'upgrade_type': 'none', 'is_upgrade': False, } subscriptions_data_merged.fillna(column_fillna, inplace=True) column_types = { 'user_id': int, 'subscription_id': int, 'length': int, 'is_recurrent': float, 'is_paid': float, 'subscription_type': str, 'subscription_type_id': int, 'payment_count': int, 'amount': float, 'additional_amount': float, 'is_recurrent_charge': float, 'payment_status': str, 'sub_type_name': str, 'sub_type_code': str, 'sub_type_length': int, 'sub_type_price': float, 'sub_web_access': float, 'sub_standard_access': float, 'sub_club_access': float, 'sub_print_access': float, 'sub_print_friday_access': float, 'is_upgraded': float, 'base_subscription_id': int, 'upgrade_type': str, 'is_upgrade': float } subscriptions_data_merged = subscriptions_data_merged.astype(column_types) mysql_predplatne_session.close() return subscriptions_data_merged
def test_on_duplicate_key_update_preserve_order(self): foos = self.tables.foos with testing.db.connect() as conn: conn.execute( insert( foos, [ dict(id=1, bar="b", baz="bz"), dict(id=2, bar="b", baz="bz2"), ], ) ) stmt = insert(foos) update_condition = foos.c.updated_once == False # The following statements show importance of the columns update # ordering as old values being referenced in UPDATE clause are # getting replaced one by one from left to right with their new # values. stmt1 = stmt.on_duplicate_key_update( [ ( "bar", func.if_( update_condition, func.values(foos.c.bar), foos.c.bar, ), ), ( "updated_once", func.if_(update_condition, True, foos.c.updated_once), ), ] ) stmt2 = stmt.on_duplicate_key_update( [ ( "updated_once", func.if_(update_condition, True, foos.c.updated_once), ), ( "bar", func.if_( update_condition, func.values(foos.c.bar), foos.c.bar, ), ), ] ) # First statement should succeed updating column bar conn.execute(stmt1, dict(id=1, bar="ab")) eq_( conn.execute(foos.select().where(foos.c.id == 1)).fetchall(), [(1, "ab", "bz", True)], ) # Second statement will do noop update of column bar conn.execute(stmt2, dict(id=2, bar="ab")) eq_( conn.execute(foos.select().where(foos.c.id == 2)).fetchall(), [(2, "b", "bz2", True)], )
def get(self, status): """ 获取投资组合列表数据 """ args = paginate_parser.parse_args() page_no = args['page_no'] count = args['count'] user_id = session.get('user_id') # 获取各个组合 最新交易日 date_latest_query = db.session.query( PortfolioValueDaily.pl_id, func.max(PortfolioValueDaily.trade_date).label( 'trade_date_max')).group_by( PortfolioValueDaily.pl_id).subquery('date_latest') # 获取各个投资组合 最新净值 nav_latest_query = db.session.query( PortfolioValueDaily.pl_id, PortfolioValueDaily.trade_date, PortfolioValueDaily.rr, PortfolioValueDaily.nav).filter( PortfolioValueDaily.pl_id == date_latest_query.c.pl_id, PortfolioValueDaily.trade_date == date_latest_query.c.trade_date_max).subquery('nav_latest') # 分页查询投资组合信息及最新净值 if status == 'my': filter_c = PortfolioInfo.create_user_id == user_id elif status == 'all': filter_c = or_(PortfolioInfo.access_type == 'public', PortfolioInfo.create_user_id == user_id) elif status == 'star': # TODO: 星标投资组合 filter_c = not_(func.isnull(FavoriteCompare.update_time)) else: filter_c = None if filter_c is None: raise KeyError('status 参数错误 status = %s' % status) else: pagination = PortfolioInfo.query.outerjoin( nav_latest_query, PortfolioInfo.pl_id == nav_latest_query.c.pl_id).add_columns( nav_latest_query.c.trade_date, nav_latest_query.c.rr, nav_latest_query.c.nav, ).outerjoin(User).add_columns(User.username).outerjoin( FavoritePortfolio, and_(PortfolioInfo.pl_id == FavoritePortfolio.pl_id, FavoritePortfolio.user_id == user_id)).add_columns( func.if_( func.isnull(FavoritePortfolio.update_time), 0, 1).label('favorite')).filter( filter_c).paginate(page_no, count) logger.debug('%d / %d 页 %d / %d 条数据', pagination.page, pagination.pages, len(pagination.items), pagination.total) ret_dic_list = [{ 'pl_id': data.PortfolioInfo.pl_id, 'name': data.PortfolioInfo.name, 'date_from': date_2_str(data.PortfolioInfo.date_from), 'date_to': date_2_str(data.PortfolioInfo.date_to), 'status': data.PortfolioInfo.status, 'desc': data.PortfolioInfo.desc, 'create_user_id': data.PortfolioInfo.create_user_id, 'username': data.username, 'favorite': data.favorite, 'trade_date': date_2_str(data.trade_date), 'rr': try_2_float(data.rr), 'nav': try_2_float(data.nav), 'access_type': data.PortfolioInfo.access_type, } for data in pagination.items] ret_dic = { 'page': pagination.page, 'pages': pagination.pages, 'count': len(pagination.items), 'total': pagination.total, 'has_prev': pagination.has_prev, 'has_next': pagination.has_next, 'data': ret_dic_list, } return ret_dic
def get(self, status): """ 获取比较列表数据(分页) """ args = paginate_parser.parse_args() page_no = args['page_no'] count = args['count'] user_id = session.get('user_id') logger.debug('get_cmp_list user_id:%s', user_id) if status == 'my': filter_c = PortfolioCompareInfo.create_user_id == user_id having_c = None elif status == 'all': filter_c = or_(PortfolioCompareInfo.create_user_id == user_id, PortfolioCompareInfo.access_type == 'public') having_c = None elif status == 'star': filter_c = and_( or_(PortfolioCompareInfo.create_user_id == user_id, PortfolioCompareInfo.access_type == 'public'), not_(func.isnull(FavoriteCompare.update_time))) having_c = None elif status == 'verified': filter_c = or_(PortfolioCompareInfo.create_user_id == user_id, PortfolioCompareInfo.access_type == 'public') having_c = column('complete_rate') >= 1 elif status == 'unverified': filter_c = or_(PortfolioCompareInfo.create_user_id == user_id, PortfolioCompareInfo.access_type == 'public') having_c = or_( column('complete_rate').is_(None), column('complete_rate') < 1) else: raise KeyError('status 参数错误 status = %s' % status) # 整理数据 # logger.debug("data_list_df len:%d", data_list_df.shape[0]) # data_list_df = data_list_df.where(data_list_df.notna(), None) # data_list = data_list_df.to_dict('record') # data_table_dic = {'data': data_list} # logger.debug(data_table_dic) query = PortfolioCompareInfo.query.outerjoin( PortfolioCompareResult ).group_by(PortfolioCompareResult.cmp_id).add_columns( func.count().label('tot_count'), func.min( PortfolioCompareResult.trade_date).label('trade_date_min'), func.max( PortfolioCompareResult.trade_date).label('trade_date_max'), func.sum(PortfolioCompareResult.result).label('fit_count'), (func.sum(PortfolioCompareResult.result) / func.count()).label('fit_rate'), ((func.max(PortfolioCompareResult.trade_date) - PortfolioCompareInfo.date_from) / (PortfolioCompareInfo.date_to - PortfolioCompareInfo.date_from) ).label('complete_rate')).outerjoin(User).add_columns( User.username).outerjoin( FavoriteCompare, and_( PortfolioCompareInfo.cmp_id == FavoriteCompare.cmp_id, FavoriteCompare.user_id == user_id)).add_columns( func.if_( func.isnull(FavoriteCompare.update_time), 0, 1).label('favorite')).filter( and_(filter_c, PortfolioCompareInfo.is_del == 0)) if having_c is None: pagination = query.paginate(page_no, count) else: pagination = query.having(having_c).paginate(page_no, count) logger.debug('%d / %d 页 %d / %d 条数据', pagination.page, pagination.pages, len(pagination.items), pagination.total) ret_dic_list = [{ 'cmp_id': data.PortfolioCompareInfo.cmp_id, 'name': data.PortfolioCompareInfo.name, 'status': data.PortfolioCompareInfo.status, 'params': data.PortfolioCompareInfo.params, 'desc': data.PortfolioCompareInfo.desc, 'date_from': date_2_str(data.PortfolioCompareInfo.date_from), 'date_to': date_2_str(data.PortfolioCompareInfo.date_to), 'trade_date_min': date_2_str(data.trade_date_min), 'trade_date_max': date_2_str(data.trade_date_max), 'create_user_id': data.PortfolioCompareInfo.create_user_id, 'username': data.username, 'favorite': data.favorite, 'complete_rate': try_2_float(data.complete_rate), } for data in pagination.items] ret_dic = { 'page': pagination.page, 'pages': pagination.pages, 'count': len(pagination.items), 'total': pagination.total, 'has_prev': pagination.has_prev, 'has_next': pagination.has_next, 'data': ret_dic_list, } return ret_dic
def op_dura_business_list(): cur_page, per_page = PageForm.fetch_page_param( PageForm().validate_for_api()) column_name, column_order = ColumnSortForm.fetch_column_param( ColumnSortForm().validate_for_api()) form = StatOpDurationForm().validate_for_api() # join 和 outerjoin 最好不要同时用,可以以下面的方式写一个子查询 # with_labels()可以直接对QiInfoTraffic中的字段命名,格式为qi_info_traffic_call_id。 traffic_score = db_v1.session.query(QiInfoTraffic, QiScoreCall).outerjoin( QiScoreCall, and_(QiInfoTraffic.call_id == QiScoreCall.call_id, QiScoreCall.is_deleted == 0)).filter( QiInfoTraffic.is_deleted == 0).with_labels().subquery( 'traffic_score') grp_q = db_v1.session.query( QiInfoBusinessType.id, QiInfoBusinessType.type_name, func.count( func.if_( traffic_score.c.qi_info_traffic_file_status >= TrafficFileStatus.FILE_QUALITY_FINISH.value, 1, None)).label('cnt_qi'), func.avg(traffic_score.c.qi_score_call_qc_score).label('avg_score'), func.avg(QiCallDuration.duration).label('duration'), (func.avg(QiCallDuration.effective_duration) / func.avg(QiCallDuration.duration)).label('rate_effe'), ).filter( QiInfoBusinessType.type_code == QiResultsBusinessAnalysis.business_type, traffic_score.c.qi_info_traffic_call_id == QiResultsBusinessAnalysis.call_id, traffic_score.c.qi_info_traffic_call_id == QiCallDuration.call_id, QiInfoBusinessType.is_deleted == 0, QiResultsBusinessAnalysis.is_deleted == 0, QiCallDuration.is_deleted == 0, ).group_by(QiInfoBusinessType.type_code) if form.call_time_left.data: grp_q = grp_q.filter(QiCallDuration.call_start_time >= datetime.fromtimestamp(form.call_time_left.data)) if form.call_time_right.data: grp_q = grp_q.filter(QiCallDuration.call_start_time <= datetime.fromtimestamp(form.call_time_right.data)) # 在进行排序前进行一个子查询 # 1可以减少修改代码的次数 # 2不容易出错 grp_q = grp_q.subquery('grp_q') q = db_v1.session.query(grp_q) # 排序处理 # 多用字典的get()方法,真的很棒 column_obj_map = { 'cnt_qi': grp_q.c.cnt_qi, 'avg_score': grp_q.c.avg_score, 'duration': grp_q.c.duration, 'rate_effe': grp_q.c.rate_effe, } if column_order == 'ascending': q = q.order_by(asc(column_obj_map.get(column_name, grp_q.c.id))) else: q = q.order_by(desc(column_obj_map.get(column_name, grp_q.c.id))) rvs = pager(q, page=cur_page, per_page=per_page) vms = [] for rv in rvs.items: rv_dict = {c: getattr(rv, c, None) for c in rv._fields} vm = {} vm['id'] = rv_dict['id'] vm['type_name'] = rv_dict['type_name'] # 这种对值进行转化很棒,用try 和 except vm['cnt_qi'] = safe_cast_int(rv_dict['cnt_qi']) vm['avg_score'] = round(safe_cast_float(rv_dict['avg_score']), 1) # 封装了一种函数,进行ms的转化 vm['duration'] = str(msec2time(rv_dict['duration'])) vm['rate_effe'] = "%.1f%%" % (safe_cast_float(rv_dict['rate_effe']) * 100) vms.append(vm) return PageResultSuccess(msg='通话时长详情表-通话时长分布', data=vms, page=rvs.page_view())