def admin_parent_details_views(request, user_id): """管理员获取父账号用户详情""" client = UserProfile.objects.filter(id=user_id).first() group = client.groups.first() client.group_name = group user_perm_code_list = PermUser.user_perm(client) user_perm_list = Perm.objects.filter(code__in=user_perm_code_list) user_perm, user_perm_type = handle_perm(user_perm_list) all_perm = Perm.objects.filter(content_type__contains='CLIENT') all_perm, all_perm_type = handle_perm(all_perm) client.company = client.company if client.company else '' client.mobile = client.mobile if client.mobile else '' client.active_type = af.IS_ACTIVE if client.is_active else af.IS_NOT_ACTIVE body = { 'username_list': [client.username] } res = APIUrl.post_link('user_query', body) user_query = res.get('user_query', {}) user_info = user_query.get(client.username, {}) secret_id = user_info.get('api_secret_id', '') secret_key = user_info.get('api_secret_key', '') api_create_time = user_info.get('api_create_time', '') if api_create_time: api_create_time = timestamp_to_str(api_create_time) api_open = user_info.get('api_open', 0) api_info = list() if secret_id and secret_key: api_info_dict = { 'secret_id': secret_id, 'secret_key': secret_key, 'create_time': api_create_time, 'status': api_open, 'type': _(af.COMMON) } api_info.append(api_info_dict) client.api_info = api_info res = { 'client': client, 'user_perm': user_perm, 'user_perm_type': user_perm_type, 'all_perm': all_perm, 'all_perm_type': all_perm_type } return render(request, 'user_accounts/admin_parent_details_views.html', res)
def parent_secret_info_views(request): """用户秘钥信息信息""" user = request.user body = { 'username_list': [user.username] } res = APIUrl.post_link('user_query', body) user_query = res.get('user_query', {}) user_info = user_query.get(user.username, {}) secret_id = user_info.get('api_secret_id', '') secret_key = user_info.get('api_secret_key', '') api_create_time = user_info.get('api_create_time', '') if api_create_time: api_create_time = timestamp_to_str(api_create_time) api_open = user_info.get('api_open', 0) api_info = list() if secret_id and secret_key: api_info_dict = { 'secret_id': secret_id, 'secret_key': secret_key, 'create_time': api_create_time, 'status': api_open, 'type': _(af.COMMON) } api_info.append(api_info_dict) user.api_info = api_info res = { 'user': user } return render(request, 'user/user_secret_info_details.html', res)
def user_download_ip_list(request, domain_id, start_time, end_time): """父账号下载拦截攻击来源""" msg = '' status = False is_en = False if request.LANGUAGE_CODE == 'en': is_en = True res = { 'status': status, 'msg': msg } provider = 'QINGSONG' try: start_time = int_check(start_time) if start_time is None: msg = af.PARAME_ERROR assert False end_time = int_check(end_time) if end_time is None: msg = af.PARAME_ERROR assert False start_time = timestamp_to_str(start_time, _format='%Y-%m-%d') end_time = timestamp_to_str(end_time, _format='%Y-%m-%d') domain_obj = Domain.objects.filter(id=domain_id).first() if not domain_obj: msg = af.DOMAIN_NOT_EXIST assert False channel = '%s://%s' % (domain_obj.protocol, domain_obj.domain) body = { 'channel': channel, 'start_day': start_time, 'end_day': end_time, } api_res = APIUrl.post_link('waf_defense_statistics', body) excel_name = '%s-ip_list.xls' % domain_obj.domain if api_res[provider]['return_code'] == 0: api_res = api_res[provider] detail_data = api_res.get('data', {}) ip_list = detail_data.get('ip_list', []) sheet_name = 'ip-list' row, excel_path, worksheet, workbook = make_base_excel( excel_name, sheet_name, channel, start_time, end_time) row += 3 worksheet.write(row, 0, label='ip_address') worksheet.write(row, 1, label='count') for v in ip_list: row += 1 ip = v.get('ip', '') ip_address = v.get('ip_address', '') if is_en: if ip_address in COUNTRY_NAME_CONF: ip_address = COUNTRY_NAME_CONF[ip_address] else: ip_address = 'Unknown' cnt = v.get('cnt', '') worksheet.write(row, 0, label=ip) worksheet.write(row, 1, label=ip_address) worksheet.write(row, 2, label=cnt) workbook.save(excel_path) except AssertionError: excel_name = 'error_documents' excel_path = make_error_file(excel_name, _(msg)) response = StreamingHttpResponse(file_iterator(excel_path)) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="{0}"'.format( excel_name) return response
def user_download_time_cnt(request, domain_id, start_time, end_time): """父账号下载拦截攻击次数excel""" msg = '' status = False res = { 'status': status, 'msg': msg } provider = 'QINGSONG' domain_obj = Domain.objects.filter(id=domain_id).first() try: if not domain_obj: msg = af.DOMAIN_NOT_EXIST assert False excel_name = '%s-time_cut.xls' % domain_obj.domain start_time = int_check(start_time) if start_time is None: msg = af.PARAME_ERROR assert False end_time = int_check(end_time) if end_time is None: msg = af.PARAME_ERROR assert False start_time = timestamp_to_str(start_time, _format='%Y-%m-%d') end_time = timestamp_to_str(end_time, _format='%Y-%m-%d') channel = '%s://%s' % (domain_obj.protocol, domain_obj.domain) body = { 'channel': channel, 'start_day': start_time, 'end_day': end_time, } api_res = APIUrl.post_link('waf_defense_statistics', body) if api_res[provider]['return_code'] == 0: api_res = api_res[provider] sheet_name = 'time-cut' row, excel_path, worksheet, workbook = make_base_excel( excel_name, sheet_name, channel, start_time, end_time) detail_data = api_res.get('data', {}) time_cnt_data = detail_data.get('time_cnt', []) row += 3 worksheet.write(row, 0, label='time') worksheet.write(row, 1, label='count') time_str_list = list(time_cnt_data.keys()) time_str_list.sort() for time_key in time_str_list: data_list = time_cnt_data[time_key] day_str = '%s-%s-%s' % ( time_key[:4], time_key[4:6], time_key[6:8]) for v in data_list: row += 1 cnt = v.get('cnt', 0) name = int(v.get('name')) name = '0%s' % name if name < 10 else str(name) start = '%s:00' % name end = '%s:59' % name time_str = '%s %s - %s %s' % (day_str, start, day_str, end) worksheet.write(row, 0, label=time_str) worksheet.write(row, 1, label=cnt) workbook.save(excel_path) except AssertionError: excel_name = 'error_documents' excel_path = make_error_file(excel_name, _(msg)) response = StreamingHttpResponse(file_iterator(excel_path)) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="{0}"'.format( excel_name) return response
def user_download_log(request, domain_id, atk_ip, start_time, end_time, log_rows): """父账号下载日志 {'action': 'waf_report', 'message': 'success', 'data': {'cur_page': 70, 'log_rows': 1381, 'waf_log': [], 'page_cnt': 70}, 'return_code': 0} """ msg = '' status = False page_size = 2000 log_list = [] is_en = False if request.LANGUAGE_CODE == 'en': is_en = True provider = 'QINGSONG' try: log_rows = int_check(log_rows) if log_rows is None: msg = af.PARAME_ERROR assert False page_count = log_rows // page_size domain_id = int(domain_id) domain_obj = Domain.objects.filter(id=domain_id).first() if not domain_obj: msg = af.DOMAIN_NOT_EXIST assert False channel = '%s://%s' % (domain_obj.protocol, domain_obj.domain) body = { 'channel': channel, } if start_time and end_time: start_time = int_check(start_time) if start_time is None: msg = af.PARAME_ERROR assert False end_time = int_check(end_time) if end_time is None: msg = af.PARAME_ERROR assert False base_format = '%Y-%m-%d %H:%M' start_time = timestamp_to_str(start_time, _format=base_format) end_time = timestamp_to_str(end_time, _format=base_format) body['start_time'] = start_time body['end_time'] = end_time if atk_ip != '-': body['atk_ip'] = atk_ip body_list = [] for count in range(0, page_count+1): body['page'] = count body['page_size'] = page_size body_list.append(deepcopy(body)) loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) p0 = time.time() r = loop.run_until_complete(asyncio.wait([APIUrl.doPostAio('waf_log_list', b) for b in body_list])) p1 = time.time() print(f'user_download_log[len: {len(body_list)}.] takeTime: {p1-p0}') results = r[0] n = 0 for i in results: try: result = json.loads(i.result()) data = result.get(provider).get('data') if result.get(provider).get('return_code') == 0: log_list.extend(data['waf_log']) print(f'user_download_log[No.{n}] cur_page: {data["cur_page"]}|| log_rows: {data["log_rows"]}') except json.decoder.JSONDecodeError: print(f'user_download_log[jsonError.] error: {i.result()}|| info: {r[1]}|| traceback: {traceback.format_exc()}') # api_res = APIUrl.post_link('waf_log_list', body) # if api_res[provider]['return_code'] == 0: # api_res = api_res[provider] # waf_log = api_res.get('data', {}).get('waf_log', []) # log_list.extend(waf_log) n += 1 loop.close() # api_res = APIUrl.post_link('waf_log_list', body) # if api_res[provider]['return_code'] == 0: # api_res = api_res[provider] # waf_log = api_res.get('data', {}).get('waf_log', []) # log_list.extend(waf_log) # ---origins as below # for count in range(0, page_count+1): # body['page'] = count # body['page_size'] = page_size # api_res = APIUrl.post_link('waf_log_list', body) # if api_res[provider]['return_code'] == 0: # api_res = api_res[provider] # waf_log = api_res.get('data', {}).get('waf_log', []) # log_list.extend(waf_log) # excel_name = '%s-log.xlsx' % domain_obj.domain # sheet_name = 'log_data' # row, excel_path, worksheet, workbook = make_base_excel(excel_name, sheet_name, channel, start_time, end_time) csv_rows = [['waf_channel:', channel], ['start_time:', start_time], ['end_time:', end_time], [], []] csv_header = ['log_time', 'atk_ip', 'tar_url', 'atk_url', 'rule_id'] csv_rows.append(csv_header) csv_name = '%s-log.csv' % domain_obj.domain for v in log_list: log_time = v.get('log_time', '') atk_ip = v.get('atk_ip', '') target_url = v.get('target_url', '') # target_url = unquote(target_url) atk_type = v.get('atk_type', '') # if not is_en: # # atk_type = WAF_ATTACK_TYPE[atk_type].encode('utf-8').decode('gb18030') # atk_type = WAF_ATTACK_TYPE[atk_type] # # try: # # atk_type = WAF_ATTACK_TYPE[atk_type].encode('utf-8').decode('GB2312') # # except UnicodeDecodeError: # # print(f'\n---atk_type(Error.)---\n{atk_type}') # # atk_type = WAF_ATTACK_TYPE[atk_type] rule_id = v.get('ruleid', '') csv_rows.append([log_time, atk_ip, target_url, atk_type, rule_id]) csv_path = make_base_csv(csv_name, csv_rows) # print(f'\n---"Done."---\n{"Done."}') # assert False # row += 3 # worksheet.write(row, 0, label='log_time') # worksheet.write(row, 1, label='atk_ip') # worksheet.write(row, 2, label='tar_url') # worksheet.write(row, 3, label='atk_type') # worksheet.write(row, 4, label='rule_id') # base_rote = 40 # for v in log_list: # row += 1 # log_time = v.get('log_time', '') # atk_ip = v.get('atk_ip', '') # target_url = v.get('target_url', '') # target_url = unquote(target_url) # atk_type = v.get('atk_type', '') # if not is_en: # atk_type = WAF_ATTACK_TYPE[atk_type] # rule_id = v.get('ruleid', '') # if len(target_url) > base_rote: # first_col = worksheet.col(2) # first_col.width = 100 * len(target_url) if 100 * len(target_url) < 65536 else 60000 # base_rote = len(target_url) # worksheet.write(row, 0, label=log_time) # worksheet.write(row, 1, label=atk_ip) # worksheet.write(row, 2, label=target_url) # worksheet.write(row, 3, label=atk_type) # worksheet.write(row, 4, label=rule_id) # workbook.save(excel_path) except AssertionError: csv_name = 'error_documents' csv_path = make_error_file(csv_name, _(msg)) # response = StreamingHttpResponse(file_iterator(csv_path)) try: response = FileResponse(open(csv_path, 'rb')) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = f'attachment;filename="{csv_name}"' os.remove(csv_path) print(f'user_download_log[FileDeleteDone.] csv_path: {csv_path}') return response except Exception: print(f'user_download_log[Error.] error: {traceback.format_exc()}') raise Http404
def user_download_rule_list(request, domain_id, start_time, end_time): """父账号下载攻击方式""" msg = '' status = False res = { 'status': status, 'msg': msg } provider = 'QINGSONG' try: start_time = int_check(start_time) if start_time is None: msg = af.PARAME_ERROR assert False end_time = int_check(end_time) if end_time is None: msg = af.PARAME_ERROR assert False start_time = timestamp_to_str(start_time, _format='%Y-%m-%d') end_time = timestamp_to_str(end_time, _format='%Y-%m-%d') domain_obj = Domain.objects.filter(id=domain_id).first() if not domain_obj: msg = af.DOMAIN_NOT_EXIST assert False channel = '%s://%s' % (domain_obj.protocol, domain_obj.domain) body = { 'channel': channel, 'start_day': start_time, 'end_day': end_time, } api_res = APIUrl.post_link('waf_defense_statistics', body) excel_name = '%s-rule_list.xls' % domain_obj.domain if api_res[provider]['return_code'] == 0: api_res = api_res[provider] detail_data = api_res.get('data', {}) sheet_name = 'rule-list' url_list = detail_data.get('url_list', []) row, excel_path, worksheet, workbook = make_base_excel( excel_name, sheet_name, channel, start_time, end_time) row += 3 worksheet.write(row, 0, label='type') worksheet.write(row, 1, label='count') worksheet.write(row, 2, label='proportion(%)') rule_list = detail_data.get('rule_list', []) for v in rule_list: row += 1 name = v.get('name', '') cnt = v.get('cnt', '') pro = v.get('pro', '') worksheet.write(row, 0, label=name) worksheet.write(row, 1, label=cnt) worksheet.write(row, 2, label=pro) row += 5 worksheet.write(row, 0, label='url') worksheet.write(row, 1, label='count') base_rote = 40 for v in url_list: row += 1 site_url = v.get('site_url', '') site_url = unquote(site_url) if len(site_url) > base_rote: first_col = worksheet.col(0) first_col.width = 256 * len(site_url) base_rote = len(site_url) cnt = v.get('cnt', '') worksheet.write(row, 0, label=site_url) worksheet.write(row, 1, label=cnt) workbook.save(excel_path) except AssertionError: excel_name = 'error_documents' excel_path = make_error_file(excel_name, _(msg)) response = StreamingHttpResponse(file_iterator(excel_path)) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="{0}"'.format( excel_name) return response
def admin_cert_detail_views(request, cert_name, user_id): """管理端证书详情页面""" cert_detail = {} provider_list = Provider.objects.all() provider_dict_list = [] for i in provider_list: provider_dict = {'name': i.name, 'code': i.code} provider_dict_list.append(provider_dict) try: if user_id: user_id = int_check(user_id) if user_id is None: msg = af.PARAME_ERROR assert False body = {'cert_name': cert_name, 'user_id': user_id} api_res = APIUrl.post_link('ssl_cert_detail', body) return_code = api_res.get('return_code', 0) if return_code != 0: assert False cert_detail = api_res.get('cert_detail', {}) relation_list = cert_detail['relation_list'] relation_result = [] for i in relation_list: domain = i.get('domain', '') status = i.get('status', []) status_list = list(set(status)) if CDNConf.DOMAIN_SERVING in status_list: status_list.pop(status_list.index(CDNConf.DOMAIN_SERVING)) if not status_list: status_name = CDNConf.DOMAIN_SERVING else: status_value = status_list[0] status_name = status_value result_dict = {'domain': domain, 'status': status_name} relation_result.append(result_dict) cert_detail['relation_list'] = relation_result log_list = cert_detail['log_list'] temp_list = [] for i in log_list: create_time = i.get('create_time', 0) create_time = timestamp_to_str(create_time) temp_log = copy.deepcopy(i) temp_log['create_time'] = create_time opt_result_dict = i.get('opt_result') opt_list = [] for p in opt_result_dict: temp = {'opt': p, 'result': opt_result_dict[p]} opt_list.append(temp) temp_log['opt_result'] = opt_list temp_list.append(temp_log) temp_list = sorted(temp_list, key=lambda x: x['create_time'], reverse=True) cert_detail['log_list'] = temp_list except AssertionError: pass res = { 'cert_detail': cert_detail, 'cert_status': CertConf.CERT_STATUS, 'cert_from': CertConf.CERT_FROM, 'domain_status': CDNConf.DOMAIN_STATUS, 'opt_send_status': CertConf.OPT_SEND_STATUS, 'provider_list': provider_dict_list } return render(request, 'cert/admin_cert_detail.html', res)
def client_download_status_code_trend(request, start_time, end_time, domain_ids): """下载计费数据""" msg = '' status = False base_code = CDNConf.STATUS_CODE_TREND try: user = request.user msg, start_time, end_time = handle_req_time(start_time, end_time) if msg: assert False if domain_ids == '-': domain_query = Domain.objects.filter(user=user) else: domain_ids = domain_ids.split(',') domain_query = Domain.objects.filter(id__in=domain_ids) domain_list = [i.domain for i in domain_query] opts = [] __, __, __, all_trend_data, __ = get_domain_status_code( user.id, domain_list, start_time, end_time, opts) excel_name = '%s-status_code_trend.xls' % user.username sheet_name = 'status_code_trend' start_time = timestamp_to_str(start_time) end_time = timestamp_to_str(end_time) row, excel_path, worksheet, workbook = make_base_excel( excel_name, sheet_name, domain_list, start_time, end_time) base_row = row + 2 worksheet.write(base_row, 0, label='status_code') worksheet.write(base_row, 1, label='total') worksheet.write(base_row, 2, label='Percentage') row += 3 """ {'2xx': 2528195, '3xx': 41, '4xx': 1, '5xx': 0} {'CC': {'2xx': 2528195, '3xx': 41, '4xx': 1, '5xx': 0}} """ sum_req = 0 for i in all_trend_data: sum_req += all_trend_data[i] for code in base_code: num = all_trend_data[code] ratio = '%.4f' % (num / sum_req * 100) worksheet.write(row, 0, label=code) worksheet.write(row, 1, label=num) worksheet.write(row, 2, label='{}%'.format(ratio)) row += 1 workbook.save(excel_path) except AssertionError: excel_name = 'error_documents' excel_path = make_error_file(excel_name, _(msg)) response = StreamingHttpResponse(file_iterator(excel_path)) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="{0}"'.format( excel_name) return response
def client_download_cdn_flux(request, start_time, end_time, domain_ids): """下载计费数据""" msg = '' status = False try: user = request.user msg, start_time, end_time = handle_req_time(start_time, end_time) if msg: assert False if domain_ids == '-': domain_query = Domain.objects.filter(user=user) else: domain_ids = domain_ids.split(',') domain_query = Domain.objects.filter(id__in=domain_ids) domain_list = [i.domain for i in domain_query] opts = [] (all_flux_list, sum_cdn_flux, sum_src_flux, max_cdn, max_src, table_data, opt_result) = get_domain_flux(user.id, domain_list, start_time, end_time, opts) excel_name = '%s-flux_data.xls' % user.username sheet_name = 'Detailed Traffic Bandwidth' start_time = timestamp_to_str(start_time) end_time = timestamp_to_str(end_time) row, excel_path, worksheet, workbook = make_base_excel( excel_name, sheet_name, domain_list, start_time, end_time) row += 1 worksheet.write(row, 0, label='Peak bandwidth (Mbps)') worksheet.write(row, 1, label=max_cdn) row += 1 worksheet.write(row, 0, label='Source peak bandwidth (Mbps)') worksheet.write(row, 1, label=max_src) row += 1 worksheet.write(row, 0, label='Total flow (MB)') worksheet.write(row, 1, label=sum_cdn_flux) row += 1 worksheet.write(row, 0, label='Total source flow (MB)') worksheet.write(row, 1, label=sum_src_flux) row += 2 worksheet.write(row, 0, label='Time') worksheet.write(row, 1, label='bandwidth (Mbps)') worksheet.write(row, 2, label='flow (MB)') row += 1 for i in all_flux_list: time_key = i.get('time_key', '') cdn_data = i.get('cdn_data', 0) cdn_bandwidth = cdn_data / 300 * 8 worksheet.write(row, 0, label=time_key) worksheet.write(row, 1, label=cdn_bandwidth) worksheet.write(row, 2, label=cdn_data) row += 1 workbook.save(excel_path) except AssertionError: excel_name = 'error_documents' excel_path = make_error_file(excel_name, _(msg)) response = StreamingHttpResponse(file_iterator(excel_path)) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="{0}"'.format( excel_name) return response
def admin_download_cdn_flux(request, start_time, end_time, user_id, domain_ids, opts): """下载计费数据""" msg = '' status = False try: user_id = int_check(user_id) if user_id is None: msg = af.PARAME_ERROR assert False user = UserProfile.objects.filter(id=user_id).first() msg, start_time, end_time = handle_req_time(start_time, end_time) if msg: assert False domain_ids = domain_ids.split(',') domain_query = Domain.objects.filter(id__in=domain_ids) domain_list = [i.domain for i in domain_query] opts = opts.split(',') (all_flux_list, sum_cdn_flux, sum_src_flux, max_cdn, max_src, table_data, opt_result) = get_domain_flux(user_id, domain_list, start_time, end_time, opts) excel_name = '%s-flux_data.xls' % user.username sheet_name = 'Detailed Traffic Bandwidth' start_time = timestamp_to_str(start_time) end_time = timestamp_to_str(end_time) row, excel_path, worksheet, workbook = make_base_excel( excel_name, sheet_name, domain_list, start_time, end_time) row += 1 worksheet.write(row, 0, label='Peak bandwidth (M/bps)') worksheet.write(row, 1, label=max_cdn) row += 1 worksheet.write(row, 0, label='Source peak bandwidth (M/bps)') worksheet.write(row, 1, label=max_src) row += 1 worksheet.write(row, 0, label='Total flow (MB)') worksheet.write(row, 1, label=sum_cdn_flux) row += 1 worksheet.write(row, 0, label='Total source flow (MB)') worksheet.write(row, 1, label=sum_src_flux) row += 2 worksheet.write(row, 0, label='Time') worksheet.write(row, 1, label='bandwidth (M/bps)') worksheet.write(row, 2, label='flow (MB)') base_title_row = row base_title_col = 3 row += 1 for i in all_flux_list: time_key = i.get('time_key', '') cdn_data = i.get('cdn_data', 0) cdn_bandwidth = cdn_data / 300 * 8 worksheet.write(row, 0, label=time_key) worksheet.write(row, 1, label=cdn_bandwidth) worksheet.write(row, 2, label=cdn_data) row += 1 for opt in opt_result: title_row = base_title_row bandwidth_title = '%s bandwidth' % opt bandwidth_col = base_title_col worksheet.write(title_row, bandwidth_col, label=bandwidth_title) flow_title = '%s flow' % opt flow_col = base_title_col + 1 worksheet.write(title_row, flow_col, label=flow_title) for i in opt_result[opt]: title_row += 1 cdn_data = i.get('cdn_data', 0) cdn_bandwidth = cdn_data / 300 * 8 worksheet.write(title_row, bandwidth_col, label=cdn_bandwidth) worksheet.write(title_row, flow_col, label=cdn_data) base_title_col += 1 workbook.save(excel_path) except AssertionError: excel_name = 'error_documents' excel_path = make_error_file(excel_name, _(msg)) response = StreamingHttpResponse(file_iterator(excel_path)) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="{0}"'.format( excel_name) return response
def admin_download_status_code_trend(request, start_time, end_time, user_id, domain_ids, opts): """下载计费数据""" msg = '' status = False base_code = CDNConf.STATUS_CODE_TREND try: user_id = int_check(user_id) if user_id is None: msg = af.PARAME_ERROR assert False user = UserProfile.objects.filter(id=user_id).first() msg, start_time, end_time = handle_req_time(start_time, end_time) if msg: assert False domain_ids = domain_ids.split(',') domain_query = Domain.objects.filter(id__in=domain_ids) domain_list = [i.domain for i in domain_query] opts = opts.split(',') _, _, _, all_trend_data, opt_trend_data = get_domain_status_code( user_id, domain_list, start_time, end_time, opts) excel_name = '%s-status_code_trend.xls' % user.username sheet_name = 'status_code_trend' start_time = timestamp_to_str(start_time) end_time = timestamp_to_str(end_time) row, excel_path, worksheet, workbook = make_base_excel( excel_name, sheet_name, domain_list, start_time, end_time) base_row = row + 2 worksheet.write(base_row, 0, label='status_code') worksheet.write(base_row, 1, label='total') worksheet.write(base_row, 2, label='Percentage') opt_col = 3 opt_list = [] for opt in opt_trend_data: worksheet.write(base_row, opt_col, label='{} total'.format(opt)) worksheet.write(base_row, opt_col + 1, label='{} percentage'.format(opt)) opt_list.append(opt) opt_col += 1 row += 3 """ {'2xx': 2528195, '3xx': 41, '4xx': 1, '5xx': 0} {'CC': {'2xx': 2528195, '3xx': 41, '4xx': 1, '5xx': 0}} """ sum_req = 0 for i in all_trend_data: sum_req += all_trend_data[i] for code in base_code: num = all_trend_data[code] ratio = '%.4f' % (num / sum_req * 100) worksheet.write(row, 0, label=code) worksheet.write(row, 1, label=num) worksheet.write(row, 2, label='{}%'.format(ratio)) col = 3 for opt in opt_list: opt_num = opt_trend_data[opt][code] worksheet.write(row, col, label=opt_num) opt_ratio = '%.4f' % (opt_num / num * 100 if num else 0) worksheet.write(row, col + 1, label='{}%'.format(opt_ratio)) col += 1 row += 1 workbook.save(excel_path) except AssertionError: excel_name = 'error_documents' excel_path = make_error_file(excel_name, _(msg)) response = StreamingHttpResponse(file_iterator(excel_path)) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="{0}"'.format( excel_name) return response